diff --git a/CHANGELOG.md b/CHANGELOG.md index 62c47b9..9baf032 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -5,6 +5,13 @@ All notable changes to this project will be documented in this file. The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/), and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html). +## [0.2.0] - 2024-02-07 + +- Support `2023-12-01-preview` API version and enable `response_format: {"type": "json_object"}` by default in code completion +- Fix triple-backticks issue in code-completion response +- Add `API parameters` setting to support customization and parameter tweaking such as temperature +- Update logo (generated by DALL-E) + ## [0.1.11] - 2024-01-30 - Add setting to allow self-signed certificate which could be useful in dev / test stages diff --git a/assets/logo.png b/assets/logo.png index eac799e..4340beb 100644 Binary files a/assets/logo.png and b/assets/logo.png differ diff --git a/assets/logo_v0.png b/assets/logo_v0.png new file mode 100644 index 0000000..eac799e Binary files /dev/null and b/assets/logo_v0.png differ diff --git a/package.json b/package.json index 3af47be..b79a8cb 100644 --- a/package.json +++ b/package.json @@ -3,7 +3,7 @@ "displayName": "GAI Choy", "description": "G̲enerative A̲I̲ empowered, C̲ode H̲elper O̲n Y̲our side. Yet another Copilot for coding, with built-in integration capability with Azure OpenAI models or, local LLM such as CodeShell.", "publisher": "carusyte", - "version": "0.1.11", + "version": "0.2.0", "icon": "assets/logo.png", "pricing": "Free", "keywords": [ @@ -63,6 +63,7 @@ "gpt-4", "gpt-4-32k" ], + "when": "config.GAIChoy.RunEnvForLLMs == 'Azure OpenAI'", "type": "string", "order": 4 }, @@ -75,6 +76,7 @@ "gpt-4", "gpt-4-32k" ], + "when": "config.GAIChoy.RunEnvForLLMs == 'Azure OpenAI'", "type": "string", "order": 5 }, @@ -83,6 +85,7 @@ "type": "null", "scope": "application", "markdownDescription": "[Set API Key](command:gaichoy.set_api_key)", + "when": "config.GAIChoy.RunEnvForLLMs == 'Azure OpenAI'", "order": 6 }, "GAIChoy.ApiVersion": { @@ -93,16 +96,25 @@ "2023-06-01-preview", "2023-07-01-preview", "2023-08-01-preview", - "2023-09-01-preview" + "2023-09-01-preview", + "2023-12-01-preview" ], + "when": "config.GAIChoy.RunEnvForLLMs == 'Azure OpenAI'", "type": "string", "order": 7 }, + "GAIChoy.ApiParameters": { + "description": "The API parameters for Azure OpenAI. Format: key=value pairs delimited by semicolons.", + "default": "temperature=0.2", + "when": "config.GAIChoy.RunEnvForLLMs == 'Azure OpenAI'", + "type": "string", + "order": 8 + }, "GAIChoy.AutoTriggerCompletion": { "description": "Whether or not to automatically trigger completion when typing.", "default": false, "type": "boolean", - "order": 8 + "order": 9 }, "GAIChoy.AutoCompletionDelay": { "description": "The delay in seconds before automatic code completion triggers.", @@ -113,7 +125,7 @@ 3 ], "default": 2, - "order": 9 + "order": 10 }, "GAIChoy.CompletionMaxTokens": { "description": "Maximum number of tokens for which suggestions will be displayed", @@ -128,7 +140,7 @@ 4096 ], "default": 64, - "order": 10 + "order": 11 }, "GAIChoy.ChatMaxTokens": { "description": "Maximum number of tokens for which chat messages will be displayed", @@ -142,20 +154,20 @@ 32768 ], "default": 2048, - "order": 11 + "order": 12 }, "GAIChoy.EnableDebugMessage": { "description": "Prints debug message to extension output.", "type": "boolean", "default": false, - "order": 12 + "order": 13 }, "GAIChoy.ClearChatHistory": { "description": "Clear the chat history", "type": "null", "scope": "application", "markdownDescription": "[Clear chat history](command:gaichoy.clear_chat_history)", - "order": 13 + "order": 14 } } }, diff --git a/src/llm/AzureOAI.ts b/src/llm/AzureOAI.ts index 36016ff..99ff51f 100644 --- a/src/llm/AzureOAI.ts +++ b/src/llm/AzureOAI.ts @@ -30,6 +30,7 @@ export class AzureOAI { const serverAddress = workspace.getConfiguration("GAIChoy").get("ServerAddress") as string; const model = workspace.getConfiguration("GAIChoy").get("ChatModel") as string; const api_version = workspace.getConfiguration("GAIChoy").get("ApiVersion") as string; + const parameters = workspace.getConfiguration("GAIChoy").get("ApiParameters") as string; // get API key from secret storage let api_key = await ExtensionResource.instance.getApiKey(); @@ -43,15 +44,15 @@ export class AzureOAI { "api-key": api_key } - let data = { + var data: any = { "temperature": 0.2, "messages": [ { "role": "system", - "content": `Your role is an AI code interpreter. + "content": `Your role is an AI code generator. Your task is to provide executable and functional code fragments AS-IS, based on the context provided by the user. -The context and metadata of the code fragment will be provided by user in the following format, as surrounded by triple backticks (actual input -does not contain the triple backticks): +The context and metadata of the code fragment will be provided by user in the following format, as surrounded by triple-backticks. +Actual input from user will exclude the beginning and trailing triple-backticks: \`\`\` { "file_name": "the file name of the program including file extension, which indicates the program type", @@ -60,7 +61,8 @@ does not contain the triple backticks): } \`\`\` -You must reply the generated code in a JSON format, as surrounded by triple backticks (your response shall not include the triple backticks): +You must reply the generated code in a JSON format, as illustrated in the following code block (your final output +MUST not include the beginning and trailing triple-backticks. Reply in JSON format): \`\`\` { "generated_code": "the code to be generated" @@ -108,20 +110,62 @@ Expected response in JSON format: } ] }; + + this.mergeParameters(data, parameters) + + // Conditionally add "response_format": {"type": "json_object"} to the data variable if api_version equals '2023-12-01-preview'. + data = api_version === '2023-12-01-preview' ? { + ...data, response_format: { type: 'json_object' } + } : data + ExtensionResource.instance.debugMessage("request.data: " + data) const uri = "/openai/deployments/" + model + "/chat/completions?api-version=" + api_version const response = await axiosInstance.post(serverAddress + uri, data, { headers: headers }); ExtensionResource.instance.debugMessage("response.data: " + response.data) const content = response.data.choices[0].message.content; ExtensionResource.instance.debugMessage("response.data.choices[0].message.content: " + content) - let contentJSON = JSON.parse(AzureOAI.trimTripleBackticks(content)); + let contentJSON = JSON.parse(this.trimTripleBackticks(content)); return contentJSON.generated_code; } static trimTripleBackticks(str: string){ - if (str.startsWith('```') && str.endsWith('```')) { - return str.slice(3, -3); + // if the str starts or ends with triple backticks, remove them + return str.replace(/^\`{3}|\`{3}$/g, ''); + } + + // Function to attempt converting a string to its appropriate type + static parseValue(value: string): any { + value = value.trim(); + + // Check if the value is numeric + if (!isNaN(value as any) && value !== '') { + return parseFloat(value); + } + + // Check if the value is a quoted string and remove quotes + const match = value.match(/^'(.*)'$/); + if (match) { + return match[1]; + } + + // Return the value as is for any other case (e.g. booleans or unquoted strings) + return value; + } + + static mergeParameters(data: any, parameters: string) { + if (parameters) { + // Define the type for the accumulator object + type ParamMapType = { [key: string]: any }; + + const paramMap = parameters.split(';') + .filter(p => p) + .map(p => p.split('=')) + .reduce((acc: ParamMapType, [key, value]) => { + acc[key.trim()] = this.parseValue(value); + return acc; + }, {} as ParamMapType); // Initialize the accumulator with the correct type + + Object.assign(data, paramMap); } - return str; } } \ No newline at end of file