From e9902ee8296239f5b1573971e9f300b946cac0a6 Mon Sep 17 00:00:00 2001 From: Rob Chambers Date: Tue, 9 Jan 2024 15:59:06 -0800 Subject: [PATCH 01/30] added `ai dev new` templates (#142) * added openai-functions-streaming-py template * remove extra line * remove unnecessary templating * function refactoring symmetry across languages and templates * added openai-functions-streaming-cs template * remove search template for now; fixed some warnings; better output * made c# templates more alike * updated javascript samples * more javascript <-> javascript template refactoring to match * updated env vars; upgraded python streaming chat template to v1.0 * upgrade python examples to 1.0 sdk from openai * better symmetry in python samples * add weather func to demonstrate passing params * more pythonic variable names * fix python refactor bug * more symmetry in C# templates * even more c# symmetry and incorporate OpenAI into key class names * javascript PR feedback (name changes, etc.) --- .../OpenAIChatWithAzureAISearch.csproj._ | 0 .../OpenAIChatWithAzureAISearchClass.cs | 0 .../openai-chat-with-azure-ai-search/_.json | 0 ...de.python.script.function_call_evaluate.py | 2 +- src/ai/.x/templates/openai-chat-go/_.json | 4 +- .../openai_chat_completions_hello_world.go | 12 +- src/ai/.x/templates/openai-chat-java/_.json | 4 +- .../src/OpenAIQuickstart.java | 8 +- .../openai-chat-js/ChatCompletion.js | 48 --- src/ai/.x/templates/openai-chat-js/Main.js | 41 ++ .../OpenAIChatCompletionsClass.js | 31 ++ src/ai/.x/templates/openai-chat-js/_.json | 5 +- .../.x/templates/openai-chat-js/package.json | 2 +- src/ai/.x/templates/openai-chat-py/_.json | 6 +- .../openai-chat-py/openai_chat_completions.py | 42 +- .../templates/openai-chat-py/requirements.txt | 2 +- .../openai-chat-streaming-java/_.json | 4 +- .../src/OpenAIQuickstartStreaming.java | 8 +- .../ChatCompletionsStreaming.js | 85 ---- .../openai-chat-streaming-js/Main.js | 44 ++ .../OpenAIChatCompletionsStreamingClass.js | 47 +++ .../templates/openai-chat-streaming-js/_.json | 5 +- .../openai-chat-streaming-js/package.json | 2 +- .../templates/openai-chat-streaming-py/_.json | 6 +- .../chat_completions_streaming.py | 48 +++ .../openai-chat-streaming-py/main.py | 33 ++ .../openai_chat_completions_streaming.py | 56 --- .../openai-chat-streaming-py/requirements.txt | 2 +- .../OpenAIChatCompletionsStreamingClass.cs | 64 ++- .../openai-chat-streaming/Program.cs | 34 ++ .../.x/templates/openai-chat-streaming/_.json | 6 +- .../openai-chat/OpenAIChatCompletionsClass.cs | 54 +-- src/ai/.x/templates/openai-chat/Program.cs | 31 ++ src/ai/.x/templates/openai-chat/_.json | 6 +- .../FunctionCallContext.cs | 59 +++ .../FunctionFactory.cs | 384 ++++++++++++++++++ .../HelperFunctionDescriptionAttribute.cs | 13 + ...erFunctionParameterDescriptionAttribute.cs | 13 + .../OpenAIChatCompletionsCustomFunctions.cs | 24 ++ ...ChatCompletionsFunctionsStreaming.csproj._ | 21 + ...IChatCompletionsFunctionsStreamingClass.cs | 83 ++++ .../openai-functions-streaming-cs/Program.cs | 37 ++ .../openai-functions-streaming-cs/_.json | 9 + .../openai-functions-streaming-js/Main.js | 24 +- ...> OpenAIChatCompletionsCustomFunctions.js} | 28 +- ...ChatCompletionsFunctionsStreamingClass.js} | 16 +- .../openai-functions-streaming-js/_.json | 5 +- .../openai-functions-streaming-py/_.json | 9 + .../chat_completions_custom_functions.py | 66 +++ .../chat_completions_functions_streaming.py | 61 +++ .../function_call_context.py | 42 ++ .../function_factory.py | 16 + .../openai-functions-streaming-py/main.py | 34 ++ .../requirements.txt | 1 + src/ai/.x/templates/openai-webpage/.env | 8 +- src/ai/.x/templates/openai-webpage/_.json | 5 +- .../src/ChatCompletionsCustomFunctions.js | 16 - .../OpenAIChatCompletionsCustomFunctions.js | 62 +++ ...ChatCompletionsFunctionsStreamingClass.js} | 16 +- .../.x/templates/openai-webpage/src/script.js | 30 +- src/ai/helpers/config_environment_helpers.cs | 4 + 61 files changed, 1431 insertions(+), 397 deletions(-) rename {src/ai/.x/templates => ideas}/openai-chat-with-azure-ai-search/OpenAIChatWithAzureAISearch.csproj._ (100%) rename {src/ai/.x/templates => ideas}/openai-chat-with-azure-ai-search/OpenAIChatWithAzureAISearchClass.cs (100%) rename {src/ai/.x/templates => ideas}/openai-chat-with-azure-ai-search/_.json (100%) delete mode 100644 src/ai/.x/templates/openai-chat-js/ChatCompletion.js create mode 100644 src/ai/.x/templates/openai-chat-js/Main.js create mode 100644 src/ai/.x/templates/openai-chat-js/OpenAIChatCompletionsClass.js delete mode 100644 src/ai/.x/templates/openai-chat-streaming-js/ChatCompletionsStreaming.js create mode 100644 src/ai/.x/templates/openai-chat-streaming-js/Main.js create mode 100644 src/ai/.x/templates/openai-chat-streaming-js/OpenAIChatCompletionsStreamingClass.js create mode 100644 src/ai/.x/templates/openai-chat-streaming-py/chat_completions_streaming.py create mode 100644 src/ai/.x/templates/openai-chat-streaming-py/main.py delete mode 100644 src/ai/.x/templates/openai-chat-streaming-py/openai_chat_completions_streaming.py create mode 100644 src/ai/.x/templates/openai-chat-streaming/Program.cs create mode 100644 src/ai/.x/templates/openai-chat/Program.cs create mode 100644 src/ai/.x/templates/openai-functions-streaming-cs/FunctionCallContext.cs create mode 100644 src/ai/.x/templates/openai-functions-streaming-cs/FunctionFactory.cs create mode 100644 src/ai/.x/templates/openai-functions-streaming-cs/HelperFunctionDescriptionAttribute.cs create mode 100644 src/ai/.x/templates/openai-functions-streaming-cs/HelperFunctionParameterDescriptionAttribute.cs create mode 100644 src/ai/.x/templates/openai-functions-streaming-cs/OpenAIChatCompletionsCustomFunctions.cs create mode 100644 src/ai/.x/templates/openai-functions-streaming-cs/OpenAIChatCompletionsFunctionsStreaming.csproj._ create mode 100644 src/ai/.x/templates/openai-functions-streaming-cs/OpenAIChatCompletionsFunctionsStreamingClass.cs create mode 100644 src/ai/.x/templates/openai-functions-streaming-cs/Program.cs create mode 100644 src/ai/.x/templates/openai-functions-streaming-cs/_.json rename src/ai/.x/templates/openai-functions-streaming-js/{ChatCompletionsCustomFunctions.js => OpenAIChatCompletionsCustomFunctions.js} (60%) rename src/ai/.x/templates/{openai-webpage/src/ChatCompletionsFunctionsStreaming.js => openai-functions-streaming-js/OpenAIChatCompletionsFunctionsStreamingClass.js} (79%) create mode 100644 src/ai/.x/templates/openai-functions-streaming-py/_.json create mode 100644 src/ai/.x/templates/openai-functions-streaming-py/chat_completions_custom_functions.py create mode 100644 src/ai/.x/templates/openai-functions-streaming-py/chat_completions_functions_streaming.py create mode 100644 src/ai/.x/templates/openai-functions-streaming-py/function_call_context.py create mode 100644 src/ai/.x/templates/openai-functions-streaming-py/function_factory.py create mode 100644 src/ai/.x/templates/openai-functions-streaming-py/main.py create mode 100644 src/ai/.x/templates/openai-functions-streaming-py/requirements.txt delete mode 100644 src/ai/.x/templates/openai-webpage/src/ChatCompletionsCustomFunctions.js create mode 100644 src/ai/.x/templates/openai-webpage/src/OpenAIChatCompletionsCustomFunctions.js rename src/ai/.x/templates/{openai-functions-streaming-js/ChatCompletionsFunctionsStreaming.js => openai-webpage/src/OpenAIChatCompletionsFunctionsStreamingClass.js} (79%) diff --git a/src/ai/.x/templates/openai-chat-with-azure-ai-search/OpenAIChatWithAzureAISearch.csproj._ b/ideas/openai-chat-with-azure-ai-search/OpenAIChatWithAzureAISearch.csproj._ similarity index 100% rename from src/ai/.x/templates/openai-chat-with-azure-ai-search/OpenAIChatWithAzureAISearch.csproj._ rename to ideas/openai-chat-with-azure-ai-search/OpenAIChatWithAzureAISearch.csproj._ diff --git a/src/ai/.x/templates/openai-chat-with-azure-ai-search/OpenAIChatWithAzureAISearchClass.cs b/ideas/openai-chat-with-azure-ai-search/OpenAIChatWithAzureAISearchClass.cs similarity index 100% rename from src/ai/.x/templates/openai-chat-with-azure-ai-search/OpenAIChatWithAzureAISearchClass.cs rename to ideas/openai-chat-with-azure-ai-search/OpenAIChatWithAzureAISearchClass.cs diff --git a/src/ai/.x/templates/openai-chat-with-azure-ai-search/_.json b/ideas/openai-chat-with-azure-ai-search/_.json similarity index 100% rename from src/ai/.x/templates/openai-chat-with-azure-ai-search/_.json rename to ideas/openai-chat-with-azure-ai-search/_.json diff --git a/src/ai/.x/help/include.python.script.function_call_evaluate.py b/src/ai/.x/help/include.python.script.function_call_evaluate.py index 147b8a3a..ea3f9c86 100755 --- a/src/ai/.x/help/include.python.script.function_call_evaluate.py +++ b/src/ai/.x/help/include.python.script.function_call_evaluate.py @@ -236,7 +236,7 @@ def dont_call_this_method(kwargs): "api_version": os.getenv("OPENAI_API_VERSION"), "api_base": os.getenv("OPENAI_API_BASE"), "api_type": os.getenv("OPENAI_API_TYPE"), - "api_key": os.getenv("OPENAI_API_KEY"), + "api_key": os.getenv("AZURE_OPENAI_KEY"), "deployment_id": os.getenv("AZURE_OPENAI_EVALUATION_DEPLOYMENT") }, tracking_uri=client.tracking_uri, diff --git a/src/ai/.x/templates/openai-chat-go/_.json b/src/ai/.x/templates/openai-chat-go/_.json index c9e6bfff..d33c5448 100644 --- a/src/ai/.x/templates/openai-chat-go/_.json +++ b/src/ai/.x/templates/openai-chat-go/_.json @@ -1,8 +1,8 @@ { "_Name": "OpenAI Chat Completions in Go", "_Language": "Go", - "OPENAI_ENDPOINT": "", - "OPENAI_API_KEY": "", + "AZURE_OPENAI_ENDPOINT": "", + "AZURE_OPENAI_KEY": "", "AZURE_OPENAI_CHAT_DEPLOYMENT": "", "AZURE_OPENAI_SYSTEM_PROMPT": "You are a helpful AI assistant." } \ No newline at end of file diff --git a/src/ai/.x/templates/openai-chat-go/openai_chat_completions_hello_world.go b/src/ai/.x/templates/openai-chat-go/openai_chat_completions_hello_world.go index 469d35f5..e0f423e6 100644 --- a/src/ai/.x/templates/openai-chat-go/openai_chat_completions_hello_world.go +++ b/src/ai/.x/templates/openai-chat-go/openai_chat_completions_hello_world.go @@ -1,7 +1,7 @@ <#@ template hostspecific="true" #> <#@ output extension=".go" encoding="utf-8" #> -<#@ parameter type="System.String" name="OPENAI_ENDPOINT" #> -<#@ parameter type="System.String" name="OPENAI_API_KEY" #> +<#@ parameter type="System.String" name="AZURE_OPENAI_ENDPOINT" #> +<#@ parameter type="System.String" name="AZURE_OPENAI_KEY" #> <#@ parameter type="System.String" name="AZURE_OPENAI_CHAT_DEPLOYMENT" #> <#@ parameter type="System.String" name="AZURE_OPENAI_SYSTEM_PROMPT" #> package main @@ -19,13 +19,13 @@ import ( ) func main() { - azureOpenAIKey := os.Getenv("OPENAI_API_KEY") + azureOpenAIKey := os.Getenv("AZURE_OPENAI_KEY") if azureOpenAIKey == "" { - azureOpenAIKey = "<#= OPENAI_API_KEY #>" + azureOpenAIKey = "<#= AZURE_OPENAI_KEY #>" } - azureOpenAIEndpoint := os.Getenv("OPENAI_ENDPOINT") + azureOpenAIEndpoint := os.Getenv("AZURE_OPENAI_ENDPOINT") if azureOpenAIEndpoint == "" { - azureOpenAIEndpoint = "<#= OPENAI_ENDPOINT #>" + azureOpenAIEndpoint = "<#= AZURE_OPENAI_ENDPOINT #>" } modelDeploymentID := os.Getenv("AZURE_OPENAI_CHAT_DEPLOYMENT") if modelDeploymentID == "" { diff --git a/src/ai/.x/templates/openai-chat-java/_.json b/src/ai/.x/templates/openai-chat-java/_.json index e11606dd..c706443f 100644 --- a/src/ai/.x/templates/openai-chat-java/_.json +++ b/src/ai/.x/templates/openai-chat-java/_.json @@ -1,8 +1,8 @@ { "_Name": "OpenAI Chat Completions in Java", "_Language": "Java", - "OPENAI_ENDPOINT": "", - "OPENAI_API_KEY": "", + "AZURE_OPENAI_ENDPOINT": "", + "AZURE_OPENAI_KEY": "", "AZURE_OPENAI_CHAT_DEPLOYMENT": "", "AZURE_OPENAI_SYSTEM_PROMPT": "You are a helpful AI assistant." } \ No newline at end of file diff --git a/src/ai/.x/templates/openai-chat-java/src/OpenAIQuickstart.java b/src/ai/.x/templates/openai-chat-java/src/OpenAIQuickstart.java index 39087733..dbe10257 100644 --- a/src/ai/.x/templates/openai-chat-java/src/OpenAIQuickstart.java +++ b/src/ai/.x/templates/openai-chat-java/src/OpenAIQuickstart.java @@ -1,8 +1,8 @@ <#@ template hostspecific="true" #> <#@ output extension=".java" encoding="utf-8" #> <#@ parameter type="System.String" name="ClassName" #> -<#@ parameter type="System.String" name="OPENAI_ENDPOINT" #> -<#@ parameter type="System.String" name="OPENAI_API_KEY" #> +<#@ parameter type="System.String" name="AZURE_OPENAI_ENDPOINT" #> +<#@ parameter type="System.String" name="AZURE_OPENAI_KEY" #> <#@ parameter type="System.String" name="AZURE_OPENAI_CHAT_DEPLOYMENT" #> <#@ parameter type="System.String" name="AZURE_OPENAI_SYSTEM_PROMPT" #> import com.azure.ai.openai.OpenAIClient; @@ -24,8 +24,8 @@ public class OpenAIQuickstart { private OpenAIClient client; private ChatCompletionsOptions options; - private String key = (System.getenv("OPENAI_API_KEY") != null) ? System.getenv("OPENAI_API_KEY") : "<#= OPENAI_API_KEY #>"; - private String endpoint = (System.getenv("OPENAI_ENDPOINT") != null) ? System.getenv("OPENAI_ENDPOINT") : "<#= OPENAI_ENDPOINT #>"; + private String key = (System.getenv("AZURE_OPENAI_KEY") != null) ? System.getenv("AZURE_OPENAI_KEY") : "<#= AZURE_OPENAI_KEY #>"; + private String endpoint = (System.getenv("AZURE_OPENAI_ENDPOINT") != null) ? System.getenv("AZURE_OPENAI_ENDPOINT") : "<#= AZURE_OPENAI_ENDPOINT #>"; private String deploymentName = (System.getenv("AZURE_OPENAI_CHAT_DEPLOYMENT") != null) ? System.getenv("AZURE_OPENAI_CHAT_DEPLOYMENT") : "<#= AZURE_OPENAI_CHAT_DEPLOYMENT #>"; private String systemPrompt = (System.getenv("AZURE_OPENAI_SYSTEM_PROMPT") != null) ? System.getenv("AZURE_OPENAI_SYSTEM_PROMPT") : "<#= AZURE_OPENAI_SYSTEM_PROMPT #>"; diff --git a/src/ai/.x/templates/openai-chat-js/ChatCompletion.js b/src/ai/.x/templates/openai-chat-js/ChatCompletion.js deleted file mode 100644 index 2256a152..00000000 --- a/src/ai/.x/templates/openai-chat-js/ChatCompletion.js +++ /dev/null @@ -1,48 +0,0 @@ -<#@ template hostspecific="true" #> -<#@ output extension=".js" encoding="utf-8" #> -<#@ parameter type="System.String" name="OPENAI_ENDPOINT" #> -<#@ parameter type="System.String" name="OPENAI_API_KEY" #> -<#@ parameter type="System.String" name="AZURE_OPENAI_CHAT_DEPLOYMENT" #> -<#@ parameter type="System.String" name="AZURE_OPENAI_SYSTEM_PROMPT" #> -const { OpenAIClient, AzureKeyCredential } = require("@azure/openai"); -const readline = require('readline'); -const rl = readline.createInterface({ - input: process.stdin, - output: process.stdout -}); - -const endpoint = process.env["OPENAI_ENDPOINT"] || "<#= OPENAI_ENDPOINT #>"; -const azureApiKey = process.env["OPENAI_API_KEY"] || "<#= OPENAI_API_KEY #>"; -const deploymentName = process.env["AZURE_OPENAI_CHAT_DEPLOYMENT"] || "<#= AZURE_OPENAI_CHAT_DEPLOYMENT #>" ; -const systemPrompt = process.env["AZURE_OPENAI_SYSTEM_PROMPT"] || "<#= AZURE_OPENAI_SYSTEM_PROMPT #>" ; - -messages = [ - { role: "system", content: systemPrompt }, -]; - -async function main() { - - const client = new OpenAIClient(endpoint, new AzureKeyCredential(azureApiKey)); - - while (true) { - - const input = await new Promise(resolve => rl.question('User: ', resolve)); - if (input === 'exit' || input === '') break; - - messages.push({ role: "user", content: input }); - const result = await client.getChatCompletions(deploymentName, messages); - - const response_content = result.choices[0].message.content; - messages.push({ role: "assistant", content: response_content }); - - console.log(`\nAssistant: ${response_content}\n`); - } - - console.log('Bye!'); -} - -main().catch((err) => { - console.error("The sample encountered an error:", err); -}); - -module.exports = { main }; diff --git a/src/ai/.x/templates/openai-chat-js/Main.js b/src/ai/.x/templates/openai-chat-js/Main.js new file mode 100644 index 00000000..b26d878c --- /dev/null +++ b/src/ai/.x/templates/openai-chat-js/Main.js @@ -0,0 +1,41 @@ +<#@ template hostspecific="true" #> +<#@ output extension=".js" encoding="utf-8" #> +<#@ parameter type="System.String" name="ClassName" #> +<#@ parameter type="System.String" name="AZURE_OPENAI_ENDPOINT" #> +<#@ parameter type="System.String" name="AZURE_OPENAI_KEY" #> +<#@ parameter type="System.String" name="AZURE_OPENAI_CHAT_DEPLOYMENT" #> +<#@ parameter type="System.String" name="AZURE_OPENAI_SYSTEM_PROMPT" #> +const { <#= ClassName #> } = require("./OpenAIChatCompletionsClass"); + +const readline = require('readline'); +const rl = readline.createInterface({ + input: process.stdin, + output: process.stdout +}); + +async function main() { + + const endpoint = process.env["AZURE_OPENAI_ENDPOINT"] || "<#= AZURE_OPENAI_ENDPOINT #>"; + const azureApiKey = process.env["AZURE_OPENAI_KEY"] || "<#= AZURE_OPENAI_KEY #>"; + const deploymentName = process.env["AZURE_OPENAI_CHAT_DEPLOYMENT"] || "<#= AZURE_OPENAI_CHAT_DEPLOYMENT #>" ; + const systemPrompt = process.env["AZURE_OPENAI_SYSTEM_PROMPT"] || "<#= AZURE_OPENAI_SYSTEM_PROMPT #>" ; + + const chat = new <#= ClassName #>(systemPrompt, endpoint, azureApiKey, deploymentName); + + while (true) { + + const input = await new Promise(resolve => rl.question('User: ', resolve)); + if (input === 'exit' || input === '') break; + + let response = await chat.getChatCompletions(input); + console.log(`\nAssistant: ${response}\n`); + } + + console.log('Bye!'); +} + +main().catch((err) => { + console.error("The sample encountered an error:", err); +}); + +module.exports = { main }; diff --git a/src/ai/.x/templates/openai-chat-js/OpenAIChatCompletionsClass.js b/src/ai/.x/templates/openai-chat-js/OpenAIChatCompletionsClass.js new file mode 100644 index 00000000..6c88c22e --- /dev/null +++ b/src/ai/.x/templates/openai-chat-js/OpenAIChatCompletionsClass.js @@ -0,0 +1,31 @@ +<#@ template hostspecific="true" #> +<#@ output extension=".js" encoding="utf-8" #> +<#@ parameter type="System.String" name="ClassName" #> +const { OpenAIClient, AzureKeyCredential } = require("@azure/openai"); + +class <#= ClassName #> { + constructor(systemPrompt, endpoint, azureApiKey, deploymentName) { + this.systemPrompt = systemPrompt; + this.deploymentName = deploymentName; + this.client = new OpenAIClient(endpoint, new AzureKeyCredential(azureApiKey)); + this.clearConversation(); + } + + clearConversation() { + this.messages = [ + { role: 'system', content: this.systemPrompt } + ]; + } + + async getChatCompletions(userInput) { + this.messages.push({ role: 'user', content: userInput }); + + const result = await this.client.getChatCompletions(this.deploymentName, this.messages); + const responseContent = result.choices[0].message.content; + + this.messages.push({ role: 'assistant', content: responseContent }); + return responseContent; + } +} + +exports.<#= ClassName #> = <#= ClassName #>; \ No newline at end of file diff --git a/src/ai/.x/templates/openai-chat-js/_.json b/src/ai/.x/templates/openai-chat-js/_.json index fb6365fd..e91e08b4 100644 --- a/src/ai/.x/templates/openai-chat-js/_.json +++ b/src/ai/.x/templates/openai-chat-js/_.json @@ -1,8 +1,9 @@ { "_Name": "OpenAI Chat Completions in JavaScript", "_Language": "JavaScript", - "OPENAI_ENDPOINT": "", - "OPENAI_API_KEY": "", + "ClassName": "OpenAIChatCompletionsClass", + "AZURE_OPENAI_ENDPOINT": "", + "AZURE_OPENAI_KEY": "", "AZURE_OPENAI_CHAT_DEPLOYMENT": "", "AZURE_OPENAI_SYSTEM_PROMPT": "You are a helpful AI assistant." } \ No newline at end of file diff --git a/src/ai/.x/templates/openai-chat-js/package.json b/src/ai/.x/templates/openai-chat-js/package.json index 63b70f82..ca7c4681 100644 --- a/src/ai/.x/templates/openai-chat-js/package.json +++ b/src/ai/.x/templates/openai-chat-js/package.json @@ -2,7 +2,7 @@ "name": "openai-chat", "version": "1.0.0", "description": "", - "main": "ChatCompletions.js", + "main": "Main.js", "scripts": { "test": "echo \"Error: no test specified\" && exit 1" }, diff --git a/src/ai/.x/templates/openai-chat-py/_.json b/src/ai/.x/templates/openai-chat-py/_.json index 42d9549b..27e5f6b8 100644 --- a/src/ai/.x/templates/openai-chat-py/_.json +++ b/src/ai/.x/templates/openai-chat-py/_.json @@ -1,9 +1,9 @@ { "_Name": "OpenAI Chat Completions in Python", "_Language": "Python", - "OPENAI_ENDPOINT": "", - "OPENAI_API_KEY": "", - "OPENAI_API_VERSION": "2023-12-01-preview", + "AZURE_OPENAI_ENDPOINT": "", + "AZURE_OPENAI_KEY": "", + "AZURE_OPENAI_API_VERSION": "2023-12-01-preview", "AZURE_OPENAI_CHAT_DEPLOYMENT": "", "AZURE_OPENAI_SYSTEM_PROMPT": "You are a helpful AI assistant." } \ No newline at end of file diff --git a/src/ai/.x/templates/openai-chat-py/openai_chat_completions.py b/src/ai/.x/templates/openai-chat-py/openai_chat_completions.py index 810615ee..fb29317e 100644 --- a/src/ai/.x/templates/openai-chat-py/openai_chat_completions.py +++ b/src/ai/.x/templates/openai-chat-py/openai_chat_completions.py @@ -1,42 +1,46 @@ <#@ template hostspecific="true" #> <#@ output extension=".py" encoding="utf-8" #> -<#@ parameter type="System.String" name="OPENAI_ENDPOINT" #> -<#@ parameter type="System.String" name="OPENAI_API_KEY" #> -<#@ parameter type="System.String" name="OPENAI_API_VERSION" #> +<#@ parameter type="System.String" name="AZURE_OPENAI_ENDPOINT" #> +<#@ parameter type="System.String" name="AZURE_OPENAI_KEY" #> +<#@ parameter type="System.String" name="AZURE_OPENAI_API_VERSION" #> <#@ parameter type="System.String" name="AZURE_OPENAI_CHAT_DEPLOYMENT" #> <#@ parameter type="System.String" name="AZURE_OPENAI_SYSTEM_PROMPT" #> import os -import openai +from openai import AzureOpenAI -openai.api_type = "azure" -openai.api_base = os.getenv("OPENAI_ENDPOINT") or "<#= OPENAI_ENDPOINT #>" -openai.api_key = os.getenv("OPENAI_API_KEY") or "<#= OPENAI_API_KEY #>" -openai.api_version = os.getenv("OPENAI_API_VERSION") or "<#= OPENAI_API_VERSION #>" +api_key = os.getenv("AZURE_OPENAI_KEY") or "<#= AZURE_OPENAI_KEY #>" +endpoint = os.getenv("AZURE_OPENAI_ENDPOINT") or "<#= AZURE_OPENAI_ENDPOINT #>" +api_version = os.getenv("AZURE_OPENAI_API_VERSION") or "<#= AZURE_OPENAI_API_VERSION #>" +deployment_name = os.getenv("AZURE_OPENAI_CHAT_DEPLOYMENT") or "<#= AZURE_OPENAI_CHAT_DEPLOYMENT #>" +system_prompt = os.getenv("AZURE_OPENAI_SYSTEM_PROMPT") or "<#= AZURE_OPENAI_SYSTEM_PROMPT #>" -deploymentName = os.getenv("AZURE_OPENAI_CHAT_DEPLOYMENT") or "<#= AZURE_OPENAI_CHAT_DEPLOYMENT #>" -systemPrompt = os.getenv("AZURE_OPENAI_SYSTEM_PROMPT") or "<#= AZURE_OPENAI_SYSTEM_PROMPT #>" +client = AzureOpenAI( + api_key=api_key, + api_version=api_version, + azure_endpoint = endpoint +) messages=[ - {"role": "system", "content": systemPrompt}, + {"role": "system", "content": system_prompt}, ] -def getChatCompletions() -> str: - messages.append({"role": "user", "content": userPrompt}) +def get_chat_completions(user_input) -> str: + messages.append({"role": "user", "content": user_input}) - response = openai.ChatCompletion.create( - engine=deploymentName, + response = client.chat.completions.create( + model=deployment_name, messages=messages, ) - response_content = response["choices"][0]["message"]["content"] + response_content = response.choices[0].message.content messages.append({"role": "assistant", "content": response_content}) return response_content while True: - userPrompt = input("User: ") - if userPrompt == "" or userPrompt == "exit": + user_input = input("User: ") + if user_input == "" or user_input == "exit": break - response_content = getChatCompletions() + response_content = get_chat_completions(user_input) print(f"\nAssistant: {response_content}\n") \ No newline at end of file diff --git a/src/ai/.x/templates/openai-chat-py/requirements.txt b/src/ai/.x/templates/openai-chat-py/requirements.txt index d008bb14..7a06be70 100644 --- a/src/ai/.x/templates/openai-chat-py/requirements.txt +++ b/src/ai/.x/templates/openai-chat-py/requirements.txt @@ -1 +1 @@ -openai==0.28.1 +openai==1.0.0 diff --git a/src/ai/.x/templates/openai-chat-streaming-java/_.json b/src/ai/.x/templates/openai-chat-streaming-java/_.json index acb799d8..8a4336b2 100644 --- a/src/ai/.x/templates/openai-chat-streaming-java/_.json +++ b/src/ai/.x/templates/openai-chat-streaming-java/_.json @@ -1,8 +1,8 @@ { "_Name": "OpenAI Chat Completions (Streaming) in Java", "_Language": "Java", - "OPENAI_ENDPOINT": "", - "OPENAI_API_KEY": "", + "AZURE_OPENAI_ENDPOINT": "", + "AZURE_OPENAI_KEY": "", "AZURE_OPENAI_CHAT_DEPLOYMENT": "", "AZURE_OPENAI_SYSTEM_PROMPT": "You are a helpful AI assistant." } \ No newline at end of file diff --git a/src/ai/.x/templates/openai-chat-streaming-java/src/OpenAIQuickstartStreaming.java b/src/ai/.x/templates/openai-chat-streaming-java/src/OpenAIQuickstartStreaming.java index 88c3db27..aee6adcb 100644 --- a/src/ai/.x/templates/openai-chat-streaming-java/src/OpenAIQuickstartStreaming.java +++ b/src/ai/.x/templates/openai-chat-streaming-java/src/OpenAIQuickstartStreaming.java @@ -1,8 +1,8 @@ <#@ template hostspecific="true" #> <#@ output extension=".java" encoding="utf-8" #> <#@ parameter type="System.String" name="ClassName" #> -<#@ parameter type="System.String" name="OPENAI_ENDPOINT" #> -<#@ parameter type="System.String" name="OPENAI_API_KEY" #> +<#@ parameter type="System.String" name="AZURE_OPENAI_ENDPOINT" #> +<#@ parameter type="System.String" name="AZURE_OPENAI_KEY" #> <#@ parameter type="System.String" name="AZURE_OPENAI_CHAT_DEPLOYMENT" #> <#@ parameter type="System.String" name="AZURE_OPENAI_SYSTEM_PROMPT" #> import com.azure.ai.openai.OpenAIAsyncClient; @@ -37,9 +37,9 @@ public class OpenAIQuickstartStreaming { private OpenAIAsyncClient client; private ChatCompletionsOptions options; - private String key = (System.getenv("OPENAI_API_KEY") != null) ? System.getenv("OPENAI_API_KEY") + private String key = (System.getenv("AZURE_OPENAI_KEY") != null) ? System.getenv("AZURE_OPENAI_KEY") : ""; - private String endpoint = (System.getenv("OPENAI_ENDPOINT") != null) ? System.getenv("OPENAI_ENDPOINT") + private String endpoint = (System.getenv("AZURE_OPENAI_ENDPOINT") != null) ? System.getenv("AZURE_OPENAI_ENDPOINT") : ""; private String deploymentName = (System.getenv("AZURE_OPENAI_CHAT_DEPLOYMENT") != null) ? System.getenv("AZURE_OPENAI_CHAT_DEPLOYMENT") diff --git a/src/ai/.x/templates/openai-chat-streaming-js/ChatCompletionsStreaming.js b/src/ai/.x/templates/openai-chat-streaming-js/ChatCompletionsStreaming.js deleted file mode 100644 index be985d45..00000000 --- a/src/ai/.x/templates/openai-chat-streaming-js/ChatCompletionsStreaming.js +++ /dev/null @@ -1,85 +0,0 @@ -<#@ template hostspecific="true" #> -<#@ output extension=".js" encoding="utf-8" #> -<#@ parameter type="System.String" name="OPENAI_ENDPOINT" #> -<#@ parameter type="System.String" name="OPENAI_API_KEY" #> -<#@ parameter type="System.String" name="AZURE_OPENAI_CHAT_DEPLOYMENT" #> -<#@ parameter type="System.String" name="AZURE_OPENAI_SYSTEM_PROMPT" #> -const { OpenAIClient, AzureKeyCredential } = require("@azure/openai"); - -class OpenAIStreamingChatCompletions { - constructor(systemPrompt, endpoint, azureApiKey, deploymentName) { - this.systemPrompt = systemPrompt; - this.endpoint = endpoint; - this.azureApiKey = azureApiKey; - this.deploymentName = deploymentName; - this.client = new OpenAIClient(this.endpoint, new AzureKeyCredential(this.azureApiKey)); - this.clearConversation(); - } - - clearConversation() { - this.messages = [ - { role: 'system', content: this.systemPrompt } - ]; - } - - async getChatCompletions(userInput, callback) { - this.messages.push({ role: 'user', content: userInput }); - - const events = this.client.listChatCompletions(this.deploymentName, this.messages); - - let contentComplete = ''; - for await (const event of events) { - for (const choice of event.choices) { - - let content = choice.delta?.content; - if (choice.finishReason === 'length') { - content = `${content}\nERROR: Exceeded token limit!`; - } - - if (content != null) { - callback(content); - await new Promise(r => setTimeout(r, 50)); // delay to simulate real-time output, word by word - contentComplete += content; - } - } - } - - this.messages.push({ role: 'assistant', content: contentComplete }); - return contentComplete; - } -} - -const readline = require('readline'); -const rl = readline.createInterface({ - input: process.stdin, - output: process.stdout -}); - -async function main() { - const endpoint = process.env["OPENAI_ENDPOINT"] || "<#= OPENAI_ENDPOINT #>"; - const azureApiKey = process.env["OPENAI_API_KEY"] || "<#= OPENAI_API_KEY #>"; - const deploymentName = process.env["AZURE_OPENAI_CHAT_DEPLOYMENT"] || "<#= AZURE_OPENAI_CHAT_DEPLOYMENT #>" ; - const systemPrompt = process.env["AZURE_OPENAI_SYSTEM_PROMPT"] || "<#= AZURE_OPENAI_SYSTEM_PROMPT #>" ; - - const streamingChatCompletions = new OpenAIStreamingChatCompletions(systemPrompt, endpoint, azureApiKey, deploymentName); - - while (true) { - - const input = await new Promise(resolve => rl.question('User: ', resolve)); - if (input === 'exit' || input === '') break; - - let response = await streamingChatCompletions.getChatCompletions(input, (content) => { - console.log(`assistant-streaming: ${content}`); - }); - - console.log(`\nAssistant: ${response}\n`); - } - - console.log('Bye!'); -} - -main().catch((err) => { - console.error("The sample encountered an error:", err); -}); - -module.exports = { main }; diff --git a/src/ai/.x/templates/openai-chat-streaming-js/Main.js b/src/ai/.x/templates/openai-chat-streaming-js/Main.js new file mode 100644 index 00000000..e7ff0ef6 --- /dev/null +++ b/src/ai/.x/templates/openai-chat-streaming-js/Main.js @@ -0,0 +1,44 @@ +<#@ template hostspecific="true" #> +<#@ output extension=".js" encoding="utf-8" #> +<#@ parameter type="System.String" name="ClassName" #> +<#@ parameter type="System.String" name="AZURE_OPENAI_ENDPOINT" #> +<#@ parameter type="System.String" name="AZURE_OPENAI_KEY" #> +<#@ parameter type="System.String" name="AZURE_OPENAI_CHAT_DEPLOYMENT" #> +<#@ parameter type="System.String" name="AZURE_OPENAI_SYSTEM_PROMPT" #> +const { <#= ClassName #> } = require("./OpenAIChatCompletionsStreamingClass"); + +const readline = require('readline'); +const rl = readline.createInterface({ + input: process.stdin, + output: process.stdout +}); + +async function main() { + + const endpoint = process.env["AZURE_OPENAI_ENDPOINT"] || "<#= AZURE_OPENAI_ENDPOINT #>"; + const azureApiKey = process.env["AZURE_OPENAI_KEY"] || "<#= AZURE_OPENAI_KEY #>"; + const deploymentName = process.env["AZURE_OPENAI_CHAT_DEPLOYMENT"] || "<#= AZURE_OPENAI_CHAT_DEPLOYMENT #>" ; + const systemPrompt = process.env["AZURE_OPENAI_SYSTEM_PROMPT"] || "<#= AZURE_OPENAI_SYSTEM_PROMPT #>" ; + + const chat = new <#= ClassName #>(systemPrompt, endpoint, azureApiKey, deploymentName); + + while (true) { + + const input = await new Promise(resolve => rl.question('User: ', resolve)); + if (input === 'exit' || input === '') break; + + let response = await chat.getChatCompletions(input, (content) => { + console.log(`assistant-streaming: ${content}`); + }); + + console.log(`\nAssistant: ${response}\n`); + } + + console.log('Bye!'); +} + +main().catch((err) => { + console.error("The sample encountered an error:", err); +}); + +module.exports = { main }; diff --git a/src/ai/.x/templates/openai-chat-streaming-js/OpenAIChatCompletionsStreamingClass.js b/src/ai/.x/templates/openai-chat-streaming-js/OpenAIChatCompletionsStreamingClass.js new file mode 100644 index 00000000..31c55957 --- /dev/null +++ b/src/ai/.x/templates/openai-chat-streaming-js/OpenAIChatCompletionsStreamingClass.js @@ -0,0 +1,47 @@ +<#@ template hostspecific="true" #> +<#@ output extension=".js" encoding="utf-8" #> +<#@ parameter type="System.String" name="ClassName" #> +const { OpenAIClient, AzureKeyCredential } = require("@azure/openai"); + +class <#= ClassName #> { + constructor(systemPrompt, endpoint, azureApiKey, deploymentName) { + this.systemPrompt = systemPrompt; + this.deploymentName = deploymentName; + this.client = new OpenAIClient(endpoint, new AzureKeyCredential(azureApiKey)); + this.clearConversation(); + } + + clearConversation() { + this.messages = [ + { role: 'system', content: this.systemPrompt } + ]; + } + + async getChatCompletions(userInput, callback) { + this.messages.push({ role: 'user', content: userInput }); + + let contentComplete = ''; + const events = this.client.listChatCompletions(this.deploymentName, this.messages); + + for await (const event of events) { + for (const choice of event.choices) { + + let content = choice.delta?.content; + if (choice.finishReason === 'length') { + content = `${content}\nERROR: Exceeded token limit!`; + } + + if (content != null) { + callback(content); + await new Promise(r => setTimeout(r, 50)); // delay to simulate real-time output, word by word + contentComplete += content; + } + } + } + + this.messages.push({ role: 'assistant', content: contentComplete }); + return contentComplete; + } +} + +exports.<#= ClassName #> = <#= ClassName #>; \ No newline at end of file diff --git a/src/ai/.x/templates/openai-chat-streaming-js/_.json b/src/ai/.x/templates/openai-chat-streaming-js/_.json index 0fa5ea1b..b14e23cc 100644 --- a/src/ai/.x/templates/openai-chat-streaming-js/_.json +++ b/src/ai/.x/templates/openai-chat-streaming-js/_.json @@ -1,8 +1,9 @@ { "_Name": "OpenAI Chat Completions (Streaming) in JavaScript", "_Language": "JavaScript", - "OPENAI_ENDPOINT": "", - "OPENAI_API_KEY": "", + "ClassName": "OpenAIChatCompletionsStreamingClass", + "AZURE_OPENAI_ENDPOINT": "", + "AZURE_OPENAI_KEY": "", "AZURE_OPENAI_CHAT_DEPLOYMENT": "", "AZURE_OPENAI_SYSTEM_PROMPT": "You are a helpful AI assistant." } \ No newline at end of file diff --git a/src/ai/.x/templates/openai-chat-streaming-js/package.json b/src/ai/.x/templates/openai-chat-streaming-js/package.json index 1d208557..c94810af 100644 --- a/src/ai/.x/templates/openai-chat-streaming-js/package.json +++ b/src/ai/.x/templates/openai-chat-streaming-js/package.json @@ -2,7 +2,7 @@ "name": "openai-chat-streaming", "version": "1.0.0", "description": "", - "main": "ChatCompletionsStreaming.js", + "main": "Main.js", "scripts": { "test": "echo \"Error: no test specified\" && exit 1" }, diff --git a/src/ai/.x/templates/openai-chat-streaming-py/_.json b/src/ai/.x/templates/openai-chat-streaming-py/_.json index fb3a3ab9..3526d90b 100644 --- a/src/ai/.x/templates/openai-chat-streaming-py/_.json +++ b/src/ai/.x/templates/openai-chat-streaming-py/_.json @@ -1,9 +1,9 @@ { "_Name": "OpenAI Chat Completions (Streaming) in Python", "_Language": "Python", - "OPENAI_ENDPOINT": "", - "OPENAI_API_KEY": "", - "OPENAI_API_VERSION": "2023-12-01-preview", + "AZURE_OPENAI_ENDPOINT": "", + "AZURE_OPENAI_KEY": "", + "AZURE_OPENAI_API_VERSION": "2023-12-01-preview", "AZURE_OPENAI_CHAT_DEPLOYMENT": "", "AZURE_OPENAI_SYSTEM_PROMPT": "You are a helpful AI assistant." } \ No newline at end of file diff --git a/src/ai/.x/templates/openai-chat-streaming-py/chat_completions_streaming.py b/src/ai/.x/templates/openai-chat-streaming-py/chat_completions_streaming.py new file mode 100644 index 00000000..984f6370 --- /dev/null +++ b/src/ai/.x/templates/openai-chat-streaming-py/chat_completions_streaming.py @@ -0,0 +1,48 @@ +from openai import AzureOpenAI + +class ChatCompletionsStreaming: + def __init__(self, system_prompt, endpoint, azure_api_key, azure_api_version, deployment_name): + self.system_prompt = system_prompt + self.endpoint = endpoint + self.azure_api_key = azure_api_key + self.azure_api_version = azure_api_version + self.deployment_name = deployment_name + self.client = AzureOpenAI( + api_key=self.azure_api_key, + api_version=self.azure_api_version, + azure_endpoint = endpoint + ) + self.clear_conversation() + + def clear_conversation(self): + self.messages = [ + {'role': 'system', 'content': self.system_prompt} + ] + + def get_chat_completions(self, user_input, callback): + self.messages.append({'role': 'user', 'content': user_input}) + + complete_content = "" + response = self.client.chat.completions.create( + model=self.deployment_name, + messages=self.messages, + stream=True) + + for chunk in response: + + choice0 = chunk.choices[0] if hasattr(chunk, 'choices') and chunk.choices else None + delta = choice0.delta if choice0 and hasattr(choice0, 'delta') else None + + content = delta.content if delta and hasattr(delta, 'content') else "" + if content is None: continue + + if content is not None: + callback(content) + complete_content += content + + finish_reason = choice0.finish_reason if choice0 and hasattr(choice0, 'finish_reason') else None + if finish_reason == "length": + content += f"{content}\nERROR: Exceeded max token length!" + + self.messages.append({"role": "assistant", "content": complete_content}) + return complete_content diff --git a/src/ai/.x/templates/openai-chat-streaming-py/main.py b/src/ai/.x/templates/openai-chat-streaming-py/main.py new file mode 100644 index 00000000..47c287d8 --- /dev/null +++ b/src/ai/.x/templates/openai-chat-streaming-py/main.py @@ -0,0 +1,33 @@ +<#@ template hostspecific="true" #> +<#@ output extension=".py" encoding="utf-8" #> +<#@ parameter type="System.String" name="AZURE_OPENAI_ENDPOINT" #> +<#@ parameter type="System.String" name="AZURE_OPENAI_KEY" #> +<#@ parameter type="System.String" name="AZURE_OPENAI_API_VERSION" #> +<#@ parameter type="System.String" name="AZURE_OPENAI_CHAT_DEPLOYMENT" #> +<#@ parameter type="System.String" name="AZURE_OPENAI_SYSTEM_PROMPT" #> +from chat_completions_streaming import ChatCompletionsStreaming +import os + +def main(): + azure_api_key = os.getenv('AZURE_OPENAI_KEY', '<#= AZURE_OPENAI_KEY #>') + endpoint = os.getenv('AZURE_OPENAI_ENDPOINT', '<#= AZURE_OPENAI_ENDPOINT #>') + api_version = os.getenv("AZURE_OPENAI_API_VERSION") or "<#= AZURE_OPENAI_API_VERSION #>" + deployment_name = os.getenv('AZURE_OPENAI_CHAT_DEPLOYMENT', '<#= AZURE_OPENAI_CHAT_DEPLOYMENT #>') + system_prompt = os.getenv('AZURE_OPENAI_SYSTEM_PROMPT', '<#= AZURE_OPENAI_SYSTEM_PROMPT #>') + + chat = ChatCompletionsStreaming(system_prompt, endpoint, azure_api_key, api_version, deployment_name) + + while True: + user_input = input('User: ') + if user_input == 'exit' or user_input == '': + break + + print("\nAssistant: ", end="") + response = chat.get_chat_completions(user_input, lambda content: print(content, end="")) + print("\n") + +if __name__ == '__main__': + try: + main() + except Exception as e: + print(f'The sample encountered an error: {e}') \ No newline at end of file diff --git a/src/ai/.x/templates/openai-chat-streaming-py/openai_chat_completions_streaming.py b/src/ai/.x/templates/openai-chat-streaming-py/openai_chat_completions_streaming.py deleted file mode 100644 index 43b23c16..00000000 --- a/src/ai/.x/templates/openai-chat-streaming-py/openai_chat_completions_streaming.py +++ /dev/null @@ -1,56 +0,0 @@ -<#@ template hostspecific="true" #> -<#@ output extension=".py" encoding="utf-8" #> -<#@ parameter type="System.String" name="OPENAI_ENDPOINT" #> -<#@ parameter type="System.String" name="OPENAI_API_KEY" #> -<#@ parameter type="System.String" name="OPENAI_API_VERSION" #> -<#@ parameter type="System.String" name="AZURE_OPENAI_CHAT_DEPLOYMENT" #> -<#@ parameter type="System.String" name="AZURE_OPENAI_SYSTEM_PROMPT" #> -import os -import openai - -openai.api_type = "azure" -openai.api_base = os.getenv("OPENAI_ENDPOINT") or "<#= OPENAI_ENDPOINT #>" -openai.api_key = os.getenv("OPENAI_API_KEY") or "<#= OPENAI_API_KEY #>" -openai.api_version = os.getenv("OPENAI_API_VERSION") or "<#= OPENAI_API_VERSION #>" - -deploymentName = os.getenv("AZURE_OPENAI_CHAT_DEPLOYMENT") or "<#= AZURE_OPENAI_CHAT_DEPLOYMENT #>" -systemPrompt = os.getenv("AZURE_OPENAI_SYSTEM_PROMPT") or "<#= AZURE_OPENAI_SYSTEM_PROMPT #>" - -messages=[ - {"role": "system", "content": systemPrompt}, -] - -def getChatStreamingCompletions() -> str: - messages.append({"role": "user", "content": userPrompt}) - - response_content = "" - response = openai.ChatCompletion.create( - engine=deploymentName, - messages=messages, - stream=True) - - for update in response: - - choices = update["choices"] if "choices" in update else [] - choice0 = choices[0] if len(choices) > 0 else {} - delta = choice0["delta"] if "delta" in choice0 else {} - - content = delta["content"] if "content" in delta else "" - response_content += content - print(content, end="") - - finish_reason = choice0["finish_reason"] if "finish_reason" in choice0 else "" - if finish_reason == "length": - content += f"{content}\nERROR: Exceeded max token length!" - - messages.append({"role": "assistant", "content": response_content}) - return response_content - -while True: - userPrompt = input("User: ") - if userPrompt == "" or userPrompt == "exit": - break - - print("\nAssistant: ", end="") - response_content = getChatStreamingCompletions() - print("\n") diff --git a/src/ai/.x/templates/openai-chat-streaming-py/requirements.txt b/src/ai/.x/templates/openai-chat-streaming-py/requirements.txt index d008bb14..7a06be70 100644 --- a/src/ai/.x/templates/openai-chat-streaming-py/requirements.txt +++ b/src/ai/.x/templates/openai-chat-streaming-py/requirements.txt @@ -1 +1 @@ -openai==0.28.1 +openai==1.0.0 diff --git a/src/ai/.x/templates/openai-chat-streaming/OpenAIChatCompletionsStreamingClass.cs b/src/ai/.x/templates/openai-chat-streaming/OpenAIChatCompletionsStreamingClass.cs index 3eff8a12..3aaee1b4 100644 --- a/src/ai/.x/templates/openai-chat-streaming/OpenAIChatCompletionsStreamingClass.cs +++ b/src/ai/.x/templates/openai-chat-streaming/OpenAIChatCompletionsStreamingClass.cs @@ -1,10 +1,6 @@ <#@ template hostspecific="true" #> <#@ output extension=".cs" encoding="utf-8" #> <#@ parameter type="System.String" name="ClassName" #> -<#@ parameter type="System.String" name="OPENAI_ENDPOINT" #> -<#@ parameter type="System.String" name="OPENAI_API_KEY" #> -<#@ parameter type="System.String" name="AZURE_OPENAI_CHAT_DEPLOYMENT" #> -<#@ parameter type="System.String" name="AZURE_OPENAI_SYSTEM_PROMPT" #> using Azure; using Azure.AI.OpenAI; using Azure.Identity; @@ -12,35 +8,34 @@ public class <#= ClassName #> { - private OpenAIClient client; - private ChatCompletionsOptions options; - - public <#= ClassName #>() + public <#= ClassName #>(string systemPrompt, string endpoint, string azureApiKey, string deploymentName) { - var key = Environment.GetEnvironmentVariable("OPENAI_API_KEY") ?? "<#= OPENAI_API_KEY #>"; - var endpoint = Environment.GetEnvironmentVariable("OPENAI_ENDPOINT") ?? "<#= OPENAI_ENDPOINT #>"; - var deploymentName = Environment.GetEnvironmentVariable("AZURE_OPENAI_CHAT_DEPLOYMENT") ?? "<#= AZURE_OPENAI_CHAT_DEPLOYMENT #>"; - var systemPrompt = Environment.GetEnvironmentVariable("AZURE_OPENAI_SYSTEM_PROMPT") ?? "<#= AZURE_OPENAI_SYSTEM_PROMPT #>"; + _systemPrompt = systemPrompt; - client = string.IsNullOrEmpty(key) + _client = string.IsNullOrEmpty(azureApiKey) ? new OpenAIClient(new Uri(endpoint), new DefaultAzureCredential()) - : new OpenAIClient(new Uri(endpoint), new AzureKeyCredential(key)); + : new OpenAIClient(new Uri(endpoint), new AzureKeyCredential(azureApiKey)); + + _options = new ChatCompletionsOptions(); + _options.DeploymentName = deploymentName; + + ClearConversation(); + } - options = new ChatCompletionsOptions(); - options.DeploymentName = deploymentName; - options.Messages.Add(new ChatRequestSystemMessage(systemPrompt)); + public void ClearConversation() + { + _options.Messages.Clear(); + _options.Messages.Add(new ChatRequestSystemMessage(_systemPrompt)); } - public async Task GetChatCompletionsStreamingAsync(string userPrompt, Action callback = null) + public async Task GetChatCompletionsStreamingAsync(string userPrompt, Action? callback = null) { - options.Messages.Add(new ChatRequestUserMessage(userPrompt)); + _options.Messages.Add(new ChatRequestUserMessage(userPrompt)); var responseContent = string.Empty; - var response = await client.GetChatCompletionsStreamingAsync(options); + var response = await _client.GetChatCompletionsStreamingAsync(_options); await foreach (var update in response.EnumerateValues()) { - callback(update); - var content = update.ContentUpdate; if (update.FinishReason == CompletionsFinishReason.ContentFiltered) { @@ -53,28 +48,15 @@ public async Task GetChatCompletionsStreamingAsync(string userPrompt, Ac if (string.IsNullOrEmpty(content)) continue; + if (callback != null) callback(update); responseContent += content; } - options.Messages.Add(new ChatRequestAssistantMessage(responseContent)); + _options.Messages.Add(new ChatRequestAssistantMessage(responseContent)); return responseContent; } - public static async Task Main(string[] args) - { - var chat = new <#= ClassName #>(); - - while (true) - { - Console.Write("User: "); - var userPrompt = Console.ReadLine(); - if (string.IsNullOrEmpty(userPrompt) || userPrompt == "exit") break; - - Console.Write("\nAssistant: "); - var response = await chat.GetChatCompletionsStreamingAsync(userPrompt, update => - Console.Write(update.ContentUpdate) - ); - Console.WriteLine("\n"); - } - } -} + private string _systemPrompt; + private ChatCompletionsOptions _options; + private OpenAIClient _client; +} \ No newline at end of file diff --git a/src/ai/.x/templates/openai-chat-streaming/Program.cs b/src/ai/.x/templates/openai-chat-streaming/Program.cs new file mode 100644 index 00000000..ab353757 --- /dev/null +++ b/src/ai/.x/templates/openai-chat-streaming/Program.cs @@ -0,0 +1,34 @@ +<#@ template hostspecific="true" #> +<#@ output extension=".cs" encoding="utf-8" #> +<#@ parameter type="System.String" name="ClassName" #> +<#@ parameter type="System.String" name="AZURE_OPENAI_ENDPOINT" #> +<#@ parameter type="System.String" name="AZURE_OPENAI_KEY" #> +<#@ parameter type="System.String" name="AZURE_OPENAI_CHAT_DEPLOYMENT" #> +<#@ parameter type="System.String" name="AZURE_OPENAI_SYSTEM_PROMPT" #> +using System; + +public class Program +{ + public static async Task Main(string[] args) + { + var endpoint = Environment.GetEnvironmentVariable("AZURE_OPENAI_ENDPOINT") ?? "<#= AZURE_OPENAI_ENDPOINT #>"; + var azureApiKey = Environment.GetEnvironmentVariable("AZURE_OPENAI_KEY") ?? "<#= AZURE_OPENAI_KEY #>"; + var deploymentName = Environment.GetEnvironmentVariable("AZURE_OPENAI_CHAT_DEPLOYMENT") ?? "<#= AZURE_OPENAI_CHAT_DEPLOYMENT #>"; + var systemPrompt = Environment.GetEnvironmentVariable("AZURE_OPENAI_SYSTEM_PROMPT") ?? "<#= AZURE_OPENAI_SYSTEM_PROMPT #>"; + + var chat = new <#= ClassName #>(systemPrompt, endpoint, azureApiKey, deploymentName); + + while (true) + { + Console.Write("User: "); + var userPrompt = Console.ReadLine(); + if (string.IsNullOrEmpty(userPrompt) || userPrompt == "exit") break; + + Console.Write("\nAssistant: "); + var response = await chat.GetChatCompletionsStreamingAsync(userPrompt, update => + Console.Write(update.ContentUpdate) + ); + Console.WriteLine("\n"); + } + } +} \ No newline at end of file diff --git a/src/ai/.x/templates/openai-chat-streaming/_.json b/src/ai/.x/templates/openai-chat-streaming/_.json index 252f0afd..e873fff3 100644 --- a/src/ai/.x/templates/openai-chat-streaming/_.json +++ b/src/ai/.x/templates/openai-chat-streaming/_.json @@ -1,9 +1,9 @@ { "_Name": "OpenAI Chat Completions (Streaming) in C#", "_Language": "C#", - "ClassName": "OpenAIHelloWorldStreamingClass", - "OPENAI_ENDPOINT": "", - "OPENAI_API_KEY": "", + "ClassName": "OpenAIChatCompletionsStreamingClass", + "AZURE_OPENAI_ENDPOINT": "", + "AZURE_OPENAI_KEY": "", "AZURE_OPENAI_CHAT_DEPLOYMENT": "", "AZURE_OPENAI_SYSTEM_PROMPT": "You are a helpful AI assistant." } \ No newline at end of file diff --git a/src/ai/.x/templates/openai-chat/OpenAIChatCompletionsClass.cs b/src/ai/.x/templates/openai-chat/OpenAIChatCompletionsClass.cs index 060ae8ce..a3bcd430 100644 --- a/src/ai/.x/templates/openai-chat/OpenAIChatCompletionsClass.cs +++ b/src/ai/.x/templates/openai-chat/OpenAIChatCompletionsClass.cs @@ -1,10 +1,6 @@ <#@ template hostspecific="true" #> <#@ output extension=".cs" encoding="utf-8" #> <#@ parameter type="System.String" name="ClassName" #> -<#@ parameter type="System.String" name="OPENAI_ENDPOINT" #> -<#@ parameter type="System.String" name="OPENAI_API_KEY" #> -<#@ parameter type="System.String" name="AZURE_OPENAI_CHAT_DEPLOYMENT" #> -<#@ parameter type="System.String" name="AZURE_OPENAI_SYSTEM_PROMPT" #> using Azure; using Azure.AI.OpenAI; using Azure.Identity; @@ -12,48 +8,38 @@ public class <#= ClassName #> { - private OpenAIClient client; - private ChatCompletionsOptions options; - - public <#= ClassName #>() + public <#= ClassName #>(string systemPrompt, string endpoint, string azureApiKey, string deploymentName) { - var key = Environment.GetEnvironmentVariable("OPENAI_API_KEY") ?? "<#= OPENAI_API_KEY #>"; - var endpoint = Environment.GetEnvironmentVariable("OPENAI_ENDPOINT") ?? "<#= OPENAI_ENDPOINT #>"; - var deploymentName = Environment.GetEnvironmentVariable("AZURE_OPENAI_CHAT_DEPLOYMENT") ?? "<#= AZURE_OPENAI_CHAT_DEPLOYMENT #>"; - var systemPrompt = Environment.GetEnvironmentVariable("AZURE_OPENAI_SYSTEM_PROMPT") ?? "<#= AZURE_OPENAI_SYSTEM_PROMPT #>"; + _systemPrompt = systemPrompt; - client = string.IsNullOrEmpty(key) + _client = string.IsNullOrEmpty(azureApiKey) ? new OpenAIClient(new Uri(endpoint), new DefaultAzureCredential()) - : new OpenAIClient(new Uri(endpoint), new AzureKeyCredential(key)); + : new OpenAIClient(new Uri(endpoint), new AzureKeyCredential(azureApiKey)); + + _options = new ChatCompletionsOptions(); + _options.DeploymentName = deploymentName; + + ClearConversation(); + } - options = new ChatCompletionsOptions(); - options.DeploymentName = deploymentName; - options.Messages.Add(new ChatRequestSystemMessage(systemPrompt)); + public void ClearConversation() + { + _options.Messages.Clear(); + _options.Messages.Add(new ChatRequestSystemMessage(_systemPrompt)); } public string GetChatCompletion(string userPrompt) { - options.Messages.Add(new ChatRequestUserMessage(userPrompt)); + _options.Messages.Add(new ChatRequestUserMessage(userPrompt)); - var response = client.GetChatCompletions(options); + var response = _client.GetChatCompletions(_options); var responseContent = response.Value.Choices[0].Message.Content; - options.Messages.Add(new ChatRequestAssistantMessage(responseContent)); + _options.Messages.Add(new ChatRequestAssistantMessage(responseContent)); return responseContent; } - public static void Main(string[] args) - { - var chat = new OpenAIHelloWorldClass(); - - while (true) - { - Console.Write("User: "); - var userPrompt = Console.ReadLine(); - if (string.IsNullOrEmpty(userPrompt) || userPrompt == "exit") break; - - var response = chat.GetChatCompletion(userPrompt); - Console.WriteLine($"\nAssistant: {response}\n"); - } - } + private string _systemPrompt; + private ChatCompletionsOptions _options; + private OpenAIClient _client; } \ No newline at end of file diff --git a/src/ai/.x/templates/openai-chat/Program.cs b/src/ai/.x/templates/openai-chat/Program.cs new file mode 100644 index 00000000..860d3d8a --- /dev/null +++ b/src/ai/.x/templates/openai-chat/Program.cs @@ -0,0 +1,31 @@ +<#@ template hostspecific="true" #> +<#@ output extension=".cs" encoding="utf-8" #> +<#@ parameter type="System.String" name="ClassName" #> +<#@ parameter type="System.String" name="AZURE_OPENAI_ENDPOINT" #> +<#@ parameter type="System.String" name="AZURE_OPENAI_KEY" #> +<#@ parameter type="System.String" name="AZURE_OPENAI_CHAT_DEPLOYMENT" #> +<#@ parameter type="System.String" name="AZURE_OPENAI_SYSTEM_PROMPT" #> +using System; + +public class Program +{ + public static void Main(string[] args) + { + var endpoint = Environment.GetEnvironmentVariable("AZURE_OPENAI_ENDPOINT") ?? "<#= AZURE_OPENAI_ENDPOINT #>"; + var azureApiKey = Environment.GetEnvironmentVariable("AZURE_OPENAI_KEY") ?? "<#= AZURE_OPENAI_KEY #>"; + var deploymentName = Environment.GetEnvironmentVariable("AZURE_OPENAI_CHAT_DEPLOYMENT") ?? "<#= AZURE_OPENAI_CHAT_DEPLOYMENT #>"; + var systemPrompt = Environment.GetEnvironmentVariable("AZURE_OPENAI_SYSTEM_PROMPT") ?? "<#= AZURE_OPENAI_SYSTEM_PROMPT #>"; + + var chat = new <#= ClassName #>(systemPrompt, endpoint, azureApiKey, deploymentName); + + while (true) + { + Console.Write("User: "); + var userPrompt = Console.ReadLine(); + if (string.IsNullOrEmpty(userPrompt) || userPrompt == "exit") break; + + var response = chat.GetChatCompletion(userPrompt); + Console.WriteLine($"\nAssistant: {response}\n"); + } + } +} \ No newline at end of file diff --git a/src/ai/.x/templates/openai-chat/_.json b/src/ai/.x/templates/openai-chat/_.json index a27cd1e1..1fb11394 100644 --- a/src/ai/.x/templates/openai-chat/_.json +++ b/src/ai/.x/templates/openai-chat/_.json @@ -1,9 +1,9 @@ { "_Name": "OpenAI Chat Completions in C#", "_Language": "C#", - "ClassName": "OpenAIHelloWorldClass", - "OPENAI_ENDPOINT": "", - "OPENAI_API_KEY": "", + "ClassName": "OpenAIChatCompletionsClass", + "AZURE_OPENAI_ENDPOINT": "", + "AZURE_OPENAI_KEY": "", "AZURE_OPENAI_CHAT_DEPLOYMENT": "", "AZURE_OPENAI_SYSTEM_PROMPT": "You are a helpful AI assistant." } \ No newline at end of file diff --git a/src/ai/.x/templates/openai-functions-streaming-cs/FunctionCallContext.cs b/src/ai/.x/templates/openai-functions-streaming-cs/FunctionCallContext.cs new file mode 100644 index 00000000..0b5031ce --- /dev/null +++ b/src/ai/.x/templates/openai-functions-streaming-cs/FunctionCallContext.cs @@ -0,0 +1,59 @@ +using Azure.AI.OpenAI; +using System; +using System.Collections.Generic; + +public class FunctionCallContext +{ + private FunctionFactory _functionFactory; + private IList _messages; + private string _functionName = ""; + private string _functionArguments = ""; + + public FunctionCallContext(FunctionFactory functionFactory, IList messages) + { + _functionFactory = functionFactory; + _messages = messages; + } + + + public bool CheckForUpdate(StreamingChatCompletionsUpdate update) + { + var updated = false; + + var name = update?.FunctionName; + if (name != null) + { + _functionName = name; + updated = true; + } + + var args = update?.FunctionArgumentsUpdate; + if (args != null) + { + _functionArguments += args; + updated = true; + } + + return updated; + } + + public string? TryCallFunction() + { + var ok = _functionFactory.TryCallFunction(_functionName, _functionArguments, out var result); + if (!ok) return null; + + Console.WriteLine($"\rassistant-function: {_functionName}({_functionArguments}) => {result}"); + Console.Write("\nAssistant: "); + + _messages.Add(new ChatRequestAssistantMessage("") { FunctionCall = new FunctionCall(_functionName, _functionArguments) }); + _messages.Add(new ChatRequestFunctionMessage(_functionName, result)); + + return result; + } + + public void Clear() + { + _functionName = ""; + _functionArguments = ""; + } +} \ No newline at end of file diff --git a/src/ai/.x/templates/openai-functions-streaming-cs/FunctionFactory.cs b/src/ai/.x/templates/openai-functions-streaming-cs/FunctionFactory.cs new file mode 100644 index 00000000..79b572e2 --- /dev/null +++ b/src/ai/.x/templates/openai-functions-streaming-cs/FunctionFactory.cs @@ -0,0 +1,384 @@ +using System.Reflection; +using Azure.AI.OpenAI; +using System.Collections; +using Newtonsoft.Json.Linq; +using Newtonsoft.Json; + +public class FunctionFactory +{ + public FunctionFactory() + { + } + + public FunctionFactory(Assembly assembly) + { + AddFunctions(assembly); + } + + public FunctionFactory(Type type1, params Type[] types) + { + AddFunctions(type1, types); + } + + public FunctionFactory(IEnumerable types) + { + AddFunctions(types); + } + + public FunctionFactory(Type type) + { + AddFunctions(type); + } + + public void AddFunctions(Assembly assembly) + { + AddFunctions(assembly.GetTypes()); + } + + public void AddFunctions(Type type1, params Type[] types) + { + AddFunctions(new List { type1 }); + AddFunctions(types); + } + + public void AddFunctions(IEnumerable types) + { + foreach (var type in types) + { + AddFunctions(type); + } + } + + public void AddFunctions(Type type) + { + var methods = type.GetMethods(BindingFlags.Static | BindingFlags.Public); + foreach (var method in methods) + { + AddFunction(method); + } + } + + public void AddFunction(MethodInfo method) + { + var attributes = method.GetCustomAttributes(typeof(HelperFunctionDescriptionAttribute), false); + if (attributes.Length > 0) + { + var funcDescriptionAttrib = attributes[0] as HelperFunctionDescriptionAttribute; + var funcDescription = funcDescriptionAttrib!.Description; + + string json = GetMethodParametersJsonSchema(method); + _functions.TryAdd(method, new FunctionDefinition(method.Name) + { + Description = funcDescription, + Parameters = new BinaryData(json) + }); + } + } + + public IEnumerable GetFunctionDefinitions() + { + return _functions.Values; + } + + public bool TryCallFunction(string functionName, string functionArguments, out string? result) + { + result = null; + if (!string.IsNullOrEmpty(functionName) && !string.IsNullOrEmpty(functionArguments)) + { + var function = _functions.FirstOrDefault(x => x.Value.Name == functionName); + if (function.Key != null) + { + result = CallFunction(function.Key, function.Value, functionArguments); + return true; + } + } + return false; + } + + // operator to add to FunctionFactories together + public static FunctionFactory operator +(FunctionFactory a, FunctionFactory b) + { + var newFactory = new FunctionFactory(); + a._functions.ToList().ForEach(x => newFactory._functions.Add(x.Key, x.Value)); + b._functions.ToList().ForEach(x => newFactory._functions.Add(x.Key, x.Value)); + return newFactory; + } + + private static string? CallFunction(MethodInfo methodInfo, FunctionDefinition functionDefinition, string argumentsAsJson) + { + var jObject = JObject.Parse(argumentsAsJson); + var arguments = new List(); + + var parameters = methodInfo.GetParameters(); + foreach (var parameter in parameters) + { + var parameterName = parameter.Name; + if (parameterName == null) continue; + + var parameterValue = jObject[parameterName]?.ToString(); + if (parameterValue == null) continue; + + var parsed = ParseParameterValue(parameterValue, parameter.ParameterType); + arguments.Add(parsed); + } + + var args = arguments.ToArray(); + var result = CallFunction(methodInfo, args); + return ConvertFunctionResultToString(result); + } + + private static object? CallFunction(MethodInfo methodInfo, object[] args) + { + var t = methodInfo.ReturnType; + return t == typeof(Task) + ? CallVoidAsyncFunction(methodInfo, args) + : t.IsGenericType && t.GetGenericTypeDefinition() == typeof(Task<>) + ? CallAsyncFunction(methodInfo, args) + : t.Name != "Void" + ? CallSyncFunction(methodInfo, args) + : CallVoidFunction(methodInfo, args); + } + + private static object? CallVoidAsyncFunction(MethodInfo methodInfo, object[] args) + { + var task = methodInfo.Invoke(null, args) as Task; + task!.Wait(); + return true; + } + + private static object? CallAsyncFunction(MethodInfo methodInfo, object[] args) + { + var task = methodInfo.Invoke(null, args) as Task; + task!.Wait(); + return task.GetType().GetProperty("Result")?.GetValue(task); + } + + private static object? CallSyncFunction(MethodInfo methodInfo, object[] args) + { + return methodInfo.Invoke(null, args); + } + + private static object? CallVoidFunction(MethodInfo methodInfo, object[] args) + { + methodInfo.Invoke(null, args); + return true; + } + + private static string? ConvertFunctionResultToString(object? result) + { + if (result is IEnumerable enumerable && !(result is string)) + { + var array = new JArray(); + foreach (var item in enumerable) + { + var str = item.ToString(); + array.Add(str); + } + return array.ToString(); + } + return result?.ToString(); + } + + private static object ParseParameterValue(string parameterValue, Type parameterType) + { + if (IsArrayType(parameterType)) + { + Type elementType = parameterType.GetElementType()!; + return CreateGenericCollectionFromJsonArray(parameterValue, typeof(Array), elementType); + } + + if (IsTuppleType(parameterType)) + { + Type elementType = parameterType.GetGenericArguments()[0]; + return CreateTuppleTypeFromJsonArray(parameterValue, elementType); + } + + if (IsGenericListOrEquivalentType(parameterType)) + { + Type elementType = parameterType.GetGenericArguments()[0]; + return CreateGenericCollectionFromJsonArray(parameterValue, typeof(List<>), elementType); + } + + switch (Type.GetTypeCode(parameterType)) + { + case TypeCode.Boolean: return bool.Parse(parameterValue!); + case TypeCode.Byte: return byte.Parse(parameterValue!); + case TypeCode.Decimal: return decimal.Parse(parameterValue!); + case TypeCode.Double: return double.Parse(parameterValue!); + case TypeCode.Single: return float.Parse(parameterValue!); + case TypeCode.Int16: return short.Parse(parameterValue!); + case TypeCode.Int32: return int.Parse(parameterValue!); + case TypeCode.Int64: return long.Parse(parameterValue!); + case TypeCode.SByte: return sbyte.Parse(parameterValue!); + case TypeCode.UInt16: return ushort.Parse(parameterValue!); + case TypeCode.UInt32: return uint.Parse(parameterValue!); + case TypeCode.UInt64: return ulong.Parse(parameterValue!); + case TypeCode.String: return parameterValue!; + default: return Convert.ChangeType(parameterValue!, parameterType); + } + } + + private static object CreateGenericCollectionFromJsonArray(string parameterValue, Type collectionType, Type elementType) + { + var array = JArray.Parse(parameterValue); + + if (collectionType == typeof(Array)) + { + var collection = Array.CreateInstance(elementType, array.Count); + for (int i = 0; i < array.Count; i++) + { + var parsed = ParseParameterValue(array[i].ToString(), elementType); + if (parsed != null) collection.SetValue(parsed, i); + } + return collection; + } + else if (collectionType == typeof(List<>)) + { + var collection = Activator.CreateInstance(collectionType.MakeGenericType(elementType)); + var list = collection as IList; + foreach (var item in array) + { + var parsed = ParseParameterValue(item.ToString(), elementType); + if (parsed != null) list!.Add(parsed); + } + return collection!; + } + + return array; + } + + private static object CreateTuppleTypeFromJsonArray(string parameterValue, Type elementType) + { + var list = new List(); + + var array = JArray.Parse(parameterValue); + foreach (var item in array) + { + var parsed = ParseParameterValue(item.ToString(), elementType); + if (parsed != null) list!.Add(parsed); + } + + var collection = list.Count() switch + { + 1 => Activator.CreateInstance(typeof(Tuple<>).MakeGenericType(elementType), list[0]), + 2 => Activator.CreateInstance(typeof(Tuple<,>).MakeGenericType(elementType, elementType), list[0], list[1]), + 3 => Activator.CreateInstance(typeof(Tuple<,,>).MakeGenericType(elementType, elementType, elementType), list[0], list[1], list[2]), + 4 => Activator.CreateInstance(typeof(Tuple<,,,>).MakeGenericType(elementType, elementType, elementType, elementType), list[0], list[1], list[2], list[3]), + 5 => Activator.CreateInstance(typeof(Tuple<,,,,>).MakeGenericType(elementType, elementType, elementType, elementType, elementType), list[0], list[1], list[2], list[3], list[4]), + 6 => Activator.CreateInstance(typeof(Tuple<,,,,,>).MakeGenericType(elementType, elementType, elementType, elementType, elementType, elementType), list[0], list[1], list[2], list[3], list[4], list[5]), + 7 => Activator.CreateInstance(typeof(Tuple<,,,,,,>).MakeGenericType(elementType, elementType, elementType, elementType, elementType, elementType, elementType), list[0], list[1], list[2], list[3], list[4], list[5], list[6]), + _ => throw new Exception("Tuples with more than 7 elements are not supported") + }; + return collection!; + } + + private static string GetMethodParametersJsonSchema(MethodInfo method) + { + var schema = new JObject(); + schema["type"] = "object"; + + var properties = new JObject(); + schema["properties"] = properties; + + var required = new JArray(); + foreach (var parameter in method.GetParameters()) + { + if (parameter.Name == null) continue; + + properties[parameter.Name] = GetJsonSchemaForParameterWithDescription(parameter); + if (!parameter.IsOptional) + { + required.Add(parameter.Name); + } + } + + schema["required"] = required; + + return schema.ToString(Formatting.None); + } + + private static JToken GetJsonSchemaForParameterWithDescription(ParameterInfo parameter) + { + var schema = GetJsonSchemaForType(parameter.ParameterType); + schema["description"] = GetParameterDescription(parameter); + return schema; + } + + private static string GetParameterDescription(ParameterInfo parameter) + { + var attributes = parameter.GetCustomAttributes(typeof(HelperFunctionParameterDescriptionAttribute), false); + var paramDescriptionAttrib = attributes.Length > 0 ? (attributes[0] as HelperFunctionParameterDescriptionAttribute) : null; + return paramDescriptionAttrib?.Description ?? $"The {parameter.Name} parameter"; + } + + private static JObject GetJsonSchemaForType(Type t) + { + return IsJsonArrayEquivalentType(t) + ? GetJsonArraySchemaFromType(t) + : GetJsonPrimativeSchemaFromType(t); + } + + private static JObject GetJsonArraySchemaFromType(Type containerType) + { + var schema = new JObject(); + schema["type"] = "array"; + schema["items"] = GetJsonArrayItemSchemaFromType(containerType); + return schema; + } + + private static JObject GetJsonArrayItemSchemaFromType(Type containerType) + { + var itemType = containerType.IsArray + ? containerType.GetElementType()! + : containerType.GetGenericArguments()[0]; + return GetJsonSchemaForType(itemType); + } + + private static JObject GetJsonPrimativeSchemaFromType(Type primativeType) + { + var schema = new JObject(); + schema["type"] = GetJsonTypeFromPrimitiveType(primativeType); + return schema; + } + + private static string GetJsonTypeFromPrimitiveType(Type primativeType) + { + return Type.GetTypeCode(primativeType) switch + { + TypeCode.Boolean => "boolean", + TypeCode.Byte or TypeCode.SByte or TypeCode.Int16 or TypeCode.Int32 or TypeCode.Int64 or + TypeCode.UInt16 or TypeCode.UInt32 or TypeCode.UInt64 => "integer", + TypeCode.Decimal or TypeCode.Double or TypeCode.Single => "number", + TypeCode.String => "string", + _ => "string" + }; + } + + private static bool IsJsonArrayEquivalentType(Type t) + { + return IsArrayType(t) || IsTuppleType(t) || IsGenericListOrEquivalentType(t); + } + + private static bool IsArrayType(Type t) + { + return t.IsArray; + } + + private static bool IsTuppleType(Type parameterType) + { + return parameterType.IsGenericType && parameterType.GetGenericTypeDefinition().Name.StartsWith("Tuple"); + } + + private static bool IsGenericListOrEquivalentType(Type t) + { + return t.IsGenericType && + (t.GetGenericTypeDefinition() == typeof(List<>) || + t.GetGenericTypeDefinition() == typeof(ICollection<>) || + t.GetGenericTypeDefinition() == typeof(IEnumerable<>) || + t.GetGenericTypeDefinition() == typeof(IList<>) || + t.GetGenericTypeDefinition() == typeof(IReadOnlyCollection<>) || + t.GetGenericTypeDefinition() == typeof(IReadOnlyList<>)); + } + + private Dictionary _functions = new(); +} diff --git a/src/ai/.x/templates/openai-functions-streaming-cs/HelperFunctionDescriptionAttribute.cs b/src/ai/.x/templates/openai-functions-streaming-cs/HelperFunctionDescriptionAttribute.cs new file mode 100644 index 00000000..c6678de8 --- /dev/null +++ b/src/ai/.x/templates/openai-functions-streaming-cs/HelperFunctionDescriptionAttribute.cs @@ -0,0 +1,13 @@ +public class HelperFunctionDescriptionAttribute : Attribute +{ + public HelperFunctionDescriptionAttribute() + { + } + + public HelperFunctionDescriptionAttribute(string description) + { + Description = description; + } + + public string? Description { get; set; } +} diff --git a/src/ai/.x/templates/openai-functions-streaming-cs/HelperFunctionParameterDescriptionAttribute.cs b/src/ai/.x/templates/openai-functions-streaming-cs/HelperFunctionParameterDescriptionAttribute.cs new file mode 100644 index 00000000..36e672a4 --- /dev/null +++ b/src/ai/.x/templates/openai-functions-streaming-cs/HelperFunctionParameterDescriptionAttribute.cs @@ -0,0 +1,13 @@ +public class HelperFunctionParameterDescriptionAttribute : Attribute +{ + public HelperFunctionParameterDescriptionAttribute() + { + } + + public HelperFunctionParameterDescriptionAttribute(string? description = null) + { + Description = description; + } + + public string? Description { get; set; } +} diff --git a/src/ai/.x/templates/openai-functions-streaming-cs/OpenAIChatCompletionsCustomFunctions.cs b/src/ai/.x/templates/openai-functions-streaming-cs/OpenAIChatCompletionsCustomFunctions.cs new file mode 100644 index 00000000..55b7daad --- /dev/null +++ b/src/ai/.x/templates/openai-functions-streaming-cs/OpenAIChatCompletionsCustomFunctions.cs @@ -0,0 +1,24 @@ +using System; + +public class OpenAIChatCompletionsCustomFunctions +{ + [HelperFunctionDescription("Gets the current weather for a location.")] + public static string GetCurrentWeather(string location) + { + return $"The weather in {location} is 72 degrees and sunny."; + } + + [HelperFunctionDescription("Gets the current date.")] + public static string GetCurrentDate() + { + var date = DateTime.Now; + return $"{date.Year}-{date.Month}-{date.Day}"; + } + + [HelperFunctionDescription("Gets the current time.")] + public static string GetCurrentTime() + { + var date = DateTime.Now; + return $"{date.Hour}:{date.Minute}:{date.Second}"; + } +} diff --git a/src/ai/.x/templates/openai-functions-streaming-cs/OpenAIChatCompletionsFunctionsStreaming.csproj._ b/src/ai/.x/templates/openai-functions-streaming-cs/OpenAIChatCompletionsFunctionsStreaming.csproj._ new file mode 100644 index 00000000..06e19a27 --- /dev/null +++ b/src/ai/.x/templates/openai-functions-streaming-cs/OpenAIChatCompletionsFunctionsStreaming.csproj._ @@ -0,0 +1,21 @@ +<#@ template hostspecific="true" #> +<#@ output extension=".csproj" encoding="utf-8" #> +<#@ parameter name="AICLIExtensionReferencePath" type="System.String" #> + + + + net7.0 + enable + enable + true + Exe + + + + + + + + + + \ No newline at end of file diff --git a/src/ai/.x/templates/openai-functions-streaming-cs/OpenAIChatCompletionsFunctionsStreamingClass.cs b/src/ai/.x/templates/openai-functions-streaming-cs/OpenAIChatCompletionsFunctionsStreamingClass.cs new file mode 100644 index 00000000..a0dad85d --- /dev/null +++ b/src/ai/.x/templates/openai-functions-streaming-cs/OpenAIChatCompletionsFunctionsStreamingClass.cs @@ -0,0 +1,83 @@ +<#@ template hostspecific="true" #> +<#@ output extension=".cs" encoding="utf-8" #> +<#@ parameter type="System.String" name="ClassName" #> +using Azure; +using Azure.AI.OpenAI; +using Azure.Identity; +using System; + +public class <#= ClassName #> +{ + public <#= ClassName #>(string systemPrompt, string endpoint, string azureApiKey, string deploymentName, FunctionFactory factory) + { + _systemPrompt = systemPrompt; + _functionFactory = factory; + + _client = string.IsNullOrEmpty(azureApiKey) + ? new OpenAIClient(new Uri(endpoint), new DefaultAzureCredential()) + : new OpenAIClient(new Uri(endpoint), new AzureKeyCredential(azureApiKey)); + + _options = new ChatCompletionsOptions(); + _options.DeploymentName = deploymentName; + + foreach (var function in _functionFactory.GetFunctionDefinitions()) + { + _options.Functions.Add(function); + // _options.Tools.Add(new ChatCompletionsFunctionToolDefinition(function)); + } + + _functionCallContext = new(_functionFactory, _options.Messages); + ClearConversation(); + } + + public void ClearConversation() + { + _options.Messages.Clear(); + _options.Messages.Add(new ChatRequestSystemMessage(_systemPrompt)); + } + + public async Task GetChatCompletionsStreamingAsync(string userPrompt, Action? callback = null) + { + _options.Messages.Add(new ChatRequestUserMessage(userPrompt)); + + var responseContent = string.Empty; + while (true) + { + var response = await _client.GetChatCompletionsStreamingAsync(_options); + await foreach (var update in response.EnumerateValues()) + { + _functionCallContext.CheckForUpdate(update); + + var content = update.ContentUpdate; + if (update.FinishReason == CompletionsFinishReason.ContentFiltered) + { + content = $"{content}\nWARNING: Content filtered!"; + } + else if (update.FinishReason == CompletionsFinishReason.TokenLimitReached) + { + content = $"{content}\nERROR: Exceeded token limit!"; + } + + if (string.IsNullOrEmpty(content)) continue; + + if (callback != null) callback(update); + responseContent += content; + } + + if (_functionCallContext.TryCallFunction() != null) + { + _functionCallContext.Clear(); + continue; + } + + _options.Messages.Add(new ChatRequestAssistantMessage(responseContent)); + return responseContent; + } + } + + private string _systemPrompt; + private FunctionFactory _functionFactory; + private FunctionCallContext _functionCallContext; + private ChatCompletionsOptions _options; + private OpenAIClient _client; +} diff --git a/src/ai/.x/templates/openai-functions-streaming-cs/Program.cs b/src/ai/.x/templates/openai-functions-streaming-cs/Program.cs new file mode 100644 index 00000000..55eda1ac --- /dev/null +++ b/src/ai/.x/templates/openai-functions-streaming-cs/Program.cs @@ -0,0 +1,37 @@ +<#@ template hostspecific="true" #> +<#@ output extension=".cs" encoding="utf-8" #> +<#@ parameter type="System.String" name="ClassName" #> +<#@ parameter type="System.String" name="AZURE_OPENAI_ENDPOINT" #> +<#@ parameter type="System.String" name="AZURE_OPENAI_KEY" #> +<#@ parameter type="System.String" name="AZURE_OPENAI_CHAT_DEPLOYMENT" #> +<#@ parameter type="System.String" name="AZURE_OPENAI_SYSTEM_PROMPT" #> +using System; + +public class Program +{ + public static async Task Main(string[] args) + { + var endpoint = Environment.GetEnvironmentVariable("AZURE_OPENAI_ENDPOINT") ?? "<#= AZURE_OPENAI_ENDPOINT #>"; + var azureApiKey = Environment.GetEnvironmentVariable("AZURE_OPENAI_KEY") ?? "<#= AZURE_OPENAI_KEY #>"; + var deploymentName = Environment.GetEnvironmentVariable("AZURE_OPENAI_CHAT_DEPLOYMENT") ?? "<#= AZURE_OPENAI_CHAT_DEPLOYMENT #>"; + var systemPrompt = Environment.GetEnvironmentVariable("AZURE_OPENAI_SYSTEM_PROMPT") ?? "<#= AZURE_OPENAI_SYSTEM_PROMPT #>"; + + var factory = new FunctionFactory(); + factory.AddFunctions(typeof(OpenAIChatCompletionsCustomFunctions)); + + var chat = new <#= ClassName #>(systemPrompt, endpoint, azureApiKey, deploymentName, factory); + + while (true) + { + Console.Write("User: "); + var userPrompt = Console.ReadLine(); + if (string.IsNullOrEmpty(userPrompt) || userPrompt == "exit") break; + + Console.Write("\nAssistant: "); + var response = await chat.GetChatCompletionsStreamingAsync(userPrompt, update => + Console.Write(update.ContentUpdate) + ); + Console.WriteLine("\n"); + } + } +} \ No newline at end of file diff --git a/src/ai/.x/templates/openai-functions-streaming-cs/_.json b/src/ai/.x/templates/openai-functions-streaming-cs/_.json new file mode 100644 index 00000000..7cead786 --- /dev/null +++ b/src/ai/.x/templates/openai-functions-streaming-cs/_.json @@ -0,0 +1,9 @@ +{ + "_Name": "OpenAI Chat Completions (Functions) in C#", + "_Language": "C#", + "ClassName": "OpenAIChatCompletionsFunctionsStreamingClass", + "AZURE_OPENAI_ENDPOINT": "", + "AZURE_OPENAI_KEY": "", + "AZURE_OPENAI_CHAT_DEPLOYMENT": "", + "AZURE_OPENAI_SYSTEM_PROMPT": "You are a helpful AI assistant." +} \ No newline at end of file diff --git a/src/ai/.x/templates/openai-functions-streaming-js/Main.js b/src/ai/.x/templates/openai-functions-streaming-js/Main.js index 786a26af..55a5c456 100644 --- a/src/ai/.x/templates/openai-functions-streaming-js/Main.js +++ b/src/ai/.x/templates/openai-functions-streaming-js/Main.js @@ -1,14 +1,12 @@ <#@ template hostspecific="true" #> <#@ output extension=".js" encoding="utf-8" #> -<#@ parameter type="System.String" name="OPENAI_ENDPOINT" #> -<#@ parameter type="System.String" name="OPENAI_API_KEY" #> +<#@ parameter type="System.String" name="ClassName" #> +<#@ parameter type="System.String" name="AZURE_OPENAI_ENDPOINT" #> +<#@ parameter type="System.String" name="AZURE_OPENAI_KEY" #> <#@ parameter type="System.String" name="AZURE_OPENAI_CHAT_DEPLOYMENT" #> <#@ parameter type="System.String" name="AZURE_OPENAI_SYSTEM_PROMPT" #> -const customFunctions = require("./ChatCompletionsCustomFunctions"); -const { getCurrentWeatherSchema, getCurrentWeather } = customFunctions; -const { getCurrentDateSchema, getCurrentDate } = customFunctions; -const { FunctionFactory } = require("./FunctionFactory"); -const { ChatCompletionsFunctionsStreaming } = require("./ChatCompletionsFunctionsStreaming"); +const { factory } = require("./OpenAIChatCompletionsCustomFunctions"); +const { <#= ClassName #> } = require("./OpenAIChatCompletionsFunctionsStreamingClass"); const readline = require('readline'); const rl = readline.createInterface({ @@ -18,23 +16,19 @@ const rl = readline.createInterface({ async function main() { - let factory = new FunctionFactory(); - factory.addFunction(getCurrentWeatherSchema, getCurrentWeather); - factory.addFunction(getCurrentDateSchema, getCurrentDate); - - const endpoint = process.env["OPENAI_ENDPOINT"] || "<#= OPENAI_ENDPOINT #>"; - const azureApiKey = process.env["OPENAI_API_KEY"] || "<#= OPENAI_API_KEY #>"; + const endpoint = process.env["AZURE_OPENAI_ENDPOINT"] || "<#= AZURE_OPENAI_ENDPOINT #>"; + const azureApiKey = process.env["AZURE_OPENAI_KEY"] || "<#= AZURE_OPENAI_KEY #>"; const deploymentName = process.env["AZURE_OPENAI_CHAT_DEPLOYMENT"] || "<#= AZURE_OPENAI_CHAT_DEPLOYMENT #>" ; const systemPrompt = process.env["AZURE_OPENAI_SYSTEM_PROMPT"] || "<#= AZURE_OPENAI_SYSTEM_PROMPT #>" ; - const streamingChatCompletions = new ChatCompletionsFunctionsStreaming(systemPrompt, endpoint, azureApiKey, deploymentName, factory); + const chat = new <#= ClassName #>(systemPrompt, endpoint, azureApiKey, deploymentName, factory); while (true) { const input = await new Promise(resolve => rl.question('User: ', resolve)); if (input === 'exit' || input === '') break; - let response = await streamingChatCompletions.getChatCompletions(input, (content) => { + let response = await chat.getChatCompletions(input, (content) => { console.log(`assistant-streaming: ${content}`); }); diff --git a/src/ai/.x/templates/openai-functions-streaming-js/ChatCompletionsCustomFunctions.js b/src/ai/.x/templates/openai-functions-streaming-js/OpenAIChatCompletionsCustomFunctions.js similarity index 60% rename from src/ai/.x/templates/openai-functions-streaming-js/ChatCompletionsCustomFunctions.js rename to src/ai/.x/templates/openai-functions-streaming-js/OpenAIChatCompletionsCustomFunctions.js index ad3c3e8d..15ed3234 100644 --- a/src/ai/.x/templates/openai-functions-streaming-js/ChatCompletionsCustomFunctions.js +++ b/src/ai/.x/templates/openai-functions-streaming-js/OpenAIChatCompletionsCustomFunctions.js @@ -1,3 +1,6 @@ +const { FunctionFactory } = require("./FunctionFactory"); +let factory = new FunctionFactory(); + function getCurrentWeather(function_arguments) { const location = JSON.parse(function_arguments).location; return `The weather in ${location} is 72 degrees and sunny.`; @@ -22,6 +25,8 @@ const getCurrentWeatherSchema = { }, }; +factory.addFunction(getCurrentWeatherSchema, getCurrentWeather); + function getCurrentDate() { const date = new Date(); return `${date.getFullYear()}-${date.getMonth() + 1}-${date.getDate()}`; @@ -36,7 +41,22 @@ const getCurrentDateSchema = { }, }; -exports.getCurrentWeather = getCurrentWeather; -exports.getCurrentWeatherSchema = getCurrentWeatherSchema; -exports.getCurrentDate = getCurrentDate; -exports.getCurrentDateSchema = getCurrentDateSchema; +factory.addFunction(getCurrentDateSchema, getCurrentDate); + +function getCurrentTime() { + const date = new Date(); + return `${date.getHours()}:${date.getMinutes()}:${date.getSeconds()}`; +} + +const getCurrentTimeSchema = { + name: "get_current_time", + description: "Get the current time", + parameters: { + type: "object", + properties: {}, + }, +}; + +factory.addFunction(getCurrentTimeSchema, getCurrentTime); + +exports.factory = factory; \ No newline at end of file diff --git a/src/ai/.x/templates/openai-webpage/src/ChatCompletionsFunctionsStreaming.js b/src/ai/.x/templates/openai-functions-streaming-js/OpenAIChatCompletionsFunctionsStreamingClass.js similarity index 79% rename from src/ai/.x/templates/openai-webpage/src/ChatCompletionsFunctionsStreaming.js rename to src/ai/.x/templates/openai-functions-streaming-js/OpenAIChatCompletionsFunctionsStreamingClass.js index b379f063..776edd0a 100644 --- a/src/ai/.x/templates/openai-webpage/src/ChatCompletionsFunctionsStreaming.js +++ b/src/ai/.x/templates/openai-functions-streaming-js/OpenAIChatCompletionsFunctionsStreamingClass.js @@ -1,15 +1,15 @@ +<#@ template hostspecific="true" #> +<#@ output extension=".js" encoding="utf-8" #> +<#@ parameter type="System.String" name="ClassName" #> const { OpenAIClient, AzureKeyCredential } = require("@azure/openai"); -const { FunctionFactory } = require("./FunctionFactory"); const { FunctionCallContext } = require("./FunctionCallContext"); -class ChatCompletionsFunctionsStreaming { +class <#= ClassName #> { constructor(systemPrompt, endpoint, azureApiKey, deploymentName, functionFactory) { this.systemPrompt = systemPrompt; - this.endpoint = endpoint; - this.azureApiKey = azureApiKey; this.deploymentName = deploymentName; - this.client = new OpenAIClient(this.endpoint, new AzureKeyCredential(this.azureApiKey)); - this.functionFactory = functionFactory || new FunctionFactory(); + this.client = new OpenAIClient(endpoint, new AzureKeyCredential(azureApiKey)); + this.functionFactory = functionFactory; this.clearConversation(); } @@ -23,7 +23,7 @@ class ChatCompletionsFunctionsStreaming { async getChatCompletions(userInput, callback) { this.messages.push({ role: 'user', content: userInput }); - let contentComplete = ""; + let contentComplete = ''; while (true) { const events = this.client.listChatCompletions(this.deploymentName, this.messages, { functions: this.functionFactory.getFunctionSchemas(), @@ -58,4 +58,4 @@ class ChatCompletionsFunctionsStreaming { } } -exports.ChatCompletionsFunctionsStreaming = ChatCompletionsFunctionsStreaming; \ No newline at end of file +exports.<#= ClassName #> = <#= ClassName #>; \ No newline at end of file diff --git a/src/ai/.x/templates/openai-functions-streaming-js/_.json b/src/ai/.x/templates/openai-functions-streaming-js/_.json index 7223f71b..0786c0a9 100644 --- a/src/ai/.x/templates/openai-functions-streaming-js/_.json +++ b/src/ai/.x/templates/openai-functions-streaming-js/_.json @@ -1,8 +1,9 @@ { "_Name": "OpenAI Chat Completions (Functions) in JavaScript", "_Language": "JavaScript", - "OPENAI_ENDPOINT": "", - "OPENAI_API_KEY": "", + "ClassName": "OpenAIChatCompletionsFunctionsStreamingClass", + "AZURE_OPENAI_ENDPOINT": "", + "AZURE_OPENAI_KEY": "", "AZURE_OPENAI_CHAT_DEPLOYMENT": "", "AZURE_OPENAI_SYSTEM_PROMPT": "You are a helpful AI assistant." } \ No newline at end of file diff --git a/src/ai/.x/templates/openai-functions-streaming-py/_.json b/src/ai/.x/templates/openai-functions-streaming-py/_.json new file mode 100644 index 00000000..5325ff87 --- /dev/null +++ b/src/ai/.x/templates/openai-functions-streaming-py/_.json @@ -0,0 +1,9 @@ +{ + "_Name": "OpenAI Chat Completions (Functions) in Python", + "_Language": "Python", + "AZURE_OPENAI_ENDPOINT": "", + "AZURE_OPENAI_KEY": "", + "AZURE_OPENAI_API_VERSION": "2023-12-01-preview", + "AZURE_OPENAI_CHAT_DEPLOYMENT": "", + "AZURE_OPENAI_SYSTEM_PROMPT": "You are a helpful AI assistant." +} \ No newline at end of file diff --git a/src/ai/.x/templates/openai-functions-streaming-py/chat_completions_custom_functions.py b/src/ai/.x/templates/openai-functions-streaming-py/chat_completions_custom_functions.py new file mode 100644 index 00000000..91283bdb --- /dev/null +++ b/src/ai/.x/templates/openai-functions-streaming-py/chat_completions_custom_functions.py @@ -0,0 +1,66 @@ +from function_factory import FunctionFactory +factory = FunctionFactory() + +def ignore_args_decorator(func): + def wrapper(*args, **kwargs): + return func() + return wrapper + +@ignore_args_decorator +def get_current_date(): + from datetime import date + today = date.today() + return f'{today.year}-{today.month}-{today.day}' + +get_current_date_schema = { + 'name': 'get_current_date', + 'description': 'Get the current date', + 'parameters': { + 'type': 'object', + 'properties': {}, + }, +} + +factory.add_function(get_current_date_schema, get_current_date) + +@ignore_args_decorator +def get_current_time(): + from datetime import datetime + now = datetime.now() + return f'{now.hour}:{now.minute}' + +get_current_time_schema = { + 'name': 'get_current_time', + 'description': 'Get the current time', + 'parameters': { + 'type': 'object', + 'properties': {}, + }, +} + +factory.add_function(get_current_time_schema, get_current_time) + +def get_current_weather(function_arguments): + location = function_arguments.get('location') + return f'The weather in {location} is 72 degrees and sunny.' + +get_current_weather_schema = { + 'name': 'get_current_weather', + 'description': 'Get the current weather in a given location', + 'parameters': { + 'type': 'object', + 'properties': { + 'location': { + 'type': 'string', + 'description': 'The city and state, e.g. San Francisco, CA', + }, + 'unit': { + 'type': 'string', + 'enum': ['celsius', 'fahrenheit'], + }, + }, + 'required': ['location'], + }, +} + +factory.add_function(get_current_weather_schema, get_current_weather) diff --git a/src/ai/.x/templates/openai-functions-streaming-py/chat_completions_functions_streaming.py b/src/ai/.x/templates/openai-functions-streaming-py/chat_completions_functions_streaming.py new file mode 100644 index 00000000..09bbb8bf --- /dev/null +++ b/src/ai/.x/templates/openai-functions-streaming-py/chat_completions_functions_streaming.py @@ -0,0 +1,61 @@ +from openai import AzureOpenAI +from function_call_context import FunctionCallContext + +class ChatCompletionsFunctionsStreaming: + def __init__(self, system_prompt, endpoint, azure_api_key, azure_api_version, deployment_name, function_factory): + self.system_prompt = system_prompt + self.endpoint = endpoint + self.azure_api_key = azure_api_key + self.azure_api_version = azure_api_version + self.deployment_name = deployment_name + self.function_factory = function_factory + self.client = AzureOpenAI( + api_key=self.azure_api_key, + api_version=self.azure_api_version, + azure_endpoint = endpoint + ) + self.clear_conversation() + + def clear_conversation(self): + self.messages = [ + {'role': 'system', 'content': self.system_prompt} + ] + self.function_call_context = FunctionCallContext(self.function_factory, self.messages) + + def get_chat_completions(self, user_input, callback): + self.messages.append({'role': 'user', 'content': user_input}) + + complete_content = "" + functions = self.function_factory.get_function_schemas() + + while True: + response = self.client.chat.completions.create( + model=self.deployment_name, + messages=self.messages, + stream=True, + functions=functions, + function_call="auto") + + for chunk in response: + + choice0 = chunk.choices[0] if hasattr(chunk, 'choices') and chunk.choices else None + self.function_call_context.check_for_update(choice0) + + delta = choice0.delta if choice0 and hasattr(choice0, 'delta') else None + content = delta.content if delta and hasattr(delta, 'content') else "" + if content is None: continue + + if content is not None: + callback(content) + complete_content += content + + finish_reason = choice0.finish_reason if choice0 and hasattr(choice0, 'finish_reason') else None + if finish_reason == "length": + content += f"{content}\nERROR: Exceeded max token length!" + + if self.function_call_context.try_call_function() is not None: + self.function_call_context.clear() + continue + + self.messages.append({"role": "assistant", "content": complete_content}) + return complete_content diff --git a/src/ai/.x/templates/openai-functions-streaming-py/function_call_context.py b/src/ai/.x/templates/openai-functions-streaming-py/function_call_context.py new file mode 100644 index 00000000..5948fbfa --- /dev/null +++ b/src/ai/.x/templates/openai-functions-streaming-py/function_call_context.py @@ -0,0 +1,42 @@ +import json +import logging + +class FunctionCallContext: + def __init__(self, function_factory, messages): + self.function_factory = function_factory + self.messages = messages + self.function_name = '' + self.function_arguments = '' + + def check_for_update(self, choice): + updated = False + + delta = choice.delta if choice and hasattr(choice, 'delta') else {} + name = delta.function_call.name if delta and hasattr(delta, 'function_call') and delta.function_call and hasattr(delta.function_call, 'name') else None + if name is not None: + self.function_name = name + updated = True + + args = delta.function_call.arguments if delta and hasattr(delta, 'function_call') and delta.function_call and hasattr(delta.function_call, 'arguments') else None + if args is not None: + self.function_arguments = f'{self.function_arguments}{args}' + updated = True + + return updated + + def try_call_function(self): + + dict = json.loads(self.function_arguments) if self.function_arguments != '' else None + if dict is None: return None + + result = self.function_factory.try_call_function(self.function_name, dict) + if result is None: return None + + self.messages.append({'role': 'assistant', 'content': None, 'function_call': {'name': self.function_name, 'arguments': self.function_arguments}}) + self.messages.append({'role': 'function', 'content': result, 'name': self.function_name}) + + return result + + def clear(self): + self.function_name = '' + self.function_arguments = '' \ No newline at end of file diff --git a/src/ai/.x/templates/openai-functions-streaming-py/function_factory.py b/src/ai/.x/templates/openai-functions-streaming-py/function_factory.py new file mode 100644 index 00000000..50ffb085 --- /dev/null +++ b/src/ai/.x/templates/openai-functions-streaming-py/function_factory.py @@ -0,0 +1,16 @@ +class FunctionFactory: + def __init__(self): + self.functions = {} + + def add_function(self, schema, func): + self.functions[schema['name']] = {'schema': schema, 'function': func} + + def get_function_schemas(self): + return [value['schema'] for value in self.functions.values()] + + def try_call_function(self, function_name, function_arguments): + function_info = self.functions.get(function_name) + if function_info is None: + return None + + return function_info['function'](function_arguments) \ No newline at end of file diff --git a/src/ai/.x/templates/openai-functions-streaming-py/main.py b/src/ai/.x/templates/openai-functions-streaming-py/main.py new file mode 100644 index 00000000..ce299d78 --- /dev/null +++ b/src/ai/.x/templates/openai-functions-streaming-py/main.py @@ -0,0 +1,34 @@ +<#@ template hostspecific="true" #> +<#@ output extension=".py" encoding="utf-8" #> +<#@ parameter type="System.String" name="AZURE_OPENAI_ENDPOINT" #> +<#@ parameter type="System.String" name="AZURE_OPENAI_KEY" #> +<#@ parameter type="System.String" name="AZURE_OPENAI_API_VERSION" #> +<#@ parameter type="System.String" name="AZURE_OPENAI_CHAT_DEPLOYMENT" #> +<#@ parameter type="System.String" name="AZURE_OPENAI_SYSTEM_PROMPT" #> +from chat_completions_custom_functions import factory +from chat_completions_functions_streaming import ChatCompletionsFunctionsStreaming +import os + +def main(): + azure_api_key = os.getenv('AZURE_OPENAI_KEY', '<#= AZURE_OPENAI_KEY #>') + endpoint = os.getenv('AZURE_OPENAI_ENDPOINT', '<#= AZURE_OPENAI_ENDPOINT #>') + api_version = os.getenv("AZURE_OPENAI_API_VERSION") or "<#= AZURE_OPENAI_API_VERSION #>" + deployment_name = os.getenv('AZURE_OPENAI_CHAT_DEPLOYMENT', '<#= AZURE_OPENAI_CHAT_DEPLOYMENT #>') + system_prompt = os.getenv('AZURE_OPENAI_SYSTEM_PROMPT', '<#= AZURE_OPENAI_SYSTEM_PROMPT #>') + + chat = ChatCompletionsFunctionsStreaming(system_prompt, endpoint, azure_api_key, api_version, deployment_name, factory) + + while True: + user_input = input('User: ') + if user_input == 'exit' or user_input == '': + break + + print("\nAssistant: ", end="") + response = chat.get_chat_completions(user_input, lambda content: print(content, end="")) + print("\n") + +if __name__ == '__main__': + try: + main() + except Exception as e: + print(f'The sample encountered an error: {e}') \ No newline at end of file diff --git a/src/ai/.x/templates/openai-functions-streaming-py/requirements.txt b/src/ai/.x/templates/openai-functions-streaming-py/requirements.txt new file mode 100644 index 00000000..7a06be70 --- /dev/null +++ b/src/ai/.x/templates/openai-functions-streaming-py/requirements.txt @@ -0,0 +1 @@ +openai==1.0.0 diff --git a/src/ai/.x/templates/openai-webpage/.env b/src/ai/.x/templates/openai-webpage/.env index 191f56b3..bd323058 100644 --- a/src/ai/.x/templates/openai-webpage/.env +++ b/src/ai/.x/templates/openai-webpage/.env @@ -1,10 +1,10 @@ <#@ template hostspecific="true" #> <#@ output extension=".env" encoding="utf-8" #> -<#@ parameter type="System.String" name="OPENAI_ENDPOINT" #> -<#@ parameter type="System.String" name="OPENAI_API_KEY" #> +<#@ parameter type="System.String" name="AZURE_OPENAI_ENDPOINT" #> +<#@ parameter type="System.String" name="AZURE_OPENAI_KEY" #> <#@ parameter type="System.String" name="AZURE_OPENAI_CHAT_DEPLOYMENT" #> <#@ parameter type="System.String" name="AZURE_OPENAI_SYSTEM_PROMPT" #> AZURE_OPENAI_CHAT_DEPLOYMENT=<#= AZURE_OPENAI_CHAT_DEPLOYMENT #> -OPENAI_API_KEY=<#= OPENAI_API_KEY #> -OPENAI_ENDPOINT=<#= OPENAI_ENDPOINT #> +AZURE_OPENAI_KEY=<#= AZURE_OPENAI_KEY #> +AZURE_OPENAI_ENDPOINT=<#= AZURE_OPENAI_ENDPOINT #> AZURE_OPENAI_SYSTEM_PROMPT=<#= AZURE_OPENAI_SYSTEM_PROMPT #> diff --git a/src/ai/.x/templates/openai-webpage/_.json b/src/ai/.x/templates/openai-webpage/_.json index b996236c..de5efd3d 100644 --- a/src/ai/.x/templates/openai-webpage/_.json +++ b/src/ai/.x/templates/openai-webpage/_.json @@ -1,8 +1,9 @@ { "_Name": "OpenAI Webpage (Streaming + Functions)", "_Language": "Javascript", - "OPENAI_ENDPOINT": "", - "OPENAI_API_KEY": "", + "ClassName": "OpenAIChatCompletionsFunctionsStreamingClass", + "AZURE_OPENAI_ENDPOINT": "", + "AZURE_OPENAI_KEY": "", "AZURE_OPENAI_CHAT_DEPLOYMENT": "", "AZURE_OPENAI_SYSTEM_PROMPT": "You are a helpful AI assistant." } \ No newline at end of file diff --git a/src/ai/.x/templates/openai-webpage/src/ChatCompletionsCustomFunctions.js b/src/ai/.x/templates/openai-webpage/src/ChatCompletionsCustomFunctions.js deleted file mode 100644 index 1776c03e..00000000 --- a/src/ai/.x/templates/openai-webpage/src/ChatCompletionsCustomFunctions.js +++ /dev/null @@ -1,16 +0,0 @@ -function getCurrentDate() { - const date = new Date(); - return `${date.getFullYear()}-${date.getMonth() + 1}-${date.getDate()}`; -} - -const getCurrentDateSchema = { - name: "get_current_date", - description: "Get the current date", - parameters: { - type: "object", - properties: {}, - }, -}; - -exports.getCurrentDate = getCurrentDate; -exports.getCurrentDateSchema = getCurrentDateSchema; diff --git a/src/ai/.x/templates/openai-webpage/src/OpenAIChatCompletionsCustomFunctions.js b/src/ai/.x/templates/openai-webpage/src/OpenAIChatCompletionsCustomFunctions.js new file mode 100644 index 00000000..15ed3234 --- /dev/null +++ b/src/ai/.x/templates/openai-webpage/src/OpenAIChatCompletionsCustomFunctions.js @@ -0,0 +1,62 @@ +const { FunctionFactory } = require("./FunctionFactory"); +let factory = new FunctionFactory(); + +function getCurrentWeather(function_arguments) { + const location = JSON.parse(function_arguments).location; + return `The weather in ${location} is 72 degrees and sunny.`; + }; + +const getCurrentWeatherSchema = { + name: "get_current_weather", + description: "Get the current weather in a given location", + parameters: { + type: "object", + properties: { + location: { + type: "string", + description: "The city and state, e.g. San Francisco, CA", + }, + unit: { + type: "string", + enum: ["celsius", "fahrenheit"], + }, + }, + required: ["location"], + }, +}; + +factory.addFunction(getCurrentWeatherSchema, getCurrentWeather); + +function getCurrentDate() { + const date = new Date(); + return `${date.getFullYear()}-${date.getMonth() + 1}-${date.getDate()}`; +} + +const getCurrentDateSchema = { + name: "get_current_date", + description: "Get the current date", + parameters: { + type: "object", + properties: {}, + }, +}; + +factory.addFunction(getCurrentDateSchema, getCurrentDate); + +function getCurrentTime() { + const date = new Date(); + return `${date.getHours()}:${date.getMinutes()}:${date.getSeconds()}`; +} + +const getCurrentTimeSchema = { + name: "get_current_time", + description: "Get the current time", + parameters: { + type: "object", + properties: {}, + }, +}; + +factory.addFunction(getCurrentTimeSchema, getCurrentTime); + +exports.factory = factory; \ No newline at end of file diff --git a/src/ai/.x/templates/openai-functions-streaming-js/ChatCompletionsFunctionsStreaming.js b/src/ai/.x/templates/openai-webpage/src/OpenAIChatCompletionsFunctionsStreamingClass.js similarity index 79% rename from src/ai/.x/templates/openai-functions-streaming-js/ChatCompletionsFunctionsStreaming.js rename to src/ai/.x/templates/openai-webpage/src/OpenAIChatCompletionsFunctionsStreamingClass.js index b379f063..776edd0a 100644 --- a/src/ai/.x/templates/openai-functions-streaming-js/ChatCompletionsFunctionsStreaming.js +++ b/src/ai/.x/templates/openai-webpage/src/OpenAIChatCompletionsFunctionsStreamingClass.js @@ -1,15 +1,15 @@ +<#@ template hostspecific="true" #> +<#@ output extension=".js" encoding="utf-8" #> +<#@ parameter type="System.String" name="ClassName" #> const { OpenAIClient, AzureKeyCredential } = require("@azure/openai"); -const { FunctionFactory } = require("./FunctionFactory"); const { FunctionCallContext } = require("./FunctionCallContext"); -class ChatCompletionsFunctionsStreaming { +class <#= ClassName #> { constructor(systemPrompt, endpoint, azureApiKey, deploymentName, functionFactory) { this.systemPrompt = systemPrompt; - this.endpoint = endpoint; - this.azureApiKey = azureApiKey; this.deploymentName = deploymentName; - this.client = new OpenAIClient(this.endpoint, new AzureKeyCredential(this.azureApiKey)); - this.functionFactory = functionFactory || new FunctionFactory(); + this.client = new OpenAIClient(endpoint, new AzureKeyCredential(azureApiKey)); + this.functionFactory = functionFactory; this.clearConversation(); } @@ -23,7 +23,7 @@ class ChatCompletionsFunctionsStreaming { async getChatCompletions(userInput, callback) { this.messages.push({ role: 'user', content: userInput }); - let contentComplete = ""; + let contentComplete = ''; while (true) { const events = this.client.listChatCompletions(this.deploymentName, this.messages, { functions: this.functionFactory.getFunctionSchemas(), @@ -58,4 +58,4 @@ class ChatCompletionsFunctionsStreaming { } } -exports.ChatCompletionsFunctionsStreaming = ChatCompletionsFunctionsStreaming; \ No newline at end of file +exports.<#= ClassName #> = <#= ClassName #>; \ No newline at end of file diff --git a/src/ai/.x/templates/openai-webpage/src/script.js b/src/ai/.x/templates/openai-webpage/src/script.js index 477ce271..962bceef 100644 --- a/src/ai/.x/templates/openai-webpage/src/script.js +++ b/src/ai/.x/templates/openai-webpage/src/script.js @@ -1,34 +1,36 @@ +<#@ template hostspecific="true" #> +<#@ output extension=".js" encoding="utf-8" #> +<#@ parameter type="System.String" name="ClassName" #> +<#@ parameter type="System.String" name="AZURE_OPENAI_ENDPOINT" #> +<#@ parameter type="System.String" name="AZURE_OPENAI_KEY" #> +<#@ parameter type="System.String" name="AZURE_OPENAI_CHAT_DEPLOYMENT" #> +<#@ parameter type="System.String" name="AZURE_OPENAI_SYSTEM_PROMPT" #> const marked = require("marked"); const hljs = require("highlight.js"); -const customFunctions = require("./ChatCompletionsCustomFunctions"); -const { getCurrentDateSchema, getCurrentDate } = customFunctions; -const { FunctionFactory } = require("./FunctionFactory"); +const { factory } = require("./OpenAIChatCompletionsCustomFunctions"); -const { ChatCompletionsFunctionsStreaming } = require('./ChatCompletionsFunctionsStreaming'); +const { <#= ClassName #> } = require('./OpenAIChatCompletionsFunctionsStreamingClass'); let streamingChatCompletions; function streamingChatCompletionsInit() { - let factory = new FunctionFactory(); - factory.addFunction(getCurrentDateSchema, getCurrentDate); - - const endpoint = process.env.OPENAI_ENDPOINT; - const azureApiKey = process.env.OPENAI_API_KEY; - const deploymentName = process.env.AZURE_OPENAI_CHAT_DEPLOYMENT; - const systemPrompt = "You are a helpful AI assistant."; + const endpoint = process.env.AZURE_OPENAI_ENDPOINT || "<#= AZURE_OPENAI_ENDPOINT #>"; + const azureApiKey = process.env.AZURE_OPENAI_KEY || "<#= AZURE_OPENAI_KEY #>"; + const deploymentName = process.env.AZURE_OPENAI_CHAT_DEPLOYMENT || "<#= AZURE_OPENAI_CHAT_DEPLOYMENT #>" ; + const systemPrompt = process.env.AZURE_OPENAI_SYSTEM_PROMPT || "<#= AZURE_OPENAI_SYSTEM_PROMPT #>"; if (!endpoint || endpoint.startsWith('(systemPrompt, endpoint, azureApiKey, deploymentName, factory); } function streamingChatCompletionsClear() { diff --git a/src/ai/helpers/config_environment_helpers.cs b/src/ai/helpers/config_environment_helpers.cs index e89529e9..f2d0efe1 100644 --- a/src/ai/helpers/config_environment_helpers.cs +++ b/src/ai/helpers/config_environment_helpers.cs @@ -22,6 +22,10 @@ public static Dictionary GetEnvironment(INamedValues values) env.Add("AZURE_AI_PROJECT_NAME", ReadConfig(values, "project")); env.Add("AZURE_AI_RESOURCE_NAME", ReadConfig(values, "resource")); + env.Add("AZURE_OPENAI_ENDPOINT", ReadConfig(values, "chat.endpoint")); + env.Add("AZURE_OPENAI_KEY", ReadConfig(values, "chat.key")); + env.Add("AZURE_OPENAI_API_VERSION", ChatCommand.GetOpenAIClientVersionNumber()); + env.Add("AZURE_OPENAI_CHAT_DEPLOYMENT", ReadConfig(values, "chat.deployment")); env.Add("AZURE_OPENAI_EVALUATION_DEPLOYMENT", ReadConfig(values, "chat.evaluation.model.deployment.name") ?? ReadConfig(values, "chat.deployment")); env.Add("AZURE_OPENAI_EMBEDDING_DEPLOYMENT", ReadConfig(values, "search.embedding.model.deployment.name")); From 2c576e2fa4aa9d364e9a3da056943cf9df300855 Mon Sep 17 00:00:00 2001 From: Rob Chambers Date: Thu, 11 Jan 2024 16:22:30 -0800 Subject: [PATCH 02/30] add openai go template(s) and more (#144) ## PRIMARY FEATURES: - added ability to `ai dev new openai-chat-streaming-go` - added ability to `ai dev new openai-functions-streaming-go` ## Also... - upgraded javascript openai templates to use beta 10 of client SDK instead of beta 8 - slight optimizations to `ai dev new ...` template instantiation outputs --- src/ai/.x/templates/openai-chat-go/_.json | 1 + src/ai/.x/templates/openai-chat-go/go.mod | 2 +- src/ai/.x/templates/openai-chat-go/main.go | 71 +++++++++++ .../openai_chat_completions_hello_world.go | 119 +++++++----------- .../.x/templates/openai-chat-js/package.json | 2 +- .../templates/openai-chat-streaming-go/_.json | 9 ++ .../templates/openai-chat-streaming-go/go.mod | 6 + .../openai-chat-streaming-go/main.go | 73 +++++++++++ ..._chat_completions_streaming_hello_world.go | 93 ++++++++++++++ .../OpenAIChatCompletionsStreamingClass.js | 2 +- .../openai-chat-streaming-js/package.json | 2 +- .../openai-functions-streaming-go/_.json | 9 ++ .../function_call_context.go | 66 ++++++++++ .../function_factory.go | 41 ++++++ .../openai-functions-streaming-go/go.mod | 6 + .../openai-functions-streaming-go/main.go | 74 +++++++++++ ...penai_chat_completions_custom_functions.go | 66 ++++++++++ ...letions_functions_streaming_hello_world.go | 113 +++++++++++++++++ ...IChatCompletionsFunctionsStreamingClass.js | 2 +- .../package.json | 2 +- src/common/details/helpers/file_helpers.cs | 4 +- .../template_extension/TemplateFactory.cs | 15 ++- 22 files changed, 690 insertions(+), 88 deletions(-) create mode 100644 src/ai/.x/templates/openai-chat-go/main.go create mode 100644 src/ai/.x/templates/openai-chat-streaming-go/_.json create mode 100644 src/ai/.x/templates/openai-chat-streaming-go/go.mod create mode 100644 src/ai/.x/templates/openai-chat-streaming-go/main.go create mode 100644 src/ai/.x/templates/openai-chat-streaming-go/openai_chat_completions_streaming_hello_world.go create mode 100644 src/ai/.x/templates/openai-functions-streaming-go/_.json create mode 100644 src/ai/.x/templates/openai-functions-streaming-go/function_call_context.go create mode 100644 src/ai/.x/templates/openai-functions-streaming-go/function_factory.go create mode 100644 src/ai/.x/templates/openai-functions-streaming-go/go.mod create mode 100644 src/ai/.x/templates/openai-functions-streaming-go/main.go create mode 100644 src/ai/.x/templates/openai-functions-streaming-go/openai_chat_completions_custom_functions.go create mode 100644 src/ai/.x/templates/openai-functions-streaming-go/openai_chat_completions_functions_streaming_hello_world.go diff --git a/src/ai/.x/templates/openai-chat-go/_.json b/src/ai/.x/templates/openai-chat-go/_.json index d33c5448..99fca6e3 100644 --- a/src/ai/.x/templates/openai-chat-go/_.json +++ b/src/ai/.x/templates/openai-chat-go/_.json @@ -1,6 +1,7 @@ { "_Name": "OpenAI Chat Completions in Go", "_Language": "Go", + "ClassName": "OpenAIChatCompletionsExample", "AZURE_OPENAI_ENDPOINT": "", "AZURE_OPENAI_KEY": "", "AZURE_OPENAI_CHAT_DEPLOYMENT": "", diff --git a/src/ai/.x/templates/openai-chat-go/go.mod b/src/ai/.x/templates/openai-chat-go/go.mod index 5b9b4d4a..6bcebc87 100644 --- a/src/ai/.x/templates/openai-chat-go/go.mod +++ b/src/ai/.x/templates/openai-chat-go/go.mod @@ -1,4 +1,4 @@ -module openai_chat_completions_hello_world.go +module openai_chat_completions_hello_world require ( github.com/Azure/azure-sdk-for-go/sdk/ai/azopenai v0.3.0 diff --git a/src/ai/.x/templates/openai-chat-go/main.go b/src/ai/.x/templates/openai-chat-go/main.go new file mode 100644 index 00000000..302c4db1 --- /dev/null +++ b/src/ai/.x/templates/openai-chat-go/main.go @@ -0,0 +1,71 @@ +<#@ template hostspecific="true" #> +<#@ output extension=".go" encoding="utf-8" #> +<#@ parameter type="System.String" name="ClassName" #> +<#@ parameter type="System.String" name="AZURE_OPENAI_ENDPOINT" #> +<#@ parameter type="System.String" name="AZURE_OPENAI_KEY" #> +<#@ parameter type="System.String" name="AZURE_OPENAI_CHAT_DEPLOYMENT" #> +<#@ parameter type="System.String" name="AZURE_OPENAI_SYSTEM_PROMPT" #> +package main + +import ( + "bufio" + "fmt" + "log" + "os" + "strings" +) + +func main() { + azureOpenAIEndpoint := os.Getenv("AZURE_OPENAI_ENDPOINT") + if azureOpenAIEndpoint == "" { + azureOpenAIEndpoint = "<#= AZURE_OPENAI_ENDPOINT #>" + } + azureOpenAIKey := os.Getenv("AZURE_OPENAI_KEY") + if azureOpenAIKey == "" { + azureOpenAIKey = "<#= AZURE_OPENAI_KEY #>" + } + deploymentName := os.Getenv("AZURE_OPENAI_CHAT_DEPLOYMENT") + if deploymentName == "" { + deploymentName = "<#= AZURE_OPENAI_CHAT_DEPLOYMENT #>" + } + systemPrompt := os.Getenv("OPENAI_SYSTEM_PROMPT") + if systemPrompt == "" { + systemPrompt = "<#= AZURE_OPENAI_SYSTEM_PROMPT #>" + } + + if azureOpenAIEndpoint == "" || azureOpenAIKey == "" || deploymentName == "" || systemPrompt == "" { + fmt.Println("Please set the environment variables.") + os.Exit(1) + } + + chat, err := New<#= ClassName #>(systemPrompt, azureOpenAIEndpoint, azureOpenAIKey, deploymentName) + if err != nil { + log.Fatalf("ERROR: %s", err) + } + + for { + fmt.Print("User: ") + input, _ := getUserInput() + if input == "exit" || input == "" { + break + } + + response, err := chat.GetChatCompletions(input) + if err != nil { + log.Fatalf("ERROR: %s", err) + } + + fmt.Printf("Assistant: %s\n\n", response) + } +} + +func getUserInput() (string, error) { + reader := bufio.NewReader(os.Stdin) + userInput, err := reader.ReadString('\n') + if err != nil { + return "", err + } + userInput = strings.TrimSuffix(userInput, "\n") + userInput = strings.TrimSuffix(userInput, "\r") + return userInput, nil +} diff --git a/src/ai/.x/templates/openai-chat-go/openai_chat_completions_hello_world.go b/src/ai/.x/templates/openai-chat-go/openai_chat_completions_hello_world.go index e0f423e6..8bd57384 100644 --- a/src/ai/.x/templates/openai-chat-go/openai_chat_completions_hello_world.go +++ b/src/ai/.x/templates/openai-chat-go/openai_chat_completions_hello_world.go @@ -1,90 +1,59 @@ <#@ template hostspecific="true" #> <#@ output extension=".go" encoding="utf-8" #> -<#@ parameter type="System.String" name="AZURE_OPENAI_ENDPOINT" #> -<#@ parameter type="System.String" name="AZURE_OPENAI_KEY" #> -<#@ parameter type="System.String" name="AZURE_OPENAI_CHAT_DEPLOYMENT" #> -<#@ parameter type="System.String" name="AZURE_OPENAI_SYSTEM_PROMPT" #> +<#@ parameter type="System.String" name="ClassName" #> package main import ( - "bufio" - "context" - "fmt" - "log" - "os" - "strings" + "context" - "github.com/Azure/azure-sdk-for-go/sdk/ai/azopenai" - "github.com/Azure/azure-sdk-for-go/sdk/azcore/to" + "github.com/Azure/azure-sdk-for-go/sdk/ai/azopenai" + "github.com/Azure/azure-sdk-for-go/sdk/azcore/to" ) -func main() { - azureOpenAIKey := os.Getenv("AZURE_OPENAI_KEY") - if azureOpenAIKey == "" { - azureOpenAIKey = "<#= AZURE_OPENAI_KEY #>" - } - azureOpenAIEndpoint := os.Getenv("AZURE_OPENAI_ENDPOINT") - if azureOpenAIEndpoint == "" { - azureOpenAIEndpoint = "<#= AZURE_OPENAI_ENDPOINT #>" - } - modelDeploymentID := os.Getenv("AZURE_OPENAI_CHAT_DEPLOYMENT") - if modelDeploymentID == "" { - modelDeploymentID = "<#= AZURE_OPENAI_CHAT_DEPLOYMENT #>" - } - systemPrompt := os.Getenv("OPENAI_SYSTEM_PROMPT") - if systemPrompt == "" { - systemPrompt = "<#= AZURE_OPENAI_SYSTEM_PROMPT #>" - } - - keyCredential, err := azopenai.NewKeyCredential(azureOpenAIKey) - if err != nil { - log.Fatalf("ERROR: %s", err) - } - client, err := azopenai.NewClientWithKeyCredential(azureOpenAIEndpoint, keyCredential, nil) - if err != nil { - log.Fatalf("ERROR: %s", err) - } - - options := azopenai.ChatCompletionsOptions{ - Deployment: modelDeploymentID, - Messages: []azopenai.ChatMessage{ - {Role: to.Ptr(azopenai.ChatRoleSystem), Content: to.Ptr(systemPrompt)}, - }, - } - - for { - fmt.Print("User: ") +type <#= ClassName #> struct { + client *azopenai.Client + options *azopenai.ChatCompletionsOptions +} - userPrompt, err := getUserInput() - if err != nil { - fmt.Println("Error reading input:", err) - break - } - if userPrompt == "exit" || userPrompt == "" { - break - } +func New<#= ClassName #>(systemPrompt string, endpoint string, azureApiKey string, deploymentName string) (*<#= ClassName #>, error) { + keyCredential, err := azopenai.NewKeyCredential(azureApiKey) + if err != nil { + return nil, err + } + client, err := azopenai.NewClientWithKeyCredential(endpoint, keyCredential, nil) + if err != nil { + return nil, err + } + + messages := []azopenai.ChatMessage{ + {Role: to.Ptr(azopenai.ChatRoleSystem), Content: to.Ptr(systemPrompt)}, + } + + options := &azopenai.ChatCompletionsOptions{ + Deployment: deploymentName, + Messages: messages, + } + + return &<#= ClassName #> { + client: client, + options: options, + }, nil +} - options.Messages = append(options.Messages, azopenai.ChatMessage{Role: to.Ptr(azopenai.ChatRoleUser), Content: to.Ptr(userPrompt)}) +func (chat *<#= ClassName #>) ClearConversation() { + chat.options.Messages = chat.options.Messages[:1] +} - resp, err := client.GetChatCompletions(context.TODO(), options, nil) - if err != nil { - log.Fatalf("ERROR: %s", err) - } +func (chat *<#= ClassName #>) GetChatCompletions(userPrompt string) (string, error) { + chat.options.Messages = append(chat.options.Messages, azopenai.ChatMessage{Role: to.Ptr(azopenai.ChatRoleUser), Content: to.Ptr(userPrompt)}) - responseContent := *resp.Choices[0].Message.Content - options.Messages = append(options.Messages, azopenai.ChatMessage{Role: to.Ptr(azopenai.ChatRoleAssistant), Content: to.Ptr(responseContent)}) + resp, err := chat.client.GetChatCompletions(context.TODO(), *chat.options, nil) + if err != nil { + return "", err + } - fmt.Printf("\nAssistant: %s\n\n", responseContent) - } -} + responseContent := *resp.Choices[0].Message.Content + chat.options.Messages = append(chat.options.Messages, azopenai.ChatMessage{Role: to.Ptr(azopenai.ChatRoleAssistant), Content: to.Ptr(responseContent)}) -func getUserInput() (string, error) { - reader := bufio.NewReader(os.Stdin) - userInput, err := reader.ReadString('\n') - if err != nil { - return "", err - } - userInput = strings.TrimSuffix(userInput, "\n") - userInput = strings.TrimSuffix(userInput, "\r") - return userInput, nil + return responseContent, nil } diff --git a/src/ai/.x/templates/openai-chat-js/package.json b/src/ai/.x/templates/openai-chat-js/package.json index ca7c4681..90d9afe9 100644 --- a/src/ai/.x/templates/openai-chat-js/package.json +++ b/src/ai/.x/templates/openai-chat-js/package.json @@ -9,7 +9,7 @@ "author": "", "license": "MIT", "dependencies": { - "@azure/openai": "1.0.0-beta.8" + "@azure/openai": "1.0.0-beta.10" } } \ No newline at end of file diff --git a/src/ai/.x/templates/openai-chat-streaming-go/_.json b/src/ai/.x/templates/openai-chat-streaming-go/_.json new file mode 100644 index 00000000..e36b1131 --- /dev/null +++ b/src/ai/.x/templates/openai-chat-streaming-go/_.json @@ -0,0 +1,9 @@ +{ + "_Name": "OpenAI Chat Completions (Streaming) in Go", + "_Language": "Go", + "ClassName": "OpenAIChatCompletionsStreamingExample", + "AZURE_OPENAI_ENDPOINT": "", + "AZURE_OPENAI_KEY": "", + "AZURE_OPENAI_CHAT_DEPLOYMENT": "", + "AZURE_OPENAI_SYSTEM_PROMPT": "You are a helpful AI assistant." +} \ No newline at end of file diff --git a/src/ai/.x/templates/openai-chat-streaming-go/go.mod b/src/ai/.x/templates/openai-chat-streaming-go/go.mod new file mode 100644 index 00000000..3d12c4f5 --- /dev/null +++ b/src/ai/.x/templates/openai-chat-streaming-go/go.mod @@ -0,0 +1,6 @@ +module openai_chat_completions_streaming_hello_world + +require ( + github.com/Azure/azure-sdk-for-go/sdk/ai/azopenai v0.3.0 + github.com/Azure/azure-sdk-for-go/sdk/azcore v1.9.0 +) diff --git a/src/ai/.x/templates/openai-chat-streaming-go/main.go b/src/ai/.x/templates/openai-chat-streaming-go/main.go new file mode 100644 index 00000000..27441124 --- /dev/null +++ b/src/ai/.x/templates/openai-chat-streaming-go/main.go @@ -0,0 +1,73 @@ +<#@ template hostspecific="true" #> +<#@ output extension=".go" encoding="utf-8" #> +<#@ parameter type="System.String" name="ClassName" #> +<#@ parameter type="System.String" name="AZURE_OPENAI_ENDPOINT" #> +<#@ parameter type="System.String" name="AZURE_OPENAI_KEY" #> +<#@ parameter type="System.String" name="AZURE_OPENAI_CHAT_DEPLOYMENT" #> +<#@ parameter type="System.String" name="AZURE_OPENAI_SYSTEM_PROMPT" #> +package main + +import ( + "bufio" + "fmt" + "log" + "os" + "strings" +) + +func main() { + azureOpenAIEndpoint := os.Getenv("AZURE_OPENAI_ENDPOINT") + if azureOpenAIEndpoint == "" { + azureOpenAIEndpoint = "<#= AZURE_OPENAI_ENDPOINT #>" + } + azureOpenAIKey := os.Getenv("AZURE_OPENAI_KEY") + if azureOpenAIKey == "" { + azureOpenAIKey = "<#= AZURE_OPENAI_KEY #>" + } + deploymentName := os.Getenv("AZURE_OPENAI_CHAT_DEPLOYMENT") + if deploymentName == "" { + deploymentName = "<#= AZURE_OPENAI_CHAT_DEPLOYMENT #>" + } + systemPrompt := os.Getenv("OPENAI_SYSTEM_PROMPT") + if systemPrompt == "" { + systemPrompt = "<#= AZURE_OPENAI_SYSTEM_PROMPT #>" + } + + if azureOpenAIEndpoint == "" || azureOpenAIKey == "" || deploymentName == "" || systemPrompt == "" { + fmt.Println("Please set the environment variables.") + os.Exit(1) + } + + chat, err := New<#= ClassName #>(systemPrompt, azureOpenAIEndpoint, azureOpenAIKey, deploymentName) + if err != nil { + log.Fatalf("ERROR: %s", err) + } + + for { + fmt.Print("User: ") + input, _ := getUserInput() + if input == "exit" || input == "" { + break + } + + fmt.Printf("\nAssistant: ") + _, err := chat.GetChatCompletionsStream(input, func(content string) { + fmt.Printf("%s", content) + }) + if err != nil { + log.Fatalf("ERROR: %s", err) + } + fmt.Printf("\n\n") + } +} + +func getUserInput() (string, error) { + reader := bufio.NewReader(os.Stdin) + userInput, err := reader.ReadString('\n') + if err != nil { + return "", err + } + userInput = strings.TrimSuffix(userInput, "\n") + userInput = strings.TrimSuffix(userInput, "\r") + return userInput, nil +} diff --git a/src/ai/.x/templates/openai-chat-streaming-go/openai_chat_completions_streaming_hello_world.go b/src/ai/.x/templates/openai-chat-streaming-go/openai_chat_completions_streaming_hello_world.go new file mode 100644 index 00000000..29e78875 --- /dev/null +++ b/src/ai/.x/templates/openai-chat-streaming-go/openai_chat_completions_streaming_hello_world.go @@ -0,0 +1,93 @@ +<#@ template hostspecific="true" #> +<#@ output extension=".go" encoding="utf-8" #> +<#@ parameter type="System.String" name="ClassName" #> +package main + +import ( + "context" + "errors" + "io" + + "github.com/Azure/azure-sdk-for-go/sdk/ai/azopenai" + "github.com/Azure/azure-sdk-for-go/sdk/azcore/to" +) + +type <#= ClassName #> struct { + client *azopenai.Client + options *azopenai.ChatCompletionsOptions +} + +func New<#= ClassName #>(systemPrompt string, endpoint string, azureApiKey string, deploymentName string) (*<#= ClassName #>, error) { + keyCredential, err := azopenai.NewKeyCredential(azureApiKey) + if err != nil { + return nil, err + } + client, err := azopenai.NewClientWithKeyCredential(endpoint, keyCredential, nil) + if err != nil { + return nil, err + } + + messages := []azopenai.ChatMessage{ + {Role: to.Ptr(azopenai.ChatRoleSystem), Content: to.Ptr(systemPrompt)}, + } + + options := &azopenai.ChatCompletionsOptions{ + Deployment: deploymentName, + Messages: messages, + } + + return &<#= ClassName #> { + client: client, + options: options, + }, nil +} + +func (chat *<#= ClassName #>) ClearConversation() { + chat.options.Messages = chat.options.Messages[:1] +} + +func (chat *<#= ClassName #>) GetChatCompletionsStream(userPrompt string, callback func(content string)) (string, error) { + chat.options.Messages = append(chat.options.Messages, azopenai.ChatMessage{Role: to.Ptr(azopenai.ChatRoleUser), Content: to.Ptr(userPrompt)}) + + resp, err := chat.client.GetChatCompletionsStream(context.TODO(), *chat.options, nil) + if err != nil { + return "", err + } + defer resp.ChatCompletionsStream.Close() + + responseContent := "" + for { + chatCompletions, err := resp.ChatCompletionsStream.Read() + if errors.Is(err, io.EOF) { + break + } + if err != nil { + return "", err + } + + for _, choice := range chatCompletions.Choices { + + content := "" + if choice.Delta.Content != nil { + content = *choice.Delta.Content + } + + if choice.FinishReason != nil { + finishReason := *choice.FinishReason + if finishReason == azopenai.CompletionsFinishReasonLength { + content = content + "\nWARNING: Exceeded token limit!" + } + } + + if content == "" { + continue + } + + callback(content) + responseContent += content + } + } + + chat.options.Messages = append(chat.options.Messages, azopenai.ChatMessage{Role: to.Ptr(azopenai.ChatRoleAssistant), Content: to.Ptr(responseContent)}) + return responseContent, nil +} diff --git a/src/ai/.x/templates/openai-chat-streaming-js/OpenAIChatCompletionsStreamingClass.js b/src/ai/.x/templates/openai-chat-streaming-js/OpenAIChatCompletionsStreamingClass.js index 31c55957..f9c499f1 100644 --- a/src/ai/.x/templates/openai-chat-streaming-js/OpenAIChatCompletionsStreamingClass.js +++ b/src/ai/.x/templates/openai-chat-streaming-js/OpenAIChatCompletionsStreamingClass.js @@ -21,7 +21,7 @@ class <#= ClassName #> { this.messages.push({ role: 'user', content: userInput }); let contentComplete = ''; - const events = this.client.listChatCompletions(this.deploymentName, this.messages); + const events = await this.client.streamChatCompletions(this.deploymentName, this.messages); for await (const event of events) { for (const choice of event.choices) { diff --git a/src/ai/.x/templates/openai-chat-streaming-js/package.json b/src/ai/.x/templates/openai-chat-streaming-js/package.json index c94810af..17195509 100644 --- a/src/ai/.x/templates/openai-chat-streaming-js/package.json +++ b/src/ai/.x/templates/openai-chat-streaming-js/package.json @@ -9,7 +9,7 @@ "author": "", "license": "MIT", "dependencies": { - "@azure/openai": "1.0.0-beta.8" + "@azure/openai": "1.0.0-beta.10" } } \ No newline at end of file diff --git a/src/ai/.x/templates/openai-functions-streaming-go/_.json b/src/ai/.x/templates/openai-functions-streaming-go/_.json new file mode 100644 index 00000000..6f686170 --- /dev/null +++ b/src/ai/.x/templates/openai-functions-streaming-go/_.json @@ -0,0 +1,9 @@ +{ + "_Name": "OpenAI Chat Completions (Functions) in Go", + "_Language": "Go", + "ClassName": "OpenAIChatCompletionsStreamingExample", + "AZURE_OPENAI_ENDPOINT": "", + "AZURE_OPENAI_KEY": "", + "AZURE_OPENAI_CHAT_DEPLOYMENT": "", + "AZURE_OPENAI_SYSTEM_PROMPT": "You are a helpful AI assistant." +} \ No newline at end of file diff --git a/src/ai/.x/templates/openai-functions-streaming-go/function_call_context.go b/src/ai/.x/templates/openai-functions-streaming-go/function_call_context.go new file mode 100644 index 00000000..0f69a4c4 --- /dev/null +++ b/src/ai/.x/templates/openai-functions-streaming-go/function_call_context.go @@ -0,0 +1,66 @@ +package main + +import ( + "fmt" + + "github.com/Azure/azure-sdk-for-go/sdk/ai/azopenai" + "github.com/Azure/azure-sdk-for-go/sdk/azcore/to" +) + +type FunctionCallContext struct { + functionFactory *FunctionFactory + options *azopenai.ChatCompletionsOptions + functionName string + functionArguments string +} + +func NewFunctionCallContext(functionFactory *FunctionFactory, options *azopenai.ChatCompletionsOptions) *FunctionCallContext { + return &FunctionCallContext{ + functionFactory: functionFactory, + options: options, + functionName: "", + functionArguments: "", + } +} + +func (fcc *FunctionCallContext) CheckForUpdate(choice azopenai.ChatChoice) bool { + updated := false + + if choice.Delta != nil && choice.Delta.FunctionCall != nil { + name := choice.Delta.FunctionCall.Name + if name != nil && *name != "" { + fcc.functionName = *name + updated = true + } + } + + if choice.Delta != nil && choice.Delta.FunctionCall != nil { + args := choice.Delta.FunctionCall.Arguments + if args != nil && *args != "" { + fcc.functionArguments = *args + updated = true + } + } + + return updated +} + +func (fcc *FunctionCallContext) TryCallFunction() string { + result := fcc.functionFactory.TryCallFunction(fcc.functionName, fcc.functionArguments) + if result == "" { + return "" + } + + fmt.Printf("\rassistant-function: %s(%s) => %s\n", fcc.functionName, fcc.functionArguments, result) + fmt.Printf("\nAssistant: ") + + fcc.options.Messages = append(fcc.options.Messages, azopenai.ChatMessage{Role: to.Ptr(azopenai.ChatRoleAssistant), Content: to.Ptr(""), FunctionCall: &azopenai.ChatMessageFunctionCall{Name: to.Ptr(fcc.functionName), Arguments: to.Ptr(fcc.functionArguments)}}) + fcc.options.Messages = append(fcc.options.Messages, azopenai.ChatMessage{Role: to.Ptr(azopenai.ChatRoleFunction), Content: to.Ptr(result), Name: to.Ptr(fcc.functionName)}) + + return result +} + +func (fcc *FunctionCallContext) Clear() { + fcc.functionName = "" + fcc.functionArguments = "" +} diff --git a/src/ai/.x/templates/openai-functions-streaming-go/function_factory.go b/src/ai/.x/templates/openai-functions-streaming-go/function_factory.go new file mode 100644 index 00000000..0a952fb8 --- /dev/null +++ b/src/ai/.x/templates/openai-functions-streaming-go/function_factory.go @@ -0,0 +1,41 @@ +package main + +import ( + "github.com/Azure/azure-sdk-for-go/sdk/ai/azopenai" +) + +type FunctionInfo struct { + Schema azopenai.FunctionDefinition + Function func(string) string +} + +type FunctionFactory struct { + functions map[string]FunctionInfo +} + +func NewFunctionFactory() *FunctionFactory { + return &FunctionFactory{ + functions: make(map[string]FunctionInfo), + } +} + +func (ff *FunctionFactory) AddFunction(schema azopenai.FunctionDefinition, fun func(string) string) { + ff.functions[*schema.Name] = FunctionInfo{Schema: schema, Function: fun} +} + +func (ff *FunctionFactory) GetFunctionSchemas() []azopenai.FunctionDefinition { + schemas := []azopenai.FunctionDefinition{} + for _, functionInfo := range ff.functions { + schemas = append(schemas, functionInfo.Schema) + } + return schemas +} + +func (ff *FunctionFactory) TryCallFunction(functionName string, functionArguments string) string { + functionInfo, exists := ff.functions[functionName] + if !exists { + return "" + } + + return functionInfo.Function(functionArguments) +} diff --git a/src/ai/.x/templates/openai-functions-streaming-go/go.mod b/src/ai/.x/templates/openai-functions-streaming-go/go.mod new file mode 100644 index 00000000..34f300c4 --- /dev/null +++ b/src/ai/.x/templates/openai-functions-streaming-go/go.mod @@ -0,0 +1,6 @@ +module openai_chat_completions_functions_streaming_hello_world + +require ( + github.com/Azure/azure-sdk-for-go/sdk/ai/azopenai v0.3.0 + github.com/Azure/azure-sdk-for-go/sdk/azcore v1.9.0 +) diff --git a/src/ai/.x/templates/openai-functions-streaming-go/main.go b/src/ai/.x/templates/openai-functions-streaming-go/main.go new file mode 100644 index 00000000..36ae263a --- /dev/null +++ b/src/ai/.x/templates/openai-functions-streaming-go/main.go @@ -0,0 +1,74 @@ +<#@ template hostspecific="true" #> +<#@ output extension=".go" encoding="utf-8" #> +<#@ parameter type="System.String" name="ClassName" #> +<#@ parameter type="System.String" name="AZURE_OPENAI_ENDPOINT" #> +<#@ parameter type="System.String" name="AZURE_OPENAI_KEY" #> +<#@ parameter type="System.String" name="AZURE_OPENAI_CHAT_DEPLOYMENT" #> +<#@ parameter type="System.String" name="AZURE_OPENAI_SYSTEM_PROMPT" #> +package main + +import ( + "bufio" + "fmt" + "log" + "os" + "strings" +) + +func main() { + azureOpenAIEndpoint := os.Getenv("AZURE_OPENAI_ENDPOINT") + if azureOpenAIEndpoint == "" { + azureOpenAIEndpoint = "<#= AZURE_OPENAI_ENDPOINT #>" + } + azureOpenAIKey := os.Getenv("AZURE_OPENAI_KEY") + if azureOpenAIKey == "" { + azureOpenAIKey = "<#= AZURE_OPENAI_KEY #>" + } + deploymentName := os.Getenv("AZURE_OPENAI_CHAT_DEPLOYMENT") + if deploymentName == "" { + deploymentName = "<#= AZURE_OPENAI_CHAT_DEPLOYMENT #>" + } + systemPrompt := os.Getenv("OPENAI_SYSTEM_PROMPT") + if systemPrompt == "" { + systemPrompt = "<#= AZURE_OPENAI_SYSTEM_PROMPT #>" + } + + if azureOpenAIEndpoint == "" || azureOpenAIKey == "" || deploymentName == "" || systemPrompt == "" { + fmt.Println("Please set the environment variables.") + os.Exit(1) + } + + factory := NewFunctionFactoryWithCustomFunctions() + chat, err := New<#= ClassName #>(systemPrompt, azureOpenAIEndpoint, azureOpenAIKey, deploymentName, factory) + if err != nil { + log.Fatalf("ERROR: %s", err) + } + + for { + fmt.Print("User: ") + input, _ := getUserInput() + if input == "exit" || input == "" { + break + } + + fmt.Printf("\nAssistant: ") + _, err := chat.GetChatCompletionsStream(input, func(content string) { + fmt.Printf("%s", content) + }) + if err != nil { + log.Fatalf("ERROR: %s", err) + } + fmt.Printf("\n\n") + } +} + +func getUserInput() (string, error) { + reader := bufio.NewReader(os.Stdin) + userInput, err := reader.ReadString('\n') + if err != nil { + return "", err + } + userInput = strings.TrimSuffix(userInput, "\n") + userInput = strings.TrimSuffix(userInput, "\r") + return userInput, nil +} diff --git a/src/ai/.x/templates/openai-functions-streaming-go/openai_chat_completions_custom_functions.go b/src/ai/.x/templates/openai-functions-streaming-go/openai_chat_completions_custom_functions.go new file mode 100644 index 00000000..cf6e8500 --- /dev/null +++ b/src/ai/.x/templates/openai-functions-streaming-go/openai_chat_completions_custom_functions.go @@ -0,0 +1,66 @@ +package main + +import ( + "encoding/json" + "fmt" + "time" + + "github.com/Azure/azure-sdk-for-go/sdk/ai/azopenai" + "github.com/Azure/azure-sdk-for-go/sdk/azcore/to" +) + +func GetCurrentWeather(functionArguments string) string { + var args map[string]string + json.Unmarshal([]byte(functionArguments), &args) + location, _ := args["location"] + return fmt.Sprintf("The weather in %s is 72 degrees and sunny.", location) +} + +var GetCurrentWeatherSchema = azopenai.FunctionDefinition{ + Name: to.Ptr("get_current_weather"), + Description: to.Ptr("Get the current weather in a given location"), + Parameters: map[string]any{ + "type": "object", + "properties": map[string]any{ + "location": map[string]any{ + "type": "string", + "description": "The city and state, e.g. San Francisco, CA", + }, + }, + "required": []string{"location"}, + }, +} + +func GetCurrentDate(_ string) string { + return time.Now().Format("2006-01-02") +} + +var GetCurrentDateSchema = azopenai.FunctionDefinition{ + Name: to.Ptr("get_current_date"), + Description: to.Ptr("Get the current date"), + Parameters: map[string]any{ + "type": "object", + "properties": map[string]any{}, + }, +} + +func GetCurrentTime(_ string) string { + return time.Now().Format("15:04:05") +} + +var GetCurrentTimeSchema = azopenai.FunctionDefinition{ + Name: to.Ptr("get_current_time"), + Description: to.Ptr("Get the current time"), + Parameters: map[string]any{ + "type": "object", + "properties": map[string]any{}, + }, +} + +func NewFunctionFactoryWithCustomFunctions() *FunctionFactory { + factory := NewFunctionFactory() + factory.AddFunction(GetCurrentWeatherSchema, GetCurrentWeather) + factory.AddFunction(GetCurrentDateSchema, GetCurrentDate) + factory.AddFunction(GetCurrentTimeSchema, GetCurrentTime) + return factory +} diff --git a/src/ai/.x/templates/openai-functions-streaming-go/openai_chat_completions_functions_streaming_hello_world.go b/src/ai/.x/templates/openai-functions-streaming-go/openai_chat_completions_functions_streaming_hello_world.go new file mode 100644 index 00000000..8e5ad832 --- /dev/null +++ b/src/ai/.x/templates/openai-functions-streaming-go/openai_chat_completions_functions_streaming_hello_world.go @@ -0,0 +1,113 @@ +<#@ template hostspecific="true" #> +<#@ output extension=".go" encoding="utf-8" #> +<#@ parameter type="System.String" name="ClassName" #> +<#@ parameter type="System.String" name="AZURE_OPENAI_ENDPOINT" #> +<#@ parameter type="System.String" name="AZURE_OPENAI_KEY" #> +<#@ parameter type="System.String" name="AZURE_OPENAI_CHAT_DEPLOYMENT" #> +<#@ parameter type="System.String" name="AZURE_OPENAI_SYSTEM_PROMPT" #> +package main + +import ( + "context" + "errors" + "io" + + "github.com/Azure/azure-sdk-for-go/sdk/ai/azopenai" + "github.com/Azure/azure-sdk-for-go/sdk/azcore/to" +) + +type <#= ClassName #> struct { + client *azopenai.Client + options *azopenai.ChatCompletionsOptions + functionFactory *FunctionFactory + functionCallContext *FunctionCallContext +} + +func New<#= ClassName #>(systemPrompt string, endpoint string, azureApiKey string, deploymentName string, functionFactory *FunctionFactory) (*<#= ClassName #>, error) { + keyCredential, err := azopenai.NewKeyCredential(azureApiKey) + if err != nil { + return nil, err + } + client, err := azopenai.NewClientWithKeyCredential(endpoint, keyCredential, nil) + if err != nil { + return nil, err + } + + messages := []azopenai.ChatMessage{ + {Role: to.Ptr(azopenai.ChatRoleSystem), Content: to.Ptr(systemPrompt)}, + } + + options := &azopenai.ChatCompletionsOptions{ + Deployment: deploymentName, + Messages: messages, + FunctionCall: &azopenai.ChatCompletionsOptionsFunctionCall{ + Value: to.Ptr("auto"), + }, + Functions: functionFactory.GetFunctionSchemas(), + } + + return &<#= ClassName #>{ + client: client, + options: options, + functionCallContext: NewFunctionCallContext(functionFactory, options), + }, nil +} + +func (chat *<#= ClassName #>) ClearConversation() { + chat.options.Messages = chat.options.Messages[:1] +} + +func (chat *<#= ClassName #>) GetChatCompletionsStream(userPrompt string, callback func(content string)) (string, error) { + chat.options.Messages = append(chat.options.Messages, azopenai.ChatMessage{Role: to.Ptr(azopenai.ChatRoleUser), Content: to.Ptr(userPrompt)}) + + responseContent := "" + for { + resp, err := chat.client.GetChatCompletionsStream(context.TODO(), *chat.options, nil) + if err != nil { + return "", err + } + defer resp.ChatCompletionsStream.Close() + + for { + chatCompletions, err := resp.ChatCompletionsStream.Read() + if errors.Is(err, io.EOF) { + break + } + if err != nil { + return "", err + } + + for _, choice := range chatCompletions.Choices { + + chat.functionCallContext.CheckForUpdate(choice) + + content := "" + if choice.Delta.Content != nil { + content = *choice.Delta.Content + } + + if choice.FinishReason != nil { + finishReason := *choice.FinishReason + if finishReason == azopenai.CompletionsFinishReasonLength { + content = content + "\nWARNING: Exceeded token limit!" + } + } + + if content == "" { + continue + } + + callback(content) + responseContent += content + } + } + + if chat.functionCallContext.TryCallFunction() != "" { + chat.functionCallContext.Clear() + continue + } + + chat.options.Messages = append(chat.options.Messages, azopenai.ChatMessage{Role: to.Ptr(azopenai.ChatRoleAssistant), Content: to.Ptr(responseContent)}) + return responseContent, nil + } +} diff --git a/src/ai/.x/templates/openai-functions-streaming-js/OpenAIChatCompletionsFunctionsStreamingClass.js b/src/ai/.x/templates/openai-functions-streaming-js/OpenAIChatCompletionsFunctionsStreamingClass.js index 776edd0a..a2153256 100644 --- a/src/ai/.x/templates/openai-functions-streaming-js/OpenAIChatCompletionsFunctionsStreamingClass.js +++ b/src/ai/.x/templates/openai-functions-streaming-js/OpenAIChatCompletionsFunctionsStreamingClass.js @@ -25,7 +25,7 @@ class <#= ClassName #> { let contentComplete = ''; while (true) { - const events = this.client.listChatCompletions(this.deploymentName, this.messages, { + const events = await this.client.streamChatCompletions(this.deploymentName, this.messages, { functions: this.functionFactory.getFunctionSchemas(), }); diff --git a/src/ai/.x/templates/openai-functions-streaming-js/package.json b/src/ai/.x/templates/openai-functions-streaming-js/package.json index 9532927e..5eae9dd5 100644 --- a/src/ai/.x/templates/openai-functions-streaming-js/package.json +++ b/src/ai/.x/templates/openai-functions-streaming-js/package.json @@ -9,7 +9,7 @@ "author": "", "license": "MIT", "dependencies": { - "@azure/openai": "1.0.0-beta.8" + "@azure/openai": "1.0.0-beta.10" } } \ No newline at end of file diff --git a/src/common/details/helpers/file_helpers.cs b/src/common/details/helpers/file_helpers.cs index b3340b5b..ab4c2117 100644 --- a/src/common/details/helpers/file_helpers.cs +++ b/src/common/details/helpers/file_helpers.cs @@ -55,7 +55,9 @@ public static IEnumerable Combine(string path1, IEnumerable path public static string NormalizePath(string outputDirectory) { - return new DirectoryInfo(outputDirectory).FullName; + var normalized = new DirectoryInfo(outputDirectory).FullName; + var cwd = Directory.GetCurrentDirectory(); + return normalized.StartsWith(cwd) ? normalized.Substring(cwd.Length + 1) : normalized; } } diff --git a/src/extensions/template_extension/TemplateFactory.cs b/src/extensions/template_extension/TemplateFactory.cs index 75008553..43401b2a 100644 --- a/src/extensions/template_extension/TemplateFactory.cs +++ b/src/extensions/template_extension/TemplateFactory.cs @@ -72,14 +72,14 @@ public static bool GenerateTemplateFiles(string templateName, string instruction { var root = FileHelpers.FileNameFromResourceName("templates") + "/"; - templateName = templateName.Replace('-', '_'); + var normalizedTemplateName = templateName.Replace('-', '_'); var generator = new TemplateGenerator(); - var files = GetTemplateFileNames(templateName, generator); + var files = GetTemplateFileNames(normalizedTemplateName, generator).ToList(); if (files.Count() == 0) { - templateName = templateName.Replace(" ", "_"); - files = GetTemplateFileNames(templateName, generator); + normalizedTemplateName = normalizedTemplateName.Replace(" ", "_"); + files = GetTemplateFileNames(normalizedTemplateName, generator).ToList(); if (files.Count() == 0) { return false; @@ -87,10 +87,13 @@ public static bool GenerateTemplateFiles(string templateName, string instruction } outputDirectory = PathHelpers.NormalizePath(outputDirectory); - var message = $"Generating '{templateName}' in '{outputDirectory}' ({files.Count()} files)..."; + var message = templateName != outputDirectory + ? $"Generating '{templateName}' in '{outputDirectory}' ({files.Count()} files)..." + : $"Generating '{templateName}' ({files.Count()} files)..."; if (!quiet) Console.WriteLine($"{message}\n"); - var generated = ProcessTemplates(templateName, generator, files, outputDirectory); + files.Sort(); + var generated = ProcessTemplates(normalizedTemplateName, generator, files, outputDirectory); foreach (var item in generated) { var file = item.Replace(outputDirectory, string.Empty).Trim('\\', '/'); From 768b7264c9e3b88e8cb3c27dcf6894ea5af988ae Mon Sep 17 00:00:00 2001 From: Christopher Schraer <32145632+chschrae@users.noreply.github.com> Date: Fri, 12 Jan 2024 12:47:11 -0800 Subject: [PATCH 03/30] c# use your own data template (#143) * refactored search C# template * renamed template, and fixed hard coded name in chat completions class. * responded to Rob's comments * updated variable * variable updates * fixed some merge issues and updated variables * updated callback to be at the end of async functions * added system prompt and refactored ClearConversation * moved extension back into constructor * added system prompt to top of program.cs * renamed template classes and refactored for PR * removed citation work and made embedding consistent * added ? to paramter * removed unnecessary variable --------- Co-authored-by: Chris Schraer --- .../OpenAIChatWithAzureAISearchClass.cs | 63 -------------- ideas/openai-chat-with-azure-ai-search/_.json | 11 --- .../OpenAIChatCompletionsStreamingClass.cs | 17 ++-- .../openai-chat-streaming/Program.cs | 10 +-- ...nAIChatCompletionsWithDataStreamingClass._ | 1 + ...AIChatCompletionsWithDataStreamingClass.cs | 84 +++++++++++++++++++ .../openai-chat-with-data/Program.cs | 46 ++++++++++ .../.x/templates/openai-chat-with-data/_.json | 15 ++++ 8 files changed, 161 insertions(+), 86 deletions(-) delete mode 100644 ideas/openai-chat-with-azure-ai-search/OpenAIChatWithAzureAISearchClass.cs delete mode 100644 ideas/openai-chat-with-azure-ai-search/_.json rename ideas/openai-chat-with-azure-ai-search/OpenAIChatWithAzureAISearch.csproj._ => src/ai/.x/templates/openai-chat-with-data/OpenAIChatCompletionsWithDataStreamingClass._ (95%) create mode 100644 src/ai/.x/templates/openai-chat-with-data/OpenAIChatCompletionsWithDataStreamingClass.cs create mode 100644 src/ai/.x/templates/openai-chat-with-data/Program.cs create mode 100644 src/ai/.x/templates/openai-chat-with-data/_.json diff --git a/ideas/openai-chat-with-azure-ai-search/OpenAIChatWithAzureAISearchClass.cs b/ideas/openai-chat-with-azure-ai-search/OpenAIChatWithAzureAISearchClass.cs deleted file mode 100644 index e6847165..00000000 --- a/ideas/openai-chat-with-azure-ai-search/OpenAIChatWithAzureAISearchClass.cs +++ /dev/null @@ -1,63 +0,0 @@ -<#@ template hostspecific="true" #> -<#@ output extension=".cs" encoding="utf-8" #> -<#@ parameter type="System.String" name="ClassName" #> -<#@ parameter type="System.String" name="OpenAIEndpoint" #> -<#@ parameter type="System.String" name="OpenAIDeploymentName" #> -<#@ parameter type="System.String" name="SearchEndpoint" #> -<#@ parameter type="System.String" name="SearchApiKey" #> -<#@ parameter type="System.String" name="SearchIndexName" #> -using Azure; -using Azure.AI.OpenAI; -using Azure.Identity; -using System; -using System.Collections.Generic; -using System.IO; -using System.Threading.Tasks; - -public class <#= ClassName #> -{ - private static string _openAIEndpoint = "<#= OpenAIEndpoint #>"; - private static string _openAIDeploymentName = "<#= OpenAIDeploymentName #>"; - private static string _searchEndpoint = "<#= SearchEndpoint #>"; - private static string _searchApiKey = "<#= SearchApiKey #>"; - private static string _searchIndexName = "<#= SearchIndexName #>"; - - public async Task ChatUsingYourOwnData() - { - var client = new OpenAIClient(new Uri(_openAIEndpoint), new DefaultAzureCredential()); - - var contosoExtensionConfig = new AzureCognitiveSearchChatExtensionConfiguration() - { - SearchEndpoint = new Uri(_searchEndpoint), - Key = _searchApiKey, - IndexName = _searchIndexName, - }; - - ChatCompletionsOptions chatCompletionsOptions = new() - { - DeploymentName = _openAIDeploymentName, - Messages = - { - new ChatRequestSystemMessage("You are a helpful assistant that answers questions about the Contoso product database."), - new ChatRequestUserMessage("What are the best-selling Contoso products this month?") - }, - - AzureExtensionsOptions = new() - { - Extensions = { contosoExtensionConfig } - } - }; - - Response response = await client.GetChatCompletionsAsync(chatCompletionsOptions); - var message = response.Value.Choices[0].Message; - - Console.WriteLine($"{message.Role}: {message.Content}"); - - Console.WriteLine("Citations and other information:"); - - foreach (var contextMessage in message.AzureExtensionsContext.Messages) - { - Console.WriteLine($"{contextMessage.Role}: {contextMessage.Content}"); - } - } -} \ No newline at end of file diff --git a/ideas/openai-chat-with-azure-ai-search/_.json b/ideas/openai-chat-with-azure-ai-search/_.json deleted file mode 100644 index f06ecb0a..00000000 --- a/ideas/openai-chat-with-azure-ai-search/_.json +++ /dev/null @@ -1,11 +0,0 @@ -{ - "_Name": "OpenAI Chat w/ Azure AI Search Class Library", - "_Language": "C#", - "AICLIExtensionReferencePath": "", - "ClassName": "OpenAIChatWithAzureAISearchClass", - "OpenAIEndpoint": "https://myaccount.openai.azure.com/", - "SearchEndpoint": "https://your-contoso-search-resource.search.windows.net", - "SearchApiKey": "your-search-api-key", - "SearchIndexName": "contoso-products-index", - "OpenAIDeploymentName": "gpt-35-turbo-0613" -} \ No newline at end of file diff --git a/src/ai/.x/templates/openai-chat-streaming/OpenAIChatCompletionsStreamingClass.cs b/src/ai/.x/templates/openai-chat-streaming/OpenAIChatCompletionsStreamingClass.cs index 3aaee1b4..8f02e6e0 100644 --- a/src/ai/.x/templates/openai-chat-streaming/OpenAIChatCompletionsStreamingClass.cs +++ b/src/ai/.x/templates/openai-chat-streaming/OpenAIChatCompletionsStreamingClass.cs @@ -8,17 +8,16 @@ public class <#= ClassName #> { - public <#= ClassName #>(string systemPrompt, string endpoint, string azureApiKey, string deploymentName) + public <#= ClassName #>(string systemPrompt, string azureApiKey, string openAIEndpoint, string openAIDeploymentName) { _systemPrompt = systemPrompt; _client = string.IsNullOrEmpty(azureApiKey) - ? new OpenAIClient(new Uri(endpoint), new DefaultAzureCredential()) - : new OpenAIClient(new Uri(endpoint), new AzureKeyCredential(azureApiKey)); + ? new OpenAIClient(new Uri(openAIEndpoint), new DefaultAzureCredential()) + : new OpenAIClient(new Uri(openAIEndpoint), new AzureKeyCredential(azureApiKey)); _options = new ChatCompletionsOptions(); - _options.DeploymentName = deploymentName; - + _options.DeploymentName = openAIDeploymentName; ClearConversation(); } @@ -36,6 +35,7 @@ public async Task GetChatCompletionsStreamingAsync(string userPrompt, Ac var response = await _client.GetChatCompletionsStreamingAsync(_options); await foreach (var update in response.EnumerateValues()) { + var content = update.ContentUpdate; if (update.FinishReason == CompletionsFinishReason.ContentFiltered) { @@ -48,8 +48,11 @@ public async Task GetChatCompletionsStreamingAsync(string userPrompt, Ac if (string.IsNullOrEmpty(content)) continue; - if (callback != null) callback(update); responseContent += content; + if (callback != null) + { + callback(update); + } } _options.Messages.Add(new ChatRequestAssistantMessage(responseContent)); @@ -57,6 +60,6 @@ public async Task GetChatCompletionsStreamingAsync(string userPrompt, Ac } private string _systemPrompt; - private ChatCompletionsOptions _options; private OpenAIClient _client; + private ChatCompletionsOptions _options; } \ No newline at end of file diff --git a/src/ai/.x/templates/openai-chat-streaming/Program.cs b/src/ai/.x/templates/openai-chat-streaming/Program.cs index ab353757..4983484c 100644 --- a/src/ai/.x/templates/openai-chat-streaming/Program.cs +++ b/src/ai/.x/templates/openai-chat-streaming/Program.cs @@ -11,12 +11,12 @@ public class Program { public static async Task Main(string[] args) { - var endpoint = Environment.GetEnvironmentVariable("AZURE_OPENAI_ENDPOINT") ?? "<#= AZURE_OPENAI_ENDPOINT #>"; - var azureApiKey = Environment.GetEnvironmentVariable("AZURE_OPENAI_KEY") ?? "<#= AZURE_OPENAI_KEY #>"; - var deploymentName = Environment.GetEnvironmentVariable("AZURE_OPENAI_CHAT_DEPLOYMENT") ?? "<#= AZURE_OPENAI_CHAT_DEPLOYMENT #>"; + var azureOpenApiKey = Environment.GetEnvironmentVariable("AZURE_OPENAI_KEY") ?? "<#= AZURE_OPENAI_KEY #>"; + var azureOpenAIEndpoint = Environment.GetEnvironmentVariable("AZURE_OPENAI_ENDPOINT") ?? "<#= AZURE_OPENAI_ENDPOINT #>"; + var azureOpenAIDeploymentName = Environment.GetEnvironmentVariable("AZURE_OPENAI_CHAT_DEPLOYMENT") ?? "<#= AZURE_OPENAI_CHAT_DEPLOYMENT #>"; var systemPrompt = Environment.GetEnvironmentVariable("AZURE_OPENAI_SYSTEM_PROMPT") ?? "<#= AZURE_OPENAI_SYSTEM_PROMPT #>"; - - var chat = new <#= ClassName #>(systemPrompt, endpoint, azureApiKey, deploymentName); + + var chat = new <#= ClassName #>(systemPrompt, azureOpenApiKey, azureOpenAIEndpoint, azureOpenAIDeploymentName); while (true) { diff --git a/ideas/openai-chat-with-azure-ai-search/OpenAIChatWithAzureAISearch.csproj._ b/src/ai/.x/templates/openai-chat-with-data/OpenAIChatCompletionsWithDataStreamingClass._ similarity index 95% rename from ideas/openai-chat-with-azure-ai-search/OpenAIChatWithAzureAISearch.csproj._ rename to src/ai/.x/templates/openai-chat-with-data/OpenAIChatCompletionsWithDataStreamingClass._ index 51ad3f8f..4677a2e0 100644 --- a/ideas/openai-chat-with-azure-ai-search/OpenAIChatWithAzureAISearch.csproj._ +++ b/src/ai/.x/templates/openai-chat-with-data/OpenAIChatCompletionsWithDataStreamingClass._ @@ -8,6 +8,7 @@ enable enable true + Exe diff --git a/src/ai/.x/templates/openai-chat-with-data/OpenAIChatCompletionsWithDataStreamingClass.cs b/src/ai/.x/templates/openai-chat-with-data/OpenAIChatCompletionsWithDataStreamingClass.cs new file mode 100644 index 00000000..81e36c57 --- /dev/null +++ b/src/ai/.x/templates/openai-chat-with-data/OpenAIChatCompletionsWithDataStreamingClass.cs @@ -0,0 +1,84 @@ +<#@ template hostspecific="true" #> +<#@ output extension=".cs" encoding="utf-8" #> +<#@ parameter type="System.String" name="ClassName" #> +<#@ parameter type="System.Boolean" name="OPTION_INCLUDE_CITATIONS" #> +using Azure; +using Azure.AI.OpenAI; +using Azure.Identity; +using System; +using System.Collections.Generic; +using System.IO; +using System.Threading.Tasks; + +public class <#= ClassName #> +{ + public <#= ClassName #>( + string systemPrompt, string openAIKey, string openAIEndpoint, string openAIDeploymentName, string searchEndpoint, string searchApiKey, string searchIndexName, string embeddingsEndpoint) + { + _systemPrompt = systemPrompt; + _client = string.IsNullOrEmpty(openAIKey) + ? new OpenAIClient(new Uri(openAIEndpoint), new DefaultAzureCredential()) + : new OpenAIClient(new Uri(openAIEndpoint), new AzureKeyCredential(openAIKey)); + + var extensionConfig = new AzureCognitiveSearchChatExtensionConfiguration() + { + SearchEndpoint = new Uri(searchEndpoint), + Key = searchApiKey, + IndexName = searchIndexName, + QueryType = AzureCognitiveSearchQueryType.VectorSimpleHybrid, // Use VectorSimpleHybrid to get the best of both vector and keyword types. + EmbeddingEndpoint = new Uri(embeddingsEndpoint), + EmbeddingKey = openAIKey, + }; + _options = new ChatCompletionsOptions() + { + DeploymentName = openAIDeploymentName, + + AzureExtensionsOptions = new() + { + Extensions = { extensionConfig } + } + }; + ClearConversation(); + } + + public void ClearConversation() + { + _options.Messages.Clear(); + _options.Messages.Add(new ChatRequestSystemMessage(_systemPrompt)); + } + + public async Task GetChatCompletionsStreamingAsync(string userPrompt, Action? callback = null) + { + _options.Messages.Add(new ChatRequestUserMessage(userPrompt)); + + var responseContent = string.Empty; + var response = await _client.GetChatCompletionsStreamingAsync(_options); + await foreach (var update in response.EnumerateValues()) + { + var content = update.ContentUpdate; + if (update.FinishReason == CompletionsFinishReason.ContentFiltered) + { + content = $"{content}\nWARNING: Content filtered!"; + } + else if (update.FinishReason == CompletionsFinishReason.TokenLimitReached) + { + content = $"{content}\nERROR: Exceeded token limit!"; + } + + if (string.IsNullOrEmpty(content)) continue; + + responseContent += content; + if (callback != null) + { + callback(update); + } + } + + _options.Messages.Add(new ChatRequestAssistantMessage(responseContent)); + return responseContent; + } + + private string _systemPrompt; + private OpenAIClient _client; + private ChatCompletionsOptions _options; +} \ No newline at end of file diff --git a/src/ai/.x/templates/openai-chat-with-data/Program.cs b/src/ai/.x/templates/openai-chat-with-data/Program.cs new file mode 100644 index 00000000..730d8edb --- /dev/null +++ b/src/ai/.x/templates/openai-chat-with-data/Program.cs @@ -0,0 +1,46 @@ +<#@ template hostspecific="true" #> +<#@ output extension=".cs" encoding="utf-8" #> +<#@ parameter type="System.String" name="ClassName" #> +<#@ parameter type="System.String" name="AZURE_OPENAI_ENDPOINT" #> +<#@ parameter type="System.String" name="AZURE_OPENAI_KEY" #> +<#@ parameter type="System.String" name="AZURE_OPENAI_CHAT_DEPLOYMENT" #> +<#@ parameter type="System.String" name="AZURE_OPENAI_SYSTEM_PROMPT" #> +<#@ parameter type="System.String" name="AZURE_AI_SEARCH_ENDPOINT" #> +<#@ parameter type="System.String" name="AZURE_AI_SEARCH_KEY" #> +<#@ parameter type="System.String" name="AZURE_AI_SEARCH_INDEX_NAME" #> +<#@ parameter type="System.String" name="AZURE_OPENAI_EMBEDDING_DEPLOYMENT" #> +<#@ parameter type="System.String" name="OPENAI_API_VERSION" #> +using System; + +public class Program +{ + public static async Task Main(string[] args) + { + var azureOpenApiKey = Environment.GetEnvironmentVariable("AZURE_OPENAI_KEY") ?? "<#= AZURE_OPENAI_KEY #>"; + var azureOpenAIEndpoint = Environment.GetEnvironmentVariable("AZURE_OPENAI_ENDPOINT") ?? "<#= AZURE_OPENAI_ENDPOINT #>"; + var azureOpenAIDeploymentName = Environment.GetEnvironmentVariable("AZURE_OPENAI_CHAT_DEPLOYMENT") ?? "<#= AZURE_OPENAI_CHAT_DEPLOYMENT #>"; + var systemPrompt = Environment.GetEnvironmentVariable("AZURE_OPENAI_SYSTEM_PROMPT") ?? "<#= AZURE_OPENAI_SYSTEM_PROMPT #>"; + var searchEndpoint = Environment.GetEnvironmentVariable("AZURE_AI_SEARCH_ENDPOINT") ?? "<#= AZURE_AI_SEARCH_ENDPOINT #>"; + var searchApiKey = Environment.GetEnvironmentVariable("AZURE_AI_SEARCH_KEY") ?? "<#= AZURE_AI_SEARCH_KEY #>"; + var searchIndexName = Environment.GetEnvironmentVariable("AZURE_AI_SEARCH_INDEX_NAME") ?? "<#= AZURE_AI_SEARCH_INDEX_NAME #>"; + var embeddingsDeployment = Environment.GetEnvironmentVariable("AZURE_OPENAI_EMBEDDING_DEPLOYMENT") ?? "<#= AZURE_OPENAI_EMBEDDING_DEPLOYMENT #>"; + var azureOpenAIApiVersion = Environment.GetEnvironmentVariable("OPENAI_API_VERSION") ?? "<#= OPENAI_API_VERSION #>"; + var embeddingsEndpoint = $"{azureOpenAIEndpoint.Trim('/')}/openai/deployments/{embeddingsDeployment}/embeddings?api-version={azureOpenAIApiVersion}"; + + var chat = new <#= ClassName #>( + systemPrompt, azureOpenApiKey, azureOpenAIEndpoint, azureOpenAIDeploymentName, searchEndpoint, searchApiKey, searchIndexName, embeddingsEndpoint); + + while (true) + { + Console.Write("User: "); + var userPrompt = Console.ReadLine(); + if (string.IsNullOrEmpty(userPrompt) || userPrompt == "exit") break; + + Console.Write("\nAssistant: "); + var response = await chat.GetChatCompletionsStreamingAsync(userPrompt, update => + Console.Write(update.ContentUpdate) + ); + Console.WriteLine("\n"); + } + } +} \ No newline at end of file diff --git a/src/ai/.x/templates/openai-chat-with-data/_.json b/src/ai/.x/templates/openai-chat-with-data/_.json new file mode 100644 index 00000000..84c6fbbe --- /dev/null +++ b/src/ai/.x/templates/openai-chat-with-data/_.json @@ -0,0 +1,15 @@ +{ + "_Name": "OpenAI Chat w/ your own data", + "_Language": "C#", + "AICLIExtensionReferencePath": "", + "ClassName": "OpenAIChatCompletionsWithDataStreamingClass", + "AZURE_OPENAI_KEY": "", + "AZURE_OPENAI_ENDPOINT": "", + "AZURE_OPENAI_SYSTEM_PROMPT": "", + "AZURE_AI_SEARCH_ENDPOINT": "", + "AZURE_AI_SEARCH_KEY": "", + "AZURE_AI_SEARCH_INDEX_NAME": "", + "AZURE_OPENAI_CHAT_DEPLOYMENT": "", + "AZURE_OPENAI_EMBEDDING_DEPLOYMENT": "", + "OPENAI_API_VERSION": "" +} \ No newline at end of file From eb67add1009245eac8ab379dec6f2f4865b5ab80 Mon Sep 17 00:00:00 2001 From: Rob Chambers Date: Fri, 12 Jan 2024 15:29:47 -0800 Subject: [PATCH 04/30] =?UTF-8?q?added=20better=20`dev=20new=20list`=20out?= =?UTF-8?q?put,=20grouped=20by=20long=20name,=20and=20added=20`=E2=80=A6?= =?UTF-8?q?=20(#145)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * added better `dev new list` output, grouped by long name, and added `--language` and shortcuts for each lang, e.g. `--c#` * updated dev new with --language support; improved output errors with dev new * rename py files to reflect they're openai samples/templates * move directory * renamed and moved some stuff around... * small refactor --- src/ai/.x/templates/helper_functions/_.json | 1 + .../OpenAIChatCompletions.csproj._ | 0 .../OpenAIChatCompletionsClass.cs | 0 .../Program.cs | 0 .../{openai-chat => openai-chat-cs}/_.json | 3 +- src/ai/.x/templates/openai-chat-go/_.json | 3 +- src/ai/.x/templates/openai-chat-java/_.json | 3 +- src/ai/.x/templates/openai-chat-js/_.json | 3 +- src/ai/.x/templates/openai-chat-py/_.json | 3 +- .../OpenAIChatCompletionsStreaming.csproj._ | 0 .../OpenAIChatCompletionsStreamingClass.cs | 0 .../Program.cs | 0 .../_.json | 3 +- .../templates/openai-chat-streaming-go/_.json | 3 +- .../openai-chat-streaming-java/_.json | 3 +- .../templates/openai-chat-streaming-js/_.json | 3 +- .../templates/openai-chat-streaming-py/_.json | 3 +- .../openai-chat-streaming-py/main.py | 4 +- ...y => openai_chat_completions_streaming.py} | 2 +- ...nAIChatCompletionsWithDataStreamingClass._ | 0 ...AIChatCompletionsWithDataStreamingClass.cs | 0 .../Program.cs | 6 +- .../_.json | 12 +- .../openai-functions-streaming-cs/_.json | 3 +- .../openai-functions-streaming-go/_.json | 3 +- .../openai-functions-streaming-js/_.json | 3 +- .../openai-functions-streaming-py/_.json | 3 +- .../openai-functions-streaming-py/main.py | 6 +- ...enai_chat_completions_custom_functions.py} | 0 ...i_chat_completions_functions_streaming.py} | 2 +- src/ai/.x/templates/openai-webpage/_.json | 5 +- src/ai/commands/dev_command.cs | 59 ++++++-- src/ai/commands/parsers/dev_command_parser.cs | 1 + .../tokens/programming_language_token.cs | 46 +++++++ .../template_extension/TemplateFactory.cs | 130 +++++++++++++----- 35 files changed, 237 insertions(+), 79 deletions(-) rename src/ai/.x/templates/{openai-chat => openai-chat-cs}/OpenAIChatCompletions.csproj._ (100%) rename src/ai/.x/templates/{openai-chat => openai-chat-cs}/OpenAIChatCompletionsClass.cs (100%) rename src/ai/.x/templates/{openai-chat => openai-chat-cs}/Program.cs (100%) rename src/ai/.x/templates/{openai-chat => openai-chat-cs}/_.json (83%) rename src/ai/.x/templates/{openai-chat-streaming => openai-chat-streaming-cs}/OpenAIChatCompletionsStreaming.csproj._ (100%) rename src/ai/.x/templates/{openai-chat-streaming => openai-chat-streaming-cs}/OpenAIChatCompletionsStreamingClass.cs (100%) rename src/ai/.x/templates/{openai-chat-streaming => openai-chat-streaming-cs}/Program.cs (100%) rename src/ai/.x/templates/{openai-chat-streaming => openai-chat-streaming-cs}/_.json (79%) rename src/ai/.x/templates/openai-chat-streaming-py/{chat_completions_streaming.py => openai_chat_completions_streaming.py} (97%) rename src/ai/.x/templates/{openai-chat-with-data => openai-data-ai-search-cs}/OpenAIChatCompletionsWithDataStreamingClass._ (100%) rename src/ai/.x/templates/{openai-chat-with-data => openai-data-ai-search-cs}/OpenAIChatCompletionsWithDataStreamingClass.cs (100%) rename src/ai/.x/templates/{openai-chat-with-data => openai-data-ai-search-cs}/Program.cs (95%) rename src/ai/.x/templates/{openai-chat-with-data => openai-data-ai-search-cs}/_.json (79%) rename src/ai/.x/templates/openai-functions-streaming-py/{chat_completions_custom_functions.py => openai_chat_completions_custom_functions.py} (100%) rename src/ai/.x/templates/openai-functions-streaming-py/{chat_completions_functions_streaming.py => openai_chat_completions_functions_streaming.py} (98%) create mode 100644 src/common/details/named_values/tokens/programming_language_token.cs diff --git a/src/ai/.x/templates/helper_functions/_.json b/src/ai/.x/templates/helper_functions/_.json index ce746bb2..311ba8b0 100644 --- a/src/ai/.x/templates/helper_functions/_.json +++ b/src/ai/.x/templates/helper_functions/_.json @@ -1,5 +1,6 @@ { "_Name": "Helper Function Class Library", + "_Short": "helper-functions", "_Language": "C#", "ClassName": "HelperFunctionClass", "AICLIExtensionReferencePath": "" diff --git a/src/ai/.x/templates/openai-chat/OpenAIChatCompletions.csproj._ b/src/ai/.x/templates/openai-chat-cs/OpenAIChatCompletions.csproj._ similarity index 100% rename from src/ai/.x/templates/openai-chat/OpenAIChatCompletions.csproj._ rename to src/ai/.x/templates/openai-chat-cs/OpenAIChatCompletions.csproj._ diff --git a/src/ai/.x/templates/openai-chat/OpenAIChatCompletionsClass.cs b/src/ai/.x/templates/openai-chat-cs/OpenAIChatCompletionsClass.cs similarity index 100% rename from src/ai/.x/templates/openai-chat/OpenAIChatCompletionsClass.cs rename to src/ai/.x/templates/openai-chat-cs/OpenAIChatCompletionsClass.cs diff --git a/src/ai/.x/templates/openai-chat/Program.cs b/src/ai/.x/templates/openai-chat-cs/Program.cs similarity index 100% rename from src/ai/.x/templates/openai-chat/Program.cs rename to src/ai/.x/templates/openai-chat-cs/Program.cs diff --git a/src/ai/.x/templates/openai-chat/_.json b/src/ai/.x/templates/openai-chat-cs/_.json similarity index 83% rename from src/ai/.x/templates/openai-chat/_.json rename to src/ai/.x/templates/openai-chat-cs/_.json index 1fb11394..4b49d063 100644 --- a/src/ai/.x/templates/openai-chat/_.json +++ b/src/ai/.x/templates/openai-chat-cs/_.json @@ -1,5 +1,6 @@ { - "_Name": "OpenAI Chat Completions in C#", + "_Name": "OpenAI Chat Completions", + "_Short": "openai-chat", "_Language": "C#", "ClassName": "OpenAIChatCompletionsClass", "AZURE_OPENAI_ENDPOINT": "", diff --git a/src/ai/.x/templates/openai-chat-go/_.json b/src/ai/.x/templates/openai-chat-go/_.json index 99fca6e3..63aeffdd 100644 --- a/src/ai/.x/templates/openai-chat-go/_.json +++ b/src/ai/.x/templates/openai-chat-go/_.json @@ -1,5 +1,6 @@ { - "_Name": "OpenAI Chat Completions in Go", + "_Name": "OpenAI Chat Completions", + "_Short": "openai-chat", "_Language": "Go", "ClassName": "OpenAIChatCompletionsExample", "AZURE_OPENAI_ENDPOINT": "", diff --git a/src/ai/.x/templates/openai-chat-java/_.json b/src/ai/.x/templates/openai-chat-java/_.json index c706443f..df715882 100644 --- a/src/ai/.x/templates/openai-chat-java/_.json +++ b/src/ai/.x/templates/openai-chat-java/_.json @@ -1,5 +1,6 @@ { - "_Name": "OpenAI Chat Completions in Java", + "_Name": "OpenAI Chat Completions", + "_Short": "openai-chat", "_Language": "Java", "AZURE_OPENAI_ENDPOINT": "", "AZURE_OPENAI_KEY": "", diff --git a/src/ai/.x/templates/openai-chat-js/_.json b/src/ai/.x/templates/openai-chat-js/_.json index e91e08b4..67960b6f 100644 --- a/src/ai/.x/templates/openai-chat-js/_.json +++ b/src/ai/.x/templates/openai-chat-js/_.json @@ -1,5 +1,6 @@ { - "_Name": "OpenAI Chat Completions in JavaScript", + "_Name": "OpenAI Chat Completions", + "_Short": "openai-chat", "_Language": "JavaScript", "ClassName": "OpenAIChatCompletionsClass", "AZURE_OPENAI_ENDPOINT": "", diff --git a/src/ai/.x/templates/openai-chat-py/_.json b/src/ai/.x/templates/openai-chat-py/_.json index 27e5f6b8..c8ea01f4 100644 --- a/src/ai/.x/templates/openai-chat-py/_.json +++ b/src/ai/.x/templates/openai-chat-py/_.json @@ -1,5 +1,6 @@ { - "_Name": "OpenAI Chat Completions in Python", + "_Name": "OpenAI Chat Completions", + "_Short": "openai-chat", "_Language": "Python", "AZURE_OPENAI_ENDPOINT": "", "AZURE_OPENAI_KEY": "", diff --git a/src/ai/.x/templates/openai-chat-streaming/OpenAIChatCompletionsStreaming.csproj._ b/src/ai/.x/templates/openai-chat-streaming-cs/OpenAIChatCompletionsStreaming.csproj._ similarity index 100% rename from src/ai/.x/templates/openai-chat-streaming/OpenAIChatCompletionsStreaming.csproj._ rename to src/ai/.x/templates/openai-chat-streaming-cs/OpenAIChatCompletionsStreaming.csproj._ diff --git a/src/ai/.x/templates/openai-chat-streaming/OpenAIChatCompletionsStreamingClass.cs b/src/ai/.x/templates/openai-chat-streaming-cs/OpenAIChatCompletionsStreamingClass.cs similarity index 100% rename from src/ai/.x/templates/openai-chat-streaming/OpenAIChatCompletionsStreamingClass.cs rename to src/ai/.x/templates/openai-chat-streaming-cs/OpenAIChatCompletionsStreamingClass.cs diff --git a/src/ai/.x/templates/openai-chat-streaming/Program.cs b/src/ai/.x/templates/openai-chat-streaming-cs/Program.cs similarity index 100% rename from src/ai/.x/templates/openai-chat-streaming/Program.cs rename to src/ai/.x/templates/openai-chat-streaming-cs/Program.cs diff --git a/src/ai/.x/templates/openai-chat-streaming/_.json b/src/ai/.x/templates/openai-chat-streaming-cs/_.json similarity index 79% rename from src/ai/.x/templates/openai-chat-streaming/_.json rename to src/ai/.x/templates/openai-chat-streaming-cs/_.json index e873fff3..fb98c135 100644 --- a/src/ai/.x/templates/openai-chat-streaming/_.json +++ b/src/ai/.x/templates/openai-chat-streaming-cs/_.json @@ -1,5 +1,6 @@ { - "_Name": "OpenAI Chat Completions (Streaming) in C#", + "_Name": "OpenAI Chat Completions (Streaming)", + "_Short": "openai-chat-streaming", "_Language": "C#", "ClassName": "OpenAIChatCompletionsStreamingClass", "AZURE_OPENAI_ENDPOINT": "", diff --git a/src/ai/.x/templates/openai-chat-streaming-go/_.json b/src/ai/.x/templates/openai-chat-streaming-go/_.json index e36b1131..fc1cb1c1 100644 --- a/src/ai/.x/templates/openai-chat-streaming-go/_.json +++ b/src/ai/.x/templates/openai-chat-streaming-go/_.json @@ -1,5 +1,6 @@ { - "_Name": "OpenAI Chat Completions (Streaming) in Go", + "_Name": "OpenAI Chat Completions (Streaming)", + "_Short": "openai-chat-streaming", "_Language": "Go", "ClassName": "OpenAIChatCompletionsStreamingExample", "AZURE_OPENAI_ENDPOINT": "", diff --git a/src/ai/.x/templates/openai-chat-streaming-java/_.json b/src/ai/.x/templates/openai-chat-streaming-java/_.json index 8a4336b2..3716658b 100644 --- a/src/ai/.x/templates/openai-chat-streaming-java/_.json +++ b/src/ai/.x/templates/openai-chat-streaming-java/_.json @@ -1,5 +1,6 @@ { - "_Name": "OpenAI Chat Completions (Streaming) in Java", + "_Name": "OpenAI Chat Completions (Streaming)", + "_Short": "openai-chat-streaming", "_Language": "Java", "AZURE_OPENAI_ENDPOINT": "", "AZURE_OPENAI_KEY": "", diff --git a/src/ai/.x/templates/openai-chat-streaming-js/_.json b/src/ai/.x/templates/openai-chat-streaming-js/_.json index b14e23cc..b635a394 100644 --- a/src/ai/.x/templates/openai-chat-streaming-js/_.json +++ b/src/ai/.x/templates/openai-chat-streaming-js/_.json @@ -1,5 +1,6 @@ { - "_Name": "OpenAI Chat Completions (Streaming) in JavaScript", + "_Name": "OpenAI Chat Completions (Streaming)", + "_Short": "openai-chat-streaming", "_Language": "JavaScript", "ClassName": "OpenAIChatCompletionsStreamingClass", "AZURE_OPENAI_ENDPOINT": "", diff --git a/src/ai/.x/templates/openai-chat-streaming-py/_.json b/src/ai/.x/templates/openai-chat-streaming-py/_.json index 3526d90b..9d98d550 100644 --- a/src/ai/.x/templates/openai-chat-streaming-py/_.json +++ b/src/ai/.x/templates/openai-chat-streaming-py/_.json @@ -1,5 +1,6 @@ { - "_Name": "OpenAI Chat Completions (Streaming) in Python", + "_Name": "OpenAI Chat Completions (Streaming)", + "_Short": "openai-chat-streaming", "_Language": "Python", "AZURE_OPENAI_ENDPOINT": "", "AZURE_OPENAI_KEY": "", diff --git a/src/ai/.x/templates/openai-chat-streaming-py/main.py b/src/ai/.x/templates/openai-chat-streaming-py/main.py index 47c287d8..6f329791 100644 --- a/src/ai/.x/templates/openai-chat-streaming-py/main.py +++ b/src/ai/.x/templates/openai-chat-streaming-py/main.py @@ -5,7 +5,7 @@ <#@ parameter type="System.String" name="AZURE_OPENAI_API_VERSION" #> <#@ parameter type="System.String" name="AZURE_OPENAI_CHAT_DEPLOYMENT" #> <#@ parameter type="System.String" name="AZURE_OPENAI_SYSTEM_PROMPT" #> -from chat_completions_streaming import ChatCompletionsStreaming +from openai_chat_completions_streaming import OpenAIChatCompletionsStreaming import os def main(): @@ -15,7 +15,7 @@ def main(): deployment_name = os.getenv('AZURE_OPENAI_CHAT_DEPLOYMENT', '<#= AZURE_OPENAI_CHAT_DEPLOYMENT #>') system_prompt = os.getenv('AZURE_OPENAI_SYSTEM_PROMPT', '<#= AZURE_OPENAI_SYSTEM_PROMPT #>') - chat = ChatCompletionsStreaming(system_prompt, endpoint, azure_api_key, api_version, deployment_name) + chat = OpenAIChatCompletionsStreaming(system_prompt, endpoint, azure_api_key, api_version, deployment_name) while True: user_input = input('User: ') diff --git a/src/ai/.x/templates/openai-chat-streaming-py/chat_completions_streaming.py b/src/ai/.x/templates/openai-chat-streaming-py/openai_chat_completions_streaming.py similarity index 97% rename from src/ai/.x/templates/openai-chat-streaming-py/chat_completions_streaming.py rename to src/ai/.x/templates/openai-chat-streaming-py/openai_chat_completions_streaming.py index 984f6370..32c4dc11 100644 --- a/src/ai/.x/templates/openai-chat-streaming-py/chat_completions_streaming.py +++ b/src/ai/.x/templates/openai-chat-streaming-py/openai_chat_completions_streaming.py @@ -1,6 +1,6 @@ from openai import AzureOpenAI -class ChatCompletionsStreaming: +class OpenAIChatCompletionsStreaming: def __init__(self, system_prompt, endpoint, azure_api_key, azure_api_version, deployment_name): self.system_prompt = system_prompt self.endpoint = endpoint diff --git a/src/ai/.x/templates/openai-chat-with-data/OpenAIChatCompletionsWithDataStreamingClass._ b/src/ai/.x/templates/openai-data-ai-search-cs/OpenAIChatCompletionsWithDataStreamingClass._ similarity index 100% rename from src/ai/.x/templates/openai-chat-with-data/OpenAIChatCompletionsWithDataStreamingClass._ rename to src/ai/.x/templates/openai-data-ai-search-cs/OpenAIChatCompletionsWithDataStreamingClass._ diff --git a/src/ai/.x/templates/openai-chat-with-data/OpenAIChatCompletionsWithDataStreamingClass.cs b/src/ai/.x/templates/openai-data-ai-search-cs/OpenAIChatCompletionsWithDataStreamingClass.cs similarity index 100% rename from src/ai/.x/templates/openai-chat-with-data/OpenAIChatCompletionsWithDataStreamingClass.cs rename to src/ai/.x/templates/openai-data-ai-search-cs/OpenAIChatCompletionsWithDataStreamingClass.cs diff --git a/src/ai/.x/templates/openai-chat-with-data/Program.cs b/src/ai/.x/templates/openai-data-ai-search-cs/Program.cs similarity index 95% rename from src/ai/.x/templates/openai-chat-with-data/Program.cs rename to src/ai/.x/templates/openai-data-ai-search-cs/Program.cs index 730d8edb..0a827ae7 100644 --- a/src/ai/.x/templates/openai-chat-with-data/Program.cs +++ b/src/ai/.x/templates/openai-data-ai-search-cs/Program.cs @@ -1,15 +1,15 @@ <#@ template hostspecific="true" #> <#@ output extension=".cs" encoding="utf-8" #> <#@ parameter type="System.String" name="ClassName" #> +<#@ parameter type="System.String" name="AZURE_OPENAI_API_VERSION" #> <#@ parameter type="System.String" name="AZURE_OPENAI_ENDPOINT" #> <#@ parameter type="System.String" name="AZURE_OPENAI_KEY" #> <#@ parameter type="System.String" name="AZURE_OPENAI_CHAT_DEPLOYMENT" #> +<#@ parameter type="System.String" name="AZURE_OPENAI_EMBEDDING_DEPLOYMENT" #> <#@ parameter type="System.String" name="AZURE_OPENAI_SYSTEM_PROMPT" #> <#@ parameter type="System.String" name="AZURE_AI_SEARCH_ENDPOINT" #> <#@ parameter type="System.String" name="AZURE_AI_SEARCH_KEY" #> <#@ parameter type="System.String" name="AZURE_AI_SEARCH_INDEX_NAME" #> -<#@ parameter type="System.String" name="AZURE_OPENAI_EMBEDDING_DEPLOYMENT" #> -<#@ parameter type="System.String" name="OPENAI_API_VERSION" #> using System; public class Program @@ -24,7 +24,7 @@ public static async Task Main(string[] args) var searchApiKey = Environment.GetEnvironmentVariable("AZURE_AI_SEARCH_KEY") ?? "<#= AZURE_AI_SEARCH_KEY #>"; var searchIndexName = Environment.GetEnvironmentVariable("AZURE_AI_SEARCH_INDEX_NAME") ?? "<#= AZURE_AI_SEARCH_INDEX_NAME #>"; var embeddingsDeployment = Environment.GetEnvironmentVariable("AZURE_OPENAI_EMBEDDING_DEPLOYMENT") ?? "<#= AZURE_OPENAI_EMBEDDING_DEPLOYMENT #>"; - var azureOpenAIApiVersion = Environment.GetEnvironmentVariable("OPENAI_API_VERSION") ?? "<#= OPENAI_API_VERSION #>"; + var azureOpenAIApiVersion = Environment.GetEnvironmentVariable("AZURE_OPENAI_API_VERSION") ?? "<#= AZURE_OPENAI_API_VERSION #>"; var embeddingsEndpoint = $"{azureOpenAIEndpoint.Trim('/')}/openai/deployments/{embeddingsDeployment}/embeddings?api-version={azureOpenAIApiVersion}"; var chat = new <#= ClassName #>( diff --git a/src/ai/.x/templates/openai-chat-with-data/_.json b/src/ai/.x/templates/openai-data-ai-search-cs/_.json similarity index 79% rename from src/ai/.x/templates/openai-chat-with-data/_.json rename to src/ai/.x/templates/openai-data-ai-search-cs/_.json index 84c6fbbe..f7c2b2b8 100644 --- a/src/ai/.x/templates/openai-chat-with-data/_.json +++ b/src/ai/.x/templates/openai-data-ai-search-cs/_.json @@ -1,15 +1,15 @@ { - "_Name": "OpenAI Chat w/ your own data", + "_Name": "OpenAI Chat Completions (w/ Data + AI Search)", + "_Short": "openai-data-ai-search", "_Language": "C#", - "AICLIExtensionReferencePath": "", "ClassName": "OpenAIChatCompletionsWithDataStreamingClass", - "AZURE_OPENAI_KEY": "", "AZURE_OPENAI_ENDPOINT": "", + "AZURE_OPENAI_KEY": "", + "AZURE_OPENAI_CHAT_DEPLOYMENT": "", + "AZURE_OPENAI_EMBEDDING_DEPLOYMENT": "", "AZURE_OPENAI_SYSTEM_PROMPT": "", "AZURE_AI_SEARCH_ENDPOINT": "", "AZURE_AI_SEARCH_KEY": "", "AZURE_AI_SEARCH_INDEX_NAME": "", - "AZURE_OPENAI_CHAT_DEPLOYMENT": "", - "AZURE_OPENAI_EMBEDDING_DEPLOYMENT": "", - "OPENAI_API_VERSION": "" + "AZURE_OPENAI_API_VERSION": "" } \ No newline at end of file diff --git a/src/ai/.x/templates/openai-functions-streaming-cs/_.json b/src/ai/.x/templates/openai-functions-streaming-cs/_.json index 7cead786..1e753473 100644 --- a/src/ai/.x/templates/openai-functions-streaming-cs/_.json +++ b/src/ai/.x/templates/openai-functions-streaming-cs/_.json @@ -1,5 +1,6 @@ { - "_Name": "OpenAI Chat Completions (Functions) in C#", + "_Name": "OpenAI Chat Completions (w/ Functions)", + "_Short": "openai-functions-streaming", "_Language": "C#", "ClassName": "OpenAIChatCompletionsFunctionsStreamingClass", "AZURE_OPENAI_ENDPOINT": "", diff --git a/src/ai/.x/templates/openai-functions-streaming-go/_.json b/src/ai/.x/templates/openai-functions-streaming-go/_.json index 6f686170..a8967394 100644 --- a/src/ai/.x/templates/openai-functions-streaming-go/_.json +++ b/src/ai/.x/templates/openai-functions-streaming-go/_.json @@ -1,5 +1,6 @@ { - "_Name": "OpenAI Chat Completions (Functions) in Go", + "_Name": "OpenAI Chat Completions (w/ Functions)", + "_Short": "openai-functions-streaming", "_Language": "Go", "ClassName": "OpenAIChatCompletionsStreamingExample", "AZURE_OPENAI_ENDPOINT": "", diff --git a/src/ai/.x/templates/openai-functions-streaming-js/_.json b/src/ai/.x/templates/openai-functions-streaming-js/_.json index 0786c0a9..88126255 100644 --- a/src/ai/.x/templates/openai-functions-streaming-js/_.json +++ b/src/ai/.x/templates/openai-functions-streaming-js/_.json @@ -1,5 +1,6 @@ { - "_Name": "OpenAI Chat Completions (Functions) in JavaScript", + "_Name": "OpenAI Chat Completions (w/ Functions)", + "_Short": "openai-functions-streaming", "_Language": "JavaScript", "ClassName": "OpenAIChatCompletionsFunctionsStreamingClass", "AZURE_OPENAI_ENDPOINT": "", diff --git a/src/ai/.x/templates/openai-functions-streaming-py/_.json b/src/ai/.x/templates/openai-functions-streaming-py/_.json index 5325ff87..372dbeea 100644 --- a/src/ai/.x/templates/openai-functions-streaming-py/_.json +++ b/src/ai/.x/templates/openai-functions-streaming-py/_.json @@ -1,5 +1,6 @@ { - "_Name": "OpenAI Chat Completions (Functions) in Python", + "_Name": "OpenAI Chat Completions (w/ Functions)", + "_Short": "openai-functions-streaming", "_Language": "Python", "AZURE_OPENAI_ENDPOINT": "", "AZURE_OPENAI_KEY": "", diff --git a/src/ai/.x/templates/openai-functions-streaming-py/main.py b/src/ai/.x/templates/openai-functions-streaming-py/main.py index ce299d78..7199959b 100644 --- a/src/ai/.x/templates/openai-functions-streaming-py/main.py +++ b/src/ai/.x/templates/openai-functions-streaming-py/main.py @@ -5,8 +5,8 @@ <#@ parameter type="System.String" name="AZURE_OPENAI_API_VERSION" #> <#@ parameter type="System.String" name="AZURE_OPENAI_CHAT_DEPLOYMENT" #> <#@ parameter type="System.String" name="AZURE_OPENAI_SYSTEM_PROMPT" #> -from chat_completions_custom_functions import factory -from chat_completions_functions_streaming import ChatCompletionsFunctionsStreaming +from openai_chat_completions_custom_functions import factory +from openai_chat_completions_functions_streaming import OpenAIChatCompletionsFunctionsStreaming import os def main(): @@ -16,7 +16,7 @@ def main(): deployment_name = os.getenv('AZURE_OPENAI_CHAT_DEPLOYMENT', '<#= AZURE_OPENAI_CHAT_DEPLOYMENT #>') system_prompt = os.getenv('AZURE_OPENAI_SYSTEM_PROMPT', '<#= AZURE_OPENAI_SYSTEM_PROMPT #>') - chat = ChatCompletionsFunctionsStreaming(system_prompt, endpoint, azure_api_key, api_version, deployment_name, factory) + chat = OpenAIChatCompletionsFunctionsStreaming(system_prompt, endpoint, azure_api_key, api_version, deployment_name, factory) while True: user_input = input('User: ') diff --git a/src/ai/.x/templates/openai-functions-streaming-py/chat_completions_custom_functions.py b/src/ai/.x/templates/openai-functions-streaming-py/openai_chat_completions_custom_functions.py similarity index 100% rename from src/ai/.x/templates/openai-functions-streaming-py/chat_completions_custom_functions.py rename to src/ai/.x/templates/openai-functions-streaming-py/openai_chat_completions_custom_functions.py diff --git a/src/ai/.x/templates/openai-functions-streaming-py/chat_completions_functions_streaming.py b/src/ai/.x/templates/openai-functions-streaming-py/openai_chat_completions_functions_streaming.py similarity index 98% rename from src/ai/.x/templates/openai-functions-streaming-py/chat_completions_functions_streaming.py rename to src/ai/.x/templates/openai-functions-streaming-py/openai_chat_completions_functions_streaming.py index 09bbb8bf..ec13f85a 100644 --- a/src/ai/.x/templates/openai-functions-streaming-py/chat_completions_functions_streaming.py +++ b/src/ai/.x/templates/openai-functions-streaming-py/openai_chat_completions_functions_streaming.py @@ -1,7 +1,7 @@ from openai import AzureOpenAI from function_call_context import FunctionCallContext -class ChatCompletionsFunctionsStreaming: +class OpenAIChatCompletionsFunctionsStreaming: def __init__(self, system_prompt, endpoint, azure_api_key, azure_api_version, deployment_name, function_factory): self.system_prompt = system_prompt self.endpoint = endpoint diff --git a/src/ai/.x/templates/openai-webpage/_.json b/src/ai/.x/templates/openai-webpage/_.json index de5efd3d..ad03afed 100644 --- a/src/ai/.x/templates/openai-webpage/_.json +++ b/src/ai/.x/templates/openai-webpage/_.json @@ -1,6 +1,7 @@ { - "_Name": "OpenAI Webpage (Streaming + Functions)", - "_Language": "Javascript", + "_Name": "OpenAI Webpage (w/ Functions)", + "_Short": "openai-webpage", + "_Language": "JavaScript", "ClassName": "OpenAIChatCompletionsFunctionsStreamingClass", "AZURE_OPENAI_ENDPOINT": "", "AZURE_OPENAI_KEY": "", diff --git a/src/ai/commands/dev_command.cs b/src/ai/commands/dev_command.cs index 59ca3525..fae52eaa 100644 --- a/src/ai/commands/dev_command.cs +++ b/src/ai/commands/dev_command.cs @@ -62,10 +62,11 @@ private void DoCommand(string command) private void DoNew() { var newWhat = string.Join(" ", ArgXToken.GetArgs(_values)); + var language = ProgrammingLanguageToken.Data().GetOrDefault(_values); switch (newWhat) { case ".env": DoNewEnv(); break; - default: DoNewTemplate(newWhat); break; + default: DoNewTemplate(newWhat, language); break; } } @@ -80,18 +81,14 @@ private void DoNewEnv() ConfigEnvironmentHelpers.PrintEnvironment(env); } - private void DoNewTemplate(string templateName) + private void DoNewTemplate(string templateName, string language) { var filesInDirAlready = FileHelpers.FindFiles(".", "*").Count() > 0; - var outputDirectory = !filesInDirAlready ? "." : templateName; + var outputDirectory = !filesInDirAlready ? "." : templateName + ProgrammingLanguageToken.GetSuffix(language); var instructions = InstructionsToken.Data().GetOrDefault(_values); - if (!TemplateFactory.GenerateTemplateFiles(templateName, instructions, outputDirectory, _quiet, _verbose)) - { - _values.AddThrowError("WARNING:", $"Template '{templateName}' not found", - "", - "TRY:", $"{Program.Name} dev new list"); - } + var found = TemplateFactory.GenerateTemplateFiles(templateName, language, instructions, outputDirectory, _quiet, _verbose); + CheckGenerateTemplateFileWarnings(templateName, language, found); } private void DoNewList() @@ -161,6 +158,50 @@ private void DisplayBanner(string which) } } + private void CheckGenerateTemplateFileWarnings(string templateName, string language, object check) + { + if (check != null && check is TemplateFactory.Group) + { + var group = check as TemplateFactory.Group; + var groupHasZeroLanguages = string.IsNullOrEmpty(group.Languages); + var groupHasMultipleLanguages = group.Languages.Contains(','); + var groupHasOneLanguage = !groupHasZeroLanguages && !groupHasMultipleLanguages; + + var languageSupplied = !string.IsNullOrEmpty(language); + if (languageSupplied) + { + if (groupHasZeroLanguages || groupHasOneLanguage) + { + _values.AddThrowError("WARNING:", $"Template '{templateName}' does not support language '{language}'.", + "", + "TRY:", $"{Program.Name} dev new {templateName}"); + } + else + { + _values.AddThrowError("WARNING:", $"Template '{templateName}' doesn't support language '{language}'.", + "", + "TRY:", $"{Program.Name} dev new {templateName} --LANGUAGE", + "", + "WHERE:", $"LANGUAGE is one of {group.Languages}"); + } + } + else + { + _values.AddThrowError("WARNING:", $"Template '{templateName}' supports multiple languages.", + "", + "TRY:", $"{Program.Name} dev new {templateName} --LANGUAGE", + "", + "WHERE:", $"LANGUAGE is one of {group.Languages}"); + } + } + if (check == null) + { + _values.AddThrowError("WARNING:", $"Template '{templateName}' not found.", + "", + "TRY:", $"{Program.Name} dev new list"); + } + } + private readonly bool _quiet; private readonly bool _verbose; } diff --git a/src/ai/commands/parsers/dev_command_parser.cs b/src/ai/commands/parsers/dev_command_parser.cs index 31f124dd..ad9281fa 100644 --- a/src/ai/commands/parsers/dev_command_parser.cs +++ b/src/ai/commands/parsers/dev_command_parser.cs @@ -79,6 +79,7 @@ public CommonDevNamedValueTokenParsers() : base( new CommonDevNamedValueTokenParsers(), ArgXToken.Parser(), InstructionsToken.Parser(), + ProgrammingLanguageToken.Parser(), }; private static INamedValueTokenParser[] _devShellParsers = { diff --git a/src/common/details/named_values/tokens/programming_language_token.cs b/src/common/details/named_values/tokens/programming_language_token.cs new file mode 100644 index 00000000..f8f0d80f --- /dev/null +++ b/src/common/details/named_values/tokens/programming_language_token.cs @@ -0,0 +1,46 @@ +// +// Copyright (c) Microsoft. All rights reserved. +// Licensed under the MIT license. See LICENSE.md file in the project root for full license information. +// + +namespace Azure.AI.Details.Common.CLI +{ + public class ProgrammingLanguageToken + { + public static string GetExtension(string language) + { + return language?.ToLower() switch + { + "c#" => ".cs", + "go" => ".go", + "java" => ".java", + "javascript" => ".js", + "python" => ".py", + _ => string.Empty + }; + } + + public static string GetSuffix(string language) + { + return GetExtension(language).Replace(".", "-"); + } + + public static NamedValueTokenData Data() => new NamedValueTokenData(_optionName, _fullName, _optionExample, _requiredDisplayName); + public static INamedValueTokenParser Parser() => new NamedValueTokenParserList( + new NamedValueTokenParser(_optionName, _fullName, "01", "1", "C#;c#;cs;Go;go;Java;java;JavaScript;javascript;js;Python;python;py"), + new NamedValueTokenParser("--C#", "programming.language.csharp", "001", "0", null, null, "C#", _fullName), + new NamedValueTokenParser("--CS", "programming.language.csharp", "001", "0", null, null, "C#", _fullName), + new NamedValueTokenParser("--Go", "programming.language.go", "001", "0", null, null, "Go", _fullName), + new NamedValueTokenParser("--Java", "programming.language.java", "001", "0", null, null, "Java", _fullName), + new NamedValueTokenParser("--JavaScript", "programming.language.javascript", "001", "0", null, null, "JavaScript", _fullName), + new NamedValueTokenParser("--JS", "programming.language.javascript", "001", "0", null, null, "JavaScript", _fullName), + new NamedValueTokenParser("--Python", "programming.language.python", "001", "0", null, null, "Python", _fullName), + new NamedValueTokenParser("--PY", "programming.language.python", "001", "0", null, null, "Python", _fullName) + ); + + private const string _requiredDisplayName = "programming language"; + private const string _optionName = "--language"; + private const string _optionExample = "LANGUAGE"; + private const string _fullName = "programming.language"; + } +} diff --git a/src/extensions/template_extension/TemplateFactory.cs b/src/extensions/template_extension/TemplateFactory.cs index 43401b2a..bb26a79e 100644 --- a/src/extensions/template_extension/TemplateFactory.cs +++ b/src/extensions/template_extension/TemplateFactory.cs @@ -14,67 +14,68 @@ namespace Azure.AI.Details.Common.CLI.Extensions.Templates { public class TemplateFactory { - public static bool ListTemplates() + public class Item { - var root = FileHelpers.FileNameFromResourceName("templates") + "/"; - var files = FileHelpers.FindFilesInTemplatePath("*", null).ToList(); - - var templateShortNames = files - .Select(x => x.Replace(root, string.Empty)) - .Where(x => x.EndsWith("_.json")) - .Select(x => x.Split(new char[] { '\\', '/' }).FirstOrDefault()) - .Where(x => x != null) - .Select(x => x!) - .Distinct() - .ToList(); - templateShortNames.Sort(); - - var templateLongNames = new List(); - var languages = new List(); - foreach (var item in templateShortNames) - { - var parameters = GetParameters(item); - var longName = parameters["_Name"]; - var language = parameters["_Language"]; + public string LongName { get; set; } = string.Empty; + public string ShortName { get; set; } = string.Empty; + public string Language { get; set; } = string.Empty; + public string UniqueName { get; set; } = string.Empty; + } - templateLongNames.Add(longName); - languages.Add(language); - } + public class Group + { + public string LongName { get; set; } = string.Empty; + public string ShortName { get; set; } = String.Empty; + public string Languages { get { return string.Join(", ", Items.OrderBy(x => x.Language).Select(x => x.Language)); } } + public List Items { get; set; } = new List(); + } - templateShortNames.Insert(0, ".env"); - templateLongNames.Insert(0, "Environment Variables"); - languages.Insert(0, ""); + public static bool ListTemplates() + { + var groups = GetTemplateGroups(); var longNameLabel = "Name"; var shortNameLabel = "Short Name"; var languageLabel = "Language"; var widths = new int[3]; - widths[0] = Math.Max(longNameLabel.Length, templateLongNames.Max(x => x.Length)); - widths[1] = Math.Max(shortNameLabel.Length, templateShortNames.Max(x => x.Length)); - widths[2] = Math.Max(languageLabel.Length, languages.Max(x => x.Length)); + widths[0] = Math.Max(longNameLabel.Length, groups.Max(x => x.LongName.Length)); + widths[1] = Math.Max(shortNameLabel.Length, groups.Max(x => x.ShortName.Length)); + widths[2] = Math.Max(languageLabel.Length, groups.Max(x => x.Languages.Length)); Console.WriteLine($"{longNameLabel.PadRight(widths[0])} {shortNameLabel.PadRight(widths[1])} {languageLabel.PadRight(widths[2])}"); Console.WriteLine($"{"-".PadRight(widths[0], '-')} {"-".PadRight(widths[1], '-')} {"-".PadRight(widths[2], '-')}"); - for (int i = 0; i < templateShortNames.Count; i++) + for (int i = 0; i < groups.Count; i++) { - var longName = templateLongNames[i]; - var shortName = templateShortNames[i].Replace('_', '-'); - var language = languages[i]; - Console.WriteLine($"{longName.PadRight(widths[0])} {shortName.PadRight(widths[1])} {language.PadRight(widths[2])}"); + var longName = groups[i].LongName; + var shortName = groups[i].ShortName.Replace('_', '-'); + var languages = groups[i].Languages; + Console.WriteLine($"{longName.PadRight(widths[0])} {shortName.PadRight(widths[1])} {languages.PadRight(widths[2])}"); } return true; } - public static bool GenerateTemplateFiles(string templateName, string instructions, string outputDirectory, bool quiet, bool verbose) + public static object? GenerateTemplateFiles(string templateName, string language, string instructions, string outputDirectory, bool quiet, bool verbose) { - var root = FileHelpers.FileNameFromResourceName("templates") + "/"; + var groups = GetTemplateGroups(); + var groupFound = groups.Where(x => x.ShortName == templateName).FirstOrDefault() + ?? groups.Where(x => x.LongName == templateName).FirstOrDefault(); + if (groupFound == null) return null; + + var templateFound = !string.IsNullOrEmpty(language) + ? groupFound.Items.Where(x => x.Language == language).FirstOrDefault() + : groupFound.Items.Count != 1 + ? groupFound.Items.Where(x => x.Language == string.Empty).FirstOrDefault() + : groupFound.Items.FirstOrDefault(); + if (templateFound == null) return groupFound; + + templateName = templateFound.UniqueName; var normalizedTemplateName = templateName.Replace('-', '_'); var generator = new TemplateGenerator(); - + var files = GetTemplateFileNames(normalizedTemplateName, generator).ToList(); if (files.Count() == 0) { @@ -135,6 +136,59 @@ public static bool GenerateTemplateFiles(string templateName, string instruction return true; } + private static List GetTemplateGroups() + { + var root = FileHelpers.FileNameFromResourceName("templates") + "/"; + var files = FileHelpers.FindFilesInTemplatePath("*", null).ToList(); + + var uniqueNames = files + .Select(x => x.Replace(root, string.Empty)) + .Where(x => x.EndsWith("_.json")) + .Select(x => x.Split(new char[] { '\\', '/' }).FirstOrDefault()) + .Where(x => x != null) + .Select(x => x!) + .Distinct() + .ToList(); + uniqueNames.Sort(); + + var templates = new List(); + foreach (var uniqueName in uniqueNames) + { + var parameters = GetParameters(uniqueName); + var longName = parameters["_Name"]; + var shortName = parameters["_Short"]; + var language = parameters["_Language"]; + + templates.Add(new Item() + { + LongName = longName, + ShortName = shortName, + Language = language, + UniqueName = uniqueName + }); + } + + templates.Add(new Item() + { + LongName = "Environment Variables", + ShortName = ".env", + Language = string.Empty, + UniqueName = ".env" + }); + + var grouped = templates + .GroupBy(x => x.LongName) + .Select(x => new Group() + { + LongName = x.Key, + ShortName = x.First().ShortName, + Items = x.ToList() + }) + .OrderBy(x => x.ShortName) + .ToList(); + return grouped; + } + private static IEnumerable GetTemplateFileNames(string templateName, TemplateGenerator generator) { var files = FileHelpers.FindFilesInTemplatePath($"{templateName}/*", null).ToList(); From 8b7f3f98d6982419f0b37e124871740871988665 Mon Sep 17 00:00:00 2001 From: Rob Chambers Date: Fri, 12 Jan 2024 18:01:58 -0800 Subject: [PATCH 05/30] updated names and added filtering (#147) * updated names and added filtering * make smaller width if required (ai dev new list) * only use compact form if output not redirected --- ...nAIChatCompletionsWithDataStreamingClass._ | 0 ...AIChatCompletionsWithDataStreamingClass.cs | 0 .../Program.cs | 0 .../_.json | 2 +- .../FunctionCallContext.cs | 0 .../FunctionFactory.cs | 0 .../HelperFunctionDescriptionAttribute.cs | 0 ...erFunctionParameterDescriptionAttribute.cs | 0 .../OpenAIChatCompletionsCustomFunctions.cs | 0 ...ChatCompletionsFunctionsStreaming.csproj._ | 0 ...IChatCompletionsFunctionsStreamingClass.cs | 0 .../Program.cs | 0 .../_.json | 2 +- .../_.json | 2 +- .../function_call_context.go | 0 .../function_factory.go | 0 .../go.mod | 0 .../main.go | 0 ...penai_chat_completions_custom_functions.go | 0 ...letions_functions_streaming_hello_world.go | 0 .../FunctionCallContext.js | 0 .../FunctionFactory.js | 0 .../Main.js | 0 .../OpenAIChatCompletionsCustomFunctions.js | 0 ...IChatCompletionsFunctionsStreamingClass.js | 0 .../_.json | 2 +- .../package.json | 0 .../_.json | 2 +- .../function_call_context.py | 0 .../function_factory.py | 0 .../main.py | 0 ...penai_chat_completions_custom_functions.py | 0 ...ai_chat_completions_functions_streaming.py | 0 .../requirements.txt | 0 src/ai/commands/dev_command.cs | 5 +- .../template_extension/TemplateFactory.cs | 51 +++++++++++++++++-- 36 files changed, 55 insertions(+), 11 deletions(-) rename src/ai/.x/templates/{openai-data-ai-search-cs => openai-chat-streaming-with-data-cs}/OpenAIChatCompletionsWithDataStreamingClass._ (100%) rename src/ai/.x/templates/{openai-data-ai-search-cs => openai-chat-streaming-with-data-cs}/OpenAIChatCompletionsWithDataStreamingClass.cs (100%) rename src/ai/.x/templates/{openai-data-ai-search-cs => openai-chat-streaming-with-data-cs}/Program.cs (100%) rename src/ai/.x/templates/{openai-data-ai-search-cs => openai-chat-streaming-with-data-cs}/_.json (94%) rename src/ai/.x/templates/{openai-functions-streaming-cs => openai-chat-streaming-with-functions-cs}/FunctionCallContext.cs (100%) rename src/ai/.x/templates/{openai-functions-streaming-cs => openai-chat-streaming-with-functions-cs}/FunctionFactory.cs (100%) rename src/ai/.x/templates/{openai-functions-streaming-cs => openai-chat-streaming-with-functions-cs}/HelperFunctionDescriptionAttribute.cs (100%) rename src/ai/.x/templates/{openai-functions-streaming-cs => openai-chat-streaming-with-functions-cs}/HelperFunctionParameterDescriptionAttribute.cs (100%) rename src/ai/.x/templates/{openai-functions-streaming-cs => openai-chat-streaming-with-functions-cs}/OpenAIChatCompletionsCustomFunctions.cs (100%) rename src/ai/.x/templates/{openai-functions-streaming-cs => openai-chat-streaming-with-functions-cs}/OpenAIChatCompletionsFunctionsStreaming.csproj._ (100%) rename src/ai/.x/templates/{openai-functions-streaming-cs => openai-chat-streaming-with-functions-cs}/OpenAIChatCompletionsFunctionsStreamingClass.cs (100%) rename src/ai/.x/templates/{openai-functions-streaming-cs => openai-chat-streaming-with-functions-cs}/Program.cs (100%) rename src/ai/.x/templates/{openai-functions-streaming-cs => openai-chat-streaming-with-functions-cs}/_.json (88%) rename src/ai/.x/templates/{openai-functions-streaming-go => openai-chat-streaming-with-functions-go}/_.json (88%) rename src/ai/.x/templates/{openai-functions-streaming-go => openai-chat-streaming-with-functions-go}/function_call_context.go (100%) rename src/ai/.x/templates/{openai-functions-streaming-go => openai-chat-streaming-with-functions-go}/function_factory.go (100%) rename src/ai/.x/templates/{openai-functions-streaming-go => openai-chat-streaming-with-functions-go}/go.mod (100%) rename src/ai/.x/templates/{openai-functions-streaming-go => openai-chat-streaming-with-functions-go}/main.go (100%) rename src/ai/.x/templates/{openai-functions-streaming-go => openai-chat-streaming-with-functions-go}/openai_chat_completions_custom_functions.go (100%) rename src/ai/.x/templates/{openai-functions-streaming-go => openai-chat-streaming-with-functions-go}/openai_chat_completions_functions_streaming_hello_world.go (100%) rename src/ai/.x/templates/{openai-functions-streaming-js => openai-chat-streaming-with-functions-js}/FunctionCallContext.js (100%) rename src/ai/.x/templates/{openai-functions-streaming-js => openai-chat-streaming-with-functions-js}/FunctionFactory.js (100%) rename src/ai/.x/templates/{openai-functions-streaming-js => openai-chat-streaming-with-functions-js}/Main.js (100%) rename src/ai/.x/templates/{openai-functions-streaming-js => openai-chat-streaming-with-functions-js}/OpenAIChatCompletionsCustomFunctions.js (100%) rename src/ai/.x/templates/{openai-functions-streaming-js => openai-chat-streaming-with-functions-js}/OpenAIChatCompletionsFunctionsStreamingClass.js (100%) rename src/ai/.x/templates/{openai-functions-streaming-js => openai-chat-streaming-with-functions-js}/_.json (88%) rename src/ai/.x/templates/{openai-functions-streaming-js => openai-chat-streaming-with-functions-js}/package.json (100%) rename src/ai/.x/templates/{openai-functions-streaming-py => openai-chat-streaming-with-functions-py}/_.json (88%) rename src/ai/.x/templates/{openai-functions-streaming-py => openai-chat-streaming-with-functions-py}/function_call_context.py (100%) rename src/ai/.x/templates/{openai-functions-streaming-py => openai-chat-streaming-with-functions-py}/function_factory.py (100%) rename src/ai/.x/templates/{openai-functions-streaming-py => openai-chat-streaming-with-functions-py}/main.py (100%) rename src/ai/.x/templates/{openai-functions-streaming-py => openai-chat-streaming-with-functions-py}/openai_chat_completions_custom_functions.py (100%) rename src/ai/.x/templates/{openai-functions-streaming-py => openai-chat-streaming-with-functions-py}/openai_chat_completions_functions_streaming.py (100%) rename src/ai/.x/templates/{openai-functions-streaming-py => openai-chat-streaming-with-functions-py}/requirements.txt (100%) diff --git a/src/ai/.x/templates/openai-data-ai-search-cs/OpenAIChatCompletionsWithDataStreamingClass._ b/src/ai/.x/templates/openai-chat-streaming-with-data-cs/OpenAIChatCompletionsWithDataStreamingClass._ similarity index 100% rename from src/ai/.x/templates/openai-data-ai-search-cs/OpenAIChatCompletionsWithDataStreamingClass._ rename to src/ai/.x/templates/openai-chat-streaming-with-data-cs/OpenAIChatCompletionsWithDataStreamingClass._ diff --git a/src/ai/.x/templates/openai-data-ai-search-cs/OpenAIChatCompletionsWithDataStreamingClass.cs b/src/ai/.x/templates/openai-chat-streaming-with-data-cs/OpenAIChatCompletionsWithDataStreamingClass.cs similarity index 100% rename from src/ai/.x/templates/openai-data-ai-search-cs/OpenAIChatCompletionsWithDataStreamingClass.cs rename to src/ai/.x/templates/openai-chat-streaming-with-data-cs/OpenAIChatCompletionsWithDataStreamingClass.cs diff --git a/src/ai/.x/templates/openai-data-ai-search-cs/Program.cs b/src/ai/.x/templates/openai-chat-streaming-with-data-cs/Program.cs similarity index 100% rename from src/ai/.x/templates/openai-data-ai-search-cs/Program.cs rename to src/ai/.x/templates/openai-chat-streaming-with-data-cs/Program.cs diff --git a/src/ai/.x/templates/openai-data-ai-search-cs/_.json b/src/ai/.x/templates/openai-chat-streaming-with-data-cs/_.json similarity index 94% rename from src/ai/.x/templates/openai-data-ai-search-cs/_.json rename to src/ai/.x/templates/openai-chat-streaming-with-data-cs/_.json index f7c2b2b8..2e01f5af 100644 --- a/src/ai/.x/templates/openai-data-ai-search-cs/_.json +++ b/src/ai/.x/templates/openai-chat-streaming-with-data-cs/_.json @@ -1,6 +1,6 @@ { "_Name": "OpenAI Chat Completions (w/ Data + AI Search)", - "_Short": "openai-data-ai-search", + "_Short": "openai-chat-streaming-with-data", "_Language": "C#", "ClassName": "OpenAIChatCompletionsWithDataStreamingClass", "AZURE_OPENAI_ENDPOINT": "", diff --git a/src/ai/.x/templates/openai-functions-streaming-cs/FunctionCallContext.cs b/src/ai/.x/templates/openai-chat-streaming-with-functions-cs/FunctionCallContext.cs similarity index 100% rename from src/ai/.x/templates/openai-functions-streaming-cs/FunctionCallContext.cs rename to src/ai/.x/templates/openai-chat-streaming-with-functions-cs/FunctionCallContext.cs diff --git a/src/ai/.x/templates/openai-functions-streaming-cs/FunctionFactory.cs b/src/ai/.x/templates/openai-chat-streaming-with-functions-cs/FunctionFactory.cs similarity index 100% rename from src/ai/.x/templates/openai-functions-streaming-cs/FunctionFactory.cs rename to src/ai/.x/templates/openai-chat-streaming-with-functions-cs/FunctionFactory.cs diff --git a/src/ai/.x/templates/openai-functions-streaming-cs/HelperFunctionDescriptionAttribute.cs b/src/ai/.x/templates/openai-chat-streaming-with-functions-cs/HelperFunctionDescriptionAttribute.cs similarity index 100% rename from src/ai/.x/templates/openai-functions-streaming-cs/HelperFunctionDescriptionAttribute.cs rename to src/ai/.x/templates/openai-chat-streaming-with-functions-cs/HelperFunctionDescriptionAttribute.cs diff --git a/src/ai/.x/templates/openai-functions-streaming-cs/HelperFunctionParameterDescriptionAttribute.cs b/src/ai/.x/templates/openai-chat-streaming-with-functions-cs/HelperFunctionParameterDescriptionAttribute.cs similarity index 100% rename from src/ai/.x/templates/openai-functions-streaming-cs/HelperFunctionParameterDescriptionAttribute.cs rename to src/ai/.x/templates/openai-chat-streaming-with-functions-cs/HelperFunctionParameterDescriptionAttribute.cs diff --git a/src/ai/.x/templates/openai-functions-streaming-cs/OpenAIChatCompletionsCustomFunctions.cs b/src/ai/.x/templates/openai-chat-streaming-with-functions-cs/OpenAIChatCompletionsCustomFunctions.cs similarity index 100% rename from src/ai/.x/templates/openai-functions-streaming-cs/OpenAIChatCompletionsCustomFunctions.cs rename to src/ai/.x/templates/openai-chat-streaming-with-functions-cs/OpenAIChatCompletionsCustomFunctions.cs diff --git a/src/ai/.x/templates/openai-functions-streaming-cs/OpenAIChatCompletionsFunctionsStreaming.csproj._ b/src/ai/.x/templates/openai-chat-streaming-with-functions-cs/OpenAIChatCompletionsFunctionsStreaming.csproj._ similarity index 100% rename from src/ai/.x/templates/openai-functions-streaming-cs/OpenAIChatCompletionsFunctionsStreaming.csproj._ rename to src/ai/.x/templates/openai-chat-streaming-with-functions-cs/OpenAIChatCompletionsFunctionsStreaming.csproj._ diff --git a/src/ai/.x/templates/openai-functions-streaming-cs/OpenAIChatCompletionsFunctionsStreamingClass.cs b/src/ai/.x/templates/openai-chat-streaming-with-functions-cs/OpenAIChatCompletionsFunctionsStreamingClass.cs similarity index 100% rename from src/ai/.x/templates/openai-functions-streaming-cs/OpenAIChatCompletionsFunctionsStreamingClass.cs rename to src/ai/.x/templates/openai-chat-streaming-with-functions-cs/OpenAIChatCompletionsFunctionsStreamingClass.cs diff --git a/src/ai/.x/templates/openai-functions-streaming-cs/Program.cs b/src/ai/.x/templates/openai-chat-streaming-with-functions-cs/Program.cs similarity index 100% rename from src/ai/.x/templates/openai-functions-streaming-cs/Program.cs rename to src/ai/.x/templates/openai-chat-streaming-with-functions-cs/Program.cs diff --git a/src/ai/.x/templates/openai-functions-streaming-cs/_.json b/src/ai/.x/templates/openai-chat-streaming-with-functions-cs/_.json similarity index 88% rename from src/ai/.x/templates/openai-functions-streaming-cs/_.json rename to src/ai/.x/templates/openai-chat-streaming-with-functions-cs/_.json index 1e753473..a0ee3b39 100644 --- a/src/ai/.x/templates/openai-functions-streaming-cs/_.json +++ b/src/ai/.x/templates/openai-chat-streaming-with-functions-cs/_.json @@ -1,6 +1,6 @@ { "_Name": "OpenAI Chat Completions (w/ Functions)", - "_Short": "openai-functions-streaming", + "_Short": "openai-chat-streaming-with-functions", "_Language": "C#", "ClassName": "OpenAIChatCompletionsFunctionsStreamingClass", "AZURE_OPENAI_ENDPOINT": "", diff --git a/src/ai/.x/templates/openai-functions-streaming-go/_.json b/src/ai/.x/templates/openai-chat-streaming-with-functions-go/_.json similarity index 88% rename from src/ai/.x/templates/openai-functions-streaming-go/_.json rename to src/ai/.x/templates/openai-chat-streaming-with-functions-go/_.json index a8967394..abd00f8a 100644 --- a/src/ai/.x/templates/openai-functions-streaming-go/_.json +++ b/src/ai/.x/templates/openai-chat-streaming-with-functions-go/_.json @@ -1,6 +1,6 @@ { "_Name": "OpenAI Chat Completions (w/ Functions)", - "_Short": "openai-functions-streaming", + "_Short": "openai-chat-streaming-with-functions", "_Language": "Go", "ClassName": "OpenAIChatCompletionsStreamingExample", "AZURE_OPENAI_ENDPOINT": "", diff --git a/src/ai/.x/templates/openai-functions-streaming-go/function_call_context.go b/src/ai/.x/templates/openai-chat-streaming-with-functions-go/function_call_context.go similarity index 100% rename from src/ai/.x/templates/openai-functions-streaming-go/function_call_context.go rename to src/ai/.x/templates/openai-chat-streaming-with-functions-go/function_call_context.go diff --git a/src/ai/.x/templates/openai-functions-streaming-go/function_factory.go b/src/ai/.x/templates/openai-chat-streaming-with-functions-go/function_factory.go similarity index 100% rename from src/ai/.x/templates/openai-functions-streaming-go/function_factory.go rename to src/ai/.x/templates/openai-chat-streaming-with-functions-go/function_factory.go diff --git a/src/ai/.x/templates/openai-functions-streaming-go/go.mod b/src/ai/.x/templates/openai-chat-streaming-with-functions-go/go.mod similarity index 100% rename from src/ai/.x/templates/openai-functions-streaming-go/go.mod rename to src/ai/.x/templates/openai-chat-streaming-with-functions-go/go.mod diff --git a/src/ai/.x/templates/openai-functions-streaming-go/main.go b/src/ai/.x/templates/openai-chat-streaming-with-functions-go/main.go similarity index 100% rename from src/ai/.x/templates/openai-functions-streaming-go/main.go rename to src/ai/.x/templates/openai-chat-streaming-with-functions-go/main.go diff --git a/src/ai/.x/templates/openai-functions-streaming-go/openai_chat_completions_custom_functions.go b/src/ai/.x/templates/openai-chat-streaming-with-functions-go/openai_chat_completions_custom_functions.go similarity index 100% rename from src/ai/.x/templates/openai-functions-streaming-go/openai_chat_completions_custom_functions.go rename to src/ai/.x/templates/openai-chat-streaming-with-functions-go/openai_chat_completions_custom_functions.go diff --git a/src/ai/.x/templates/openai-functions-streaming-go/openai_chat_completions_functions_streaming_hello_world.go b/src/ai/.x/templates/openai-chat-streaming-with-functions-go/openai_chat_completions_functions_streaming_hello_world.go similarity index 100% rename from src/ai/.x/templates/openai-functions-streaming-go/openai_chat_completions_functions_streaming_hello_world.go rename to src/ai/.x/templates/openai-chat-streaming-with-functions-go/openai_chat_completions_functions_streaming_hello_world.go diff --git a/src/ai/.x/templates/openai-functions-streaming-js/FunctionCallContext.js b/src/ai/.x/templates/openai-chat-streaming-with-functions-js/FunctionCallContext.js similarity index 100% rename from src/ai/.x/templates/openai-functions-streaming-js/FunctionCallContext.js rename to src/ai/.x/templates/openai-chat-streaming-with-functions-js/FunctionCallContext.js diff --git a/src/ai/.x/templates/openai-functions-streaming-js/FunctionFactory.js b/src/ai/.x/templates/openai-chat-streaming-with-functions-js/FunctionFactory.js similarity index 100% rename from src/ai/.x/templates/openai-functions-streaming-js/FunctionFactory.js rename to src/ai/.x/templates/openai-chat-streaming-with-functions-js/FunctionFactory.js diff --git a/src/ai/.x/templates/openai-functions-streaming-js/Main.js b/src/ai/.x/templates/openai-chat-streaming-with-functions-js/Main.js similarity index 100% rename from src/ai/.x/templates/openai-functions-streaming-js/Main.js rename to src/ai/.x/templates/openai-chat-streaming-with-functions-js/Main.js diff --git a/src/ai/.x/templates/openai-functions-streaming-js/OpenAIChatCompletionsCustomFunctions.js b/src/ai/.x/templates/openai-chat-streaming-with-functions-js/OpenAIChatCompletionsCustomFunctions.js similarity index 100% rename from src/ai/.x/templates/openai-functions-streaming-js/OpenAIChatCompletionsCustomFunctions.js rename to src/ai/.x/templates/openai-chat-streaming-with-functions-js/OpenAIChatCompletionsCustomFunctions.js diff --git a/src/ai/.x/templates/openai-functions-streaming-js/OpenAIChatCompletionsFunctionsStreamingClass.js b/src/ai/.x/templates/openai-chat-streaming-with-functions-js/OpenAIChatCompletionsFunctionsStreamingClass.js similarity index 100% rename from src/ai/.x/templates/openai-functions-streaming-js/OpenAIChatCompletionsFunctionsStreamingClass.js rename to src/ai/.x/templates/openai-chat-streaming-with-functions-js/OpenAIChatCompletionsFunctionsStreamingClass.js diff --git a/src/ai/.x/templates/openai-functions-streaming-js/_.json b/src/ai/.x/templates/openai-chat-streaming-with-functions-js/_.json similarity index 88% rename from src/ai/.x/templates/openai-functions-streaming-js/_.json rename to src/ai/.x/templates/openai-chat-streaming-with-functions-js/_.json index 88126255..9bebaa1a 100644 --- a/src/ai/.x/templates/openai-functions-streaming-js/_.json +++ b/src/ai/.x/templates/openai-chat-streaming-with-functions-js/_.json @@ -1,6 +1,6 @@ { "_Name": "OpenAI Chat Completions (w/ Functions)", - "_Short": "openai-functions-streaming", + "_Short": "openai-chat-streaming-with-functions", "_Language": "JavaScript", "ClassName": "OpenAIChatCompletionsFunctionsStreamingClass", "AZURE_OPENAI_ENDPOINT": "", diff --git a/src/ai/.x/templates/openai-functions-streaming-js/package.json b/src/ai/.x/templates/openai-chat-streaming-with-functions-js/package.json similarity index 100% rename from src/ai/.x/templates/openai-functions-streaming-js/package.json rename to src/ai/.x/templates/openai-chat-streaming-with-functions-js/package.json diff --git a/src/ai/.x/templates/openai-functions-streaming-py/_.json b/src/ai/.x/templates/openai-chat-streaming-with-functions-py/_.json similarity index 88% rename from src/ai/.x/templates/openai-functions-streaming-py/_.json rename to src/ai/.x/templates/openai-chat-streaming-with-functions-py/_.json index 372dbeea..45d6379b 100644 --- a/src/ai/.x/templates/openai-functions-streaming-py/_.json +++ b/src/ai/.x/templates/openai-chat-streaming-with-functions-py/_.json @@ -1,6 +1,6 @@ { "_Name": "OpenAI Chat Completions (w/ Functions)", - "_Short": "openai-functions-streaming", + "_Short": "openai-chat-streaming-with-functions", "_Language": "Python", "AZURE_OPENAI_ENDPOINT": "", "AZURE_OPENAI_KEY": "", diff --git a/src/ai/.x/templates/openai-functions-streaming-py/function_call_context.py b/src/ai/.x/templates/openai-chat-streaming-with-functions-py/function_call_context.py similarity index 100% rename from src/ai/.x/templates/openai-functions-streaming-py/function_call_context.py rename to src/ai/.x/templates/openai-chat-streaming-with-functions-py/function_call_context.py diff --git a/src/ai/.x/templates/openai-functions-streaming-py/function_factory.py b/src/ai/.x/templates/openai-chat-streaming-with-functions-py/function_factory.py similarity index 100% rename from src/ai/.x/templates/openai-functions-streaming-py/function_factory.py rename to src/ai/.x/templates/openai-chat-streaming-with-functions-py/function_factory.py diff --git a/src/ai/.x/templates/openai-functions-streaming-py/main.py b/src/ai/.x/templates/openai-chat-streaming-with-functions-py/main.py similarity index 100% rename from src/ai/.x/templates/openai-functions-streaming-py/main.py rename to src/ai/.x/templates/openai-chat-streaming-with-functions-py/main.py diff --git a/src/ai/.x/templates/openai-functions-streaming-py/openai_chat_completions_custom_functions.py b/src/ai/.x/templates/openai-chat-streaming-with-functions-py/openai_chat_completions_custom_functions.py similarity index 100% rename from src/ai/.x/templates/openai-functions-streaming-py/openai_chat_completions_custom_functions.py rename to src/ai/.x/templates/openai-chat-streaming-with-functions-py/openai_chat_completions_custom_functions.py diff --git a/src/ai/.x/templates/openai-functions-streaming-py/openai_chat_completions_functions_streaming.py b/src/ai/.x/templates/openai-chat-streaming-with-functions-py/openai_chat_completions_functions_streaming.py similarity index 100% rename from src/ai/.x/templates/openai-functions-streaming-py/openai_chat_completions_functions_streaming.py rename to src/ai/.x/templates/openai-chat-streaming-with-functions-py/openai_chat_completions_functions_streaming.py diff --git a/src/ai/.x/templates/openai-functions-streaming-py/requirements.txt b/src/ai/.x/templates/openai-chat-streaming-with-functions-py/requirements.txt similarity index 100% rename from src/ai/.x/templates/openai-functions-streaming-py/requirements.txt rename to src/ai/.x/templates/openai-chat-streaming-with-functions-py/requirements.txt diff --git a/src/ai/commands/dev_command.cs b/src/ai/commands/dev_command.cs index fae52eaa..0c97cea1 100644 --- a/src/ai/commands/dev_command.cs +++ b/src/ai/commands/dev_command.cs @@ -93,7 +93,10 @@ private void DoNewTemplate(string templateName, string language) private void DoNewList() { - TemplateFactory.ListTemplates(); + var newWhat = string.Join(" ", ArgXToken.GetArgs(_values)); + var language = ProgrammingLanguageToken.Data().GetOrDefault(_values); + + TemplateFactory.ListTemplates(newWhat, language); } private void DoDevShell() diff --git a/src/extensions/template_extension/TemplateFactory.cs b/src/extensions/template_extension/TemplateFactory.cs index bb26a79e..672b5b26 100644 --- a/src/extensions/template_extension/TemplateFactory.cs +++ b/src/extensions/template_extension/TemplateFactory.cs @@ -9,6 +9,7 @@ using System.Text; using System.Threading.Tasks; using Azure.AI.Details.Common.CLI; +using Azure.AI.Details.Common.CLI.ConsoleGui; namespace Azure.AI.Details.Common.CLI.Extensions.Templates { @@ -30,9 +31,14 @@ public class Group public List Items { get; set; } = new List(); } - public static bool ListTemplates() + public static bool ListTemplates(string? templateFilter, string? languageFilter) { - var groups = GetTemplateGroups(); + var groups = GetFilteredTemplateGroups(templateFilter, languageFilter); + if (groups.Count == 0) + { + Console.WriteLine($"No matching templates found\n"); + groups = GetTemplateGroups(); + } var longNameLabel = "Name"; var shortNameLabel = "Short Name"; @@ -43,15 +49,22 @@ public static bool ListTemplates() widths[1] = Math.Max(shortNameLabel.Length, groups.Max(x => x.ShortName.Length)); widths[2] = Math.Max(languageLabel.Length, groups.Max(x => x.Languages.Length)); - Console.WriteLine($"{longNameLabel.PadRight(widths[0])} {shortNameLabel.PadRight(widths[1])} {languageLabel.PadRight(widths[2])}"); - Console.WriteLine($"{"-".PadRight(widths[0], '-')} {"-".PadRight(widths[1], '-')} {"-".PadRight(widths[2], '-')}"); + var hideLongName = !Console.IsOutputRedirected && Screen.GetRightColumn() < widths.Sum() + 4 * 2 + 1; + + if (!hideLongName) Console.Write($"{longNameLabel.PadRight(widths[0])} "); + Console.WriteLine($"{shortNameLabel.PadRight(widths[1])} {languageLabel.PadRight(widths[2])}"); + + if (!hideLongName) Console.Write($"{"-".PadRight(widths[0], '-')} "); + Console.WriteLine($"{"-".PadRight(widths[1], '-')} {"-".PadRight(widths[2], '-')}"); for (int i = 0; i < groups.Count; i++) { var longName = groups[i].LongName; var shortName = groups[i].ShortName.Replace('_', '-'); var languages = groups[i].Languages; - Console.WriteLine($"{longName.PadRight(widths[0])} {shortName.PadRight(widths[1])} {languages.PadRight(widths[2])}"); + + if (!hideLongName) Console.Write($"{longName.PadRight(widths[0])} "); + Console.WriteLine($"{shortName.PadRight(widths[1])} {languages.PadRight(widths[2])}"); } return true; @@ -189,6 +202,34 @@ private static List GetTemplateGroups() return grouped; } + private static List GetFilteredTemplateGroups(string? templateFilter, string? languageFilter) + { + var groups = GetTemplateGroups(); + if (string.IsNullOrEmpty(templateFilter) && string.IsNullOrEmpty(languageFilter)) return groups; + + var filtered = groups + .Where(x => string.IsNullOrEmpty(templateFilter) || x.ShortName.Contains(templateFilter) || x.LongName.Contains(templateFilter)) + .Where(x => string.IsNullOrEmpty(languageFilter) || x.Languages.Split(", ").Contains(languageFilter) || x.Languages == string.Empty) + .ToList(); + + if (filtered.Count > 0 && !string.IsNullOrEmpty(languageFilter)) + { + groups.Clear(); + foreach (var item in filtered) + { + groups.Add(new Group() + { + LongName = item.LongName, + ShortName = item.ShortName, + Items = item.Items.Where(x => x.Language == languageFilter).ToList() + }); + } + return groups; + } + + return filtered; + } + private static IEnumerable GetTemplateFileNames(string templateName, TemplateGenerator generator) { var files = FileHelpers.FindFilesInTemplatePath($"{templateName}/*", null).ToList(); From da55357491bbfc40f3e0f5affb811f82cf521301 Mon Sep 17 00:00:00 2001 From: Rob Chambers Date: Sat, 13 Jan 2024 12:28:49 -0800 Subject: [PATCH 06/30] Robch/2401 jan12 bugs (#148) * updated names and added filtering * make smaller width if required (ai dev new list) * only use compact form if output not redirected * fixed a bug * CR feedback and making C# templates more consistent * CR feedback on python templates, better naming symmetry * javascript template name symmetry * go template naming symmetry --- ideas/template-generator/system.prompt.md | 4 +- src/ai/.x/templates/helper_functions/_.json | 4 +- .../OpenAIChatCompletionsClass.cs | 18 ++++----- src/ai/.x/templates/openai-chat-cs/Program.cs | 12 +++--- src/ai/.x/templates/openai-chat-cs/_.json | 6 +-- src/ai/.x/templates/openai-chat-go/_.json | 4 +- src/ai/.x/templates/openai-chat-go/main.go | 28 ++++++------- .../openai_chat_completions_hello_world.go | 10 ++--- src/ai/.x/templates/openai-chat-java/_.json | 4 +- src/ai/.x/templates/openai-chat-js/Main.js | 10 ++--- .../OpenAIChatCompletionsClass.js | 12 +++--- src/ai/.x/templates/openai-chat-js/_.json | 4 +- src/ai/.x/templates/openai-chat-py/_.json | 4 +- .../openai-chat-py/openai_chat_completions.py | 28 ++++++------- .../OpenAIChatCompletionsStreamingClass.cs | 23 +++++------ .../openai-chat-streaming-cs/Program.cs | 10 ++--- .../templates/openai-chat-streaming-cs/_.json | 6 +-- .../templates/openai-chat-streaming-go/_.json | 4 +- .../openai-chat-streaming-go/main.go | 28 ++++++------- ..._chat_completions_streaming_hello_world.go | 10 ++--- .../openai-chat-streaming-java/_.json | 4 +- .../openai-chat-streaming-js/Main.js | 10 ++--- .../OpenAIChatCompletionsStreamingClass.js | 12 +++--- .../templates/openai-chat-streaming-js/_.json | 4 +- .../templates/openai-chat-streaming-py/_.json | 4 +- .../openai-chat-streaming-py/main.py | 14 +++---- .../openai_chat_completions_streaming.py | 38 ++++++++---------- ...ChatCompletionsWithDataStreaming.csproj._} | 0 ...AIChatCompletionsWithDataStreamingClass.cs | 24 +++++------- .../Program.cs | 19 ++++----- .../openai-chat-streaming-with-data-cs/_.json | 26 ++++++------- ...IChatCompletionsFunctionsStreamingClass.cs | 22 +++++------ .../Program.cs | 10 ++--- .../_.json | 4 +- .../_.json | 6 +-- .../main.go | 28 ++++++------- ...letions_functions_streaming_hello_world.go | 10 ++--- .../Main.js | 10 ++--- ...IChatCompletionsFunctionsStreamingClass.js | 12 +++--- .../_.json | 4 +- .../_.json | 4 +- .../main.py | 14 +++---- ...ai_chat_completions_functions_streaming.py | 39 +++++++++---------- src/ai/.x/templates/openai-webpage/_.json | 4 +- .../.x/templates/openai-webpage/package.json | 2 +- ...IChatCompletionsFunctionsStreamingClass.js | 12 +++--- .../.x/templates/openai-webpage/src/script.js | 16 ++++---- src/ai/commands/dev_command.cs | 3 +- src/common/details/helpers/file_helpers.cs | 4 +- .../template_extension/TemplateFactory.cs | 4 +- 50 files changed, 290 insertions(+), 302 deletions(-) rename src/ai/.x/templates/openai-chat-streaming-with-data-cs/{OpenAIChatCompletionsWithDataStreamingClass._ => OpenAIChatCompletionsWithDataStreaming.csproj._} (100%) diff --git a/ideas/template-generator/system.prompt.md b/ideas/template-generator/system.prompt.md index f2db0074..08112f09 100644 --- a/ideas/template-generator/system.prompt.md +++ b/ideas/template-generator/system.prompt.md @@ -13,13 +13,13 @@ Are comprised of multiple project template files: ## `_.json` project template file: -`"_Name"` is the long descriptive name of the project template. This is required. +`"_LongName"` is the long descriptive name of the project template. This is required. Example: ```json { - "_Name": "OpenAI Chat Completions Class Library", + "_LongName": "OpenAI Chat Completions Class Library", "ClassName": "OpenAIChatCompletionsClass", "AICLIExtensionReferencePath": "" } diff --git a/src/ai/.x/templates/helper_functions/_.json b/src/ai/.x/templates/helper_functions/_.json index 311ba8b0..b911b99a 100644 --- a/src/ai/.x/templates/helper_functions/_.json +++ b/src/ai/.x/templates/helper_functions/_.json @@ -1,6 +1,6 @@ { - "_Name": "Helper Function Class Library", - "_Short": "helper-functions", + "_LongName": "Helper Function Class Library", + "_ShortName": "helper-functions", "_Language": "C#", "ClassName": "HelperFunctionClass", "AICLIExtensionReferencePath": "" diff --git a/src/ai/.x/templates/openai-chat-cs/OpenAIChatCompletionsClass.cs b/src/ai/.x/templates/openai-chat-cs/OpenAIChatCompletionsClass.cs index a3bcd430..e352c9d0 100644 --- a/src/ai/.x/templates/openai-chat-cs/OpenAIChatCompletionsClass.cs +++ b/src/ai/.x/templates/openai-chat-cs/OpenAIChatCompletionsClass.cs @@ -8,24 +8,24 @@ public class <#= ClassName #> { - public <#= ClassName #>(string systemPrompt, string endpoint, string azureApiKey, string deploymentName) + public <#= ClassName #>(string openAIEndpoint, string openAIKey, string openAIChatDeploymentName, string openAISystemPrompt) { - _systemPrompt = systemPrompt; + _openAISystemPrompt = openAISystemPrompt; - _client = string.IsNullOrEmpty(azureApiKey) - ? new OpenAIClient(new Uri(endpoint), new DefaultAzureCredential()) - : new OpenAIClient(new Uri(endpoint), new AzureKeyCredential(azureApiKey)); + _client = string.IsNullOrEmpty(openAIKey) + ? new OpenAIClient(new Uri(openAIEndpoint), new DefaultAzureCredential()) + : new OpenAIClient(new Uri(openAIEndpoint), new AzureKeyCredential(openAIKey)); _options = new ChatCompletionsOptions(); - _options.DeploymentName = deploymentName; - + _options.DeploymentName = openAIChatDeploymentName; + ClearConversation(); } public void ClearConversation() { _options.Messages.Clear(); - _options.Messages.Add(new ChatRequestSystemMessage(_systemPrompt)); + _options.Messages.Add(new ChatRequestSystemMessage(_openAISystemPrompt)); } public string GetChatCompletion(string userPrompt) @@ -39,7 +39,7 @@ public string GetChatCompletion(string userPrompt) return responseContent; } - private string _systemPrompt; + private string _openAISystemPrompt; private ChatCompletionsOptions _options; private OpenAIClient _client; } \ No newline at end of file diff --git a/src/ai/.x/templates/openai-chat-cs/Program.cs b/src/ai/.x/templates/openai-chat-cs/Program.cs index 860d3d8a..021f22ef 100644 --- a/src/ai/.x/templates/openai-chat-cs/Program.cs +++ b/src/ai/.x/templates/openai-chat-cs/Program.cs @@ -11,12 +11,12 @@ public class Program { public static void Main(string[] args) { - var endpoint = Environment.GetEnvironmentVariable("AZURE_OPENAI_ENDPOINT") ?? "<#= AZURE_OPENAI_ENDPOINT #>"; - var azureApiKey = Environment.GetEnvironmentVariable("AZURE_OPENAI_KEY") ?? "<#= AZURE_OPENAI_KEY #>"; - var deploymentName = Environment.GetEnvironmentVariable("AZURE_OPENAI_CHAT_DEPLOYMENT") ?? "<#= AZURE_OPENAI_CHAT_DEPLOYMENT #>"; - var systemPrompt = Environment.GetEnvironmentVariable("AZURE_OPENAI_SYSTEM_PROMPT") ?? "<#= AZURE_OPENAI_SYSTEM_PROMPT #>"; - - var chat = new <#= ClassName #>(systemPrompt, endpoint, azureApiKey, deploymentName); + var openAIEndpoint = Environment.GetEnvironmentVariable("AZURE_OPENAI_ENDPOINT") ?? "<#= AZURE_OPENAI_ENDPOINT #>"; + var openAIKey = Environment.GetEnvironmentVariable("AZURE_OPENAI_KEY") ?? "<#= AZURE_OPENAI_KEY #>"; + var openAIChatDeploymentName = Environment.GetEnvironmentVariable("AZURE_OPENAI_CHAT_DEPLOYMENT") ?? "<#= AZURE_OPENAI_CHAT_DEPLOYMENT #>"; + var openAISystemPrompt = Environment.GetEnvironmentVariable("AZURE_OPENAI_SYSTEM_PROMPT") ?? "<#= AZURE_OPENAI_SYSTEM_PROMPT #>"; + + var chat = new <#= ClassName #>(openAIEndpoint, openAIKey, openAIChatDeploymentName, openAISystemPrompt); while (true) { diff --git a/src/ai/.x/templates/openai-chat-cs/_.json b/src/ai/.x/templates/openai-chat-cs/_.json index 4b49d063..e3d0f633 100644 --- a/src/ai/.x/templates/openai-chat-cs/_.json +++ b/src/ai/.x/templates/openai-chat-cs/_.json @@ -1,10 +1,10 @@ { - "_Name": "OpenAI Chat Completions", - "_Short": "openai-chat", + "_LongName": "OpenAI Chat Completions", + "_ShortName": "openai-chat", "_Language": "C#", "ClassName": "OpenAIChatCompletionsClass", "AZURE_OPENAI_ENDPOINT": "", "AZURE_OPENAI_KEY": "", - "AZURE_OPENAI_CHAT_DEPLOYMENT": "", + "AZURE_OPENAI_CHAT_DEPLOYMENT": "", "AZURE_OPENAI_SYSTEM_PROMPT": "You are a helpful AI assistant." } \ No newline at end of file diff --git a/src/ai/.x/templates/openai-chat-go/_.json b/src/ai/.x/templates/openai-chat-go/_.json index 63aeffdd..d71325a7 100644 --- a/src/ai/.x/templates/openai-chat-go/_.json +++ b/src/ai/.x/templates/openai-chat-go/_.json @@ -1,6 +1,6 @@ { - "_Name": "OpenAI Chat Completions", - "_Short": "openai-chat", + "_LongName": "OpenAI Chat Completions", + "_ShortName": "openai-chat", "_Language": "Go", "ClassName": "OpenAIChatCompletionsExample", "AZURE_OPENAI_ENDPOINT": "", diff --git a/src/ai/.x/templates/openai-chat-go/main.go b/src/ai/.x/templates/openai-chat-go/main.go index 302c4db1..8bcbe45c 100644 --- a/src/ai/.x/templates/openai-chat-go/main.go +++ b/src/ai/.x/templates/openai-chat-go/main.go @@ -16,29 +16,29 @@ import ( ) func main() { - azureOpenAIEndpoint := os.Getenv("AZURE_OPENAI_ENDPOINT") - if azureOpenAIEndpoint == "" { - azureOpenAIEndpoint = "<#= AZURE_OPENAI_ENDPOINT #>" + openAIEndpoint := os.Getenv("AZURE_OPENAI_ENDPOINT") + if openAIEndpoint == "" { + openAIEndpoint = "<#= AZURE_OPENAI_ENDPOINT #>" } - azureOpenAIKey := os.Getenv("AZURE_OPENAI_KEY") - if azureOpenAIKey == "" { - azureOpenAIKey = "<#= AZURE_OPENAI_KEY #>" + openAIKey := os.Getenv("AZURE_OPENAI_KEY") + if openAIKey == "" { + openAIKey = "<#= AZURE_OPENAI_KEY #>" } - deploymentName := os.Getenv("AZURE_OPENAI_CHAT_DEPLOYMENT") - if deploymentName == "" { - deploymentName = "<#= AZURE_OPENAI_CHAT_DEPLOYMENT #>" + openAIChatDeploymentName := os.Getenv("AZURE_OPENAI_CHAT_DEPLOYMENT") + if openAIChatDeploymentName == "" { + openAIChatDeploymentName = "<#= AZURE_OPENAI_CHAT_DEPLOYMENT #>" } - systemPrompt := os.Getenv("OPENAI_SYSTEM_PROMPT") - if systemPrompt == "" { - systemPrompt = "<#= AZURE_OPENAI_SYSTEM_PROMPT #>" + openAISystemPrompt := os.Getenv("OPENAI_SYSTEM_PROMPT") + if openAISystemPrompt == "" { + openAISystemPrompt = "<#= AZURE_OPENAI_SYSTEM_PROMPT #>" } - if azureOpenAIEndpoint == "" || azureOpenAIKey == "" || deploymentName == "" || systemPrompt == "" { + if openAIEndpoint == "" || openAIKey == "" || openAIChatDeploymentName == "" || openAISystemPrompt == "" { fmt.Println("Please set the environment variables.") os.Exit(1) } - chat, err := New<#= ClassName #>(systemPrompt, azureOpenAIEndpoint, azureOpenAIKey, deploymentName) + chat, err := New<#= ClassName #>(openAIEndpoint, openAIKey, openAIChatDeploymentName, openAISystemPrompt) if err != nil { log.Fatalf("ERROR: %s", err) } diff --git a/src/ai/.x/templates/openai-chat-go/openai_chat_completions_hello_world.go b/src/ai/.x/templates/openai-chat-go/openai_chat_completions_hello_world.go index 8bd57384..11c1baeb 100644 --- a/src/ai/.x/templates/openai-chat-go/openai_chat_completions_hello_world.go +++ b/src/ai/.x/templates/openai-chat-go/openai_chat_completions_hello_world.go @@ -15,22 +15,22 @@ type <#= ClassName #> struct { options *azopenai.ChatCompletionsOptions } -func New<#= ClassName #>(systemPrompt string, endpoint string, azureApiKey string, deploymentName string) (*<#= ClassName #>, error) { - keyCredential, err := azopenai.NewKeyCredential(azureApiKey) +func New<#= ClassName #>(openAIEndpoint string, openAIKey string, openAIChatDeploymentName string, openAISystemPrompt string) (*<#= ClassName #>, error) { + keyCredential, err := azopenai.NewKeyCredential(openAIKey) if err != nil { return nil, err } - client, err := azopenai.NewClientWithKeyCredential(endpoint, keyCredential, nil) + client, err := azopenai.NewClientWithKeyCredential(openAIEndpoint, keyCredential, nil) if err != nil { return nil, err } messages := []azopenai.ChatMessage{ - {Role: to.Ptr(azopenai.ChatRoleSystem), Content: to.Ptr(systemPrompt)}, + {Role: to.Ptr(azopenai.ChatRoleSystem), Content: to.Ptr(openAISystemPrompt)}, } options := &azopenai.ChatCompletionsOptions{ - Deployment: deploymentName, + Deployment: openAIChatDeploymentName, Messages: messages, } diff --git a/src/ai/.x/templates/openai-chat-java/_.json b/src/ai/.x/templates/openai-chat-java/_.json index df715882..68fa5a10 100644 --- a/src/ai/.x/templates/openai-chat-java/_.json +++ b/src/ai/.x/templates/openai-chat-java/_.json @@ -1,6 +1,6 @@ { - "_Name": "OpenAI Chat Completions", - "_Short": "openai-chat", + "_LongName": "OpenAI Chat Completions", + "_ShortName": "openai-chat", "_Language": "Java", "AZURE_OPENAI_ENDPOINT": "", "AZURE_OPENAI_KEY": "", diff --git a/src/ai/.x/templates/openai-chat-js/Main.js b/src/ai/.x/templates/openai-chat-js/Main.js index b26d878c..305234e7 100644 --- a/src/ai/.x/templates/openai-chat-js/Main.js +++ b/src/ai/.x/templates/openai-chat-js/Main.js @@ -15,12 +15,12 @@ const rl = readline.createInterface({ async function main() { - const endpoint = process.env["AZURE_OPENAI_ENDPOINT"] || "<#= AZURE_OPENAI_ENDPOINT #>"; - const azureApiKey = process.env["AZURE_OPENAI_KEY"] || "<#= AZURE_OPENAI_KEY #>"; - const deploymentName = process.env["AZURE_OPENAI_CHAT_DEPLOYMENT"] || "<#= AZURE_OPENAI_CHAT_DEPLOYMENT #>" ; - const systemPrompt = process.env["AZURE_OPENAI_SYSTEM_PROMPT"] || "<#= AZURE_OPENAI_SYSTEM_PROMPT #>" ; + const openAIEndpoint = process.env["AZURE_OPENAI_ENDPOINT"] || "<#= AZURE_OPENAI_ENDPOINT #>"; + const openAIKey = process.env["AZURE_OPENAI_KEY"] || "<#= AZURE_OPENAI_KEY #>"; + const openAIChatDeploymentName = process.env["AZURE_OPENAI_CHAT_DEPLOYMENT"] || "<#= AZURE_OPENAI_CHAT_DEPLOYMENT #>" ; + const openAISystemPrompt = process.env["AZURE_OPENAI_SYSTEM_PROMPT"] || "<#= AZURE_OPENAI_SYSTEM_PROMPT #>" ; - const chat = new <#= ClassName #>(systemPrompt, endpoint, azureApiKey, deploymentName); + const chat = new <#= ClassName #>(openAIEndpoint, openAIKey, openAIChatDeploymentName, openAISystemPrompt); while (true) { diff --git a/src/ai/.x/templates/openai-chat-js/OpenAIChatCompletionsClass.js b/src/ai/.x/templates/openai-chat-js/OpenAIChatCompletionsClass.js index 6c88c22e..00bcceda 100644 --- a/src/ai/.x/templates/openai-chat-js/OpenAIChatCompletionsClass.js +++ b/src/ai/.x/templates/openai-chat-js/OpenAIChatCompletionsClass.js @@ -4,23 +4,23 @@ const { OpenAIClient, AzureKeyCredential } = require("@azure/openai"); class <#= ClassName #> { - constructor(systemPrompt, endpoint, azureApiKey, deploymentName) { - this.systemPrompt = systemPrompt; - this.deploymentName = deploymentName; - this.client = new OpenAIClient(endpoint, new AzureKeyCredential(azureApiKey)); + constructor(openAIEndpoint, openAIKey, openAIChatDeploymentName, openAISystemPrompt) { + this.openAISystemPrompt = openAISystemPrompt; + this.openAIChatDeploymentName = openAIChatDeploymentName; + this.client = new OpenAIClient(openAIEndpoint, new AzureKeyCredential(openAIKey)); this.clearConversation(); } clearConversation() { this.messages = [ - { role: 'system', content: this.systemPrompt } + { role: 'system', content: this.openAISystemPrompt } ]; } async getChatCompletions(userInput) { this.messages.push({ role: 'user', content: userInput }); - const result = await this.client.getChatCompletions(this.deploymentName, this.messages); + const result = await this.client.getChatCompletions(this.openAIChatDeploymentName, this.messages); const responseContent = result.choices[0].message.content; this.messages.push({ role: 'assistant', content: responseContent }); diff --git a/src/ai/.x/templates/openai-chat-js/_.json b/src/ai/.x/templates/openai-chat-js/_.json index 67960b6f..90a38cce 100644 --- a/src/ai/.x/templates/openai-chat-js/_.json +++ b/src/ai/.x/templates/openai-chat-js/_.json @@ -1,6 +1,6 @@ { - "_Name": "OpenAI Chat Completions", - "_Short": "openai-chat", + "_LongName": "OpenAI Chat Completions", + "_ShortName": "openai-chat", "_Language": "JavaScript", "ClassName": "OpenAIChatCompletionsClass", "AZURE_OPENAI_ENDPOINT": "", diff --git a/src/ai/.x/templates/openai-chat-py/_.json b/src/ai/.x/templates/openai-chat-py/_.json index c8ea01f4..bd621133 100644 --- a/src/ai/.x/templates/openai-chat-py/_.json +++ b/src/ai/.x/templates/openai-chat-py/_.json @@ -1,6 +1,6 @@ { - "_Name": "OpenAI Chat Completions", - "_Short": "openai-chat", + "_LongName": "OpenAI Chat Completions", + "_ShortName": "openai-chat", "_Language": "Python", "AZURE_OPENAI_ENDPOINT": "", "AZURE_OPENAI_KEY": "", diff --git a/src/ai/.x/templates/openai-chat-py/openai_chat_completions.py b/src/ai/.x/templates/openai-chat-py/openai_chat_completions.py index fb29317e..10229797 100644 --- a/src/ai/.x/templates/openai-chat-py/openai_chat_completions.py +++ b/src/ai/.x/templates/openai-chat-py/openai_chat_completions.py @@ -8,38 +8,38 @@ import os from openai import AzureOpenAI -api_key = os.getenv("AZURE_OPENAI_KEY") or "<#= AZURE_OPENAI_KEY #>" -endpoint = os.getenv("AZURE_OPENAI_ENDPOINT") or "<#= AZURE_OPENAI_ENDPOINT #>" -api_version = os.getenv("AZURE_OPENAI_API_VERSION") or "<#= AZURE_OPENAI_API_VERSION #>" -deployment_name = os.getenv("AZURE_OPENAI_CHAT_DEPLOYMENT") or "<#= AZURE_OPENAI_CHAT_DEPLOYMENT #>" -system_prompt = os.getenv("AZURE_OPENAI_SYSTEM_PROMPT") or "<#= AZURE_OPENAI_SYSTEM_PROMPT #>" +openai_api_version = os.getenv('AZURE_OPENAI_API_VERSION', '<#= AZURE_OPENAI_API_VERSION #>') +openai_endpoint = os.getenv('AZURE_OPENAI_ENDPOINT', '<#= AZURE_OPENAI_ENDPOINT #>') +openai_key = os.getenv('AZURE_OPENAI_KEY', '<#= AZURE_OPENAI_KEY #>') +openai_chat_deployment_name = os.getenv('AZURE_OPENAI_CHAT_DEPLOYMENT', '<#= AZURE_OPENAI_CHAT_DEPLOYMENT #>') +openai_system_prompt = os.getenv('AZURE_OPENAI_SYSTEM_PROMPT', '<#= AZURE_OPENAI_SYSTEM_PROMPT #>') client = AzureOpenAI( - api_key=api_key, - api_version=api_version, - azure_endpoint = endpoint + api_key=openai_key, + api_version=openai_api_version, + azure_endpoint = openai_endpoint ) messages=[ - {"role": "system", "content": system_prompt}, + {'role': 'system', 'content': openai_system_prompt}, ] def get_chat_completions(user_input) -> str: - messages.append({"role": "user", "content": user_input}) + messages.append({'role': 'user', 'content': user_input}) response = client.chat.completions.create( - model=deployment_name, + model=openai_chat_deployment_name, messages=messages, ) response_content = response.choices[0].message.content - messages.append({"role": "assistant", "content": response_content}) + messages.append({'role': 'assistant', 'content': response_content}) return response_content while True: - user_input = input("User: ") - if user_input == "" or user_input == "exit": + user_input = input('User: ') + if user_input == 'exit' or user_input == '': break response_content = get_chat_completions(user_input) diff --git a/src/ai/.x/templates/openai-chat-streaming-cs/OpenAIChatCompletionsStreamingClass.cs b/src/ai/.x/templates/openai-chat-streaming-cs/OpenAIChatCompletionsStreamingClass.cs index 8f02e6e0..fe7120a2 100644 --- a/src/ai/.x/templates/openai-chat-streaming-cs/OpenAIChatCompletionsStreamingClass.cs +++ b/src/ai/.x/templates/openai-chat-streaming-cs/OpenAIChatCompletionsStreamingClass.cs @@ -8,23 +8,24 @@ public class <#= ClassName #> { - public <#= ClassName #>(string systemPrompt, string azureApiKey, string openAIEndpoint, string openAIDeploymentName) + public <#= ClassName #>(string openAIEndpoint, string openAIKey, string openAIChatDeploymentName, string openAISystemPrompt) { - _systemPrompt = systemPrompt; + _openAISystemPrompt = openAISystemPrompt; - _client = string.IsNullOrEmpty(azureApiKey) + _client = string.IsNullOrEmpty(openAIKey) ? new OpenAIClient(new Uri(openAIEndpoint), new DefaultAzureCredential()) - : new OpenAIClient(new Uri(openAIEndpoint), new AzureKeyCredential(azureApiKey)); + : new OpenAIClient(new Uri(openAIEndpoint), new AzureKeyCredential(openAIKey)); _options = new ChatCompletionsOptions(); - _options.DeploymentName = openAIDeploymentName; + _options.DeploymentName = openAIChatDeploymentName; + ClearConversation(); } public void ClearConversation() { _options.Messages.Clear(); - _options.Messages.Add(new ChatRequestSystemMessage(_systemPrompt)); + _options.Messages.Add(new ChatRequestSystemMessage(_openAISystemPrompt)); } public async Task GetChatCompletionsStreamingAsync(string userPrompt, Action? callback = null) @@ -35,7 +36,6 @@ public async Task GetChatCompletionsStreamingAsync(string userPrompt, Ac var response = await _client.GetChatCompletionsStreamingAsync(_options); await foreach (var update in response.EnumerateValues()) { - var content = update.ContentUpdate; if (update.FinishReason == CompletionsFinishReason.ContentFiltered) { @@ -49,17 +49,14 @@ public async Task GetChatCompletionsStreamingAsync(string userPrompt, Ac if (string.IsNullOrEmpty(content)) continue; responseContent += content; - if (callback != null) - { - callback(update); - } + if (callback != null) callback(update); } _options.Messages.Add(new ChatRequestAssistantMessage(responseContent)); return responseContent; } - private string _systemPrompt; - private OpenAIClient _client; + private string _openAISystemPrompt; private ChatCompletionsOptions _options; + private OpenAIClient _client; } \ No newline at end of file diff --git a/src/ai/.x/templates/openai-chat-streaming-cs/Program.cs b/src/ai/.x/templates/openai-chat-streaming-cs/Program.cs index 4983484c..46c8dffe 100644 --- a/src/ai/.x/templates/openai-chat-streaming-cs/Program.cs +++ b/src/ai/.x/templates/openai-chat-streaming-cs/Program.cs @@ -11,12 +11,12 @@ public class Program { public static async Task Main(string[] args) { - var azureOpenApiKey = Environment.GetEnvironmentVariable("AZURE_OPENAI_KEY") ?? "<#= AZURE_OPENAI_KEY #>"; - var azureOpenAIEndpoint = Environment.GetEnvironmentVariable("AZURE_OPENAI_ENDPOINT") ?? "<#= AZURE_OPENAI_ENDPOINT #>"; - var azureOpenAIDeploymentName = Environment.GetEnvironmentVariable("AZURE_OPENAI_CHAT_DEPLOYMENT") ?? "<#= AZURE_OPENAI_CHAT_DEPLOYMENT #>"; - var systemPrompt = Environment.GetEnvironmentVariable("AZURE_OPENAI_SYSTEM_PROMPT") ?? "<#= AZURE_OPENAI_SYSTEM_PROMPT #>"; + var openAIEndpoint = Environment.GetEnvironmentVariable("AZURE_OPENAI_ENDPOINT") ?? "<#= AZURE_OPENAI_ENDPOINT #>"; + var openAIKey = Environment.GetEnvironmentVariable("AZURE_OPENAI_KEY") ?? "<#= AZURE_OPENAI_KEY #>"; + var openAIChatDeploymentName = Environment.GetEnvironmentVariable("AZURE_OPENAI_CHAT_DEPLOYMENT") ?? "<#= AZURE_OPENAI_CHAT_DEPLOYMENT #>"; + var openAISystemPrompt = Environment.GetEnvironmentVariable("AZURE_OPENAI_SYSTEM_PROMPT") ?? "<#= AZURE_OPENAI_SYSTEM_PROMPT #>"; - var chat = new <#= ClassName #>(systemPrompt, azureOpenApiKey, azureOpenAIEndpoint, azureOpenAIDeploymentName); + var chat = new <#= ClassName #>(openAIEndpoint, openAIKey, openAIChatDeploymentName, openAISystemPrompt); while (true) { diff --git a/src/ai/.x/templates/openai-chat-streaming-cs/_.json b/src/ai/.x/templates/openai-chat-streaming-cs/_.json index fb98c135..227dbd09 100644 --- a/src/ai/.x/templates/openai-chat-streaming-cs/_.json +++ b/src/ai/.x/templates/openai-chat-streaming-cs/_.json @@ -1,10 +1,10 @@ { - "_Name": "OpenAI Chat Completions (Streaming)", - "_Short": "openai-chat-streaming", + "_LongName": "OpenAI Chat Completions (Streaming)", + "_ShortName": "openai-chat-streaming", "_Language": "C#", "ClassName": "OpenAIChatCompletionsStreamingClass", "AZURE_OPENAI_ENDPOINT": "", "AZURE_OPENAI_KEY": "", - "AZURE_OPENAI_CHAT_DEPLOYMENT": "", + "AZURE_OPENAI_CHAT_DEPLOYMENT": "", "AZURE_OPENAI_SYSTEM_PROMPT": "You are a helpful AI assistant." } \ No newline at end of file diff --git a/src/ai/.x/templates/openai-chat-streaming-go/_.json b/src/ai/.x/templates/openai-chat-streaming-go/_.json index fc1cb1c1..8318a4b2 100644 --- a/src/ai/.x/templates/openai-chat-streaming-go/_.json +++ b/src/ai/.x/templates/openai-chat-streaming-go/_.json @@ -1,6 +1,6 @@ { - "_Name": "OpenAI Chat Completions (Streaming)", - "_Short": "openai-chat-streaming", + "_LongName": "OpenAI Chat Completions (Streaming)", + "_ShortName": "openai-chat-streaming", "_Language": "Go", "ClassName": "OpenAIChatCompletionsStreamingExample", "AZURE_OPENAI_ENDPOINT": "", diff --git a/src/ai/.x/templates/openai-chat-streaming-go/main.go b/src/ai/.x/templates/openai-chat-streaming-go/main.go index 27441124..5e6deb95 100644 --- a/src/ai/.x/templates/openai-chat-streaming-go/main.go +++ b/src/ai/.x/templates/openai-chat-streaming-go/main.go @@ -16,29 +16,29 @@ import ( ) func main() { - azureOpenAIEndpoint := os.Getenv("AZURE_OPENAI_ENDPOINT") - if azureOpenAIEndpoint == "" { - azureOpenAIEndpoint = "<#= AZURE_OPENAI_ENDPOINT #>" + openAIEndpoint := os.Getenv("AZURE_OPENAI_ENDPOINT") + if openAIEndpoint == "" { + openAIEndpoint = "<#= AZURE_OPENAI_ENDPOINT #>" } - azureOpenAIKey := os.Getenv("AZURE_OPENAI_KEY") - if azureOpenAIKey == "" { - azureOpenAIKey = "<#= AZURE_OPENAI_KEY #>" + openAIKey := os.Getenv("AZURE_OPENAI_KEY") + if openAIKey == "" { + openAIKey = "<#= AZURE_OPENAI_KEY #>" } - deploymentName := os.Getenv("AZURE_OPENAI_CHAT_DEPLOYMENT") - if deploymentName == "" { - deploymentName = "<#= AZURE_OPENAI_CHAT_DEPLOYMENT #>" + openAIChatDeploymentName := os.Getenv("AZURE_OPENAI_CHAT_DEPLOYMENT") + if openAIChatDeploymentName == "" { + openAIChatDeploymentName = "<#= AZURE_OPENAI_CHAT_DEPLOYMENT #>" } - systemPrompt := os.Getenv("OPENAI_SYSTEM_PROMPT") - if systemPrompt == "" { - systemPrompt = "<#= AZURE_OPENAI_SYSTEM_PROMPT #>" + openAISystemPrompt := os.Getenv("OPENAI_SYSTEM_PROMPT") + if openAISystemPrompt == "" { + openAISystemPrompt = "<#= AZURE_OPENAI_SYSTEM_PROMPT #>" } - if azureOpenAIEndpoint == "" || azureOpenAIKey == "" || deploymentName == "" || systemPrompt == "" { + if openAIEndpoint == "" || openAIKey == "" || openAIChatDeploymentName == "" || openAISystemPrompt == "" { fmt.Println("Please set the environment variables.") os.Exit(1) } - chat, err := New<#= ClassName #>(systemPrompt, azureOpenAIEndpoint, azureOpenAIKey, deploymentName) + chat, err := New<#= ClassName #>(openAIEndpoint, openAIKey, openAIChatDeploymentName, openAISystemPrompt) if err != nil { log.Fatalf("ERROR: %s", err) } diff --git a/src/ai/.x/templates/openai-chat-streaming-go/openai_chat_completions_streaming_hello_world.go b/src/ai/.x/templates/openai-chat-streaming-go/openai_chat_completions_streaming_hello_world.go index 29e78875..13e22361 100644 --- a/src/ai/.x/templates/openai-chat-streaming-go/openai_chat_completions_streaming_hello_world.go +++ b/src/ai/.x/templates/openai-chat-streaming-go/openai_chat_completions_streaming_hello_world.go @@ -17,22 +17,22 @@ type <#= ClassName #> struct { options *azopenai.ChatCompletionsOptions } -func New<#= ClassName #>(systemPrompt string, endpoint string, azureApiKey string, deploymentName string) (*<#= ClassName #>, error) { - keyCredential, err := azopenai.NewKeyCredential(azureApiKey) +func New<#= ClassName #>(openAIEndpoint string, openAIKey string, openAIChatDeploymentName string, openAISystemPrompt string) (*<#= ClassName #>, error) { + keyCredential, err := azopenai.NewKeyCredential(openAIKey) if err != nil { return nil, err } - client, err := azopenai.NewClientWithKeyCredential(endpoint, keyCredential, nil) + client, err := azopenai.NewClientWithKeyCredential(openAIEndpoint, keyCredential, nil) if err != nil { return nil, err } messages := []azopenai.ChatMessage{ - {Role: to.Ptr(azopenai.ChatRoleSystem), Content: to.Ptr(systemPrompt)}, + {Role: to.Ptr(azopenai.ChatRoleSystem), Content: to.Ptr(openAISystemPrompt)}, } options := &azopenai.ChatCompletionsOptions{ - Deployment: deploymentName, + Deployment: openAIChatDeploymentName, Messages: messages, } diff --git a/src/ai/.x/templates/openai-chat-streaming-java/_.json b/src/ai/.x/templates/openai-chat-streaming-java/_.json index 3716658b..baa10f0c 100644 --- a/src/ai/.x/templates/openai-chat-streaming-java/_.json +++ b/src/ai/.x/templates/openai-chat-streaming-java/_.json @@ -1,6 +1,6 @@ { - "_Name": "OpenAI Chat Completions (Streaming)", - "_Short": "openai-chat-streaming", + "_LongName": "OpenAI Chat Completions (Streaming)", + "_ShortName": "openai-chat-streaming", "_Language": "Java", "AZURE_OPENAI_ENDPOINT": "", "AZURE_OPENAI_KEY": "", diff --git a/src/ai/.x/templates/openai-chat-streaming-js/Main.js b/src/ai/.x/templates/openai-chat-streaming-js/Main.js index e7ff0ef6..cbe7bac3 100644 --- a/src/ai/.x/templates/openai-chat-streaming-js/Main.js +++ b/src/ai/.x/templates/openai-chat-streaming-js/Main.js @@ -15,12 +15,12 @@ const rl = readline.createInterface({ async function main() { - const endpoint = process.env["AZURE_OPENAI_ENDPOINT"] || "<#= AZURE_OPENAI_ENDPOINT #>"; - const azureApiKey = process.env["AZURE_OPENAI_KEY"] || "<#= AZURE_OPENAI_KEY #>"; - const deploymentName = process.env["AZURE_OPENAI_CHAT_DEPLOYMENT"] || "<#= AZURE_OPENAI_CHAT_DEPLOYMENT #>" ; - const systemPrompt = process.env["AZURE_OPENAI_SYSTEM_PROMPT"] || "<#= AZURE_OPENAI_SYSTEM_PROMPT #>" ; + const openAIEndpoint = process.env["AZURE_OPENAI_ENDPOINT"] || "<#= AZURE_OPENAI_ENDPOINT #>"; + const openAIKey = process.env["AZURE_OPENAI_KEY"] || "<#= AZURE_OPENAI_KEY #>"; + const openAIChatDeploymentName = process.env["AZURE_OPENAI_CHAT_DEPLOYMENT"] || "<#= AZURE_OPENAI_CHAT_DEPLOYMENT #>" ; + const openAISystemPrompt = process.env["AZURE_OPENAI_SYSTEM_PROMPT"] || "<#= AZURE_OPENAI_SYSTEM_PROMPT #>" ; - const chat = new <#= ClassName #>(systemPrompt, endpoint, azureApiKey, deploymentName); + const chat = new <#= ClassName #>(openAIEndpoint, openAIKey, openAIChatDeploymentName, openAISystemPrompt); while (true) { diff --git a/src/ai/.x/templates/openai-chat-streaming-js/OpenAIChatCompletionsStreamingClass.js b/src/ai/.x/templates/openai-chat-streaming-js/OpenAIChatCompletionsStreamingClass.js index f9c499f1..8b204001 100644 --- a/src/ai/.x/templates/openai-chat-streaming-js/OpenAIChatCompletionsStreamingClass.js +++ b/src/ai/.x/templates/openai-chat-streaming-js/OpenAIChatCompletionsStreamingClass.js @@ -4,16 +4,16 @@ const { OpenAIClient, AzureKeyCredential } = require("@azure/openai"); class <#= ClassName #> { - constructor(systemPrompt, endpoint, azureApiKey, deploymentName) { - this.systemPrompt = systemPrompt; - this.deploymentName = deploymentName; - this.client = new OpenAIClient(endpoint, new AzureKeyCredential(azureApiKey)); + constructor(openAIEndpoint, openAIKey, openAIChatDeploymentName, openAISystemPrompt) { + this.openAISystemPrompt = openAISystemPrompt; + this.openAIChatDeploymentName = openAIChatDeploymentName; + this.client = new OpenAIClient(openAIEndpoint, new AzureKeyCredential(openAIKey)); this.clearConversation(); } clearConversation() { this.messages = [ - { role: 'system', content: this.systemPrompt } + { role: 'system', content: this.openAISystemPrompt } ]; } @@ -21,7 +21,7 @@ class <#= ClassName #> { this.messages.push({ role: 'user', content: userInput }); let contentComplete = ''; - const events = await this.client.streamChatCompletions(this.deploymentName, this.messages); + const events = await this.client.streamChatCompletions(this.openAIChatDeploymentName, this.messages); for await (const event of events) { for (const choice of event.choices) { diff --git a/src/ai/.x/templates/openai-chat-streaming-js/_.json b/src/ai/.x/templates/openai-chat-streaming-js/_.json index b635a394..b588891c 100644 --- a/src/ai/.x/templates/openai-chat-streaming-js/_.json +++ b/src/ai/.x/templates/openai-chat-streaming-js/_.json @@ -1,6 +1,6 @@ { - "_Name": "OpenAI Chat Completions (Streaming)", - "_Short": "openai-chat-streaming", + "_LongName": "OpenAI Chat Completions (Streaming)", + "_ShortName": "openai-chat-streaming", "_Language": "JavaScript", "ClassName": "OpenAIChatCompletionsStreamingClass", "AZURE_OPENAI_ENDPOINT": "", diff --git a/src/ai/.x/templates/openai-chat-streaming-py/_.json b/src/ai/.x/templates/openai-chat-streaming-py/_.json index 9d98d550..a108fcae 100644 --- a/src/ai/.x/templates/openai-chat-streaming-py/_.json +++ b/src/ai/.x/templates/openai-chat-streaming-py/_.json @@ -1,6 +1,6 @@ { - "_Name": "OpenAI Chat Completions (Streaming)", - "_Short": "openai-chat-streaming", + "_LongName": "OpenAI Chat Completions (Streaming)", + "_ShortName": "openai-chat-streaming", "_Language": "Python", "AZURE_OPENAI_ENDPOINT": "", "AZURE_OPENAI_KEY": "", diff --git a/src/ai/.x/templates/openai-chat-streaming-py/main.py b/src/ai/.x/templates/openai-chat-streaming-py/main.py index 6f329791..4728c815 100644 --- a/src/ai/.x/templates/openai-chat-streaming-py/main.py +++ b/src/ai/.x/templates/openai-chat-streaming-py/main.py @@ -9,13 +9,13 @@ import os def main(): - azure_api_key = os.getenv('AZURE_OPENAI_KEY', '<#= AZURE_OPENAI_KEY #>') - endpoint = os.getenv('AZURE_OPENAI_ENDPOINT', '<#= AZURE_OPENAI_ENDPOINT #>') - api_version = os.getenv("AZURE_OPENAI_API_VERSION") or "<#= AZURE_OPENAI_API_VERSION #>" - deployment_name = os.getenv('AZURE_OPENAI_CHAT_DEPLOYMENT', '<#= AZURE_OPENAI_CHAT_DEPLOYMENT #>') - system_prompt = os.getenv('AZURE_OPENAI_SYSTEM_PROMPT', '<#= AZURE_OPENAI_SYSTEM_PROMPT #>') + openai_api_version = os.getenv('AZURE_OPENAI_API_VERSION', '<#= AZURE_OPENAI_API_VERSION #>') + openai_endpoint = os.getenv('AZURE_OPENAI_ENDPOINT', '<#= AZURE_OPENAI_ENDPOINT #>') + openai_key = os.getenv('AZURE_OPENAI_KEY', '<#= AZURE_OPENAI_KEY #>') + openai_chat_deployment_name = os.getenv('AZURE_OPENAI_CHAT_DEPLOYMENT', '<#= AZURE_OPENAI_CHAT_DEPLOYMENT #>') + openai_system_prompt = os.getenv('AZURE_OPENAI_SYSTEM_PROMPT', '<#= AZURE_OPENAI_SYSTEM_PROMPT #>') - chat = OpenAIChatCompletionsStreaming(system_prompt, endpoint, azure_api_key, api_version, deployment_name) + chat = OpenAIChatCompletionsStreaming(openai_api_version, openai_endpoint, openai_key, openai_chat_deployment_name, openai_system_prompt) while True: user_input = input('User: ') @@ -30,4 +30,4 @@ def main(): try: main() except Exception as e: - print(f'The sample encountered an error: {e}') \ No newline at end of file + print(f"The sample encountered an error: {e}") \ No newline at end of file diff --git a/src/ai/.x/templates/openai-chat-streaming-py/openai_chat_completions_streaming.py b/src/ai/.x/templates/openai-chat-streaming-py/openai_chat_completions_streaming.py index 32c4dc11..96f48304 100644 --- a/src/ai/.x/templates/openai-chat-streaming-py/openai_chat_completions_streaming.py +++ b/src/ai/.x/templates/openai-chat-streaming-py/openai_chat_completions_streaming.py @@ -1,30 +1,27 @@ from openai import AzureOpenAI class OpenAIChatCompletionsStreaming: - def __init__(self, system_prompt, endpoint, azure_api_key, azure_api_version, deployment_name): - self.system_prompt = system_prompt - self.endpoint = endpoint - self.azure_api_key = azure_api_key - self.azure_api_version = azure_api_version - self.deployment_name = deployment_name + def __init__(self, openai_api_version, openai_endpoint, openai_key, openai_chat_deployment_name, openai_system_prompt): + self.openai_system_prompt = openai_system_prompt + self.openai_chat_deployment_name = openai_chat_deployment_name self.client = AzureOpenAI( - api_key=self.azure_api_key, - api_version=self.azure_api_version, - azure_endpoint = endpoint + api_key=openai_key, + api_version=openai_api_version, + azure_endpoint = openai_endpoint ) self.clear_conversation() def clear_conversation(self): self.messages = [ - {'role': 'system', 'content': self.system_prompt} + {'role': 'system', 'content': self.openai_system_prompt} ] def get_chat_completions(self, user_input, callback): self.messages.append({'role': 'user', 'content': user_input}) - complete_content = "" + complete_content = '' response = self.client.chat.completions.create( - model=self.deployment_name, + model=self.openai_chat_deployment_name, messages=self.messages, stream=True) @@ -32,17 +29,16 @@ def get_chat_completions(self, user_input, callback): choice0 = chunk.choices[0] if hasattr(chunk, 'choices') and chunk.choices else None delta = choice0.delta if choice0 and hasattr(choice0, 'delta') else None - - content = delta.content if delta and hasattr(delta, 'content') else "" - if content is None: continue - - if content is not None: - callback(content) - complete_content += content + content = delta.content if delta and hasattr(delta, 'content') else '' finish_reason = choice0.finish_reason if choice0 and hasattr(choice0, 'finish_reason') else None - if finish_reason == "length": + if finish_reason == 'length': content += f"{content}\nERROR: Exceeded max token length!" - self.messages.append({"role": "assistant", "content": complete_content}) + if content is None: continue + + complete_content += content + callback(content) + + self.messages.append({'role': 'assistant', 'content': complete_content}) return complete_content diff --git a/src/ai/.x/templates/openai-chat-streaming-with-data-cs/OpenAIChatCompletionsWithDataStreamingClass._ b/src/ai/.x/templates/openai-chat-streaming-with-data-cs/OpenAIChatCompletionsWithDataStreaming.csproj._ similarity index 100% rename from src/ai/.x/templates/openai-chat-streaming-with-data-cs/OpenAIChatCompletionsWithDataStreamingClass._ rename to src/ai/.x/templates/openai-chat-streaming-with-data-cs/OpenAIChatCompletionsWithDataStreaming.csproj._ diff --git a/src/ai/.x/templates/openai-chat-streaming-with-data-cs/OpenAIChatCompletionsWithDataStreamingClass.cs b/src/ai/.x/templates/openai-chat-streaming-with-data-cs/OpenAIChatCompletionsWithDataStreamingClass.cs index 81e36c57..0ae92a34 100644 --- a/src/ai/.x/templates/openai-chat-streaming-with-data-cs/OpenAIChatCompletionsWithDataStreamingClass.cs +++ b/src/ai/.x/templates/openai-chat-streaming-with-data-cs/OpenAIChatCompletionsWithDataStreamingClass.cs @@ -1,7 +1,6 @@ <#@ template hostspecific="true" #> <#@ output extension=".cs" encoding="utf-8" #> <#@ parameter type="System.String" name="ClassName" #> -<#@ parameter type="System.Boolean" name="OPTION_INCLUDE_CITATIONS" #> using Azure; using Azure.AI.OpenAI; using Azure.Identity; @@ -12,10 +11,10 @@ public class <#= ClassName #> { - public <#= ClassName #>( - string systemPrompt, string openAIKey, string openAIEndpoint, string openAIDeploymentName, string searchEndpoint, string searchApiKey, string searchIndexName, string embeddingsEndpoint) + public <#= ClassName #>(string openAIEndpoint, string openAIKey, string openAIChatDeploymentName, string openAISystemPrompt, string searchEndpoint, string searchApiKey, string searchIndexName, string embeddingsEndpoint) { - _systemPrompt = systemPrompt; + _openAISystemPrompt = openAISystemPrompt; + _client = string.IsNullOrEmpty(openAIKey) ? new OpenAIClient(new Uri(openAIEndpoint), new DefaultAzureCredential()) : new OpenAIClient(new Uri(openAIEndpoint), new AzureKeyCredential(openAIKey)); @@ -25,26 +24,26 @@ public class <#= ClassName #> SearchEndpoint = new Uri(searchEndpoint), Key = searchApiKey, IndexName = searchIndexName, - QueryType = AzureCognitiveSearchQueryType.VectorSimpleHybrid, // Use VectorSimpleHybrid to get the best of both vector and keyword types. + QueryType = AzureCognitiveSearchQueryType.VectorSimpleHybrid, // Use VectorSimpleHybrid to get the best vector and keyword search query types. EmbeddingEndpoint = new Uri(embeddingsEndpoint), EmbeddingKey = openAIKey, }; _options = new ChatCompletionsOptions() { - DeploymentName = openAIDeploymentName, - + DeploymentName = openAIChatDeploymentName, AzureExtensionsOptions = new() { Extensions = { extensionConfig } } }; + ClearConversation(); } public void ClearConversation() { _options.Messages.Clear(); - _options.Messages.Add(new ChatRequestSystemMessage(_systemPrompt)); + _options.Messages.Add(new ChatRequestSystemMessage(_openAISystemPrompt)); } public async Task GetChatCompletionsStreamingAsync(string userPrompt, Action? callback = null) @@ -68,17 +67,14 @@ public async Task GetChatCompletionsStreamingAsync(string userPrompt, Ac if (string.IsNullOrEmpty(content)) continue; responseContent += content; - if (callback != null) - { - callback(update); - } + if (callback != null) callback(update); } _options.Messages.Add(new ChatRequestAssistantMessage(responseContent)); return responseContent; } - private string _systemPrompt; - private OpenAIClient _client; + private string _openAISystemPrompt; private ChatCompletionsOptions _options; + private OpenAIClient _client; } \ No newline at end of file diff --git a/src/ai/.x/templates/openai-chat-streaming-with-data-cs/Program.cs b/src/ai/.x/templates/openai-chat-streaming-with-data-cs/Program.cs index 0a827ae7..34c93500 100644 --- a/src/ai/.x/templates/openai-chat-streaming-with-data-cs/Program.cs +++ b/src/ai/.x/templates/openai-chat-streaming-with-data-cs/Program.cs @@ -16,19 +16,20 @@ public class Program { public static async Task Main(string[] args) { - var azureOpenApiKey = Environment.GetEnvironmentVariable("AZURE_OPENAI_KEY") ?? "<#= AZURE_OPENAI_KEY #>"; - var azureOpenAIEndpoint = Environment.GetEnvironmentVariable("AZURE_OPENAI_ENDPOINT") ?? "<#= AZURE_OPENAI_ENDPOINT #>"; - var azureOpenAIDeploymentName = Environment.GetEnvironmentVariable("AZURE_OPENAI_CHAT_DEPLOYMENT") ?? "<#= AZURE_OPENAI_CHAT_DEPLOYMENT #>"; - var systemPrompt = Environment.GetEnvironmentVariable("AZURE_OPENAI_SYSTEM_PROMPT") ?? "<#= AZURE_OPENAI_SYSTEM_PROMPT #>"; + var openAIEndpoint = Environment.GetEnvironmentVariable("AZURE_OPENAI_ENDPOINT") ?? "<#= AZURE_OPENAI_ENDPOINT #>"; + var openAIKey = Environment.GetEnvironmentVariable("AZURE_OPENAI_KEY") ?? "<#= AZURE_OPENAI_KEY #>"; + var openAIChatDeploymentName = Environment.GetEnvironmentVariable("AZURE_OPENAI_CHAT_DEPLOYMENT") ?? "<#= AZURE_OPENAI_CHAT_DEPLOYMENT #>"; + var openAISystemPrompt = Environment.GetEnvironmentVariable("AZURE_OPENAI_SYSTEM_PROMPT") ?? "<#= AZURE_OPENAI_SYSTEM_PROMPT #>"; + + var openAIApiVersion = Environment.GetEnvironmentVariable("AZURE_OPENAI_API_VERSION") ?? "<#= AZURE_OPENAI_API_VERSION #>"; + var openAIEmbeddingsDeploymentName = Environment.GetEnvironmentVariable("AZURE_OPENAI_EMBEDDING_DEPLOYMENT") ?? "<#= AZURE_OPENAI_EMBEDDING_DEPLOYMENT #>"; + var openAIEmbeddingsEndpoint = $"{openAIEndpoint.Trim('/')}/openai/deployments/{openAIEmbeddingsDeploymentName}/embeddings?api-version={openAIApiVersion}"; + var searchEndpoint = Environment.GetEnvironmentVariable("AZURE_AI_SEARCH_ENDPOINT") ?? "<#= AZURE_AI_SEARCH_ENDPOINT #>"; var searchApiKey = Environment.GetEnvironmentVariable("AZURE_AI_SEARCH_KEY") ?? "<#= AZURE_AI_SEARCH_KEY #>"; var searchIndexName = Environment.GetEnvironmentVariable("AZURE_AI_SEARCH_INDEX_NAME") ?? "<#= AZURE_AI_SEARCH_INDEX_NAME #>"; - var embeddingsDeployment = Environment.GetEnvironmentVariable("AZURE_OPENAI_EMBEDDING_DEPLOYMENT") ?? "<#= AZURE_OPENAI_EMBEDDING_DEPLOYMENT #>"; - var azureOpenAIApiVersion = Environment.GetEnvironmentVariable("AZURE_OPENAI_API_VERSION") ?? "<#= AZURE_OPENAI_API_VERSION #>"; - var embeddingsEndpoint = $"{azureOpenAIEndpoint.Trim('/')}/openai/deployments/{embeddingsDeployment}/embeddings?api-version={azureOpenAIApiVersion}"; - var chat = new <#= ClassName #>( - systemPrompt, azureOpenApiKey, azureOpenAIEndpoint, azureOpenAIDeploymentName, searchEndpoint, searchApiKey, searchIndexName, embeddingsEndpoint); + var chat = new <#= ClassName #>(openAIEndpoint, openAIKey, openAIChatDeploymentName, openAISystemPrompt, searchEndpoint, searchApiKey, searchIndexName, openAIEmbeddingsEndpoint); while (true) { diff --git a/src/ai/.x/templates/openai-chat-streaming-with-data-cs/_.json b/src/ai/.x/templates/openai-chat-streaming-with-data-cs/_.json index 2e01f5af..f767eab9 100644 --- a/src/ai/.x/templates/openai-chat-streaming-with-data-cs/_.json +++ b/src/ai/.x/templates/openai-chat-streaming-with-data-cs/_.json @@ -1,15 +1,15 @@ { - "_Name": "OpenAI Chat Completions (w/ Data + AI Search)", - "_Short": "openai-chat-streaming-with-data", - "_Language": "C#", - "ClassName": "OpenAIChatCompletionsWithDataStreamingClass", - "AZURE_OPENAI_ENDPOINT": "", - "AZURE_OPENAI_KEY": "", - "AZURE_OPENAI_CHAT_DEPLOYMENT": "", - "AZURE_OPENAI_EMBEDDING_DEPLOYMENT": "", - "AZURE_OPENAI_SYSTEM_PROMPT": "", - "AZURE_AI_SEARCH_ENDPOINT": "", - "AZURE_AI_SEARCH_KEY": "", - "AZURE_AI_SEARCH_INDEX_NAME": "", - "AZURE_OPENAI_API_VERSION": "" + "_LongName": "OpenAI Chat Completions (w/ Data + AI Search)", + "_ShortName": "openai-chat-streaming-with-data", + "_Language": "C#", + "ClassName": "OpenAIChatCompletionsWithDataStreamingClass", + "AZURE_OPENAI_API_VERSION": "", + "AZURE_OPENAI_ENDPOINT": "", + "AZURE_OPENAI_KEY": "", + "AZURE_OPENAI_CHAT_DEPLOYMENT": "", + "AZURE_OPENAI_EMBEDDING_DEPLOYMENT": "", + "AZURE_OPENAI_SYSTEM_PROMPT": "You are a helpful AI assistant.", + "AZURE_AI_SEARCH_ENDPOINT": "", + "AZURE_AI_SEARCH_KEY": "", + "AZURE_AI_SEARCH_INDEX_NAME": "" } \ No newline at end of file diff --git a/src/ai/.x/templates/openai-chat-streaming-with-functions-cs/OpenAIChatCompletionsFunctionsStreamingClass.cs b/src/ai/.x/templates/openai-chat-streaming-with-functions-cs/OpenAIChatCompletionsFunctionsStreamingClass.cs index a0dad85d..a804e0c5 100644 --- a/src/ai/.x/templates/openai-chat-streaming-with-functions-cs/OpenAIChatCompletionsFunctionsStreamingClass.cs +++ b/src/ai/.x/templates/openai-chat-streaming-with-functions-cs/OpenAIChatCompletionsFunctionsStreamingClass.cs @@ -8,17 +8,17 @@ public class <#= ClassName #> { - public <#= ClassName #>(string systemPrompt, string endpoint, string azureApiKey, string deploymentName, FunctionFactory factory) + public <#= ClassName #>(string openAIEndpoint, string openAIKey, string openAIChatDeploymentName, string openAISystemPrompt, FunctionFactory factory) { - _systemPrompt = systemPrompt; + _openAISystemPrompt = openAISystemPrompt; _functionFactory = factory; - _client = string.IsNullOrEmpty(azureApiKey) - ? new OpenAIClient(new Uri(endpoint), new DefaultAzureCredential()) - : new OpenAIClient(new Uri(endpoint), new AzureKeyCredential(azureApiKey)); + _client = string.IsNullOrEmpty(openAIKey) + ? new OpenAIClient(new Uri(openAIEndpoint), new DefaultAzureCredential()) + : new OpenAIClient(new Uri(openAIEndpoint), new AzureKeyCredential(openAIKey)); _options = new ChatCompletionsOptions(); - _options.DeploymentName = deploymentName; + _options.DeploymentName = openAIChatDeploymentName; foreach (var function in _functionFactory.GetFunctionDefinitions()) { @@ -26,14 +26,14 @@ public class <#= ClassName #> // _options.Tools.Add(new ChatCompletionsFunctionToolDefinition(function)); } - _functionCallContext = new(_functionFactory, _options.Messages); + _functionCallContext = new FunctionCallContext(_functionFactory, _options.Messages); ClearConversation(); } public void ClearConversation() { _options.Messages.Clear(); - _options.Messages.Add(new ChatRequestSystemMessage(_systemPrompt)); + _options.Messages.Add(new ChatRequestSystemMessage(_openAISystemPrompt)); } public async Task GetChatCompletionsStreamingAsync(string userPrompt, Action? callback = null) @@ -60,8 +60,8 @@ public async Task GetChatCompletionsStreamingAsync(string userPrompt, Ac if (string.IsNullOrEmpty(content)) continue; - if (callback != null) callback(update); responseContent += content; + if (callback != null) callback(update); } if (_functionCallContext.TryCallFunction() != null) @@ -75,9 +75,9 @@ public async Task GetChatCompletionsStreamingAsync(string userPrompt, Ac } } - private string _systemPrompt; + private string _openAISystemPrompt; private FunctionFactory _functionFactory; private FunctionCallContext _functionCallContext; private ChatCompletionsOptions _options; private OpenAIClient _client; -} +} \ No newline at end of file diff --git a/src/ai/.x/templates/openai-chat-streaming-with-functions-cs/Program.cs b/src/ai/.x/templates/openai-chat-streaming-with-functions-cs/Program.cs index 55eda1ac..769c8e1b 100644 --- a/src/ai/.x/templates/openai-chat-streaming-with-functions-cs/Program.cs +++ b/src/ai/.x/templates/openai-chat-streaming-with-functions-cs/Program.cs @@ -11,15 +11,15 @@ public class Program { public static async Task Main(string[] args) { - var endpoint = Environment.GetEnvironmentVariable("AZURE_OPENAI_ENDPOINT") ?? "<#= AZURE_OPENAI_ENDPOINT #>"; - var azureApiKey = Environment.GetEnvironmentVariable("AZURE_OPENAI_KEY") ?? "<#= AZURE_OPENAI_KEY #>"; - var deploymentName = Environment.GetEnvironmentVariable("AZURE_OPENAI_CHAT_DEPLOYMENT") ?? "<#= AZURE_OPENAI_CHAT_DEPLOYMENT #>"; - var systemPrompt = Environment.GetEnvironmentVariable("AZURE_OPENAI_SYSTEM_PROMPT") ?? "<#= AZURE_OPENAI_SYSTEM_PROMPT #>"; + var openAIEndpoint = Environment.GetEnvironmentVariable("AZURE_OPENAI_ENDPOINT") ?? "<#= AZURE_OPENAI_ENDPOINT #>"; + var openAIKey = Environment.GetEnvironmentVariable("AZURE_OPENAI_KEY") ?? "<#= AZURE_OPENAI_KEY #>"; + var openAIChatDeploymentName = Environment.GetEnvironmentVariable("AZURE_OPENAI_CHAT_DEPLOYMENT") ?? "<#= AZURE_OPENAI_CHAT_DEPLOYMENT #>"; + var openAISystemPrompt = Environment.GetEnvironmentVariable("AZURE_OPENAI_SYSTEM_PROMPT") ?? "<#= AZURE_OPENAI_SYSTEM_PROMPT #>"; var factory = new FunctionFactory(); factory.AddFunctions(typeof(OpenAIChatCompletionsCustomFunctions)); - var chat = new <#= ClassName #>(systemPrompt, endpoint, azureApiKey, deploymentName, factory); + var chat = new <#= ClassName #>(openAIEndpoint, openAIKey, openAIChatDeploymentName, openAISystemPrompt, factory); while (true) { diff --git a/src/ai/.x/templates/openai-chat-streaming-with-functions-cs/_.json b/src/ai/.x/templates/openai-chat-streaming-with-functions-cs/_.json index a0ee3b39..92faed44 100644 --- a/src/ai/.x/templates/openai-chat-streaming-with-functions-cs/_.json +++ b/src/ai/.x/templates/openai-chat-streaming-with-functions-cs/_.json @@ -1,6 +1,6 @@ { - "_Name": "OpenAI Chat Completions (w/ Functions)", - "_Short": "openai-chat-streaming-with-functions", + "_LongName": "OpenAI Chat Completions (w/ Functions)", + "_ShortName": "openai-chat-streaming-with-functions", "_Language": "C#", "ClassName": "OpenAIChatCompletionsFunctionsStreamingClass", "AZURE_OPENAI_ENDPOINT": "", diff --git a/src/ai/.x/templates/openai-chat-streaming-with-functions-go/_.json b/src/ai/.x/templates/openai-chat-streaming-with-functions-go/_.json index abd00f8a..9399b730 100644 --- a/src/ai/.x/templates/openai-chat-streaming-with-functions-go/_.json +++ b/src/ai/.x/templates/openai-chat-streaming-with-functions-go/_.json @@ -1,8 +1,8 @@ { - "_Name": "OpenAI Chat Completions (w/ Functions)", - "_Short": "openai-chat-streaming-with-functions", + "_LongName": "OpenAI Chat Completions (w/ Functions)", + "_ShortName": "openai-chat-streaming-with-functions", "_Language": "Go", - "ClassName": "OpenAIChatCompletionsStreamingExample", + "ClassName": "OpenAIChatCompletionsFunctionsStreamingExample", "AZURE_OPENAI_ENDPOINT": "", "AZURE_OPENAI_KEY": "", "AZURE_OPENAI_CHAT_DEPLOYMENT": "", diff --git a/src/ai/.x/templates/openai-chat-streaming-with-functions-go/main.go b/src/ai/.x/templates/openai-chat-streaming-with-functions-go/main.go index 36ae263a..c20008d4 100644 --- a/src/ai/.x/templates/openai-chat-streaming-with-functions-go/main.go +++ b/src/ai/.x/templates/openai-chat-streaming-with-functions-go/main.go @@ -16,30 +16,30 @@ import ( ) func main() { - azureOpenAIEndpoint := os.Getenv("AZURE_OPENAI_ENDPOINT") - if azureOpenAIEndpoint == "" { - azureOpenAIEndpoint = "<#= AZURE_OPENAI_ENDPOINT #>" + openAIEndpoint := os.Getenv("AZURE_OPENAI_ENDPOINT") + if openAIEndpoint == "" { + openAIEndpoint = "<#= AZURE_OPENAI_ENDPOINT #>" } - azureOpenAIKey := os.Getenv("AZURE_OPENAI_KEY") - if azureOpenAIKey == "" { - azureOpenAIKey = "<#= AZURE_OPENAI_KEY #>" + openAIKey := os.Getenv("AZURE_OPENAI_KEY") + if openAIKey == "" { + openAIKey = "<#= AZURE_OPENAI_KEY #>" } - deploymentName := os.Getenv("AZURE_OPENAI_CHAT_DEPLOYMENT") - if deploymentName == "" { - deploymentName = "<#= AZURE_OPENAI_CHAT_DEPLOYMENT #>" + openAIChatDeploymentName := os.Getenv("AZURE_OPENAI_CHAT_DEPLOYMENT") + if openAIChatDeploymentName == "" { + openAIChatDeploymentName = "<#= AZURE_OPENAI_CHAT_DEPLOYMENT #>" } - systemPrompt := os.Getenv("OPENAI_SYSTEM_PROMPT") - if systemPrompt == "" { - systemPrompt = "<#= AZURE_OPENAI_SYSTEM_PROMPT #>" + openAISystemPrompt := os.Getenv("OPENAI_SYSTEM_PROMPT") + if openAISystemPrompt == "" { + openAISystemPrompt = "<#= AZURE_OPENAI_SYSTEM_PROMPT #>" } - if azureOpenAIEndpoint == "" || azureOpenAIKey == "" || deploymentName == "" || systemPrompt == "" { + if openAIEndpoint == "" || openAIKey == "" || openAIChatDeploymentName == "" || openAISystemPrompt == "" { fmt.Println("Please set the environment variables.") os.Exit(1) } factory := NewFunctionFactoryWithCustomFunctions() - chat, err := New<#= ClassName #>(systemPrompt, azureOpenAIEndpoint, azureOpenAIKey, deploymentName, factory) + chat, err := New<#= ClassName #>(openAIEndpoint, openAIKey, openAIChatDeploymentName, openAISystemPrompt, factory) if err != nil { log.Fatalf("ERROR: %s", err) } diff --git a/src/ai/.x/templates/openai-chat-streaming-with-functions-go/openai_chat_completions_functions_streaming_hello_world.go b/src/ai/.x/templates/openai-chat-streaming-with-functions-go/openai_chat_completions_functions_streaming_hello_world.go index 8e5ad832..4513f056 100644 --- a/src/ai/.x/templates/openai-chat-streaming-with-functions-go/openai_chat_completions_functions_streaming_hello_world.go +++ b/src/ai/.x/templates/openai-chat-streaming-with-functions-go/openai_chat_completions_functions_streaming_hello_world.go @@ -23,22 +23,22 @@ type <#= ClassName #> struct { functionCallContext *FunctionCallContext } -func New<#= ClassName #>(systemPrompt string, endpoint string, azureApiKey string, deploymentName string, functionFactory *FunctionFactory) (*<#= ClassName #>, error) { - keyCredential, err := azopenai.NewKeyCredential(azureApiKey) +func New<#= ClassName #>(openAIEndpoint string, openAIKey string, openAIChatDeploymentName string, openAISystemPrompt string, functionFactory *FunctionFactory) (*<#= ClassName #>, error) { + keyCredential, err := azopenai.NewKeyCredential(openAIKey) if err != nil { return nil, err } - client, err := azopenai.NewClientWithKeyCredential(endpoint, keyCredential, nil) + client, err := azopenai.NewClientWithKeyCredential(openAIEndpoint, keyCredential, nil) if err != nil { return nil, err } messages := []azopenai.ChatMessage{ - {Role: to.Ptr(azopenai.ChatRoleSystem), Content: to.Ptr(systemPrompt)}, + {Role: to.Ptr(azopenai.ChatRoleSystem), Content: to.Ptr(openAISystemPrompt)}, } options := &azopenai.ChatCompletionsOptions{ - Deployment: deploymentName, + Deployment: openAIChatDeploymentName, Messages: messages, FunctionCall: &azopenai.ChatCompletionsOptionsFunctionCall{ Value: to.Ptr("auto"), diff --git a/src/ai/.x/templates/openai-chat-streaming-with-functions-js/Main.js b/src/ai/.x/templates/openai-chat-streaming-with-functions-js/Main.js index 55a5c456..4ae24cf4 100644 --- a/src/ai/.x/templates/openai-chat-streaming-with-functions-js/Main.js +++ b/src/ai/.x/templates/openai-chat-streaming-with-functions-js/Main.js @@ -16,12 +16,12 @@ const rl = readline.createInterface({ async function main() { - const endpoint = process.env["AZURE_OPENAI_ENDPOINT"] || "<#= AZURE_OPENAI_ENDPOINT #>"; - const azureApiKey = process.env["AZURE_OPENAI_KEY"] || "<#= AZURE_OPENAI_KEY #>"; - const deploymentName = process.env["AZURE_OPENAI_CHAT_DEPLOYMENT"] || "<#= AZURE_OPENAI_CHAT_DEPLOYMENT #>" ; - const systemPrompt = process.env["AZURE_OPENAI_SYSTEM_PROMPT"] || "<#= AZURE_OPENAI_SYSTEM_PROMPT #>" ; + const openAIEndpoint = process.env["AZURE_OPENAI_ENDPOINT"] || "<#= AZURE_OPENAI_ENDPOINT #>"; + const openAIKey = process.env["AZURE_OPENAI_KEY"] || "<#= AZURE_OPENAI_KEY #>"; + const openAIChatDeploymentName = process.env["AZURE_OPENAI_CHAT_DEPLOYMENT"] || "<#= AZURE_OPENAI_CHAT_DEPLOYMENT #>" ; + const openAISystemPrompt = process.env["AZURE_OPENAI_SYSTEM_PROMPT"] || "<#= AZURE_OPENAI_SYSTEM_PROMPT #>" ; - const chat = new <#= ClassName #>(systemPrompt, endpoint, azureApiKey, deploymentName, factory); + const chat = new <#= ClassName #>(openAIEndpoint, openAIKey, openAIChatDeploymentName, openAISystemPrompt, factory); while (true) { diff --git a/src/ai/.x/templates/openai-chat-streaming-with-functions-js/OpenAIChatCompletionsFunctionsStreamingClass.js b/src/ai/.x/templates/openai-chat-streaming-with-functions-js/OpenAIChatCompletionsFunctionsStreamingClass.js index a2153256..907d6899 100644 --- a/src/ai/.x/templates/openai-chat-streaming-with-functions-js/OpenAIChatCompletionsFunctionsStreamingClass.js +++ b/src/ai/.x/templates/openai-chat-streaming-with-functions-js/OpenAIChatCompletionsFunctionsStreamingClass.js @@ -5,17 +5,17 @@ const { OpenAIClient, AzureKeyCredential } = require("@azure/openai"); const { FunctionCallContext } = require("./FunctionCallContext"); class <#= ClassName #> { - constructor(systemPrompt, endpoint, azureApiKey, deploymentName, functionFactory) { - this.systemPrompt = systemPrompt; - this.deploymentName = deploymentName; - this.client = new OpenAIClient(endpoint, new AzureKeyCredential(azureApiKey)); + constructor(openAIEndpoint, openAIKey, openAIChatDeploymentName, openAISystemPrompt, functionFactory) { + this.openAISystemPrompt = openAISystemPrompt; + this.openAIChatDeploymentName = openAIChatDeploymentName; + this.client = new OpenAIClient(openAIEndpoint, new AzureKeyCredential(openAIKey)); this.functionFactory = functionFactory; this.clearConversation(); } clearConversation() { this.messages = [ - { role: 'system', content: this.systemPrompt } + { role: 'system', content: this.openAISystemPrompt } ]; this.functionCallContext = new FunctionCallContext(this.functionFactory, this.messages); } @@ -25,7 +25,7 @@ class <#= ClassName #> { let contentComplete = ''; while (true) { - const events = await this.client.streamChatCompletions(this.deploymentName, this.messages, { + const events = await this.client.streamChatCompletions(this.openAIChatDeploymentName, this.messages, { functions: this.functionFactory.getFunctionSchemas(), }); diff --git a/src/ai/.x/templates/openai-chat-streaming-with-functions-js/_.json b/src/ai/.x/templates/openai-chat-streaming-with-functions-js/_.json index 9bebaa1a..1f79b39f 100644 --- a/src/ai/.x/templates/openai-chat-streaming-with-functions-js/_.json +++ b/src/ai/.x/templates/openai-chat-streaming-with-functions-js/_.json @@ -1,6 +1,6 @@ { - "_Name": "OpenAI Chat Completions (w/ Functions)", - "_Short": "openai-chat-streaming-with-functions", + "_LongName": "OpenAI Chat Completions (w/ Functions)", + "_ShortName": "openai-chat-streaming-with-functions", "_Language": "JavaScript", "ClassName": "OpenAIChatCompletionsFunctionsStreamingClass", "AZURE_OPENAI_ENDPOINT": "", diff --git a/src/ai/.x/templates/openai-chat-streaming-with-functions-py/_.json b/src/ai/.x/templates/openai-chat-streaming-with-functions-py/_.json index 45d6379b..8b60c4e2 100644 --- a/src/ai/.x/templates/openai-chat-streaming-with-functions-py/_.json +++ b/src/ai/.x/templates/openai-chat-streaming-with-functions-py/_.json @@ -1,6 +1,6 @@ { - "_Name": "OpenAI Chat Completions (w/ Functions)", - "_Short": "openai-chat-streaming-with-functions", + "_LongName": "OpenAI Chat Completions (w/ Functions)", + "_ShortName": "openai-chat-streaming-with-functions", "_Language": "Python", "AZURE_OPENAI_ENDPOINT": "", "AZURE_OPENAI_KEY": "", diff --git a/src/ai/.x/templates/openai-chat-streaming-with-functions-py/main.py b/src/ai/.x/templates/openai-chat-streaming-with-functions-py/main.py index 7199959b..0a77a8b9 100644 --- a/src/ai/.x/templates/openai-chat-streaming-with-functions-py/main.py +++ b/src/ai/.x/templates/openai-chat-streaming-with-functions-py/main.py @@ -10,13 +10,13 @@ import os def main(): - azure_api_key = os.getenv('AZURE_OPENAI_KEY', '<#= AZURE_OPENAI_KEY #>') - endpoint = os.getenv('AZURE_OPENAI_ENDPOINT', '<#= AZURE_OPENAI_ENDPOINT #>') - api_version = os.getenv("AZURE_OPENAI_API_VERSION") or "<#= AZURE_OPENAI_API_VERSION #>" - deployment_name = os.getenv('AZURE_OPENAI_CHAT_DEPLOYMENT', '<#= AZURE_OPENAI_CHAT_DEPLOYMENT #>') - system_prompt = os.getenv('AZURE_OPENAI_SYSTEM_PROMPT', '<#= AZURE_OPENAI_SYSTEM_PROMPT #>') + openai_api_version = os.getenv('AZURE_OPENAI_API_VERSION', '<#= AZURE_OPENAI_API_VERSION #>') + openai_endpoint = os.getenv('AZURE_OPENAI_ENDPOINT', '<#= AZURE_OPENAI_ENDPOINT #>') + openai_key = os.getenv('AZURE_OPENAI_KEY', '<#= AZURE_OPENAI_KEY #>') + openai_chat_deployment_name = os.getenv('AZURE_OPENAI_CHAT_DEPLOYMENT', '<#= AZURE_OPENAI_CHAT_DEPLOYMENT #>') + openai_system_prompt = os.getenv('AZURE_OPENAI_SYSTEM_PROMPT', '<#= AZURE_OPENAI_SYSTEM_PROMPT #>') - chat = OpenAIChatCompletionsFunctionsStreaming(system_prompt, endpoint, azure_api_key, api_version, deployment_name, factory) + chat = OpenAIChatCompletionsFunctionsStreaming(openai_api_version, openai_endpoint, openai_key, openai_chat_deployment_name, openai_system_prompt, factory) while True: user_input = input('User: ') @@ -31,4 +31,4 @@ def main(): try: main() except Exception as e: - print(f'The sample encountered an error: {e}') \ No newline at end of file + print(f"The sample encountered an error: {e}") \ No newline at end of file diff --git a/src/ai/.x/templates/openai-chat-streaming-with-functions-py/openai_chat_completions_functions_streaming.py b/src/ai/.x/templates/openai-chat-streaming-with-functions-py/openai_chat_completions_functions_streaming.py index ec13f85a..104a2c4e 100644 --- a/src/ai/.x/templates/openai-chat-streaming-with-functions-py/openai_chat_completions_functions_streaming.py +++ b/src/ai/.x/templates/openai-chat-streaming-with-functions-py/openai_chat_completions_functions_streaming.py @@ -2,39 +2,36 @@ from function_call_context import FunctionCallContext class OpenAIChatCompletionsFunctionsStreaming: - def __init__(self, system_prompt, endpoint, azure_api_key, azure_api_version, deployment_name, function_factory): - self.system_prompt = system_prompt - self.endpoint = endpoint - self.azure_api_key = azure_api_key - self.azure_api_version = azure_api_version - self.deployment_name = deployment_name + def __init__(self, openai_api_version, openai_endpoint, openai_key, openai_chat_deployment_name, openai_system_prompt, function_factory): + self.openai_system_prompt = openai_system_prompt + self.openai_chat_deployment_name = openai_chat_deployment_name self.function_factory = function_factory self.client = AzureOpenAI( - api_key=self.azure_api_key, - api_version=self.azure_api_version, - azure_endpoint = endpoint + api_key=openai_key, + api_version=openai_api_version, + azure_endpoint = openai_endpoint ) self.clear_conversation() def clear_conversation(self): self.messages = [ - {'role': 'system', 'content': self.system_prompt} + {'role': 'system', 'content': self.openai_system_prompt} ] self.function_call_context = FunctionCallContext(self.function_factory, self.messages) def get_chat_completions(self, user_input, callback): self.messages.append({'role': 'user', 'content': user_input}) - complete_content = "" + complete_content = '' functions = self.function_factory.get_function_schemas() while True: response = self.client.chat.completions.create( - model=self.deployment_name, + model=self.openai_chat_deployment_name, messages=self.messages, stream=True, functions=functions, - function_call="auto") + function_call='auto') for chunk in response: @@ -42,20 +39,20 @@ def get_chat_completions(self, user_input, callback): self.function_call_context.check_for_update(choice0) delta = choice0.delta if choice0 and hasattr(choice0, 'delta') else None - content = delta.content if delta and hasattr(delta, 'content') else "" - if content is None: continue - - if content is not None: - callback(content) - complete_content += content + content = delta.content if delta and hasattr(delta, 'content') else '' finish_reason = choice0.finish_reason if choice0 and hasattr(choice0, 'finish_reason') else None - if finish_reason == "length": + if finish_reason == 'length': content += f"{content}\nERROR: Exceeded max token length!" + if content is None: continue + + complete_content += content + callback(content) + if self.function_call_context.try_call_function() is not None: self.function_call_context.clear() continue - self.messages.append({"role": "assistant", "content": complete_content}) + self.messages.append({'role': 'assistant', 'content': complete_content}) return complete_content diff --git a/src/ai/.x/templates/openai-webpage/_.json b/src/ai/.x/templates/openai-webpage/_.json index ad03afed..0cbcec13 100644 --- a/src/ai/.x/templates/openai-webpage/_.json +++ b/src/ai/.x/templates/openai-webpage/_.json @@ -1,6 +1,6 @@ { - "_Name": "OpenAI Webpage (w/ Functions)", - "_Short": "openai-webpage", + "_LongName": "OpenAI Webpage (w/ Functions)", + "_ShortName": "openai-webpage", "_Language": "JavaScript", "ClassName": "OpenAIChatCompletionsFunctionsStreamingClass", "AZURE_OPENAI_ENDPOINT": "", diff --git a/src/ai/.x/templates/openai-webpage/package.json b/src/ai/.x/templates/openai-webpage/package.json index 7d113850..89463238 100644 --- a/src/ai/.x/templates/openai-webpage/package.json +++ b/src/ai/.x/templates/openai-webpage/package.json @@ -9,7 +9,7 @@ "author": "", "license": "MIT", "dependencies": { - "@azure/openai": "1.0.0-beta.8", + "@azure/openai": "1.0.0-beta.10", "highlight.js": "^11.7.2", "marked": "^4.0.10" }, diff --git a/src/ai/.x/templates/openai-webpage/src/OpenAIChatCompletionsFunctionsStreamingClass.js b/src/ai/.x/templates/openai-webpage/src/OpenAIChatCompletionsFunctionsStreamingClass.js index 776edd0a..907d6899 100644 --- a/src/ai/.x/templates/openai-webpage/src/OpenAIChatCompletionsFunctionsStreamingClass.js +++ b/src/ai/.x/templates/openai-webpage/src/OpenAIChatCompletionsFunctionsStreamingClass.js @@ -5,17 +5,17 @@ const { OpenAIClient, AzureKeyCredential } = require("@azure/openai"); const { FunctionCallContext } = require("./FunctionCallContext"); class <#= ClassName #> { - constructor(systemPrompt, endpoint, azureApiKey, deploymentName, functionFactory) { - this.systemPrompt = systemPrompt; - this.deploymentName = deploymentName; - this.client = new OpenAIClient(endpoint, new AzureKeyCredential(azureApiKey)); + constructor(openAIEndpoint, openAIKey, openAIChatDeploymentName, openAISystemPrompt, functionFactory) { + this.openAISystemPrompt = openAISystemPrompt; + this.openAIChatDeploymentName = openAIChatDeploymentName; + this.client = new OpenAIClient(openAIEndpoint, new AzureKeyCredential(openAIKey)); this.functionFactory = functionFactory; this.clearConversation(); } clearConversation() { this.messages = [ - { role: 'system', content: this.systemPrompt } + { role: 'system', content: this.openAISystemPrompt } ]; this.functionCallContext = new FunctionCallContext(this.functionFactory, this.messages); } @@ -25,7 +25,7 @@ class <#= ClassName #> { let contentComplete = ''; while (true) { - const events = this.client.listChatCompletions(this.deploymentName, this.messages, { + const events = await this.client.streamChatCompletions(this.openAIChatDeploymentName, this.messages, { functions: this.functionFactory.getFunctionSchemas(), }); diff --git a/src/ai/.x/templates/openai-webpage/src/script.js b/src/ai/.x/templates/openai-webpage/src/script.js index 962bceef..dc82572b 100644 --- a/src/ai/.x/templates/openai-webpage/src/script.js +++ b/src/ai/.x/templates/openai-webpage/src/script.js @@ -15,22 +15,22 @@ let streamingChatCompletions; function streamingChatCompletionsInit() { - const endpoint = process.env.AZURE_OPENAI_ENDPOINT || "<#= AZURE_OPENAI_ENDPOINT #>"; - const azureApiKey = process.env.AZURE_OPENAI_KEY || "<#= AZURE_OPENAI_KEY #>"; - const deploymentName = process.env.AZURE_OPENAI_CHAT_DEPLOYMENT || "<#= AZURE_OPENAI_CHAT_DEPLOYMENT #>" ; - const systemPrompt = process.env.AZURE_OPENAI_SYSTEM_PROMPT || "<#= AZURE_OPENAI_SYSTEM_PROMPT #>"; + const openAIEndpoint = process.env.AZURE_OPENAI_ENDPOINT || "<#= AZURE_OPENAI_ENDPOINT #>"; + const openAIKey = process.env.AZURE_OPENAI_KEY || "<#= AZURE_OPENAI_KEY #>"; + const openAIChatDeploymentName = process.env.AZURE_OPENAI_CHAT_DEPLOYMENT || "<#= AZURE_OPENAI_CHAT_DEPLOYMENT #>" ; + const openAISystemPrompt = process.env.AZURE_OPENAI_SYSTEM_PROMPT || "<#= AZURE_OPENAI_SYSTEM_PROMPT #>"; - if (!endpoint || endpoint.startsWith('(systemPrompt, endpoint, azureApiKey, deploymentName, factory); + streamingChatCompletions = new <#= ClassName #>(openAIEndpoint, openAIKey, openAIChatDeploymentName, openAISystemPrompt, factory); } function streamingChatCompletionsClear() { diff --git a/src/ai/commands/dev_command.cs b/src/ai/commands/dev_command.cs index 0c97cea1..5292d97b 100644 --- a/src/ai/commands/dev_command.cs +++ b/src/ai/commands/dev_command.cs @@ -83,8 +83,7 @@ private void DoNewEnv() private void DoNewTemplate(string templateName, string language) { - var filesInDirAlready = FileHelpers.FindFiles(".", "*").Count() > 0; - var outputDirectory = !filesInDirAlready ? "." : templateName + ProgrammingLanguageToken.GetSuffix(language); + var outputDirectory = templateName + ProgrammingLanguageToken.GetSuffix(language); var instructions = InstructionsToken.Data().GetOrDefault(_values); var found = TemplateFactory.GenerateTemplateFiles(templateName, language, instructions, outputDirectory, _quiet, _verbose); diff --git a/src/common/details/helpers/file_helpers.cs b/src/common/details/helpers/file_helpers.cs index ab4c2117..2be6afc0 100644 --- a/src/common/details/helpers/file_helpers.cs +++ b/src/common/details/helpers/file_helpers.cs @@ -57,7 +57,9 @@ public static string NormalizePath(string outputDirectory) { var normalized = new DirectoryInfo(outputDirectory).FullName; var cwd = Directory.GetCurrentDirectory(); - return normalized.StartsWith(cwd) ? normalized.Substring(cwd.Length + 1) : normalized; + return normalized.StartsWith(cwd) && normalized.Length > cwd.Length + 1 + ? normalized.Substring(cwd.Length + 1) + : normalized; } } diff --git a/src/extensions/template_extension/TemplateFactory.cs b/src/extensions/template_extension/TemplateFactory.cs index 672b5b26..29f31d0d 100644 --- a/src/extensions/template_extension/TemplateFactory.cs +++ b/src/extensions/template_extension/TemplateFactory.cs @@ -168,8 +168,8 @@ private static List GetTemplateGroups() foreach (var uniqueName in uniqueNames) { var parameters = GetParameters(uniqueName); - var longName = parameters["_Name"]; - var shortName = parameters["_Short"]; + var longName = parameters["_LongName"]; + var shortName = parameters["_ShortName"]; var language = parameters["_Language"]; templates.Add(new Item() From 43e36fb1810e975b4ebc3f238228d400b97d60c1 Mon Sep 17 00:00:00 2001 From: Christopher Schraer <32145632+chschrae@users.noreply.github.com> Date: Tue, 16 Jan 2024 17:48:17 -0800 Subject: [PATCH 07/30] python streaming with data and azure search template (#146) * initial * renamed some variables * updating to use embeddings * merge conflict * fix endpoints problem (#149) * merge main for chris so he doesn't have to (after my big change yesterday) (#150) * updated names and added filtering (#147) * updated names and added filtering * make smaller width if required (ai dev new list) * only use compact form if output not redirected * Robch/2401 jan12 bugs (#148) * updated names and added filtering * make smaller width if required (ai dev new list) * only use compact form if output not redirected * fixed a bug * CR feedback and making C# templates more consistent * CR feedback on python templates, better naming symmetry * javascript template name symmetry * go template naming symmetry * match main branch python template for second half of function ... input pararm names, and order on ctor; also normalize naming across all json files for chat deployment name * renamed to match new names --------- Co-authored-by: Chris Schraer Co-authored-by: Rob Chambers --- src/ai/.x/templates/openai-chat-go/_.json | 2 +- src/ai/.x/templates/openai-chat-java/_.json | 2 +- src/ai/.x/templates/openai-chat-js/_.json | 2 +- src/ai/.x/templates/openai-chat-py/_.json | 2 +- .../templates/openai-chat-streaming-go/_.json | 2 +- .../openai-chat-streaming-java/_.json | 2 +- .../templates/openai-chat-streaming-js/_.json | 2 +- .../templates/openai-chat-streaming-py/_.json | 3 +- .../openai-chat-streaming-py/main.py | 5 +- .../openai_chat_completions_streaming.py | 5 +- .../openai-chat-streaming-with-data-py/_.json | 16 +++++ .../main.py | 43 +++++++++++++ ...ai_chat_completions_with_data_streaming.py | 64 +++++++++++++++++++ .../requirements.txt | 1 + .../_.json | 2 +- .../_.json | 2 +- .../_.json | 2 +- src/ai/.x/templates/openai-webpage/_.json | 2 +- 18 files changed, 144 insertions(+), 15 deletions(-) create mode 100644 src/ai/.x/templates/openai-chat-streaming-with-data-py/_.json create mode 100644 src/ai/.x/templates/openai-chat-streaming-with-data-py/main.py create mode 100644 src/ai/.x/templates/openai-chat-streaming-with-data-py/openai_chat_completions_with_data_streaming.py create mode 100644 src/ai/.x/templates/openai-chat-streaming-with-data-py/requirements.txt diff --git a/src/ai/.x/templates/openai-chat-go/_.json b/src/ai/.x/templates/openai-chat-go/_.json index d71325a7..9b771ad4 100644 --- a/src/ai/.x/templates/openai-chat-go/_.json +++ b/src/ai/.x/templates/openai-chat-go/_.json @@ -5,6 +5,6 @@ "ClassName": "OpenAIChatCompletionsExample", "AZURE_OPENAI_ENDPOINT": "", "AZURE_OPENAI_KEY": "", - "AZURE_OPENAI_CHAT_DEPLOYMENT": "", + "AZURE_OPENAI_CHAT_DEPLOYMENT": "", "AZURE_OPENAI_SYSTEM_PROMPT": "You are a helpful AI assistant." } \ No newline at end of file diff --git a/src/ai/.x/templates/openai-chat-java/_.json b/src/ai/.x/templates/openai-chat-java/_.json index 68fa5a10..27a24b45 100644 --- a/src/ai/.x/templates/openai-chat-java/_.json +++ b/src/ai/.x/templates/openai-chat-java/_.json @@ -4,6 +4,6 @@ "_Language": "Java", "AZURE_OPENAI_ENDPOINT": "", "AZURE_OPENAI_KEY": "", - "AZURE_OPENAI_CHAT_DEPLOYMENT": "", + "AZURE_OPENAI_CHAT_DEPLOYMENT": "", "AZURE_OPENAI_SYSTEM_PROMPT": "You are a helpful AI assistant." } \ No newline at end of file diff --git a/src/ai/.x/templates/openai-chat-js/_.json b/src/ai/.x/templates/openai-chat-js/_.json index 90a38cce..8a1f1e93 100644 --- a/src/ai/.x/templates/openai-chat-js/_.json +++ b/src/ai/.x/templates/openai-chat-js/_.json @@ -5,6 +5,6 @@ "ClassName": "OpenAIChatCompletionsClass", "AZURE_OPENAI_ENDPOINT": "", "AZURE_OPENAI_KEY": "", - "AZURE_OPENAI_CHAT_DEPLOYMENT": "", + "AZURE_OPENAI_CHAT_DEPLOYMENT": "", "AZURE_OPENAI_SYSTEM_PROMPT": "You are a helpful AI assistant." } \ No newline at end of file diff --git a/src/ai/.x/templates/openai-chat-py/_.json b/src/ai/.x/templates/openai-chat-py/_.json index bd621133..a90bb783 100644 --- a/src/ai/.x/templates/openai-chat-py/_.json +++ b/src/ai/.x/templates/openai-chat-py/_.json @@ -5,6 +5,6 @@ "AZURE_OPENAI_ENDPOINT": "", "AZURE_OPENAI_KEY": "", "AZURE_OPENAI_API_VERSION": "2023-12-01-preview", - "AZURE_OPENAI_CHAT_DEPLOYMENT": "", + "AZURE_OPENAI_CHAT_DEPLOYMENT": "", "AZURE_OPENAI_SYSTEM_PROMPT": "You are a helpful AI assistant." } \ No newline at end of file diff --git a/src/ai/.x/templates/openai-chat-streaming-go/_.json b/src/ai/.x/templates/openai-chat-streaming-go/_.json index 8318a4b2..d3b112ec 100644 --- a/src/ai/.x/templates/openai-chat-streaming-go/_.json +++ b/src/ai/.x/templates/openai-chat-streaming-go/_.json @@ -5,6 +5,6 @@ "ClassName": "OpenAIChatCompletionsStreamingExample", "AZURE_OPENAI_ENDPOINT": "", "AZURE_OPENAI_KEY": "", - "AZURE_OPENAI_CHAT_DEPLOYMENT": "", + "AZURE_OPENAI_CHAT_DEPLOYMENT": "", "AZURE_OPENAI_SYSTEM_PROMPT": "You are a helpful AI assistant." } \ No newline at end of file diff --git a/src/ai/.x/templates/openai-chat-streaming-java/_.json b/src/ai/.x/templates/openai-chat-streaming-java/_.json index baa10f0c..137c0bfe 100644 --- a/src/ai/.x/templates/openai-chat-streaming-java/_.json +++ b/src/ai/.x/templates/openai-chat-streaming-java/_.json @@ -4,6 +4,6 @@ "_Language": "Java", "AZURE_OPENAI_ENDPOINT": "", "AZURE_OPENAI_KEY": "", - "AZURE_OPENAI_CHAT_DEPLOYMENT": "", + "AZURE_OPENAI_CHAT_DEPLOYMENT": "", "AZURE_OPENAI_SYSTEM_PROMPT": "You are a helpful AI assistant." } \ No newline at end of file diff --git a/src/ai/.x/templates/openai-chat-streaming-js/_.json b/src/ai/.x/templates/openai-chat-streaming-js/_.json index b588891c..59568312 100644 --- a/src/ai/.x/templates/openai-chat-streaming-js/_.json +++ b/src/ai/.x/templates/openai-chat-streaming-js/_.json @@ -5,6 +5,6 @@ "ClassName": "OpenAIChatCompletionsStreamingClass", "AZURE_OPENAI_ENDPOINT": "", "AZURE_OPENAI_KEY": "", - "AZURE_OPENAI_CHAT_DEPLOYMENT": "", + "AZURE_OPENAI_CHAT_DEPLOYMENT": "", "AZURE_OPENAI_SYSTEM_PROMPT": "You are a helpful AI assistant." } \ No newline at end of file diff --git a/src/ai/.x/templates/openai-chat-streaming-py/_.json b/src/ai/.x/templates/openai-chat-streaming-py/_.json index a108fcae..6a043ccc 100644 --- a/src/ai/.x/templates/openai-chat-streaming-py/_.json +++ b/src/ai/.x/templates/openai-chat-streaming-py/_.json @@ -2,9 +2,10 @@ "_LongName": "OpenAI Chat Completions (Streaming)", "_ShortName": "openai-chat-streaming", "_Language": "Python", + "ClassName": "OpenAIChatCompletionsStreaming", "AZURE_OPENAI_ENDPOINT": "", "AZURE_OPENAI_KEY": "", "AZURE_OPENAI_API_VERSION": "2023-12-01-preview", - "AZURE_OPENAI_CHAT_DEPLOYMENT": "", + "AZURE_OPENAI_CHAT_DEPLOYMENT": "", "AZURE_OPENAI_SYSTEM_PROMPT": "You are a helpful AI assistant." } \ No newline at end of file diff --git a/src/ai/.x/templates/openai-chat-streaming-py/main.py b/src/ai/.x/templates/openai-chat-streaming-py/main.py index 4728c815..68ec2c93 100644 --- a/src/ai/.x/templates/openai-chat-streaming-py/main.py +++ b/src/ai/.x/templates/openai-chat-streaming-py/main.py @@ -1,11 +1,12 @@ <#@ template hostspecific="true" #> <#@ output extension=".py" encoding="utf-8" #> +<#@ parameter type="System.String" name="ClassName" #> <#@ parameter type="System.String" name="AZURE_OPENAI_ENDPOINT" #> <#@ parameter type="System.String" name="AZURE_OPENAI_KEY" #> <#@ parameter type="System.String" name="AZURE_OPENAI_API_VERSION" #> <#@ parameter type="System.String" name="AZURE_OPENAI_CHAT_DEPLOYMENT" #> <#@ parameter type="System.String" name="AZURE_OPENAI_SYSTEM_PROMPT" #> -from openai_chat_completions_streaming import OpenAIChatCompletionsStreaming +from openai_chat_completions_streaming import <#= ClassName #> import os def main(): @@ -15,7 +16,7 @@ def main(): openai_chat_deployment_name = os.getenv('AZURE_OPENAI_CHAT_DEPLOYMENT', '<#= AZURE_OPENAI_CHAT_DEPLOYMENT #>') openai_system_prompt = os.getenv('AZURE_OPENAI_SYSTEM_PROMPT', '<#= AZURE_OPENAI_SYSTEM_PROMPT #>') - chat = OpenAIChatCompletionsStreaming(openai_api_version, openai_endpoint, openai_key, openai_chat_deployment_name, openai_system_prompt) + chat = <#= ClassName #>(openai_api_version, openai_endpoint, openai_key, openai_chat_deployment_name, openai_system_prompt) while True: user_input = input('User: ') diff --git a/src/ai/.x/templates/openai-chat-streaming-py/openai_chat_completions_streaming.py b/src/ai/.x/templates/openai-chat-streaming-py/openai_chat_completions_streaming.py index 96f48304..b6b47f85 100644 --- a/src/ai/.x/templates/openai-chat-streaming-py/openai_chat_completions_streaming.py +++ b/src/ai/.x/templates/openai-chat-streaming-py/openai_chat_completions_streaming.py @@ -1,6 +1,9 @@ +<#@ template hostspecific="true" #> +<#@ output extension=".py" encoding="utf-8" #> +<#@ parameter type="System.String" name="ClassName" #> from openai import AzureOpenAI -class OpenAIChatCompletionsStreaming: +class <#= ClassName #>: def __init__(self, openai_api_version, openai_endpoint, openai_key, openai_chat_deployment_name, openai_system_prompt): self.openai_system_prompt = openai_system_prompt self.openai_chat_deployment_name = openai_chat_deployment_name diff --git a/src/ai/.x/templates/openai-chat-streaming-with-data-py/_.json b/src/ai/.x/templates/openai-chat-streaming-with-data-py/_.json new file mode 100644 index 00000000..e4b7b894 --- /dev/null +++ b/src/ai/.x/templates/openai-chat-streaming-with-data-py/_.json @@ -0,0 +1,16 @@ +{ + "_LongName": "OpenAI Chat Completions (w/ Data + AI Search)", + "_ShortName": "openai-chat-streaming-with-data", + "_Language": "Python", + "ClassName": "OpenAIChatCompletionsStreamingWithDataAISearch", + "AZURE_OPENAI_ENDPOINT": "", + "AZURE_OPENAI_KEY": "", + "AZURE_OPENAI_API_VERSION": "2023-12-01-preview", + "AZURE_OPENAI_CHAT_DEPLOYMENT": "", + "AZURE_OPENAI_EMBEDDING_DEPLOYMENT": "", + "AZURE_OPENAI_SYSTEM_PROMPT": "You are a helpful AI assistant.", + "AZURE_AI_SEARCH_ENDPOINT": "", + "AZURE_AI_SEARCH_KEY": "", + "AZURE_AI_SEARCH_INDEX_NAME": "", + "OPENAI_API_VERSION": "" +} diff --git a/src/ai/.x/templates/openai-chat-streaming-with-data-py/main.py b/src/ai/.x/templates/openai-chat-streaming-with-data-py/main.py new file mode 100644 index 00000000..632f4a3b --- /dev/null +++ b/src/ai/.x/templates/openai-chat-streaming-with-data-py/main.py @@ -0,0 +1,43 @@ +<#@ template hostspecific="true" #> +<#@ output extension=".py" encoding="utf-8" #> +<#@ parameter type="System.String" name="ClassName" #> +<#@ parameter type="System.String" name="AZURE_OPENAI_ENDPOINT" #> +<#@ parameter type="System.String" name="AZURE_OPENAI_KEY" #> +<#@ parameter type="System.String" name="AZURE_OPENAI_API_VERSION" #> +<#@ parameter type="System.String" name="AZURE_OPENAI_CHAT_DEPLOYMENT" #> +<#@ parameter type="System.String" name="AZURE_OPENAI_SYSTEM_PROMPT" #> +<#@ parameter type="System.String" name="AZURE_AI_SEARCH_ENDPOINT" #> +<#@ parameter type="System.String" name="AZURE_AI_SEARCH_KEY" #> +<#@ parameter type="System.String" name="AZURE_AI_SEARCH_INDEX_NAME" #> +<#@ parameter type="System.String" name="AZURE_OPENAI_EMBEDDING_DEPLOYMENT" #> +from openai_chat_completions_with_data_streaming import <#= ClassName #> +import os + +def main(): + openai_api_version = os.getenv('AZURE_OPENAI_API_VERSION', '<#= AZURE_OPENAI_API_VERSION #>') + openai_endpoint = os.getenv('AZURE_OPENAI_ENDPOINT', '<#= AZURE_OPENAI_ENDPOINT #>') + openai_key = os.getenv('AZURE_OPENAI_KEY', '<#= AZURE_OPENAI_KEY #>') + openai_chat_deployment_name = os.getenv('AZURE_OPENAI_CHAT_DEPLOYMENT', '<#= AZURE_OPENAI_CHAT_DEPLOYMENT #>') + openai_system_prompt = os.getenv('AZURE_OPENAI_SYSTEM_PROMPT', '<#= AZURE_OPENAI_SYSTEM_PROMPT #>') + search_endpoint =os.getenv('AZURE_AI_SEARCH_ENDPOINT', '<#= AZURE_AI_SEARCH_ENDPOINT #>') + search_api_key = os.getenv('AZURE_AI_SEARCH_KEY', '<#= AZURE_AI_SEARCH_KEY #>') + search_index_name = os.getenv('AZURE_AI_SEARCH_INDEX_NAME', '<#= AZURE_AI_SEARCH_INDEX_NAME #>') + openai_embeddings_deployment_name = os.getenv('AZURE_OPENAI_EMBEDDING_DEPLOYMENT', '<#= AZURE_OPENAI_EMBEDDING_DEPLOYMENT #>') + openai_embeddings_endpoint = f"{openai_endpoint.rstrip('/')}/openai/deployments/{openai_embeddings_deployment_name}/embeddings?api-version={openai_api_version}"; + + chat = <#= ClassName #>(openai_api_version, openai_endpoint, openai_key, openai_chat_deployment_name, openai_system_prompt, search_endpoint, search_api_key, search_index_name, openai_embeddings_endpoint) + + while True: + user_input = input('User: ') + if user_input == 'exit' or user_input == '': + break + + print("\nAssistant: ", end="") + response = chat.get_chat_completions(user_input, lambda content: print(content, end="")) + print("\n") + +if __name__ == '__main__': + try: + main() + except Exception as e: + print(f"The sample encountered an error: {e}") \ No newline at end of file diff --git a/src/ai/.x/templates/openai-chat-streaming-with-data-py/openai_chat_completions_with_data_streaming.py b/src/ai/.x/templates/openai-chat-streaming-with-data-py/openai_chat_completions_with_data_streaming.py new file mode 100644 index 00000000..f4e98c9c --- /dev/null +++ b/src/ai/.x/templates/openai-chat-streaming-with-data-py/openai_chat_completions_with_data_streaming.py @@ -0,0 +1,64 @@ +<#@ template hostspecific="true" #> +<#@ output extension=".py" encoding="utf-8" #> +<#@ parameter type="System.String" name="ClassName" #> +from openai import AzureOpenAI + +class <#= ClassName #>: + def __init__(self, openai_api_version, openai_endpoint, openai_key, openai_chat_deployment_name, openai_system_prompt, search_endpoint, search_api_key, search_index_name, openai_embeddings_endpoint): + self.openai_system_prompt = openai_system_prompt + self.openai_chat_deployment_name = openai_chat_deployment_name + self.client = AzureOpenAI( + api_key=openai_key, + api_version=openai_api_version, + base_url = f"{openai_endpoint.rstrip('/')}/openai/deployments/{openai_chat_deployment_name}/extensions" + ) + self.extra_body={ + "dataSources": [ + { + "type": "AzureCognitiveSearch", + "parameters": { + "endpoint": search_endpoint, + "key": search_api_key, + "indexName": search_index_name, + "embeddingEndpoint": openai_embeddings_endpoint, + "embeddingKey": openai_key, + "queryType": "vectorSimpleHybrid" + } + } + ] + } + + self.clear_conversation() + + def clear_conversation(self): + self.messages = [ + {'role': 'system', 'content': self.openai_system_prompt} + ] + + def get_chat_completions(self, user_input, callback): + self.messages.append({'role': 'user', 'content': user_input}) + + complete_content = '' + response = self.client.chat.completions.create( + model=self.openai_chat_deployment_name, + messages=self.messages, + extra_body=self.extra_body, + stream=True) + + for chunk in response: + + choice0 = chunk.choices[0] if hasattr(chunk, 'choices') and chunk.choices else None + delta = choice0.delta if choice0 and hasattr(choice0, 'delta') else None + content = delta.content if delta and hasattr(delta, 'content') else '' + + finish_reason = choice0.finish_reason if choice0 and hasattr(choice0, 'finish_reason') else None + if finish_reason == 'length': + content += f"{content}\nERROR: Exceeded max token length!" + + if content is None: continue + + complete_content += content + callback(content) + + self.messages.append({'role': 'assistant', 'content': complete_content}) + return complete_content diff --git a/src/ai/.x/templates/openai-chat-streaming-with-data-py/requirements.txt b/src/ai/.x/templates/openai-chat-streaming-with-data-py/requirements.txt new file mode 100644 index 00000000..7a06be70 --- /dev/null +++ b/src/ai/.x/templates/openai-chat-streaming-with-data-py/requirements.txt @@ -0,0 +1 @@ +openai==1.0.0 diff --git a/src/ai/.x/templates/openai-chat-streaming-with-functions-cs/_.json b/src/ai/.x/templates/openai-chat-streaming-with-functions-cs/_.json index 92faed44..46e4cd04 100644 --- a/src/ai/.x/templates/openai-chat-streaming-with-functions-cs/_.json +++ b/src/ai/.x/templates/openai-chat-streaming-with-functions-cs/_.json @@ -5,6 +5,6 @@ "ClassName": "OpenAIChatCompletionsFunctionsStreamingClass", "AZURE_OPENAI_ENDPOINT": "", "AZURE_OPENAI_KEY": "", - "AZURE_OPENAI_CHAT_DEPLOYMENT": "", + "AZURE_OPENAI_CHAT_DEPLOYMENT": "", "AZURE_OPENAI_SYSTEM_PROMPT": "You are a helpful AI assistant." } \ No newline at end of file diff --git a/src/ai/.x/templates/openai-chat-streaming-with-functions-js/_.json b/src/ai/.x/templates/openai-chat-streaming-with-functions-js/_.json index 1f79b39f..92a27fd3 100644 --- a/src/ai/.x/templates/openai-chat-streaming-with-functions-js/_.json +++ b/src/ai/.x/templates/openai-chat-streaming-with-functions-js/_.json @@ -5,6 +5,6 @@ "ClassName": "OpenAIChatCompletionsFunctionsStreamingClass", "AZURE_OPENAI_ENDPOINT": "", "AZURE_OPENAI_KEY": "", - "AZURE_OPENAI_CHAT_DEPLOYMENT": "", + "AZURE_OPENAI_CHAT_DEPLOYMENT": "", "AZURE_OPENAI_SYSTEM_PROMPT": "You are a helpful AI assistant." } \ No newline at end of file diff --git a/src/ai/.x/templates/openai-chat-streaming-with-functions-py/_.json b/src/ai/.x/templates/openai-chat-streaming-with-functions-py/_.json index 8b60c4e2..dc8b30d6 100644 --- a/src/ai/.x/templates/openai-chat-streaming-with-functions-py/_.json +++ b/src/ai/.x/templates/openai-chat-streaming-with-functions-py/_.json @@ -5,6 +5,6 @@ "AZURE_OPENAI_ENDPOINT": "", "AZURE_OPENAI_KEY": "", "AZURE_OPENAI_API_VERSION": "2023-12-01-preview", - "AZURE_OPENAI_CHAT_DEPLOYMENT": "", + "AZURE_OPENAI_CHAT_DEPLOYMENT": "", "AZURE_OPENAI_SYSTEM_PROMPT": "You are a helpful AI assistant." } \ No newline at end of file diff --git a/src/ai/.x/templates/openai-webpage/_.json b/src/ai/.x/templates/openai-webpage/_.json index 0cbcec13..9cdb4b6f 100644 --- a/src/ai/.x/templates/openai-webpage/_.json +++ b/src/ai/.x/templates/openai-webpage/_.json @@ -5,6 +5,6 @@ "ClassName": "OpenAIChatCompletionsFunctionsStreamingClass", "AZURE_OPENAI_ENDPOINT": "", "AZURE_OPENAI_KEY": "", - "AZURE_OPENAI_CHAT_DEPLOYMENT": "", + "AZURE_OPENAI_CHAT_DEPLOYMENT": "", "AZURE_OPENAI_SYSTEM_PROMPT": "You are a helpful AI assistant." } \ No newline at end of file From a217d8c870b536ef2a0d413d32908a1f8e721435 Mon Sep 17 00:00:00 2001 From: Rob Chambers Date: Tue, 16 Jan 2024 20:05:33 -0800 Subject: [PATCH 08/30] =?UTF-8?q?separated=20templates=20for=20one=20with?= =?UTF-8?q?=20and=20one=20without=20functions=20for=20openai=E2=80=A6=20(#?= =?UTF-8?q?151)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * separated templates for one with and one without functions for openai-webpage * added openai-webpage for typescript * added typescript version of openai-webpage-with-functions --- .../.env | 0 .../README.md | 0 src/ai/.x/templates/openai-webpage-js/_.json | 10 + .../ai.png | Bin .../index.html | 0 .../package.json | 0 .../OpenAIChatCompletionsStreamingClass.js | 47 +++ .../templates/openai-webpage-js/src/script.js | 287 ++++++++++++++ .../style.css | 0 .../webpack.config.js | 0 src/ai/.x/templates/openai-webpage-ts/.env | 10 + .../.x/templates/openai-webpage-ts/README.md | 35 ++ src/ai/.x/templates/openai-webpage-ts/_.json | 10 + src/ai/.x/templates/openai-webpage-ts/ai.png | Bin 0 -> 46281 bytes .../.x/templates/openai-webpage-ts/index.html | 62 +++ .../templates/openai-webpage-ts/package.json | 25 ++ .../OpenAIChatCompletionsStreamingClass.ts | 50 +++ .../templates/openai-webpage-ts/src/script.ts | 298 ++++++++++++++ .../.x/templates/openai-webpage-ts/style.css | 367 ++++++++++++++++++ .../templates/openai-webpage-ts/tsconfig.json | 16 + .../openai-webpage-ts/types/marked.d.ts | 1 + .../openai-webpage-ts/webpack.config.js | 32 ++ .../openai-webpage-with-functions-ts/.env | 10 + .../README.md | 35 ++ .../openai-webpage-with-functions-ts/_.json | 10 + .../openai-webpage-with-functions-ts/ai.png | Bin 0 -> 46281 bytes .../index.html | 62 +++ .../package.json | 25 ++ .../src/FunctionCallContext.ts | 53 +++ .../src/FunctionFactory.ts | 24 ++ .../OpenAIChatCompletionsCustomFunctions.ts | 60 +++ ...IChatCompletionsFunctionsStreamingClass.ts | 67 ++++ .../src/script.ts | 300 ++++++++++++++ .../style.css | 367 ++++++++++++++++++ .../tsconfig.json | 16 + .../types/marked.d.ts | 1 + .../webpack.config.js | 32 ++ .../openai-webpage-with-functions/.env | 10 + .../openai-webpage-with-functions/README.md | 35 ++ .../_.json | 2 +- .../openai-webpage-with-functions/ai.png | Bin 0 -> 46281 bytes .../openai-webpage-with-functions/index.html | 62 +++ .../package.json | 22 ++ .../src/FunctionCallContext.js | 0 .../src/FunctionFactory.js | 0 .../OpenAIChatCompletionsCustomFunctions.js | 0 ...IChatCompletionsFunctionsStreamingClass.js | 0 .../src/script.js | 0 .../openai-webpage-with-functions/style.css | 367 ++++++++++++++++++ .../webpack.config.js | 20 + .../openai-webpage/.vscode/launch.json | 11 - .../openai-webpage/.vscode/tasks.json | 17 - .../tokens/programming_language_token.cs | 7 +- 53 files changed, 2834 insertions(+), 31 deletions(-) rename src/ai/.x/templates/{openai-webpage => openai-webpage-js}/.env (100%) rename src/ai/.x/templates/{openai-webpage => openai-webpage-js}/README.md (100%) create mode 100644 src/ai/.x/templates/openai-webpage-js/_.json rename src/ai/.x/templates/{openai-webpage => openai-webpage-js}/ai.png (100%) rename src/ai/.x/templates/{openai-webpage => openai-webpage-js}/index.html (100%) rename src/ai/.x/templates/{openai-webpage => openai-webpage-js}/package.json (100%) create mode 100644 src/ai/.x/templates/openai-webpage-js/src/OpenAIChatCompletionsStreamingClass.js create mode 100644 src/ai/.x/templates/openai-webpage-js/src/script.js rename src/ai/.x/templates/{openai-webpage => openai-webpage-js}/style.css (100%) rename src/ai/.x/templates/{openai-webpage => openai-webpage-js}/webpack.config.js (100%) create mode 100644 src/ai/.x/templates/openai-webpage-ts/.env create mode 100644 src/ai/.x/templates/openai-webpage-ts/README.md create mode 100644 src/ai/.x/templates/openai-webpage-ts/_.json create mode 100644 src/ai/.x/templates/openai-webpage-ts/ai.png create mode 100644 src/ai/.x/templates/openai-webpage-ts/index.html create mode 100644 src/ai/.x/templates/openai-webpage-ts/package.json create mode 100644 src/ai/.x/templates/openai-webpage-ts/src/OpenAIChatCompletionsStreamingClass.ts create mode 100644 src/ai/.x/templates/openai-webpage-ts/src/script.ts create mode 100644 src/ai/.x/templates/openai-webpage-ts/style.css create mode 100644 src/ai/.x/templates/openai-webpage-ts/tsconfig.json create mode 100644 src/ai/.x/templates/openai-webpage-ts/types/marked.d.ts create mode 100644 src/ai/.x/templates/openai-webpage-ts/webpack.config.js create mode 100644 src/ai/.x/templates/openai-webpage-with-functions-ts/.env create mode 100644 src/ai/.x/templates/openai-webpage-with-functions-ts/README.md create mode 100644 src/ai/.x/templates/openai-webpage-with-functions-ts/_.json create mode 100644 src/ai/.x/templates/openai-webpage-with-functions-ts/ai.png create mode 100644 src/ai/.x/templates/openai-webpage-with-functions-ts/index.html create mode 100644 src/ai/.x/templates/openai-webpage-with-functions-ts/package.json create mode 100644 src/ai/.x/templates/openai-webpage-with-functions-ts/src/FunctionCallContext.ts create mode 100644 src/ai/.x/templates/openai-webpage-with-functions-ts/src/FunctionFactory.ts create mode 100644 src/ai/.x/templates/openai-webpage-with-functions-ts/src/OpenAIChatCompletionsCustomFunctions.ts create mode 100644 src/ai/.x/templates/openai-webpage-with-functions-ts/src/OpenAIChatCompletionsFunctionsStreamingClass.ts create mode 100644 src/ai/.x/templates/openai-webpage-with-functions-ts/src/script.ts create mode 100644 src/ai/.x/templates/openai-webpage-with-functions-ts/style.css create mode 100644 src/ai/.x/templates/openai-webpage-with-functions-ts/tsconfig.json create mode 100644 src/ai/.x/templates/openai-webpage-with-functions-ts/types/marked.d.ts create mode 100644 src/ai/.x/templates/openai-webpage-with-functions-ts/webpack.config.js create mode 100644 src/ai/.x/templates/openai-webpage-with-functions/.env create mode 100644 src/ai/.x/templates/openai-webpage-with-functions/README.md rename src/ai/.x/templates/{openai-webpage => openai-webpage-with-functions}/_.json (89%) create mode 100644 src/ai/.x/templates/openai-webpage-with-functions/ai.png create mode 100644 src/ai/.x/templates/openai-webpage-with-functions/index.html create mode 100644 src/ai/.x/templates/openai-webpage-with-functions/package.json rename src/ai/.x/templates/{openai-webpage => openai-webpage-with-functions}/src/FunctionCallContext.js (100%) rename src/ai/.x/templates/{openai-webpage => openai-webpage-with-functions}/src/FunctionFactory.js (100%) rename src/ai/.x/templates/{openai-webpage => openai-webpage-with-functions}/src/OpenAIChatCompletionsCustomFunctions.js (100%) rename src/ai/.x/templates/{openai-webpage => openai-webpage-with-functions}/src/OpenAIChatCompletionsFunctionsStreamingClass.js (100%) rename src/ai/.x/templates/{openai-webpage => openai-webpage-with-functions}/src/script.js (100%) create mode 100644 src/ai/.x/templates/openai-webpage-with-functions/style.css create mode 100644 src/ai/.x/templates/openai-webpage-with-functions/webpack.config.js delete mode 100644 src/ai/.x/templates/openai-webpage/.vscode/launch.json delete mode 100644 src/ai/.x/templates/openai-webpage/.vscode/tasks.json diff --git a/src/ai/.x/templates/openai-webpage/.env b/src/ai/.x/templates/openai-webpage-js/.env similarity index 100% rename from src/ai/.x/templates/openai-webpage/.env rename to src/ai/.x/templates/openai-webpage-js/.env diff --git a/src/ai/.x/templates/openai-webpage/README.md b/src/ai/.x/templates/openai-webpage-js/README.md similarity index 100% rename from src/ai/.x/templates/openai-webpage/README.md rename to src/ai/.x/templates/openai-webpage-js/README.md diff --git a/src/ai/.x/templates/openai-webpage-js/_.json b/src/ai/.x/templates/openai-webpage-js/_.json new file mode 100644 index 00000000..93bdd911 --- /dev/null +++ b/src/ai/.x/templates/openai-webpage-js/_.json @@ -0,0 +1,10 @@ +{ + "_LongName": "OpenAI Webpage", + "_ShortName": "openai-webpage", + "_Language": "JavaScript", + "ClassName": "OpenAIChatCompletionsStreamingClass", + "AZURE_OPENAI_ENDPOINT": "", + "AZURE_OPENAI_KEY": "", + "AZURE_OPENAI_CHAT_DEPLOYMENT": "", + "AZURE_OPENAI_SYSTEM_PROMPT": "You are a helpful AI assistant." +} \ No newline at end of file diff --git a/src/ai/.x/templates/openai-webpage/ai.png b/src/ai/.x/templates/openai-webpage-js/ai.png similarity index 100% rename from src/ai/.x/templates/openai-webpage/ai.png rename to src/ai/.x/templates/openai-webpage-js/ai.png diff --git a/src/ai/.x/templates/openai-webpage/index.html b/src/ai/.x/templates/openai-webpage-js/index.html similarity index 100% rename from src/ai/.x/templates/openai-webpage/index.html rename to src/ai/.x/templates/openai-webpage-js/index.html diff --git a/src/ai/.x/templates/openai-webpage/package.json b/src/ai/.x/templates/openai-webpage-js/package.json similarity index 100% rename from src/ai/.x/templates/openai-webpage/package.json rename to src/ai/.x/templates/openai-webpage-js/package.json diff --git a/src/ai/.x/templates/openai-webpage-js/src/OpenAIChatCompletionsStreamingClass.js b/src/ai/.x/templates/openai-webpage-js/src/OpenAIChatCompletionsStreamingClass.js new file mode 100644 index 00000000..8b204001 --- /dev/null +++ b/src/ai/.x/templates/openai-webpage-js/src/OpenAIChatCompletionsStreamingClass.js @@ -0,0 +1,47 @@ +<#@ template hostspecific="true" #> +<#@ output extension=".js" encoding="utf-8" #> +<#@ parameter type="System.String" name="ClassName" #> +const { OpenAIClient, AzureKeyCredential } = require("@azure/openai"); + +class <#= ClassName #> { + constructor(openAIEndpoint, openAIKey, openAIChatDeploymentName, openAISystemPrompt) { + this.openAISystemPrompt = openAISystemPrompt; + this.openAIChatDeploymentName = openAIChatDeploymentName; + this.client = new OpenAIClient(openAIEndpoint, new AzureKeyCredential(openAIKey)); + this.clearConversation(); + } + + clearConversation() { + this.messages = [ + { role: 'system', content: this.openAISystemPrompt } + ]; + } + + async getChatCompletions(userInput, callback) { + this.messages.push({ role: 'user', content: userInput }); + + let contentComplete = ''; + const events = await this.client.streamChatCompletions(this.openAIChatDeploymentName, this.messages); + + for await (const event of events) { + for (const choice of event.choices) { + + let content = choice.delta?.content; + if (choice.finishReason === 'length') { + content = `${content}\nERROR: Exceeded token limit!`; + } + + if (content != null) { + callback(content); + await new Promise(r => setTimeout(r, 50)); // delay to simulate real-time output, word by word + contentComplete += content; + } + } + } + + this.messages.push({ role: 'assistant', content: contentComplete }); + return contentComplete; + } +} + +exports.<#= ClassName #> = <#= ClassName #>; \ No newline at end of file diff --git a/src/ai/.x/templates/openai-webpage-js/src/script.js b/src/ai/.x/templates/openai-webpage-js/src/script.js new file mode 100644 index 00000000..07a5c993 --- /dev/null +++ b/src/ai/.x/templates/openai-webpage-js/src/script.js @@ -0,0 +1,287 @@ +<#@ template hostspecific="true" #> +<#@ output extension=".js" encoding="utf-8" #> +<#@ parameter type="System.String" name="ClassName" #> +<#@ parameter type="System.String" name="AZURE_OPENAI_ENDPOINT" #> +<#@ parameter type="System.String" name="AZURE_OPENAI_KEY" #> +<#@ parameter type="System.String" name="AZURE_OPENAI_CHAT_DEPLOYMENT" #> +<#@ parameter type="System.String" name="AZURE_OPENAI_SYSTEM_PROMPT" #> +const marked = require("marked"); +const hljs = require("highlight.js"); + +const { <#= ClassName #> } = require('./OpenAIChatCompletionsStreamingClass'); +let streamingChatCompletions; + +function streamingChatCompletionsInit() { + + const openAIEndpoint = process.env.AZURE_OPENAI_ENDPOINT || "<#= AZURE_OPENAI_ENDPOINT #>"; + const openAIKey = process.env.AZURE_OPENAI_KEY || "<#= AZURE_OPENAI_KEY #>"; + const openAIChatDeploymentName = process.env.AZURE_OPENAI_CHAT_DEPLOYMENT || "<#= AZURE_OPENAI_CHAT_DEPLOYMENT #>" ; + const openAISystemPrompt = process.env.AZURE_OPENAI_SYSTEM_PROMPT || "<#= AZURE_OPENAI_SYSTEM_PROMPT #>"; + + if (!openAIEndpoint || openAIEndpoint.startsWith('(openAIEndpoint, openAIKey, openAIChatDeploymentName, openAISystemPrompt); +} + +function streamingChatCompletionsClear() { + streamingChatCompletions.clearConversation(); +} + +async function streamingChatCompletionsProcessInput(userInput) { + const blackVerticalRectangle = '\u25AE'; // Black vertical rectangle ('▮') to simulate an insertion point + + let newMessage = chatPanelAppendMessage('computer', blackVerticalRectangle); + let completeResponse = ""; + + let computerResponse = await streamingChatCompletions.getChatCompletions(userInput, function (response) { + let atBottomBeforeUpdate = chatPanelIsScrollAtBottom(); + + completeResponse += response; + let withEnding = `${completeResponse}${blackVerticalRectangle}`; + let asHtml = markdownToHtml(withEnding); + + if (asHtml !== undefined) { + newMessage.innerHTML = asHtml; + + if (atBottomBeforeUpdate) { + chatPanelScrollToBottom(); + } + } + }); + + newMessage.innerHTML = markdownToHtml(computerResponse) || computerResponse.replace(/\n/g, '
'); + chatPanel.scrollTop = chatPanel.scrollHeight; +} + +function chatPanelGetElement() { + return document.getElementById("chatPanel"); +} + +function chatPanelAppendMessage(sender, message) { + logoHide(); + + let messageContent = document.createElement("p"); + messageContent.className = "message-content"; + messageContent.innerHTML = message; + + let messageAuthor = document.createElement("p"); + messageAuthor.className = "message-author"; + messageAuthor.innerHTML = sender == "user" ? "You" : "Assistant"; + + let divContainingBoth = document.createElement("div"); + divContainingBoth.className = sender === "user" ? "user" : "computer"; + divContainingBoth.appendChild(messageAuthor); + divContainingBoth.appendChild(messageContent); + + let chatPanel = chatPanelGetElement(); + chatPanel.appendChild(divContainingBoth); + chatPanelScrollToBottom(); + + return messageContent; +} + +function chatPanelIsScrollAtBottom() { + let chatPanel = chatPanelGetElement(); + let atBottom = Math.abs(chatPanel.scrollHeight - chatPanel.clientHeight - chatPanel.scrollTop) < 1; + return atBottom; +} + +function chatPanelScrollToBottom() { + let chatPanel = chatPanelGetElement(); + chatPanel.scrollTop = chatPanel.scrollHeight; +} + +function chatPanelClear() { + let chatPanel = chatPanelGetElement(); + chatPanel.innerHTML = ''; +} + +function logoGetElement() { + return document.getElementById("logo"); +} + +function logoShow() { + let logo = logoGetElement(); + logo.style.display = "block"; +} + +function logoHide() { + let logo = logoGetElement(); + logo.style.display = "none"; +} + +function markdownInit() { + marked.setOptions({ + highlight: function (code, lang) { + let hl = lang === undefined || lang === '' + ? hljs.highlightAuto(code).value + : hljs.highlight(lang, code).value; + return `
${hl}
`; + } + }); +} + +function markdownToHtml(markdownText) { + try { + return marked.parse(markdownText); + } + catch (error) { + return undefined; + } +} + +function themeInit() { + let currentTheme = localStorage.getItem('theme'); + if (currentTheme === 'dark') { + themeSetDark(); + } + else if (currentTheme === 'light') { + themeSetLight(); + } + toggleThemeButtonInit(); +} + +function themeIsLight() { + return document.body.classList.contains("light-theme"); +} + +function themeIsDark() { + return !themeIsLight(); +} + +function toggleTheme() { + if (themeIsLight()) { + themeSetDark(); + } else { + themeSetLight(); + } +} + +function themeSetLight() { + if (!themeIsLight()) { + document.body.classList.add("light-theme"); + localStorage.setItem('theme', 'light'); + + let iconElement = toggleThemeButtonGetElement().children[0]; + iconElement.classList.remove("fa-toggle-on"); + iconElement.classList.add("fa-toggle-off"); + } +} + +function themeSetDark() { + if (!themeIsDark()) { + document.body.classList.remove("light-theme"); + localStorage.setItem('theme', 'dark'); + + let iconElement = toggleThemeButtonGetElement().children[0]; + iconElement.classList.remove("fa-toggle-off"); + iconElement.classList.add("fa-toggle-on"); + } +} + +function toggleThemeButtonGetElement() { + return document.getElementById("toggleThemeButton"); +} + +function toggleThemeButtonInit() { + let buttonElement = toggleThemeButtonGetElement(); + buttonElement.addEventListener("click", toggleTheme); + buttonElement.addEventListener('keydown', toggleThemeButtonHandleKeyDown()); +} + +function toggleThemeButtonHandleKeyDown() { + return function (event) { + if (event.code === 'Enter' || event.code === 'Space') { + toggleTheme(); + } + }; +} + +function userInputTextAreaGetElement() { + return document.getElementById("userInput"); +} + +function userInputTextAreaInit() { + let inputElement = userInputTextAreaGetElement(); + inputElement.addEventListener("keydown", userInputTextAreaHandleKeyDown()); + inputElement.addEventListener("input", userInputTextAreaUpdateHeight); +} + +function userInputTextAreaFocus() { + let inputElement = userInputTextAreaGetElement(); + inputElement.focus(); +} + +function userInputTextAreaClear() { + userInputTextAreaGetElement().value = ''; + userInputTextAreaUpdateHeight(); +} + +function userInputTextAreaUpdateHeight() { + let inputElement = userInputTextAreaGetElement(); + inputElement.style.height = 'auto'; + inputElement.style.height = (userInput.scrollHeight) + 'px'; +} + +function userInputTextAreaHandleKeyDown() { + return function (event) { + if (event.key === "Enter") { + if (!event.shiftKey) { + event.preventDefault(); + sendMessage(); + } + } + }; +} + +function varsInit() { + document.addEventListener('DOMContentLoaded', varsUpdateHeightsAndWidths); + window.addEventListener('resize', varsUpdateHeightsAndWidths); +} + +function varsUpdateHeightsAndWidths() { + let headerHeight = document.querySelector('#header').offsetHeight; + let userInputHeight = document.querySelector('#userInputPanel').offsetHeight; + document.documentElement.style.setProperty('--header-height', headerHeight + 'px'); + document.documentElement.style.setProperty('--input-height', userInputHeight + 'px'); +} + +function newChat() { + chatPanelClear(); + logoShow(); + userInputTextAreaFocus(); + streamingChatCompletionsClear(); +} + +function sendMessage() { + let inputElement = userInputTextAreaGetElement(); + let inputValue = inputElement.value; + + let notEmpty = inputValue.trim() !== ''; + if (notEmpty) { + let html = markdownToHtml(inputValue) || inputValue.replace(/\n/g, '
'); + chatPanelAppendMessage('user', html); + userInputTextAreaClear(); + varsUpdateHeightsAndWidths(); + streamingChatCompletionsProcessInput(inputValue); + } +} + +themeInit(); +markdownInit(); +userInputTextAreaInit(); +varsInit(); +streamingChatCompletionsInit(); +userInputTextAreaFocus(); + +window.sendMessage = sendMessage; +window.toggleTheme = toggleTheme; +window.newChat = newChat; diff --git a/src/ai/.x/templates/openai-webpage/style.css b/src/ai/.x/templates/openai-webpage-js/style.css similarity index 100% rename from src/ai/.x/templates/openai-webpage/style.css rename to src/ai/.x/templates/openai-webpage-js/style.css diff --git a/src/ai/.x/templates/openai-webpage/webpack.config.js b/src/ai/.x/templates/openai-webpage-js/webpack.config.js similarity index 100% rename from src/ai/.x/templates/openai-webpage/webpack.config.js rename to src/ai/.x/templates/openai-webpage-js/webpack.config.js diff --git a/src/ai/.x/templates/openai-webpage-ts/.env b/src/ai/.x/templates/openai-webpage-ts/.env new file mode 100644 index 00000000..bd323058 --- /dev/null +++ b/src/ai/.x/templates/openai-webpage-ts/.env @@ -0,0 +1,10 @@ +<#@ template hostspecific="true" #> +<#@ output extension=".env" encoding="utf-8" #> +<#@ parameter type="System.String" name="AZURE_OPENAI_ENDPOINT" #> +<#@ parameter type="System.String" name="AZURE_OPENAI_KEY" #> +<#@ parameter type="System.String" name="AZURE_OPENAI_CHAT_DEPLOYMENT" #> +<#@ parameter type="System.String" name="AZURE_OPENAI_SYSTEM_PROMPT" #> +AZURE_OPENAI_CHAT_DEPLOYMENT=<#= AZURE_OPENAI_CHAT_DEPLOYMENT #> +AZURE_OPENAI_KEY=<#= AZURE_OPENAI_KEY #> +AZURE_OPENAI_ENDPOINT=<#= AZURE_OPENAI_ENDPOINT #> +AZURE_OPENAI_SYSTEM_PROMPT=<#= AZURE_OPENAI_SYSTEM_PROMPT #> diff --git a/src/ai/.x/templates/openai-webpage-ts/README.md b/src/ai/.x/templates/openai-webpage-ts/README.md new file mode 100644 index 00000000..8fee923d --- /dev/null +++ b/src/ai/.x/templates/openai-webpage-ts/README.md @@ -0,0 +1,35 @@ +# `ai` chat website + +This is a simple website chat interface that uses OpenAI's API to generate text responses to user input. + +User input is typed into a text box and added to the conversation as a message inside a chat panel. The panel scrolls up and the computer responds with streaming text output into another message in the chat panel. There is a left nav that has a "new chat" button and has a spot for future expansion w/ a list of historical chats. + +## Setup + +To build the website, run the following commands: + +```bash +npm install +npx webpack +``` + +To run the website, launch `index.html` in your browser. + +These setup steps are also represented in tasks.json and launch.json, so that you can build and run the website from within VS Code. + +## Project structure + +| Category | File | Description +| --- | --- | --- +| **SOURCE CODE** | ai.png | Logo/icon for the website. +| | index.html | HTML file with controls and layout. +| | style.css | CSS file with layout and styling. +| | src/script.js | Main JS file with HTML to JS interactions. +| | src/ChatCompletionsStreaming.js | Main JS file with JS to OpenAI interactions. +| | | +| **VS CODE** | .vscode/tasks.json | VS Code tasks to build and run the website. +| | .vscode/launch.json | VS Code launch configuration to run the website. +| | | +| **BUILD + PACKAGING** | .env | Contains the API keys, endpoints, etc. +| | package.json | Contains the dependencies. +| | webpack.config.js | The webpack config file. \ No newline at end of file diff --git a/src/ai/.x/templates/openai-webpage-ts/_.json b/src/ai/.x/templates/openai-webpage-ts/_.json new file mode 100644 index 00000000..c022f699 --- /dev/null +++ b/src/ai/.x/templates/openai-webpage-ts/_.json @@ -0,0 +1,10 @@ +{ + "_LongName": "OpenAI Webpage", + "_ShortName": "openai-webpage", + "_Language": "TypeScript", + "ClassName": "OpenAIChatCompletionsStreamingClass", + "AZURE_OPENAI_ENDPOINT": "", + "AZURE_OPENAI_KEY": "", + "AZURE_OPENAI_CHAT_DEPLOYMENT": "", + "AZURE_OPENAI_SYSTEM_PROMPT": "You are a helpful AI assistant." +} \ No newline at end of file diff --git a/src/ai/.x/templates/openai-webpage-ts/ai.png b/src/ai/.x/templates/openai-webpage-ts/ai.png new file mode 100644 index 0000000000000000000000000000000000000000..4ba344c95ca7f9b1da183b2b3c959f6c4b723ee5 GIT binary patch literal 46281 zcmV)TK(W7xP)PyA07*naRCr$Oy$Q5rSy?9fohgPpX5P%3V`bI|RTKpRQV0kX&+k>fK2t>8 z%tY;8>#dHpaz);V6KB}_+yDOl*}?yQ{qGU@|LF)g=X=R7A}e+a~$NZgTetEAV=;Na8Tqf3a0@498T`!Md~_v z<~V#FM~>xDK<*V@?zx2{e;2vyfE&<%*A4yt?CSjdL#OsGrmK5)&77O< zd&9yH{Lq726lvraMd2icQ>2LlFUedlLE#l{k>ze)j-m^AIf~qIdGP`Tih_^MKluPI zcnf?S84JUjR;?r92Uz4TbAv zZc${sbXF7|ih_sAg*fc|xdTT=OzuZ6nd`{&Paev_lM~vPjH%uOP{2Mdr^kNEUlop< zA$NGU^0_=Z_+!U$T)78x4h}nq%yk^kDT>_TnRz^rIWD}c*Yl%ZuX}E$=RLS@|MLC2 zcg)_o_WdBS!&0V z7ejS!^S6Z!k)$ZEuETTjdwUI;r`qL3R^-*ntgl1+EcSSn8+_@r+wYOJe_#8qu}GSq z>q6q(dS*UC6nTQ8NZ>dge5V7??P0W)px5ybIKyOdE;{ zC|;Ru#vko?9pD`B;P3w#9{AKhVQw?WLg)g61Rd8y*AI}%kP<8vm_)=2?_4yw ztMB#W2zsaR(8+r844YnvvmFnomzHt#@L_!6$}8~I9lMY_J*19j?$?E%@O}Z$%sPeR zB+yx)5IhyY^qxv!5Q?%;ToLm;wgk6X1SPnvu&sU0V>yhnSmi$KJEeu5s|2O_9R+py z5+UANG$nA2u*hrYs3S@+Ez+6$h;ANoANsk-w{p4JWv9o>=kmRaEOp?!E<7)S<0i=S z46?1J9U$}JxijeaE|TF2dffqh=S04LfAP;Reev}-ecRVBer`LEkAXWZiWhqsKJIf< zyn6$8{K`iW-SrThtqe=OIRuG=EXm>eo=DgtiDkx8(g)d-c*+8W%N^FN^}G-X9f*_z z5~sj0P)XcNNiB3Q=9j*iBzbT*N9rn|1p>Jv=O7?BM+A1tRRf-1ubUeQBLL*Y^S;cn z{NCd(3@!=>Uhbloae*cnc7_=GF1Flx+`Ib#K6mXkxc~6wSo7yF^nE0Hd*~(l90FL` zfihXJ7L;`$TUJm9w3X0S3JQl30%jOYQGr~h-wdMNE$l+#=RB_?(s&%)g#ovnd>$vi zt$|xOoPb-UZ++(}uQvhQx+A!kOX#jH}=BW8XIS3)2AJ0C!OouS9{53Jp0i!aw}t zhp>3>I%ZcB6q!onA}!!}0sKyYEKAT4Ntz?`9HAO~a65j5>mVx}0a-T%I;0?mVhnwy zC>>6Yk+K|~PVSV)5WJBG0gub`ZgJV}b633Qy<7{tM!0l#0H`8ciIvND1 z3PPpp`d$aOU5MpP=EqV&rseBk5|fAv$ph+StBEDT)C zW*$7pMiUlfhewx#H9pk(S?$m8cte$Z+NxY zmWij5H_M<^^nzgU9lw%{Mi5lA21rtldR9KCf~(^0oVNyO>$JomLdM%jVAVBi^v2K@ z*-_MKz*3=;8I$P#P&e_lLAK;8F3ZUk|C3h zj?>}Rk0i{nd#Q`z=yqK9h~CG3{ina-6>Wf5;0}ud5r7AW_}icV0QyJQu&`C2SNP}@ z9%oN~>o~$udGB7}VZ_zu@o%GL^SE4?FK?3zQp^|pKL=i^0=FVc-W-vi$kr~W7Ot#P zP`50KS}Sq+TUVr~j1`YmRb05aE(+z~shcDANohD9dasn!yu(iJiIks>^&#&J6#%ph^16&YUM8#hs!t<~J9DVPHv3~2l=nWloGFQMu{AwIfgwKqX0-K(RI=jziVF~|m`DM8C zvP*H_zCAeOcj3l`NK@`$RN7>*nGr2{b18vq61@gmOIt)CG{%OyG`32(%#8(i3zT*e z32+sTZEH$X(VYaY#`&%CUsrg!5IIlf#VS*a6~^UGUAw>bd1d)F6lr@x4^Hl@@+mi% z%Wf-zM#-2iV6*$G(9GpoDbQqjdj;Hh6Z6?BUj5R?yy3O~e)q50vn#lV#Z$S@pZdsO z|J|8C`3H1Q4-pg{nPl>4zK84UqCjQ@;m#h4%Xj_hwcqmGpLSmO$S>AcOYx7V_u=lZ zKIQ&@zWqlYy7i7{bk`#6pWlUal!&w~JP&c4BH;ghAnO-c>m=x1x$iTNdis-oI{1z! zezI}C&#W)4-SN*aKJodxek*Mb7={&JTMS%ZK2ceAn5?{Vi(8r zjpVAQKjFiNpM2eWo##C2?)o`5e&rM2d)t5g^!s*3!9qXw;DtL@hV%Y%*0@=H$Oj@^qo=Koyc)E)h)Wt}5NFS-Esqz`;4rYS@vvGt4|G~5G`l%bf z{?S)D$K~8|;jLmXPK4Ke@;|-ruXh|DU`IsPf!cSAj*HFBb?lj6z*aZIiTMQAfA>qC zHuG)Q|9$;oCjCGC?SFdptsndI4+Lk1bF+yLpIdp*5mQfxw&fth!?Pni`PJY2I{!6K z`OWj4@8Mtg^B;TQFF*GSI|c=Icr(~sTNm!VxU>s#9AZ5kBH7i&Xu1EHr@rpHzsY&p z^5(R27Pp(+x3lwy3M>8{3=&3T$8%(|jj=?Qgy-yUn(q0EPq7J)2I3KJ(S>6l^mGEQP176n(~ zFa9Rk1=MuP!HbIsbKO})XNOqu&f!UqkACkL-}Ka1SKtndD^LBGPkiORKl)UE->`#z z?0`PSI3w=?I{g4i5#iiijwik0<&Sq>_V|Xh-VT(-$G-B^fB4|XKDjt>I^HmY?*{NZ zns^Fq%ocdu8(#9Q&MO}CN9O~!{l56UFTMOTANc6U_9g*(v5W4^EY6%=L4RQe>)iya z%iUwo`q-PVxX|<6{`wF6ZFt-9r|pP4=;l7cq>vrO^MO1`;pHCA%vr57X_>3ZLtt3r5dgVN_(vu2Z{goNz zy=|_vWUH zFmkNUW>|d6)xUbhyS}|a+Ajn|_rK-$-}~_2ANle5K>;@cW_q(2q#2T{mX1C4cYo-D zfoQLF{N_LW^|OEd&p)s`bm1mGT$=g%K2COHJoX1(`t0D9k84nyZ5Q7^Jv8&#w|(&3 z%!9+;Vle~9pGBO~OO_*Ws7K^j_xd7T;nlWVKc;RrvG3r7dh=Xz-?9KX5~M7oCN-s5%$3T$DOBQu4jC7LHv=Ri z%sZQS%p>ss{MyZr`8Eaj7tj3mCx7ynzT=X04|7AnFMJGX;R}3tVTxfV!`VF@Jo$~U zy2knDEAF}wK-Z5c{_f2DCvN`vvj^f{$KA}(>&;-`I#_zvb-%yw?Js)OMW64}Ck}n) z=3hO2Fbu%h58n?kj7K=Nv%ph7^|q#Hx`314`Xe9sxc7xqFJ9R6;dt}vlIAKzioh)} zNuxlo=b<+wxAN& zG!fbu3iTu#ud17u?34cN&C3^i zz6XB!4}W;;Hw%V- zSny4`HaI@Z$BP*{!Ul8P*OwAgg0b&PQK|#HG0C z`yK}I7Mz}oD2g!a^bkbHix2W$&R`UU46DOn z!N;B6dhwD^zq1*gbP*`}%*)o-o4%e!YIb(dBcV&wFYIesHRsU}#AOId@i)Y?V!mvwn_k{ILC zv^C4`#ey*&jHMQh>Xp^v_Y{RDsH;XJ8cLnHmVTx_g}RMettYCq@txL=HlW)U5BYn7 z%^%$cPJ5Za%M+@A2s;QNN|mJG>qr7_^CUdxNFlvKkEs{_vvWKGo(ndH3uB)Z57Y|7l8Y3p7m3s zy&Fz%F6$uoJe&z~?0WWNe*4mQzU(_M7K}XpvoC&ovg>T_JE0?1jzKcO+Mb|z%3uDc zi(Y{D{>uORj+GDp{ZCyI&BN);BBSFajX_JgZ@`qfJ_ha#Ry#Xz@2+Kh?(${ayL&fQ zf+d8M7O7f75*qId7ePi?aAJnYJUXfp{XFT3mo;-DBYhZTGXdN+hm1;21$7%}RkWUt zqK#&?w1pI_X^E-8Uoo00vd_5DN~dBa+4UaV_G(=WEmLAWrlb{nqPjq3(qxE+>}s_g zq|^@t)K%<@KEEoK zG~3baLWz@L#Tj9=zn?w+2fzIZ&P%TN^2LI&_~YBY;dAf($mb4>0(24&u>)-OeJnin zk)JyJzE?c!V$T=<**#DG^7}vf+5X7^`b7_&P6tKb#Zh-1&v@%Q1Ls+kp03!oqw~HiSz*l!H;0uR#;PyRxaddGX)`J;Q z!pSBQCxDaD3rdd~!_P-Z{RD}GV>{?1T?0Us%qD(u47BClmy=$()a`fHI#GX8V`Qlm z$d-9f=|P=mH-o0s?o++2htd9S%(Pdt>Zk;6j(gkVx~*xjc3+d6PW6Q?^iIN{a|sP$ zjF8Vb6(LZX^~^Wzx#_v@zR+_P|LypJzkAOIP8`_mpqKerOH%m#F1#d_ zpspa-z^uXvkc-!87OUME+`nr9NA@h^^9K&&fw@^#c9=oIoiF#%Nmc1f>ByvDF*=OL zc!|6aZZA^MqjYauQF!;I>vE#RwvwEF+&b9XZg3mLW|iAEhn}Q;3vkRy88FvaJZ+Du z1jnf(tzk83arH@de0j=RJh?3OiJr{b9n=HVG#UuL6VRIjPr#j`<2;D>{Nz&=+@E~S zyS}>T=-`qaBM%9Qz2_rM>6&H~U?Mi|d`A~+J3Q=u-s9hN`5Rt*!QeUftAF+5U;g9& z{@(ptKKh%C-SE-r_OXQw4|YQ=J^wL(e9g_TdfA17@#N2b{N@M$_MhL}Kzk@+P$Eleld-@N5*A31Kt~#%isVEBXp6~v(FNSxYxNc_TqU-gLbbN#<pKvBJ_G#>-BNx(h~mu@D;dg=MJoNJ%p|gH|Y=*1^mFH@E7hq^)o7dI4*F(6clYSKddBQ`hG}E|fa$YSSW#CWJ2(8WHD+h5!beJ&5<- z{Ee5%9sKpTzw7?}C*n&N2b4S}b|OyU_#P5wO?4f_d5jG=La}=u2cGz-UqA4cZ~LL` z09h1;d-7-h=)Dhq=1V{3p4q~|-U8CCNW!Vm=^%1}q*GusA7cKx!=JwT#&3P0^L6_e zY&-RJ;>{oX&2yiA&c!aE5U}dJj;cxnex9q$p$`Qty#9$Y=NzrPDx8yoQGJ@p)kvE;0~QSdC@`YdU$Yl z3AgUO9Jekn@>Cgts(S=S=a4l2ESW3n$ zFQp`GDwNiA)_%@%TQ`ALb*D@HSd%kS0!dudr?+KaDY z88Sa?T9lekHl}kFnb49Vrnl2U>Zb7A6L{ZSZcuQ4;#KcCw&z56+2Vk*#+P6oa!9&m zM{WwIA0P~e2)aHt(+KHIfQ`Plan&=g|1W!<@~Dq=Zn*5$az+06eb?Xrg|B?;*(3M9 z-aj|k+1X67h%T7?kR*xh3NdnoX@*YM$E@#RJxFn4C!*yiJ^I5}UH{k*_V+Cuu|SXh z^4{x?fBBAAKlG1BerU%!7JFw#=tc$nZV${<9MYigy71!&v%ZhODX{LwSl`>jLtb?1 z2`_l&&+L8FH6MqcZsDBgU}v!7_@_U2<5&Ouvu`@&%)uE@dik<=$=zvA@;V+e0#s7d z7z@q#ZPz4Hm?+1>#d@An_X1M>?Kgr(cMf>^z53=!tSFn&MULR}NB z$Y@E)ldN9icF!2;UmBLmfG1Mh?Gu~Q`H^~Mz`9P!bn%umM zfoXwUA}%M;5_@lLIx?A_R7nlZj3SfnS-U@LS}c=l?MMKX)AzB~%Ms5oHrT~rFp&H1%*y3+~7osJ}fAGnCJR6=1L&l4Ci)?C0U6HlsAAaMhUJ-K^fKR$Kf z5x8T=UaWR{7qvT$FT`+%!IM3==m;*FAw>{^@5NmM8z4*S!DOp1apBo1=TY z-@_=4(Irldwk#W`Zs@)&^Kwm`5HZ0)|mWZk{9P^^sNt-#(Ll3A!32jVsWjuW4)ofTb+U5hZ-P^hj>lHA0Bw{7cnD$?A>nmL?px<&yYao({eJ8*1!?kW7z zTfR~4+@Jog_dl?F$I9Ww*h3gFchM6?18D-Ec3V!Tcw#jZhT}{rq`5a5_|mqwT4#bC zd>J6o7HsIG4Iwbe2ofiu76u-o-YbnFsJ2_KZnbp^H?+wQr89z(NOx=kSrU~^I4U_O zQ{f^{f6HBk&%&rePGl#iG^R_0Sv!zpGtU!l&Fk>I+~!?MQ$Dw=_euqF=r7`UuoqvN z-;ck$6QrumVU%6eNM5TFTIY>Ab81!)OG#{Yjs!{3lUSt>){_|8_iPp9x}@jM zG3ov+*z5P-=G&6gK;gmJiL?v`anpBB;Fo{qhP{G?PkrzEPVBy8<-k%BAaptT#uci* zpr%8ZzS*e!CFK-lW5(x4^IMn9VBB>dnQJuvSKLc>+#kTGSJG)l=PymAhvV?dvG0u zZEJ_d##G@0R6lfW8S9MM!7R9$9BjXDF9KpD$usz*jv*tJNi0+Ja1!tRsi!Z?nEl0T z-+gM&of~`i#WY!IY8XX5kwMukE7`;zb6aI{+}fsJbK|6=Hz5ZxWfqN)nf&xlrY9B zW*sN?xx~cB#YuavEIQo=eaWWRE+Fq18dp@we$|{Jd_b*S{;= z2*BHEdUXLH!l!`mcHl*thnW>Idd@@m`8PjJ!TqPNeaAz~cdqU~5cic6GIfg5+okrY z4{Rl~Ot?(8N@fCzH_W_5p)BU&5{YSY>{Q}acO^NC1fhV%#CTD%np{VfzSiAL_-n4A zE&3y8G7`jso{}Z;ARtsFt^&6lE;~e}7)m*Y$_!k_S8&1dxNfQm%@nsRFA)clZj6A` z#O^Yb9L!YWVu(lBaQiqmw-;YpIEc^fx&jZ*??sYmtV)3ptjR?ELl#e`hcq`;UrfoW zhT>+1s~t-+<^gQQPgkRC>0B!^u>_CiYX(%tWZ}@6R7D|L&fTzAx1_d!vBse%(UUru z?e~0Y%eh-VoR(r;4-iuvwxu_B*TD6KmT+7%#OUB8-uL!z+%H)C%kO>1Lp$$W+rKyI z!7Y4|+!F3cpS{V?6An)4Xg({m&?ULo7KgFqpsEnnkf4%$OOKse5qOS1C4NpoYL>h5 zY@7bn+^!_O$ONj}ZITjD_XAA^CctHtg&6Uy1`d94KGw%DP?CS&d=5c-R_XMP|4V#i^8xxa)#e;1;_MT`MD9fmdd zpsvY+M+7?4?v%bYoggJhTXtd-TUudw%$=@Z@mVbx1Q4wx1zAY60*R@z8t-=EuUL>; z4xH-Z{6cjjtr4d}#nKMDSlWNr?fT5hvahz@W@%GNg%!{;)0v`<15aQ}XL4 z1h%aJMG*BEU{orRQme=;C_Q)So;CGI_?e|Yd}vYpjHK3uq1pLbt4Hru2Gi&fU85VQ z(rS*cm1*N=OmOaIIsB9oMyq5c41*OPGj|f@$@V1)`f)7x>U%SIsJnoF=Squ3QWn0nj~v;R*8|8vWn9bHZ#7d zoiWvgs`@2{QpzGRQGX^(pl#_8El9!urrWFAbx}%%*Jj^SXqoY)xKg;VK#|O#(%D*N zE6FILAp@yc@$%S3?t6$Ni|S##@2%ggrojLFGk^Av!}aKTR{2R;U!>3gsyxk*`&y=q z#b_Kx37TzR4EGZKrlE?}HgAH<&E(U&r^=*}07jVax?S4(BFt43`op+t z3FX!A6nOkxZNzHL(BeImwpOysZ-kcUvaw~|sElyu=xCiaBt9)8rb?EW{zyMWom4Jv z3juKkBw3YNA_m}jh9F~&4G*d3VRaN^Wi!Gs3oyz&9NK>gyvob3q2?Z9pC+um>c z-8yCL=VN8x$^~i`k?2-cnKsX(Q%vlzje04ZXO3G}62w`#z(LySV6)>Q4Nl{oZ+XEX z8LPMb@~y?0)I;L*C7akUT=dXIk!DDJ0)>-D6QF{-u4HynayAC6sMNL^cWD7I9zHwh zjNz3vDb&P8EQ({b-HX}H*pCT4F@=O_q|xT z>wXlQBkbF`2hVxj<8fskqaUsS!3a?j!uR@ePHTP?;F>Mnjt}3ZhNr0~SbFzt@V;FV zwW2_6cf7SVTkc!8qUfSmi>xK1W3{XanQjaa4`tZ;KsMVTsilTf8>t!`$w}217}{r znX$|nv(!`i;nlF0b+ovR5msV}$eI;;HQ&vcbj;mLH6bII#*_hLiLr`?POxhCB86)q zl(s&HrH*3Fv5|8&aMWC<3MExqQ&(UbW-6&5rSeb|;4$87%CQ**QU_Wetn{F9vBwGe zovy^?6sd=kM^E6Me>?(j7-E+n;QGZ5zWee;>>izlyJB9Hl$fD;i z#a#mksMyltl6PcR(!!oxc#+t2S-{xVN1P;~OAxiJ6tY-lle10|tNg827WhI&YE|W&l-Nq%1hQUtO|p*le9t~t> z{NwGw`Vg~ujBA|%Ua>33)0P7Cl1=nj_$Ou2NEVebW!}tgz#S^(9C)#@?ig@qj05+z zE}=`uStPq3!ELGCMEcRvi^goloV%Lqf0_iYOX~U`eyYkz^=(SeTUCvFY z4`QULW{yaGMY2Xqb&Pj5BY-9Gm>iT|QkERG79|E=X<~9A%1&h&DJAtbD6P`~+TJe| z-0g)gQoIK4`gz+D-wtDG$1ZKg%_5=~1#XilAv&6)<4{z#7+tNu%l8#Nyzxy{x+3Ea~FE-SlM^|bx~YO$#3h?SG$PK!!{HiVH) zJt;+tQBW@e-1YmO2JT6~ws$0k8M^iJ9JI#7=Qi=ct@mKoS4!(hntfBPY3s;_>PMK zcU?f$rMDe3@!lGGs>AU4dIX&X z_Y~jpc<_ExHZ+Q{>jN})q4xK73)O>E86~z7mZ@dfY~dhY9pJ=~<9PV!gJS>gr4ivG#Xa301NEZ3WEKU5Im~CFjI{K$_V}D^}N$1z0*M)U(zu&Thw}4eoaITqD`knA#drxAIQjc7ULRNui1|JPegO#+CtPgdJ)C*@$qTCk??vnl)1(nyI1nyGE*`2jUE8DbZ zDyGXXj1o#uD-10Wa3^@?+rC##fp7S|yY9blv~=l~k=!gWZ^6yl5yI_qZi_k*CAe*` znh9LHUGb=Wqt)Ow?c5fO)#TJN!<)iLOjvEYFdpjJy7E|^j>(I&;_9u7)=nouT{)dF zESWrn>tyJRe1w}BPMuuE!}lLYe(oXoMT#`@u^bKYlI2-EZ@G(WlXc7v&q(PE?FXa| zG)7yBNmB~YB*uxJcEyGp>sW=qDhfK33M{J_$hQqXLsaDEzyI18r2* zd`;r^w9I^MQd>EY;DxlrD3rXcfcq`4z0`vH*nOj=%eI_FjjGF4aKq>shtthh%_{{!b@pTtHDuTqZZR5lRjlfn8c}7GHwrU zFY_d@DfYG`D9IZI2nRXNoZG^guO3JA;4yRxCVe>Q#}O`%hj{h1hw&7w;*w+yULKmD zFWP%;u^AHsV`9fOhTJII1^o(IVobQW;pJmMx&Ux1s9Fm71j9i6RFlT34Qw7i3EYjH z!)zC2Po)+eJipX7N6rnnkB=$l-ySR z5l0MZiI$V)E2ynfYe8v)VbVj_=>u1CRXU)x%3IyZd>vDLrn zMr4RlSK_-zTQOG8tsp$Rg5=>d7_F}&%(oaKE}^meqb?)*-UegM-_g90hHA0k#Kn(ju##i4|To zS=T9Mz`myRlEqsQ!ZT+PuWVp+c7V;*a|rV_-vQu8P<2@2@(D8)kg$+FC=m zNYU|fgjs})-2(c3Bq1AWG2&3;X$XE*ENS<)p|q+KO?yp~-1gb0{&0fyZ~sPwK`k^v zzi-ZyO@6lct%Ka=qgrrZoYXcq$Yccy;>>uqJ*eb<_v^2);68ruVAr8dN5O5KI0|lc zzq3}J20H1BxsC!JDN?VWZMVYe_3$PQXxY@ZmR;Sn(KZ#ok~+6+q?Rph2kuFeP{05B zbEunt{P~m;kupSZSQI9imx+xia0e06b7zpQ46t!}gss5_2H{!c>6Ro{&$$IUXCA^+ z7iaJv9&r#?Bty&&H!vRFcV$;~wmn^OJX65MTJ)gB9J z|7#*ICyuKv4Pk!Rmf(^k-KttOX}s&0Y2eJX5_GBiP~rA~Q~eOn{khjExNrKMJ0G}b zxa;tiyF=rt8Bin@mgopJk|CEq0>)aRiM$sA?pi$OMDW{fzmzaGKW|L&GXQI^46!QM zkjU`>oOXC-dy%yl=622_dET0}C6}%Mv{%!FH-{|Oz^g1A0cX!4UK?P2CBfF#7B)6c zYk%=9LeF>L4F=c~ZQ=#{m+>n4bbA>xiNWg&W!Sgb8ZXfS+@o^kiPDuyl0 z3sKq*+|w#Bm4jqr94e~O-hpbK(N;xN@gUMLOT4?EURW4725`}Qbsr9ARX+R@kvNw8q| zH#GnXu^Z!s+*5sbwWrSt#&i2j z3Db|-;J)#-A-sNu{kU z_c1XsCF<@%C3hKOTJK6wOQWhhZOug>A-VM)jpjba2GT6}HXW&stH9mZj!SUs_q1jZ z+$`#oj8a7FA>yqe;^7G4AjW7d!N$s2c)%nhDe4XMr$JswlkUV5`8E51=k# z+ksPK<0|Q4O~6e9+xT3X&J4EP*%(ZlCD|-}tnQEDwwmcwzFJnbwm`8?J^kD|5*6Gc zn4L_lCC`8F8Rrh97^{1`AF$J>i zAe-Q+s)M_21sPui7Y5RbW7~nfu-~=8txKKuZq{?5y|q9*$`Edi1l;K$MluR9+}yxu za}&{UfFh;9V67KTSL+oayx|F4zv$t{M_-NIrys&4y*{F?6{KAzRkQbE0zdQ6N&C{$ zC1kfXCx_4Uwr#+@om7-DsDXPDJ*Y2^iF0ZTk4yk|o5omhSLeAnaO)*n!qN8Xq|zce>PXs{X)yNZYW;O6>ObSb=bI62vrcTf6qSwc|CG^CxrG+iP!6~OVyX;&|; zBzJ@MY!j7OEl|w_p*fD#0^|?Uk|;&I6(fsc0XN?X>cIx3_Xd(UR3+F}Iw}&(!ok95 z9m|8$_?{;|29JwF?9GPKc7%DBorD2ZWVTE%qa2^r^fDIQl#HPBgL@*?+eVobIy$v3 zikX24-OlXqWLK**j)}=E6^^C@yNRy~@6->Q;HJo7Y)J|3H+<;Gv18Gm%j)2!uU`2O zp}jr0$C?hAtG7cQ+n|+I&ZLq?h0_XE^8ISmD}Pa1%{idaavG7?;TX$w8b|mlVC_+3OU^E;ExRW@B zRB$EVOcV4veH2M75`8WoVqbO^ulc$w@w8-weWMMN>X&MqH=X`cW2I_NdOlh*jgPm- zcajuuE{f^aut`E?nxL`QY4^k#zy4%huDmB=M>a3JXtn`&8Km?zfV(|V(@cYHaPxtx zDuYeM@qZ<_>-RY|(}(VGDoxZKvI1F>>%UPZMfd5%vEZiFg!F_`JPsoyX@r2?07hGI z{XoXRW!0)8L~Hzu$7(y+_04E3FepEJSo3NKlKEngVi-3and8 zq?YxB+R{2mMzO2atZQuZHiUCRUvl|%E2^)#Md_LNfMqB zQ|yjb@bW!z`lBL0iyA8QP9JSmB(+juVh3M_7)V=f!u%5W90x$a6l zX}E>GNyz3ZQrW7AMp7fs)(&q=-R&qs1LRYx2UxIO2)L&S9!pQ#_i{dv*M(zatm}?F zL)FENDEE1omPQ)pKo&zG60i&{BMme3vKYPL79RIMy1xC#>n_8d)zet;yKqAG4C3_? zjRk5U`+WSm@qKOQnzTU1CFo>t=J^3Uy?jF(KU0OFxxxDP1h)hON=p_NBuGV}e8GEu zSVLalc*|{f-xn@FVj?Xu;6`RbUXA;mb@t?BrR#QkCr%sO3W9d+!eHjS?vn;Y2Kn>>CKZeWs}1JquKMi&uHRLbk@m5ZXh`FjRuY6zQh97Gnn`fW@ocs`7vTl( zexqjMyz#@g-Fe?=`RZxlCM~g&TM8VNw8Vn@e89FPX~!$)bAhyDt{W0$g5kd{Vd{Kr zy!~@4gw<4awZ9R{C1Yf0)RYbeWprB$^p%ocfm;fQ8gQp^QG%OFWEQg8R4Uezgy3cz zNs=%)$O==L13!wezw4m8dIHb8Y%iX_G>c2(A^d2B88$Sg^q!E3bH1&= z4f3W}QM2kNO4KQ3^QP{k5-Hd;_f5LD@$0wdZGxxMmiQ14rI}auXljD{mfP;QFI>Jx zvfrg#LQSE(Ajv%)+j68wlid> z@_gXF?Y?mN+BUf9p=6rbbcOQ#;I?fc>O8eBIWHF6?d|VEz^!9#yr^|>PYAVa0JpZ; z=nkY!lI$_Ocg9YL^vGE4H5Y3M6+}vIN@RAn6$g*-Z}RL^X^2o0N_bX#r9)?3IxNRA zTa*)0Y=2lFNizh!9+HrCQ3g2VZsL*AI{iHrDF6T<07*naQ~<6(QNM0{%r!VLSVJ!h z;pYsVvI)bc5{TTpmS`4bT#Ei{fRcTdY8lsQLtAk+lKEc&+%k9DP^h*pH3{7H`R7zh zUQ%AQJLEH-nF;ZNcipIUOK!U5wj;-)J&)eRg3uE2_}F-E`je%cnz5FM)54kprx&f! zrt{k;4fA`3-cM41N{KVYmRbi`d#kO>UMeb<8dVfYkFaGKbb)aawJ44RLz#xEQWxBS zTwJ_19Ds?b{98H~aPgQFv$Rm~#)^CeH{VH3QmN%_!A%=I<4{OXhWSVu%hH_4{4$@h zC+uXws!9R3-VkmcVMnrseZwKXacL)B_Q*@I=iJ@c7(E;1&WbXi<8L5K`Je+E8j ze32vZBB|-YU@I2SFT_7V4`-DcJ;Hh0w;l+pdT-r#dB^4?mFnFf!jXd zBzjVRxbb9!CI_ttw@(^)X`DI%+>{A4%~kr7N-(RqFiAi>leWc$Y0p8)EsdNa18z!k z;oM41@;DU5izMOar&o1HM*px3AaUS*5ME0k>V8!nuF$_1Y6>n&h_N))L0sgS%}>8!Iq5 z#ZyTG{6xuZo@x>|oCNM^yTp0HZMqB#Z<|nNxh&Kv+c{Io4EL5atsJQoE>c^o^thqm zodRxE9F=QZa1+er>QR~?&KO&gqDV5h0`43!r7aa4rlvV;-;$y;8emWE;p_SizVpgu z912%4M|x7Q-wqc;Alo(he2;E@lkduI4Wah=Mknftv`ZZ}7ci+{6TschTpAnamItpr zmvwNTZ!V|L;Wz^jp(1=s=Y!8O72HR|) zmeeG-a&T2Bd5lB4lEm_V_N%8vx15|UwFRIoufYmxtlP#OnRL-dA-Zvb!)b^Y?Ont- z9omJ?*<)A=GFf2SNXpS>QR1G~{__NJ=%w;3otSMI*{Dl*=4r2K<5wQP4Y=(w9P@hQ zs2y{@w*#lz3t+pyCxM&Ciz)D3uh%NCq$NjN3MHcg3GU2Pd9{+;+PSsqDhEZuT?d`q zknL?VQ5w|pKfXqA*JDqx%Kc5c#=Hn^p2NSsP#FY<1JJ5Lqh1U%0Z(E_9G=QYNMG==w?#n=^&a7_~8 zC0Fjl6S`a2m2H5oHZCYWO1IlZOb;kM%BJ6BCck&#rJ5E;s&X;l9-|2p)%{S{aqoWg7tqmw3*_9o9N!bvSAQrt&A)TW*~^vzm*?Va(w$;0P~$Db7I zU&lmyel8B&d=@nrkd`#S{fn>D;`29t=t#p_B7rE33Y6@3qfHymUH4FG-IDDkx4mH7 z&0`;;?EPLZ-B1sZv5R5cJla_D6O~2T_S&+hmR&CZ4%BQSS^k^~dR;hZYx<| z1%N?h3UFbrtmrSsB%1#WElFJ?!A&p4gdNT_pvv-TOG{E(G1!WbH<>0VD7901FcT!? z--gz30a!<>9|O9_i+O3;*oJEt>;ZMMa7 zqKc*jWQMUem1RO~PO~06a#36)aIy?L;{gtBoWX0Ja5bJ#Y+%Q56&+^xNbPE))5udQ zxb=BM4QCd*GCcv|eANuj2X0Yt>m{ID=3x~c)oT*Co4BbKlLVHU=i5$l-*GftzNSt~ z2yVuKTX5S1Vb(3NcJ8+1w&0fcS^!gf#n#=#(^~x5bB$GwYUNv6pw@Yu{Y>bMlrE?Z zUR&vv`?T%vQUR@IQ&V!Y(}9DOF7SXc5~)~67>Vb4J+Veew=uDKE7y+WRKP9$967Np z^F?aG;3xxf^<=8@7Ykh>xKk8agd~qdiDW5b#*Gw70YA;*g_Pb(hf+635}2Tz0_Y<{ zcjXMOpKlNGyoiM_0eFZSRmJ~6xpb+edb<-QmMywr( zsUT)qgtUkd7Ae9ama+wiaw**7#~GX?se#)FT;@=^E_!i-L-8h_+Xr5H`CcpyA4YGq ziN4ztr8jlhA-a$@CB6rANwL_ko*jJ0x_DMIda>ZP9J?uPYr>@sZdnbrox?PN8UxiN zcRuBY)M;#T?gqGT`p}U(j*gbEVS+Hh!HHsmum!iZb4y8qT0FNjQYN8qibnaX+%8%n zsrJyyjol1nvfsB8bxV#u!CqdM6m`W_KcBqi+6AcFHYGO~vFWkGKAV|`Ad3;NpTZ4K zc{GanEc!tVAp?A{~Y$bKZDQ91k7WM{jKfS0x*G<)bdeQ{54E z4Oh^YZWsnLV+^1iPNbQ$EtK@3T}yJBw&mqgVwDeiLhM6c&r!W zyAChm8Ye_&xQW1Z5cys4b~@e=Zpu0+u9ydHAA=6382l_ zO-%Q;;1*M0sI+7|a5INeskC)jUpTl+$y*ci%84fr-ub{?O8!?a<5%DF4D@v|^yg!kJz!?)X>k zpTl$ZEnv@Z4KrCN9UthL4}9GX+{8mC=!sRv*8dnEJ0V!Q6HfpdKTpS!I$E52%C0c& ze07Q51by>9%ALbBS!Lz2x`(pl+&6vb$ZbbQ%Mx31G2mwIA5DSV(ULLFZ4Rzz#@7b! zQt}&cw`q%=knuVhwZZ3hW^?h_Q=K`9^z0QtCymi7)-Z>M@cy?xRlq%q2qYu2R~54u zI6l7XXYa*Bqh$fNEPDg)IxUfVGawa*PEDbdNJAmTw3cuMq>=!f@SPUMEJI2&W0q)Q z0>PapQeIj_KNlr?=VTqYj*lo$FrRMWk?{&%e8pbeu;61UTthL~gvT;?y{@Fg<)I_F zm@MAj4o__YZ+mcC$vsW-Poq53z+En+8V|42%WC&kI;pfI8*{%m6w32SZre7*N^S{v zkSeb&aGOa@vx=R2aRgo0jebO;NEuxY?`SP-F@3C2Tv= z=+w_*^ouq0vj_1DZ~F%9bSb%0bfGzqo->aDfbV+iF+4QfSt^nQw|FXZE#F%@ciOas z6sDlMt_mfUMOs6scuI|LhR_-gFXlg%31;aqf}Fgce7uOI2RwJbAe#|~Qj#W^@e6q2 zNnAe%eCOevcuYRP(r6t;93ks3B4$I@Vf{{8|cism|uMe-+o{g z|8Bv@qnsi9txcrC95S~f3&<0Tf$lg~VoJGf#?&`O6zgM)R4sK6PVQ#9xl5S@8cy6(C7iCYJ@zB=h&G@xG zkM-?Er5n3EXksVJ8Q5l`tk~)0-LjOPoUw>exQWdl1XzKa-j82>$1~Z44>N{$Q?cZ0wE$lC z*8A{qxQs9~b!Xqke9mT zbsiXIz%eXiPQ~y4OFfKcc{0TaJESZT#%VK zcQj6#-I9Z3!GN0+%t$%b1=&E~U&Hsm<}p|dfE`T89hiG^fvo~~*Z*}t*5Vx{EzuGW z>XDR5;=3u>L%>>a^IgDd3JWQ4)!8GG+<;rTHUVzItzf4mC6Qf+J7_9pklABiYRd>! z?7>eW?8!FpXlE1O_lW(tA{n4RWN2)L{$d|zS6498>&P59l0B|Mfm!&h39jSC?*jN~ zS7P_2KBN;P^;vC}g-o^SYx^|`+;z%Qm*zCUO3$X@ zzG=4{FDzNRX`3Ky+RX_h8L4NuFTq!K409q0x|iNFsV@zd1UxR?$gI4?rlUN!iNF@n zlX_?X6v53PBL2&yr8MooZ2!%uImU?0ipyPh5&9zO?v4?URaoP%Ua&3|L zG!rrjffAn*oZT9vo{W}R%h=&7j}fFRxH4SFYY*+m^&MbuK7^MJ;q<%Mh=%BRO0gUY z6zRRgaU#HLnLFTkQ$jObfv*&qM`6Rb8Bc0sw*%uY6x`Dk(QzxyG;4`63D%334cdbH z-9KPMUTd8DA|0ic6eq_Lk; ziuB;6v|j+3Kav;;f?KbxZdwJkN)=7Mmnj$NukGzL7>e7gaLcSOu5{Z9gk*)s^a3Bz z<|aCMijEr~4+AM7M^B&hbIDR^BVKCpHWP=JBCRuuq{N6&`(h9v!CQjaq^H%u-H_ZS z^i^pIEhPlM+Q2oiirk&MD=jGnJhso^Wh8l$j-L%&nnrkD*T+i_?#AWG3cBG263>Ct z4Mb6u5LgK}pczy{Yo)rct0q#J7t$6J`pey%MU?q+C(sfV5LQu~VkbA4u!ezgmeQs( zPH#uKQai}XKx}b!r08v0`&hu(uKNpj5PL>ER ze93VPa*ygJy6C`jdI%%!2`NQ%ib&$x$q;0sjKnPimTKVESQ8r>t29L7!G(sHcyI%9 z;op3Y7PzT|T7c_Lz#uMKU39@P$%Nynk~I;sHb2EF==Pj+ZKcEieDATCxg2Z zqE&(n$4MG)DFZ|s+?AdCUrBP;x%M>El7O2NBCRm=u~d3%@RHDN;gppvR^qQE9JQ;` zn6!k^ab=1ar5Gg*fA1la=sr$3HGf`D3%Prw1roNd^$*GINk-s|L}5)u>IVp%zHHs2 zwGD~Q?sgWC<_;21l{{7_Z-ZN&*C-;QP#Qb8CNt1xUC|DLTfV;_a8pWG(%VXSCI-i{ z`*EF9N|@cwNkq}=r7>pWO+5eL4*Z9m9UR;^iy7BJoD9Lp0TMY%ZVeXG41XzdZ}hF4 zfm)J$tGfcnO6QG@O*ziz2X|c;6Cq$xJgwr_SiH-4B6(eMTh7hXHo)ERP=1Zzwg8@# zhge^2^?r~NbxJ-b14e1U8790riR6vk8l~`3X-N^zEnYl~!KC!&r&7qBC4jjDNULdXzbs=h(jMyt&mSNfMwp-J%c7uEP7?=FQ6TXP@$lt? zRP5sQ?Nuc>Hv%f+N;_!7)|e8bNK)!*mphf*1ity51+7SKBgspJvITByaUsA-dl>8` zaklJ!meUjIpxg5?jMi{D2Ke4XOL%(U!;bY;%s44xH&&~M#Ch|XHTa*>U$cJHprAV| zU)r08O5-o6-5__zW>14k}Nqx!$VS9%BUq^C_O_b4awd=Rb znbl)IN-j*miKT$R?Fcs&X-a@IhS>m=N?q;|@dxQPW?c3MV~- z67Nz3vX!#col0l5$2+3gQ&}XPpm&hg0^3KOR)R>3VL8j0nRa?bv;`kSbld_7BLKS{ zY=i?iUJr=+aP#gMxW!hTk(;t&W!=PQ=!c;%47UH5fjd>j(wHD+Q1b*{PN~i72jbGt zIw@wNHGF-(h3|X(wRpt(3KqjPxE@Wt4BaJtGnN)Hsi|4e$(VDgN+#Pls+9N9F*zLx zL*5OzRj6qA!M`=QMc-n`?j&%}hIr9?UZ>gbKm6g_kKD7h^ymRQ2(vUmQlRT}MDY@7 zRx<@^R3IxM*&c+t(x-DJ^Ih42w^9s93phr;?~A>ajtokLzTd-OFu?5WtmYBMTbSuF z_<0opRn$OQ=#GvMg(=RQiE(B%!7vVxcpVAsACd~tf$q$RiHuaC z6J#j79EnX@5CxQ4pivHGo91pXLXfSYldq!ZX9!2Mw|gi$KEkwz9QzSP0X)|)?b4F? zYE6MusKrys+0b|OTw2qg0fUv^R+3xJZB2wmnWW{UG#gT>EHq;$mP%D7JD5FmV%;?o zkK^kijRu%?63nIRc=BQo-+gEq`_@ij9wBDj0`V|HZ)O&oQH0$0;ROMbXoQ|b(Qx;p zE@Ns#=4=_L=D(CXr&K1ZGGzFWy?cZ-~IJkT3<%=VM#Nq9D1x< zQiA)9zjgbU@7-K_%+~_$aqpxq<92jj)k~)4W+k{OfmwJx1!g*dw6&u%g8zza2rnC9 z*J1~ce#9X>`YH!kUjfW>!la;&X$n^Ib+!72IdIQ0;MO}L+1+pL`{PHy{T5k5)gSIi4>wYq&YgV z7N@78>32GC!$|alFyF#qjPM-?=JCv3JuGdUL$I}ho&7$-FhtP}5IZ@dyg-+~i{jGz z$DLFZ%iM#Xu@eOQ(W^pUf60AM@miLPU`jw$3F0a4_X^|+ZZXCg52cJdv)>8scfU?e zfo*UTZynkPj3kpLqQ;41B{$vg^%BOF^loaoRcNby{JLVGDlP}L-<^R^4L#b#tQTUb z8{vj0UWw~2_i$(l;zY6QpyL57E&pm(< z4k7V&W5^7Yu7@PwQqIrh1g4jXv}SCHgKj>6J3NjLzW<4sXQ(U3g#w=DJRjI}f$w|E z12{EY6d(%d*nG&+Yk|?ZQ+v-*kv2++GlR9todzOw+$!--*3-Y?eW~>R7Xey{` z-66P1!7>AGFIMG{#Wg)YKxd>pBIy9Z@C+X7Z{T_R=5fRPEOrkzvCDNa+FD0&`dWJz?gC-j*H?O>d76(lF(AVU;G~30H%g<^xWUQpFOK%xl z$0)S4f*Ff5!F_gagm3w|?^jy#BOkuwi+6AB{5mVS|EvePsbP@u4h!;NV zDtz!}#TQJ`UXwSY&WXnQ`o5-2uMuZAbCo zU{@(krEMHJexflOmWD{#wYtgANM;)nYq!_Z{#^sNN_Z=+RpMK4(}O7si`1lG`8D-q zx+KFaGf||3BK$%|wWGM!5All2F2$1(FSIdL1cGOP%0S=6=gi#kQI;BNn}#D)QPWlA28P83>VvfnMZd8=YA zdA9-g`6RbwNt@#H7b3Y;acnl|tN>jz>eJC(`0%{~Gl_t=qV{MEhdnFbCE-ATt_?ibhY5n&9@cZP+yfaMDfh-gjR@Ss;-mVM1d}63KwaHYV7> z7%^3Ge2CB8eOYHJcZ> zJIe3M0+WdCX|(8W#O0L5TvNo|u8b_D*AX_Jj1U%^g>)Ro#0?TL6UD% zunJikA(v8GW;*qzGAfl(S(#ui88sogG$XtS+)cjSo8g;5NZe zp(U%M2M=!ER63Lz53LmSFn9KiJjVfB=bMDY` zhv4?PP&I+T!?Yhw@rRkp4Ea_fh5l*M6KgpeoyPv|7>~X%#>-xC1m~?NNZHgfoC0^8 z2EO(EA-;F>Jvh0;*)S>{%#hy3S_fA=@&KOs#8o`}kODj!Ptb1De*kui7?T$8AHFrg zzyIr7ktK&QVRyScIxad6USe2G%P4!r5Ut7W_}quC#QvBxiCes^XpF`MvThr%t-x&$ zNtmcuZqgH4KIk zaM6}7lj%6G0JRB5E4rv@#opjne9WiU4di=8<9u3T;O4Qk+&;J^6i9H3$2ps!<%0R5 zZ6V7<0l@4XZ?snH)#8H&@I^nsM{)oFAOJ~3K~&&g2!6(uWCS3)NfONRwrJb2p1GS0 z(VpFnjinJ@{DLcR`NM%5ZUFxBul@nYPUgs4hcHeKVkWU@4N0acn{@k_jqk*T2MWCK zX_w%6k6*?<5(o*iUxV4M!_Puy!(!#4A zc?ljmz7MPUF1p>eIQlX7LJ8xtZoMdl-%<7>>tCyD5sO zi%GtOIO^);ftcwQtfv8A!+*-XCEF=Zn>w1NX;No2^*F0NVR@`Xzgz_-GntYJ zj>cFoOROE=#fvUFg6HoW<6+4F#c-gbx8oF9nM)xS4PT4#{~TyBvP706vr5(eRe%h% zOT;#Myrnu$pfO)qrGIzwbyZkJk27t@kh-YUCU=rl#OGh~uU_jUjK6U0k8a;N@|a<~ zE`>t|Zf=3%D3lRe^x(Ks6Kw`CrJ}tiBUlJfZhYWL^E@(b%$2t)SR~6KR>4xg;y@J% zI_)JVU^CM4PPvd4T1i*tr85JUthO=A$B0r|l^NRE5LaB-#Xo!1LE+16_?4!_R!bbg z*L~ziI9?oP$-eMyf?IC2b7Yu#?G&f6KD`~EC%9vVMP zml50?6Gz2STJEY&`=BG0-h4OKZIb z$A8HrU0|Gz5Oq3IyiYJKrHRP=E@gM(s<-?TT(VzV#H4v4ig22e3?I7zKh8E$Fbd?r zEjx;Zpnghlvp`Cd3iIGrAUkVkAU3VRzyYlQZXDfNYWux542KDyI&N;@rv9LT+bp9> z0GCLj63Jj251Eefb7_f}Uv@Dr8vYm?`B1C=WHgEvQ&|k28A@X0OyKsd`I*2i)FTk% zsg!4fWDXab&&4H>=atw^**~>3!AmP}f8p9|ZreHXSOYgLH&@EpfSbJ;{)ec(w+@*F zN|Z)RBr+k@XLNMBD)8e%PP$>ukfa6jY^Y!;$;p@G+R}E8n03W*gz@!ZF+2)IeC=v41Sz&roVow)I24^!r+d0yoM!j}bn4%{2FV-L!5 zF9Nqblz^6S8x*I@B#;BkZNTNV84andd}X<7t8IUyqbc9J4jzJAsEjKemVllBmOD{~ zc(RTCD6!EW;mRZHc|-8sUtm@f|*HuL=?A?Wi4c+80kp*E?JryCq3jjX$oVqGt9=jSdL4qMJMp$ zryjtIuG|no9Y$TqS%-0o_kZma{{F_1)SQs_ON2z|(gO$gv0Y{;og%Cxf~Yp-5*w}o zm+@!?x6zF{Dxx4X2h%x5W~m)WD^QAaS^pPWZtLvfj%VClXbFYwnL!mv2-SNOqY~3$ zWOy(`IUb-LcX6b@h2OpC2%fSOV|{!AUCyYVI>VDrbe!XiwMTL$J)h?lT z;FiRhRc?CqCslsaX+2h4+jchdjU>>)&%#h84TaK@mwx|*8J>qBtM$Px!`%Xn&JD@EK%5of0)n7~@~jckf1jfZ&C;~s&F zhj(Hl>q}p)6lNvb$I2>132lNpiSs?e25t)lE(SMgp!yzjRfH9RtsP09j_7CBpoFvw z65QgOcW`oPjF)|)vfM|1c-!{jigPcr+-C!Bv(!GgeZFnL*1x+EpDZ{r_s%9s}g6Ud9eO;H{?l=la2T5f$htPEtRjI||dx!EhJ zrpYpmPOrsTtg1TDnid>Av8V~}QmUeN$bW85Hf=e3>ay~E7RAjlZ7x8FD{G8}djRIkp zo`trtF%{jgzqu%iZ?WYzdeJPw5iPf<8Zbwp(KlM|-!NM8mMU~^6N`gs4ixsqlG zk9Km^;vboO9=k;tzGD%TySM63mKekl-v7mIeD7F+QNfyT1-H(+(Wk>^iFT>ck_Q5A zwcuLv`ar-f*30AG25!4g75DZmw}t&0;J0eR0Nkt~G;mi{g%W$0f>!DFr_@8v$;a6Y z`@757y!RO1@u)+1ba4v%S`&1nA=ycBq}oH7B z8R0j~p?vF?uKVGw+lMb3$arjRMCFz$+mItv1UJ9JP~alb8y;sqMxCK*axh6VGfrA$ zSYGa7Hrf?E>2=mL?A(Q?J>dd8|LKQtu9ldi>&hEh`6u9Wz^-f0{5?WJPT+S$3{~q- zblC&L1o+Tb2Kd(XePk4fv|4T#3N&zci__Rw9K+{7@boj&5*G@r#Ogh8TUf+Gc;-Or z^fWEktU*fNZk;(c2GI3fR?(6m`BRMx)Ofc6z2@Rg{S$jHje1bXM{^9*n7$2bNi@Tn zMG-b9J9tud60d&j`M7j?4|*f!5jK%?W=}ChhjMJJ{auvra2DKZNPKIp_g9~sS?;i* zSmq2#B;L`IietO_cPW)%WSi7hyRk&tqwI*~e)&~zQqKMVE4Zb)4@gD>tR`cuCEIw> zvoFKXUk)i&q8JV>Z$6%3shfzD7`b-@maDS7)SzoOVdMfsJ*UW;NjC{?zO=N(z#sxX z@TFb+x0{B@Ip>-?`0ISOzqq%uBTwV@douPnHeZ*x9_ zfmh?-eAtV@tsVH9g zIrsoKPxANe~>K$D75NIL|?cjt@AIY#YCJesWvh_vI~o=avk))TwLdyIK-= zv&%@d0@r4@-;d??!Ofek-QX21sk-(TTU={OumYByi{4bFuzq64dNmuo+)86D zzOG!~l&fmX%^iVtD9qeOh}knD7EsFx`5MU>Px_+y{ZB7CfS+qkv5%celU$-IS;mqE z8-*3@oZ*B8@8=K`YRPS9H|-kzmrq3&shcz%>VeaNmV!5S>3f#@MW1+MWw|9SS+vej*5e!+4x^un_da(OEpgyx!!2#aI*O-T`YalPMBI*vJj#7jBE2r?^kL8$To;{c^St>*XyH=7%HjyPxM4n6vj3L$CeO048b14}x`ctR*D=zMghA6iG;F6aQu0u*Uz#9$VSw#K#h0blsW zH?cEZL00TTHfd`uFB1xGOEUXR@puBcH#>!2yYeA;;nR;`DNzTLh4*X^d;eGO!FO*N zBb%+D0Cn(O;gqx7e7GGK3M9A>{^T_jo)2qFUi;n!;HKr~uEN?97w6s!+(t{Pt=goI zJ;%~_6t!%@Ii0eLnN&ivkjgB#sbSgBR?+4*xd>UH6JtDQ;(%j2v$xoyp-uLAn;~#Ei+I|I7&vJ_;6n+>9En)iofr48~ z7f4+h=XQ=A!L5-D*D3BYZ7R=POB9gS$Z>mQEJJV?F1coL`U=$MP>R2y1zVxz*6VRm z76*D6TqPwDZfy3fL>i}vc1F0UHNz$2lX&G77vX~4+i_U5;iw~%&Vk!3bEwZ97F1Qz zc8)Vj35JwEu+B*7uMU}2aO<==uiL%?w-+s0$?(#TzEP_J-^Nhj?xD*#`(0$T3YKf~ zf;k9Di-~098&4j~q{d3cDcoKv65<8wePYt!+@kR}!g^~9Pr0OrH@xtCtaAK?^es^Y zGsac~{O{k|#b5s2z1WQoV>buVZdd5YWV9>RRKHXgkT*vrBg}y1;$FP;>1%k|l?Tyd z+*|AHF>C>U_T}UF?lI1MToRrp>FDaFX;)5nwk@GR)^jsM$&y#wB6;KJn7)n~%Ko|| z{W>dN!L8MhR`E9vZr5XI&YyR zdFHNau++jT-QVbZOeC!xryb1tBaz?ejr%w!@8ikK1zz=t!&p6aw@^swUzB^O)Xx`2 zRdA~z(;`Ui$)tFuSZps!qTtovs~(OymkMs}%M@>*dRAPFx)^x15vEZKTTH)yO#_|0b@#u~v(s~vORVhhIseC``};qR``F=-t{NuhP#k}BypEgdd&#URbz^;y3Cq;m zu1;I15<7&4Bc*nXrP?insmIp~)P8RnOw$}R@Y}VhtO~7Ot}+{1bgr#Wq$G(OEkbTS z8)5&rk4t75p0}}q=bXQZ?&+IwuoGiC;;z|}NESsp+r2h&p2HPE706H~tpCv7fbOI% z&n;`QkJdG8xRMO{j1J5i2+-$Z)koTaqmp%kL$ThhV&wp<2&Y$uc-cq(#|qp(xOsTs z@_xKeg5k{4u@i^jmejZt%@7k*1B^;Vl9-b|}DQnH?NX$wHWEdr8btVE~r!Y6Lx z*PpQuo1*rhBf0aqjnfhE;V;~ZYwztKOV%-VRc>otrrW2OOiQ%Wt`yN{oCwBlkrpEX z8M>1@@ye&|$1ATq7pr<_MCGbjomr&d=6|yI_yFd>O<_qTlY}|J z{oC)o33u)6ulx?)Bk7?Gz-`GI1E)MPHYAgNSZi6MAj;y`F6~{#wVQ=iypNjMtdlrG z3xl-%Uf>p5O`}lcgtzLR&>^P~)hq&s2V``LxfP}}vt(Wtr%A9h>H{HxzoaKG~l*M0w{!RF)61a6AKivsT%z}?`~7Q*s4 ziEr7fy6K|8YP5~#K7Io)dqxLGs70a?eoT`H$7jHwf9)8q{c(hyVg_K%#!H@@;+4-mq+RqbTC)2S=km|4z7OBNaSRkID5%RV zPN^17N_7ZzO9b4v;jrvA!re9K*x zEy(P51F~Q3XHgQpfjX%{o1ts!p*w&8!`jt?IC$M~TsxBXdM-F_l8m%u)IMRyjZnoY z(y879T1Ntu216dqWCz{tZCo0)@YLlNUVhFJ4h>JB8_$qW$6ygpi&pgX2<`5Q?2?p< zuJ{Xmj)&x>EbVhT~QQ9PFIN%b&4?=RaW?Ytjl&tCV0n zj_`rMIF7%+ag1`-(>S+Ol`#axBn-7nBJ_$KtQNQ8(;s}gw3(5XXe|bnBegF44ew?5 zdkt<|%U1Hb2;55Z`2R@a;T1pw%dH@EJEA$FEGF>M>?ZDS;5J%W`40gtYNqJfd#Po> z6IKL*g=BORspG;?DwG6c;ZB0F+&ju5&(J9|Yz!y3c({XCoU@K!*yy0$zX!{m0)y!Q zow$weSVf-)ZQbp;(-VUe>VlUsI+@NMVw)N%hL(CbJ}r9Sru6HqHo3N666@%W4iaE% zAN!3 zSZ*nhVuEM~7p#u(FMsa>1^09!_1bi9CRmlWun&Ij+qi4Hhdg-*2GbUj_7YAFPJv{) zm8R(A9i-C~N!&(@o!ir0B;#A~{@?#6c;sQAEfkAeTZ)rEaf06e7hCwwO?|0LR%eua zSyg9gt*AKFEw->y-j2^+^%Qj|2}tu_BY-==>q$!n`y`95qMt5p?CQBia9$#tP82`r zEv*!(D)2fh6||Yv8O&ytpsVSxonedny7#lReN{c^$+QWl=FU+T7^`IH9Jr&r5b>iv zWs#{K+hvFoXpII~-`U0!lNo;bqJwx`I>h>P8+p!}%LGg5I>uQk`X@97y-WrFhiNKJ z<}Im7(clFClLk1UAGMN{0G#)x+`4$Bty^|Sqb1WQ#@6bt2X2p+gy4>x2s!1{ z7J=I(Fg^02-x}xMt8hxeO-(=+>-4c6ox*$m;88fg%Pw=i69q}Q+SkxWj1w8~m4AN= zuDPj?qP>CB{UMe%R#Bv5jCLuWvyA0V5B>f&vTO^FdenJ%_LI)Xm5)~CD$4mri~vOt za9?HM7BPETYkK=Cd{3*Lh$zn%R*Kv4nGgPK!QEKw76qAWLa0Mta(uwc%>s)<>PMy) zfZMMgGz+ghU1<%!&ya~jDIB>dgq9?nj>alMHer*Jrfw{EJ`$N^y5gJtZ9H?mi{HBF zAT~}N!wQ=j@(fYb!I-iWDH}*iEK#Swh%juWV#%#}Lv|;Zon)Mv4F}RuFTmCs6Z<>C z9_qdepJ({6VBAb6B5$wk;B_Bz_1r$^{%OHoh5F9W8Sh6+IJc!!4zN_5#;;!aC_Mj3 zsT06)4q&9ZfSCxz19xo$-~R90*qzMqlD}~_ETFkf2XuM@yf9@ zF(50va_>hOHrAJL^oTTn2=!7fZs67?<_NPSLYA_tXRMOa?F6lCjE7J9c;lfBJTaZ( zU@}0I?MMVf!hUfq#zOeJ1SDdiBI9HVYKo^EH02r}WNa(}Ekmmaa3OG>!|jCeCCbE> zH}ZLwEVH04;m`^JcYmdiSAX;s=gc{j4;0*zlnAm>zUB7yxkFkap{s)Gqa!Sp1DwCV z#P7f6QXKqANh*OvaEd;P3qKN{RGe~6!Qli37H%f<`5F z5aFX=KY_nJx{W+$hLYfBTN}Zdh&WJOklCNYa(N3r{lRAoU7}#I=;P@<$2KUZc70QE z%Hp@yW>&?lT_(4xB{@5AOXXgJiyJMmriZ|CyHKuK=re&^xGtwi$oLX=HECUufScv8 zMAwA9Qi)82$T)|-m4KU_7F_X8PqDtUg{MwO_^ro29OsQrVr{yEIAv#*jx`a1QCbyF zHj0$m65JyHKkGQF%NR8&3g;n_gKHAUG!d+}#Om$U(JYZCEX|IQa|EI2y#u4=o$L)) zy;_qv@BYe7-#fas_H&GLlLfWnmO2u`uQe3tM@x9(w3_){hXNZ8q&09lS%;8!O9#6X zb!oB7W7J2d*uoW;F5`JWe+Z8~m*c3l(@~`*Mb<}=P<%g@xHc{Fhzem;g`H9xJw}$UAVVtU%ar}kBc&D-w(9YD za^cBzIm$M0m+|h0(}jMYfI);PwXbE0_jpTABm_q)G(^S7?sTKffpn=3MnQbasU z0B(zO^9J6ZL)jR&5HND1B{HCu3a{A=og|Ub(|P}PEXUh;%4HYhc~3cj^VWeS5ya65 zL)FkE%?n!3EtxSVj5%bcne^op@E`xL@8IDNc_^Ou?EO-o%rE{!oAR+Q?BLti50EV_ z3Ai~ZNp>8Go3bm1^UO}8Q{09>|G;x(2cS>7;?6Uc!5qMwKX4@MXm)P{SaRr1!-F5Itt?A@h;9Cf_u*5hca$1LP1~{`v zIW0m0lGa>@uF=|Q&>wf3?%E=LNe=M8YPk)CTKZW ziq`qmG{TssYsOgnjHw=(lcpA4sgM>#*|dhbUsI*0sW}rNj#}E{7Z(_}PvUK#c-3J6 z_domU&3|+4*4nc|aO>pi+H#+bLs?m9f0oFcyQ%G5w7?wyOvi!A0^Xnm?3c{Yp|o*1 z#*-g=F&=dZ@aRi{9_20Exm=RiS^paJ_v`@w_1ihVd(Exr?+)>tpMM;F@wx2zQ|%JI zZaf1%`$gc!yEE(*ncPHCV6xae!%H&;mUgky-opR%M~}t1ow}_}hW%0_oZ$Y`+pslR z6;(LVDRCLUYHFKF5j$!ji>DD8X*!Mu-kN*6Itss@!V~Z@N3#wm261rvOqHaBrRVj1 z>z+%7n?;>8yn!7sLAoEsH5m1XG13AWL~Zj_)eJ0AB>N=tJ$ZOQIoX>}>1b@>RI3l)V@ z6<_P_($Zw7ENhM39Qj=oXhjO{cYO3shXgBsa`ml$eeJ2`XAjd2S*;A^&~nF8&-lQ= zZCj_+CojOYQdZ}10nIZq%$gb%r+P7~Co$GIMh6KxEZ`ZPLN^_u*B)VgCC1@{t2lUI z1D#e|(!6ET!e}(XaJP>;f4qfbcaD+AtB9A@kmos;q71$E4C8zYaXdvk=RBN!*v=1O z72Znj-4VaJ-+R>^&4imM!uDP?y^k zJefigm8Z;d`@pVgNS*xgsfo4kRhD`YxW%eEm0i>Oc9DlFnsNqk>)c9RJQgLPzo8Xl7wURD857f8zEt0huffyil$6sYf0wkh#U*kz2y>$q$Becr`ZTg z{7W`N+U=oaa%Y@L_OP>g5clnDqucIk*9p6*C8Wl5F)0O;5IH)?Fdc1UHHom4b}$*G zm`o-}S4tVJkx|%~gH0oyqHFPF_Mpn`ufVMw*0{H%6KDiof-R&G4;0+C2<&xMeJJDf zY=EmQhMKeT*wkqb(uxXabvujHn?xl>GI5#8UmQom%%!IK$y#Z>g%8fyC>*2LN-)V! zVgGm=FFEfV>>G__(M+QhImPH`U6VFS;)=U4!^}c`NCQbPoFak?C3B&rgt8CrY8Dgy zz!Ig5Oe`cbO}a#IXRQ->&&Pj9!Tm>Hy5R@co?g9t+};!(WtKaoXH3P=pTeQs%W@mI z858YLVwhtqCM70}ueMSoDJS!0qC_gQ9H$Y|ltWJ4Y;=(%NHLmZSX)^}k&gwmNo!fg zJ`ZL)SXu4@qYUl1hZ)#YnV`qkDCEdG5ysToriCy^Qc7@jG&Vzo!qJOdoF+;kv-^aB zO4heb<33^zC>3cpJ}B2Y!rg}t$LW@vP6Oq69^rqZ{$w zP>QdQOAoB#VQer?UHcauKas7|=PdU}qAj@$2~c{ZVLpkD(U)?g(^<=7f=NP0MEG8< zHdFdCP*xOZwQw$tOH5iP@h?99dkXG%eewDqT(`Zi0=E=3O-qE}ey|RuKm5rakkOJf zjxd>yRP;S)_OYm{pLN-NrWsUxj6fao}gs-8qL~l*o9hutrPf zx%xb~4Mg^OjS4X&S7I1PO2zV+^-)4Ucw8oFd2(zms8VqAHL6F*sSVssIc|iR<-UHW+(sCc&TDyGGIZ(|-XDE>1=74+U$eMna_IW9J)d%M* z37M4^3l)R{{l8hXL~rsGQ5uPttHt!x=@z+p5y2Gs0cn(EKbbN?{>=EOY@^Wxopy@Z zm;+Ebfon->&WE#;V&!9c)2y#fbR1xtlfK$*^mA@$jBhgpNKR)Xp>;H&f?HF>VmS{{V`ZJ2ABfE=;oSW|H3>hu=M_>OCYgYKdZG-)nvsuAB{v=8;WmqW5h1iPwY^TtPz@xq z&NYHA&9HPPcZGW{?^VJ^)vWDT=4nBIbkhLw0p0t)dIw zjF&1L=&Jm5H#1S5z*2kjJJo13Gm=`HQn#IjbjC87=3x4Zh{wj9|6Ym|raIPKs3M?G zuAF1c3BzSBNgbBWvRsw^(S#wy8OJJ)q(goqO>x1#HJsDxp_^rhru3Ou*~zKQZf=fi zX5h$~mP-=pr%}Cg{jQFRG@>I-g>QGP&7bFqTN0D+mLY8^CV@z$|BYe%U#y#o3HnSbDrVZ`BsoEZ#Bw4S#a7dH?nq?dQd`!|sa-V|s=A z7I$~xH*ouod&wR{kEVpR7gaaCJI2>=)Fh2-B$jqH1vk&f@?T1(kydcG1mpOO`mrgb zh;=c{I3Q{Z>v4fY?Jf?sd)Q=@i`JZzhhu8F)7jISZpl*+*eo=ppJM%>wfU+*ZUG4q z!1s(}EhJ&ir;bvm!Y_CEZW&?Rx)1OE?CZ{zC;HYeU;m@q`x`1gPX~_Mk_T>X3N^T` zHRx=?t$^NZCcGc>T?Fo`hQj@EFMux+{+tuPzFq)%Ntx8?QUI7z- zw#HaLE!TT1J$BHak#gPl`=v%o1()Zxgrgb>x?Dd zOqJIXhmqkth9JdxtSqt_N&N)M!%Z!$-uVsOwlJjnTe5g9p-gS2_R|yilh3{0>6Uol z<{1^-Oo`^Q`h0LRHL7&lwr?%`*o&6bH@mis4G=G|xc=X(&Wr`!JU)m>P4)u54~8>= z+qZ}YZlfFK$Qiipy=z%8Z%SLEO3x)rJX+#kkE)z%<(+StYw$OBifTHVrR-I9q%L{2 zh6zs}T`l`OxJlgdSW5)DSqT{fU+UbWiTE9w%wb%c($HdvW~@WeRD5TaW1|@3NUw{H zc2}lXq!UJR=|CH!0S=Xvn> zbi<+*KIgReZh%&5u4~I|w5fit`tNF~T_bN$|1m$|w%(x{i_D3vPe2lfSz(xt2+ijyg zndn$?8c_B!G5*cI7$#~=OqJF-%Y?;jA(9a?U2CLHJHhuzz9H%&EL?)|T3G{J?x)bv(|DNvdqo(E}t zUfae6R11bm$t?@EZ2x=;^5%{Za`O;;Mo&VJhv&7$>}P7tdK-7v#jtl_l-wP6d%}3P z-nu#z*f_Vh%aHcW1KV~1<=1*Wnt9adCo2INB4vV$bFvr`RGI0d7-O07>>|RUIKrAt z8P+VLS?D;TkEQWx@@fM(iH1FG$xu4Wt=wC`YDdz3wrG|*q!OjG$ci|d&H+!Tjc(vR zfwz9j>6XxP-?qCKEm3I)qb0uOHZbZ#I(u*{5b98#eK7xgd-Lb!qOr!=pBCJ67T7FE z$(?}q)KLu=Y7OhU{qqD}I=+Rs#j^5}hrM^4+XqAw;5GOwOBz__3SdF?VYn!KN7HxY?)UXoJlVK( zZC!jT8A@2xj^N((r2gETz@U~V++nJP;y8QAp+gSetTa>yRV{zBNus`X)t*Unt;Zjv z)D6@G4e3d1>V(~yR4*uAG*$-^IS!Ol9Gd1>X62;J5>jzg8JXvT4Q7>D8^We}r4ah1 z_CgmGC#m`IY(M7BQ_1y+<^Yc+UDkw{_Qb2@=9h>u$&lV0)9x1D{PEXo_xszvalQB{b`CD_BSE$7*R`~J9ht>W5@!7FEO(x-6ogy8oVX;t#-(J$Kp8&qXZ z`a!|n&RH*AV92R)lEq>T2`88-07dzUaqH;>OS2pYXF2v4IrdQ(#zjpee<Stdy9!mfNK5o@Y3BUogWAQypTE8YE*XpH0BEFCCf1$Uk!?``1^ zKKXhlr}mYjKRiaoDaxraS|Y_>oE0M)PO?G>Zbk^^X^B?9g-IGe|rQSvXj#Nu9r@25k^VmK3a>JNC-2Vm}5E5u|A#Rz%<8N zIYqaihKq&-<>Nb}XIHBU8;yoYt%`-Z>yJ|z6YX!eOt%5svX-^Qm)a8(pK$`p!sl5u z#RL?KvS_)Nw(;gqyiURW_OD*&(GqS=1UDm|6AC>mEc& z?3M%%*WNdLeQ5Ekou-n7vz;U4%1v<97Ba|STJj{g4}NCW!02BaYeVeI2YJP@z3Xc% zGMmTfCyb;Vq8TUmdnOh5&A57_7DW;(re0MbEvINrM_8Fou|LnTF`Hp!mZ2?O@=Vav zd9)#@&f+?d8rfqpeA#UH{EqBgVv8Y^5-4I1%z)Q5tshke1ML zhv0680-fb1xLK{o6}pI)_?EM>P}M_j?mvD=&3~9-H`-nRUu3~$y?aA9<-9@0bMNkZ z$FCm*xb0!cwFa*bdq)?=y?x7Uaq9Y>)DKyX3&3snr~2GMhmL{UcCtnBZCjvH32n(F z>G;*UYSx>u6t+|3Sjt9Nn`YRUW>}fBd4qo=xRqbCz|L<2s~NL?4(X=1f+$zvr)l-t z8vB-8v-EW=yE>n<@KS3+%Wc-1u=gxCcM5Xuw|(NZMoX@}_SoRyqne!i>}iRD%q-A+ z1)qI5c`zP5ksB)&i#6xk%i^91+`d(|??cWXE}Fe9w~PBaZ{DXn{-U&$cUZ9HM^D1V z=UeW|3cF~?{LT`TvR08umoYUOWYLfX8B~40#ig|Tn>1<3&dlvxQqNlXILjn<-J4FZ zF&Sasbc~hRSS&WnRaN3it2K2;cCBHKewcG8(hM$8FNISL>#cr))Nad%Z)S#hEU(dO zycsQ#`{`vmld5rnw8}+G@*LUHDZKqtuQOWmk4KLU4n1Z*b?!3;4rTq&=aW0Sbq6$JkDX&La4&L1{q1{EVVC}W zo|eeX59!$+8OoYsxQ82X{JEVjD0MZXb5jjDu^ zFkXiOStBEUvY5Y&*DP~Zd^E%&D#xZn1bqt_oBY(9LHY-pTYJWA&3 zDLX`WdM4oJ=Yj`hferh(r8N6ms$P7t=v5dt*Y%h zaBM@1KcBI*!A7>an%CoJB+U}npUF)(@kMj0RcZ|kO-HVN|N8g3e;c=Na%OYH{iKdq zG$-=TXJL^KQJIpmezlPjAC?3&o1h_aY&)JcAnH7|q=tl5fNZd16Txye!T#YG>*Fyt z#yPq(7Px9WTO3DGigLz?;XZM*kjI?hJV84jVIM&~9ASNw zVPBr3EooRy)ac`?yU^txxDR1b68>0?lW}>9x@MjnYdc zFXxDJ(h`C@TiU`Oe*Co;tL6UcbvN8G*t}E=r}hH3(h>@V*D*~`J1?+s^Ok&Z$!Zcl zEN2LSB`=vm0ojuJAWLOeg0mB33BFk;RTV%QYI$plbVY@%sBcXRNa~C zrU$vlq$1+CxWT9{0VA_wq&824=rj)OivKmn&7=(#p4bOjU}ZAJ%6N>m;Sek1304T~ z8T%YX^}4d$0&b_t$hYq>TI?05X-eFzv)c7$?MSUTA{N=pK5KZdZZos%4S_Akh`^G5 zjz!>p%O_v^P>pk6ef^Ec`uoLlTb#R$I18nwCDw^UnX3l_?(pFn2z z?b>wLqXH_m{WR*qzlnyUj6I>_&Yf0AVu{MC{9TXFB_qi;!YMk8VfQE4*UzwTFvjvk zBO_w5CD)-63bK}{z)Ecps<#TbwerswAn#Z{Y`2*oT0cg`V3u4qNx#8gl1Va}z`Ye&hqa0X8E znk`Y~Pr~v6?AQA)0>2MP9~AXDY75>3YtXakGGi5pg3mP5>3@#8W2fezV#zs&QXEB1tdW-JIaKGG&x?kIliXMkMIVX0dnltkrszf!ERV)m z-{KBL8LEkrk$CfBG)NVBTz_o63e4fTDz*GbZa8A27N(I|tB9MuU0w6wI^?TI!c zu%0B33iMla3dh(t9$>9Mz}jwxwE^h}$AYjza4OyJoIob?CJGnob8|X*UNvqkz^ZFZ z1e6TpQS8!%O9iY|vw=QAE(}lCpyJsEehVr3ud{n)^hGI;8Ygz+vyzqI&U)K;>!)6? zwB#LEAHDV1;NV5ptRQ-K)cSB4%0=Mj?|9N$hOCC2|FG-si)Px_03D{w>(}~j$|8VU z+P`UeLvZ^UGqapOD{zbD47y9s7Th7ts;?=Gdxt+eaCqYNy8G#UpdHipF{g&gMq7eA zbC#P@XcVDLX6WQ2te-xGef3iP#0LL>s>&6`{Mz#RF!UdOSao-Qmiuf$9#;9)ud9V}nwGD5t<`lbu$1<> zYJ#JK#y>M(#2e@1;FdJ!>&P z;(#)uq2np2gk7Rd2{fwc_II#;;y$cTCc>TDBC?}yD92-yzF6>y;HD8LxV8B}Kc_`t zEFfqJ9I?(e_RzrVV&pzXWb%R+;WDimduAetPk5|iYNi;XSmEhLGsRG&Zjt}nZ zjvX5uINzrw5t|i6YpiZosN&pmpIynh1NUA44{3yNH3c;O2Me7uFD<{wcFPS2GIwVJ zw||Wx4XQ(o&GLZ?{Cj{~i{|T_J?FT(IC!`?9w@kdx>PMXsi|wQ^PlF=t4|0+Xd3yyY`*c$m?WJMI`BIFFWF`7~2b+JT{* z+iNp(oO=LD(GYBGqCUqtj@2~GSrHDCal64`y zJjZH(g7s4atnLi3IvFArC0|kvb>>KM1nf3{z(b>iCQWtK@#`>irm|=@9-Pi;t)=MG z523KG_D2>oyXro$+Uu;TDeq|(Ks+2J03?FDWDDX&dPx$B0X>%DEBttyyu5NkYN zp*h;rJaM%pq$Le-zvJrbj@>cbJYT`>H1cAKyF1HWft%@f4L#|@W)ep`aTZza@G=)! zIBi+0E!|#)N_7vk1En}DeoaObtn4pz4MIIum$8kJn>f-;mS znHbef=R38WB9wufp+>nKt`g6p8CrU(fAPO8q9NMb5uH?Ly~C!4YDco2vS}#i5ux(X*t}3Vw~i`~5`x<`2rmG4O-q`^>U)EHF}QeR z`Bk@LEeZ?PRfBSKO^nk`I}3Tdo-ec~^XY%NR(78Fi3BgsZGZO{WuxlPTjXIAK>hxK zbVOJyv`2f|TyiHUFspZx+Hc`CXt%qK4b(akCS=md5S`r}ENyK|bY$NoN0&oRc|5by zl?B$Mj9G5;Y5cl-1GZ)f?S+fME#XNAZn=*?1-LZ=V}?~1l+GL`J)>T`#N z7#_H=X}QU{HG{dVEWt4h2;dV#-} zhF!N))GkffX687z3zu2}l-(yknWOLB^l|KZYRlae^|4ruIiQ)0ZK1UF0&pK29k_4^ zqa`XVDEiVegk*};sen?gn%=2dZeK)Y5y0zkU{FJ0wH(f?-iy=Phmvt4|mRsMaCYgM#a_-uiRGNQw;hvnDygsnp7Ddqktv&^^IKF%IL2^nB zg3hVoXQeuyszfo6XJgDMV3|wSar3?{EFC|E)xj=$F++V*WV0z+ak~oZHA+ey;G`2s z261sl4O`FDRvTTB;xH}<2T!{@(hBD7o*K^ct**wym2S>gbJ{btW(K~upHuKb&tutU z7A3UYtP0%0TR-upOXc+M{@-r8{if};3&!bwZ7UNAEeQ;19f{1Op$I({idIp01uiBE zzSn@K>H~&pX7}(dlvY`efEM_`)ra5oV9Z+9rpGL((Vh7id}9#?Y4Uov(Dw;Lgw<-QxtT-%!0QR2T3ABI@@1tLHGwO2d9f?iB>aCNuP*gHq7i`?0a1#i4NEktP|B0Dq6 zF+?-0ZtdX6T_>=5atFQ11TD^yNg=~kSx+wp%FzY2HNF;~niS#nE~ltX^5C7FBrBGBU!6T=s7R}knSN^r@A~BLD7gRV z>(}3QQ-AZqETKO46baL|Y&)V}pj;sb5D#1lSIFv?Pi$;^cW zZM8axRn)}2zXMe*5m8eY-3gqiJ2-tU!i13n_Buud)}~XepV&g@z7y!3-oknTtWuYI zmSdPOgho5xawV&7OR1Uzn&8%F?|8AZnqo0S`r-3;_xH+@2jCV;vmlIoCX4MuU*qT0 z2;?MV73*XlLcg_zcYN|q50^Xmp8w<4zrAi}^C_cb6~$zPZW>`W;jEf2#!SPrm8l$S z+s~{g+X1VD32YY7oGrM;I)&}-3&3rb%eLP+tEL;)+>Kduqe2bC@R%^(T)$V-lJ5m> z%fi&>b&Dhr_1T-27zx)aA$-<`*?a(WlZ zc!1?{CZh{UV>n-uOt}>)EvcFZ7Jz$EGDkSL-*Mwm7dA*&nM!xwDsCP6T7elqlf`Zv z>J-vsJi0{r6x?t7#2b{BeE2`z`Hg?Lcl9O1bPeTXf*wb#O(uxa4u)}8!$h16#=$+a zmTM5zX49PGsXc9FABeKDgOyx4P-dYQ>`lro=i|DIKt4}Xn!M%yz+G9DFa+pXP<7t) z$oY1;x}Vgp&#vDVr@TJM#8$K)i_9>rJsp35IX0;PlwC%tU6`%`S=~^4XYYFS7|Kp zOfo4-h?U0c=RLG}+K#pg*&x{h9PFWT*#!dng8pZ|YPeD1Gqj(&GH*^hXZp_i11 z@(H52jbYpt>nyyR9Lq^#ahsJ4S)zJ!{z_{&QVYOsmnjebEWqs_?*mA`pFctK{r4{F zYTnGn(H!H@4+PwDUbjP8l&7L4p3aCK*Zcry18%{H9LNb|njk4S#ee{g3oK72Sia{3 z+V`BmdVh%Z8K)p{q@8x{%u?2)>hRS{C`WvC=bTl*R-SATr8;tW#_OyZ7!TgI_5dnE%&ovyR90*wo_aSwZNDRY#n(Ek}y_qIKI3%(I9^WV4r-fi#J zyH{D<`BmVbKU_d_O#=bfUNphM2Ww?{gAAk(^2YHMKXQ5{uH7zd(rC7s(XhCYfG9;U zXL&S4_v987#=>u5me3IrXNc>z zvZk3)&^HRBjPiILHC0vQUW>%>@}|2gRzchAV%-%x1UG&AC`Es1yZpn?|Mo$-rGIl% z`P6^?xA*+l6VU-AocT!gPtGUg0L`>37MgPfWqygYF_Cd=W>U77DpN-Uxa7lGfV<|G z^=)Z7jr_fxuX&8m$D7;RVsJN)(Q}31ZY)wSc@n_s%Wvum67Jq95B%{rZa#i8Jr^0;Xt!hJEUKYez0;9M9k^-DXRHV2Sf7YB zzN%*tTE0cwxWBUf2d-I1T`cp3PsYva=s~!ZEwhIt-YsQQIP7x z>RY<=N~U>^)ii%0!aNNL>t-gD7)*s(l#+C$KM|i>{P^wv+0CKk8@}!9x88YAbm-iZ z0}lLYVWpd5GI2U@oXpCu9Cl1|(rlT^G5ks;nJ{1W8G<|HP6nd-QkxmMd287Wjh!vP ze-_~OuO(azp~G4o>w#N-7mWC;ERm~{t1P$MQ4HMGgB2ZbSB5HEwN_OOry17U9jxvQ z(YoVqY~FD~q#0U5BVG2Cveaxhi`YpMOZSwGb+Nkbu-^J?z^!z~EU?GFJWE{zGpIdr ztb9IC5gzB(U==oOf}5>nX&PfP-bInj5HGjz_+Px@PoMMtC%m`f{a^ag^o{@NyLbF+ zcEE2>60~~DQd`0<4pI_oH1mt#&XFRIbQU`;I<0s8wRh}NAAHk-@ls8Js%hEvxVboW zm{Ii5D}WE89(#k^RVU1wjz);FT0|k;a2GodZYOcvm?*5-M4>Q2nw=3Se@Bq}A<-Gv zE9o3R(eQKP>pmsRiwp;%1k1-xAi3pEZ1zW3m9csg#EIr1nAqTRYu^+3TlnW2QU4;@IW%VCw@V%aAo=xliXy>)qdNaCW*U(DZ*d1)4 zvk_ww53zh;{jRrt`nMhyT^daqnTmh@e;)hp&G!|LpLP#mn`0wd9T@~fIRLgGaqG$0 z#1L~taw=O`OG+gD2GJ&X=RaGWEa8nezG*kFrYlWwn??E=1iXC6=SSGm^G&_Xgu7|ejmFR+{WN#{>k^GrqKmN@yp?Py*TR|6bts@uUkdM%l{IPflXGoa5zheAUY@d+m8&=Wz{ifAik*imU(c zAAN7wJ{+II60&4TCtK0Mblh?hYvM*qP9*(GE3kLW@=T#Rxv8{d{#69MjB^E0dpN2- z=zI%aZU4cJVc<3?iMocQxg34VJ@4##QUzg?MX<H-Q}{x@?aOUn{LOxyH4YPh*8dfX@-`QrX#qyZ;+MpGfKSKlUt}ht(6{E`r4ME zG&*R;M!_v{1X*kbYAcE|j_l(9uIjUTpD;8mzLwmb#*O#E?K_|q_(c>`m0h#e2&cRE z;E_+i+w$N%)NzIW?qPo@VjZmj|=0_RO}1uB{FHys5bQ)M0Z zUS#pq^5|a{ac+qS1ly8XJgZglXWH0moOAlzn>T>tz-61BA8GQV5I(r)t-EJE_n;-! z%?@ZtsM9Tah(U~d5x6abTXS+3PBX{W(Gvo7Ez6PIcoTZ}p1}Um3@a?4$|qvs+l-9l zIc6NCffyZ*-6J3gHqkk88iST~cUEMPL#bTfe4-lM`q}LSZrG_H%Hz!eZ6qN5FQ(7k zT|OhYed?q*P=|Lqo1vd>W98B5&F}ia?>|1eG}_q{-2B7G{-0C-;oEoTzp@>zN+~6W z*{kqMiA;j#IXWyD;+V9kI|tAv_?qj-{$`+J0?5EG|9T+d&*98?|7>#8t4vo-hs<&d zcdb@ky*(}h_acxs7p!}DjYr^FS}&fipmm>X0r&P#5rz`y+RSF9SZz^om844|KWCGP z)MxMe(aku_N1bSvl(!z56du{PnHd<+H{`PtpUFx+NdvjiDD#y=LiOi+d9DmYQNG zWo!D|`g2_h>HpoVq=8#X;3XR-xu>e=-=`)1qOpyv+R!Z9DRRBd^u2z@#o)H<@b7~< zbn~CbuB-mnD<<+RyDc91H=rxwR_Qkg%VKtPMAHW>f+}>7k(*c3LxOfb!|3D|k~{9k zh4-AofdUv8nRMN(yD`y>qQq%Sl5UKSvKJP-as7~sfGsZ%9QLXfzp~)=y!8y>(y4r12Lx$L#P!_H%=uC<%1gaP!&+&hx!-g z)ZA8`fvDM)t0k^JmV(*`&s>tkUEm_{`J(n!(!(hu`5+d{T5;mN$8L)VfV+PAe4^0q zYM-qRnJP~FqhboWU_$L_`I&Ip^6 z4819LP?vNu9_TuE32uH#=ZpymIl+Ias+iF<)geY}+zlQ45I9w}CwX_!*XSFeaEwS* zH(vFuYK@3IpQ;N;sgx9SF<;JL29If#n59#U%Mr?UiD>`y#2f$Q)xUb)m79OJ$Vl%A zZaesk-Fn&M=3Efn}b z|24oWkaguW|Me{gEw`kL8r;_}(6Z0m*dZ3oz_jD!0lPVhzP0vWUpu7p^h_LbM_<@| zu9lLa)!N6IP*F*Ss87+GEmjW;vEtdKl&d(GbUS ziLJ>t9&yDbKYYoXe&sg~KCOHGB4EpB-Rlqj@BikO@)6%Wde=v8J2m;mVYJ$rB|S_z za8-oOgHtT%q%jcs;I{L#CF)F|Hj7{H49&PWBROhixTti#i~Za9Y|he#7Iv>tVd%6j z`k9|xn*_X6c|b$z*ICrT+qb0tfDe??3MzAE=Q7L9dqXv;&a^~(r;pCD<5<4?1P-6( z0H!ITHg{8-i|!>jytXX4JCy;dfg^~-L1ol{=q`4DpF|- zC+mDu4M{Qt$2xP}!Q^)(OF&AFu*PRMy(a+KH1EJpz#7e(K zD$eO?)Z<2EJ@I(yE+Ta-%c!JcW5j%P?B0s`0x zcc?UwGeIpcEDEuyMwYZ15)re!tlB9{&WF<#s!;{87;$w2boEz=PRckc+!i8b3kq)5 z^hE?IugPsef{G%amSsLeXI8Y^JAzO3?od_Jej3cvP;;9ehz(G z>r}LV#Z|1Y$0=rsrK_46Wo!qduGq z>@K|TGX0^&a~Zf}I=nivRZ0UQ%8Sh6yqqR^|MDk4_OD+4+84j4_r&Pf{Q>)|E%#n0 zy79)cbLT#^T1|0%HfuIM2>pG6;gyM*x8R~i-tRBWP~P6I zcP0L8hmRh;u*$YNTkl##a?E!DJOSX({#)63d)A$`$IS_^XEf6H-F*Ve9Vx7~+0?TE@p6ra@J zv!56^4&Y$pEedy!i%qG#mCrMsxPM0|FS*d&cXuDKyT@~|_jhNHp6K-a9>j + + + + + + + + Chat Interface + + + + +
+ +
+ +
+ +
+ +
+ +
+ + + + +
+
+ + + +
+ +
+
+
+ + +
+ + +
+ +
+ +
+ + + + + + + \ No newline at end of file diff --git a/src/ai/.x/templates/openai-webpage-ts/package.json b/src/ai/.x/templates/openai-webpage-ts/package.json new file mode 100644 index 00000000..7e60f6a1 --- /dev/null +++ b/src/ai/.x/templates/openai-webpage-ts/package.json @@ -0,0 +1,25 @@ +{ + "name": "chat-interface", + "version": "1.0.0", + "description": "Chat Interface with OpenAI", + "main": "script.ts", + "scripts": { + "test": "echo \"Error: no test specified\" && exit 1" + }, + "author": "", + "license": "MIT", + "dependencies": { + "@azure/openai": "1.0.0-beta.10", + "highlight.js": "^11.7.2", + "marked": "^4.0.10" + }, + "keywords": [], + "devDependencies": { + "@types/node": "^20.11.1", + "dotenv-webpack": "^7.0.3", + "ts-loader": "^9.5.1", + "typescript": "^5.3.3", + "webpack": "^5.89.0", + "webpack-cli": "^5.1.4" + } +} diff --git a/src/ai/.x/templates/openai-webpage-ts/src/OpenAIChatCompletionsStreamingClass.ts b/src/ai/.x/templates/openai-webpage-ts/src/OpenAIChatCompletionsStreamingClass.ts new file mode 100644 index 00000000..aca48988 --- /dev/null +++ b/src/ai/.x/templates/openai-webpage-ts/src/OpenAIChatCompletionsStreamingClass.ts @@ -0,0 +1,50 @@ +<#@ template hostspecific="true" #> +<#@ output extension=".ts" encoding="utf-8" #> +<#@ parameter type="System.String" name="ClassName" #> +import { OpenAIClient, AzureKeyCredential, ChatRequestMessage } from "@azure/openai"; + +export class <#= ClassName #> { + private openAISystemPrompt: string; + private openAIChatDeploymentName: string; + private client: OpenAIClient; + private messages: ChatRequestMessage[] = []; + + constructor(openAIEndpoint: string, openAIKey: string, openAIChatDeploymentName: string, openAISystemPrompt: string) { + this.openAISystemPrompt = openAISystemPrompt; + this.openAIChatDeploymentName = openAIChatDeploymentName; + this.client = new OpenAIClient(openAIEndpoint, new AzureKeyCredential(openAIKey)); + this.clearConversation(); + } + + clearConversation(): void { + this.messages = [ + { role: 'system', content: this.openAISystemPrompt } + ]; + } + + async getChatCompletions(userInput: string, callback: (content: string) => void): Promise { + this.messages.push({ role: 'user', content: userInput }); + + let contentComplete = ''; + const events = await this.client.streamChatCompletions(this.openAIChatDeploymentName, this.messages); + + for await (const event of events) { + for (const choice of event.choices) { + + let content = choice.delta?.content; + if (choice.finishReason === 'length') { + content = `${content}\nERROR: Exceeded token limit!`; + } + + if (content != null) { + callback(content); + await new Promise(r => setTimeout(r, 50)); // delay to simulate real-time output, word by word + contentComplete += content; + } + } + } + + this.messages.push({ role: 'assistant', content: contentComplete }); + return contentComplete; + } +} \ No newline at end of file diff --git a/src/ai/.x/templates/openai-webpage-ts/src/script.ts b/src/ai/.x/templates/openai-webpage-ts/src/script.ts new file mode 100644 index 00000000..4ac8b45c --- /dev/null +++ b/src/ai/.x/templates/openai-webpage-ts/src/script.ts @@ -0,0 +1,298 @@ +<#@ template hostspecific="true" #> +<#@ output extension=".ts" encoding="utf-8" #> +<#@ parameter type="System.String" name="ClassName" #> +<#@ parameter type="System.String" name="AZURE_OPENAI_ENDPOINT" #> +<#@ parameter type="System.String" name="AZURE_OPENAI_KEY" #> +<#@ parameter type="System.String" name="AZURE_OPENAI_CHAT_DEPLOYMENT" #> +<#@ parameter type="System.String" name="AZURE_OPENAI_SYSTEM_PROMPT" #> +import { marked } from "marked" +import hljs from "highlight.js"; + +import { <#= ClassName #> } from './OpenAIChatCompletionsStreamingClass'; +let streamingChatCompletions: <#= ClassName #> | undefined; + +function streamingChatCompletionsInit(): void { + + const openAIEndpoint = process.env.AZURE_OPENAI_ENDPOINT || "<#= AZURE_OPENAI_ENDPOINT #>"; + const openAIKey = process.env.AZURE_OPENAI_KEY || "<#= AZURE_OPENAI_KEY #>"; + const openAIChatDeploymentName = process.env.AZURE_OPENAI_CHAT_DEPLOYMENT || "<#= AZURE_OPENAI_CHAT_DEPLOYMENT #>" ; + const openAISystemPrompt = process.env.AZURE_OPENAI_SYSTEM_PROMPT || "<#= AZURE_OPENAI_SYSTEM_PROMPT #>"; + + if (!openAIEndpoint || openAIEndpoint.startsWith('(openAIEndpoint, openAIKey, openAIChatDeploymentName, openAISystemPrompt); +} + +function streamingChatCompletionsClear(): void { + streamingChatCompletions!.clearConversation(); +} + +async function streamingChatCompletionsProcessInput(userInput: string): Promise { + const blackVerticalRectangle = '\u25AE'; // Black vertical rectangle ('▮') to simulate an insertion point + + let newMessage = chatPanelAppendMessage('computer', blackVerticalRectangle); + let completeResponse = ""; + + let computerResponse = await streamingChatCompletions!.getChatCompletions(userInput, function (response: string) { + let atBottomBeforeUpdate = chatPanelIsScrollAtBottom(); + + completeResponse += response; + let withEnding = `${completeResponse}${blackVerticalRectangle}`; + let asHtml = markdownToHtml(withEnding); + + if (asHtml !== undefined) { + newMessage.innerHTML = asHtml; + + if (atBottomBeforeUpdate) { + chatPanelScrollToBottom(); + } + } + }); + + newMessage.innerHTML = markdownToHtml(computerResponse) || computerResponse.replace(/\n/g, '
'); + chatPanelScrollToBottom(); +} + +function chatPanelGetElement(): HTMLElement | null { + return document.getElementById("chatPanel"); +} + +function chatPanelAppendMessage(sender: any, message: string) { + logoHide(); + + let messageContent = document.createElement("p"); + messageContent.className = "message-content"; + messageContent.innerHTML = message; + + let messageAuthor = document.createElement("p"); + messageAuthor.className = "message-author"; + messageAuthor.innerHTML = sender == "user" ? "You" : "Assistant"; + + let divContainingBoth = document.createElement("div"); + divContainingBoth.className = sender === "user" ? "user" : "computer"; + divContainingBoth.appendChild(messageAuthor); + divContainingBoth.appendChild(messageContent); + + let chatPanel = chatPanelGetElement(); + chatPanel?.appendChild(divContainingBoth); + chatPanelScrollToBottom(); + + return messageContent; +} + +function chatPanelIsScrollAtBottom(): boolean { + let chatPanel = chatPanelGetElement(); + let atBottom = chatPanel + ? Math.abs(chatPanel.scrollHeight - chatPanel.clientHeight - chatPanel.scrollTop) < 1 + : true; + return atBottom; +} + +function chatPanelScrollToBottom() { + let chatPanel = chatPanelGetElement(); + if (chatPanel) { + chatPanel.scrollTop = chatPanel.scrollHeight; + } +} + +function chatPanelClear() { + let chatPanel = chatPanelGetElement(); + if (chatPanel) { + chatPanel.innerHTML = ''; + } +} + +function logoGetElement() { + return document.getElementById("logo"); +} + +function logoShow() { + let logo = logoGetElement(); + if (logo) { + logo.style.display = "block"; + } +} + +function logoHide() { + let logo = logoGetElement(); + if (logo) { + logo.style.display = "none"; + } +} + +function markdownInit() { + marked.setOptions({ + highlight: (code: string, lang: string) => { + let hl = lang === undefined || lang === '' + ? hljs.highlightAuto(code).value + : hljs.highlight(lang, code).value; + return `
${hl}
`; + } + }); +} + +function markdownToHtml(markdownText: string) { + try { + return marked.parse(markdownText); + } + catch (error) { + return undefined; + } +} + +function themeInit() { + let currentTheme = localStorage.getItem('theme'); + if (currentTheme === 'dark') { + themeSetDark(); + } + else if (currentTheme === 'light') { + themeSetLight(); + } + toggleThemeButtonInit(); +} + +function themeIsLight() { + return document.body.classList.contains("light-theme"); +} + +function themeIsDark() { + return !themeIsLight(); +} + +function toggleTheme() { + if (themeIsLight()) { + themeSetDark(); + } else { + themeSetLight(); + } +} + +function themeSetLight() { + if (!themeIsLight()) { + document.body.classList.add("light-theme"); + localStorage.setItem('theme', 'light'); + + let iconElement = toggleThemeButtonGetElement()!.children[0]; + iconElement.classList.remove("fa-toggle-on"); + iconElement.classList.add("fa-toggle-off"); + } +} + +function themeSetDark() { + if (!themeIsDark()) { + document.body.classList.remove("light-theme"); + localStorage.setItem('theme', 'dark'); + + let iconElement = toggleThemeButtonGetElement()!.children[0]; + iconElement.classList.remove("fa-toggle-off"); + iconElement.classList.add("fa-toggle-on"); + } +} + +function toggleThemeButtonGetElement() { + return document.getElementById("toggleThemeButton"); +} + +function toggleThemeButtonInit() { + let buttonElement = toggleThemeButtonGetElement(); + buttonElement!.addEventListener("click", toggleTheme); + buttonElement!.addEventListener('keydown', toggleThemeButtonHandleKeyDown()); +} + +function toggleThemeButtonHandleKeyDown() { + return function (event: KeyboardEvent) { + if (event.code === 'Enter' || event.code === 'Space') { + toggleTheme(); + } + }; +} + +function userInputTextAreaGetElement() : HTMLTextAreaElement | null { + return document.getElementById("userInput") as HTMLTextAreaElement | null; +} + +function userInputTextAreaInit() { + let inputElement = userInputTextAreaGetElement(); + inputElement!.addEventListener("keydown", userInputTextAreaHandleKeyDown()); + inputElement!.addEventListener("input", userInputTextAreaUpdateHeight); +} + +function userInputTextAreaFocus() { + let inputElement = userInputTextAreaGetElement(); + inputElement!.focus(); +} + +function userInputTextAreaClear() { + userInputTextAreaGetElement()!.value = ''; + userInputTextAreaUpdateHeight(); +} + +function userInputTextAreaUpdateHeight() { + let userInput = userInputTextAreaGetElement()!; + let inputElement = userInputTextAreaGetElement(); + inputElement!.style.height = 'auto'; + inputElement!.style.height = (userInput.scrollHeight) + 'px'; +} + +function userInputTextAreaHandleKeyDown() { + return function (event: KeyboardEvent) { + if (event.key === "Enter") { + if (!event.shiftKey) { + event.preventDefault(); + sendMessage(); + } + } + }; +} + +function varsInit() { + document.addEventListener('DOMContentLoaded', varsUpdateHeightsAndWidths); + window.addEventListener('resize', varsUpdateHeightsAndWidths); +} + +function varsUpdateHeightsAndWidths() { + let headerHeight = (document.querySelector('#header') as HTMLElement).offsetHeight; + let userInputHeight = (document.querySelector('#userInputPanel') as HTMLElement).offsetHeight; + document.documentElement.style.setProperty('--header-height', headerHeight + 'px'); + document.documentElement.style.setProperty('--input-height', userInputHeight + 'px'); +} + +function newChat() { + chatPanelClear(); + logoShow(); + userInputTextAreaFocus(); + streamingChatCompletionsClear(); +} + +function sendMessage() { + let inputElement = userInputTextAreaGetElement(); + let inputValue = inputElement!.value; + + let notEmpty = inputValue.trim() !== ''; + if (notEmpty) { + let html = markdownToHtml(inputValue) || inputValue.replace(/\n/g, '
'); + chatPanelAppendMessage('user', html); + userInputTextAreaClear(); + varsUpdateHeightsAndWidths(); + streamingChatCompletionsProcessInput(inputValue); + } +} + +themeInit(); +markdownInit(); +userInputTextAreaInit(); +varsInit(); +streamingChatCompletionsInit(); +userInputTextAreaFocus(); + +(window as any).sendMessage = sendMessage; +(window as any).toggleTheme = toggleTheme; +(window as any).newChat = newChat; diff --git a/src/ai/.x/templates/openai-webpage-ts/style.css b/src/ai/.x/templates/openai-webpage-ts/style.css new file mode 100644 index 00000000..c3fbe4ba --- /dev/null +++ b/src/ai/.x/templates/openai-webpage-ts/style.css @@ -0,0 +1,367 @@ +:root { + --header-height: 0px; + --input-height: 0px; + --send-button-width: 36px; + --left-side-width: 250px; + --right-side-width: 0px; + --right-side-max-width: 768px; + --max-textarea-height: 200px; + --logo-size: 0.75in; + --logo-icon-size: 1.5em; + --border-radius: 10px; +} + +body { + background-color: #111; + color: #f2f2f2; + font-size: medium; + font-family: system-ui; + height: 100vh; + margin: 0px; + overflow: hidden; + max-height: 100vh; +} + +#header { + color: #222; +} + +body.light-theme #header { + color: #f2f2f2; +} + +#logo { + display: block; + margin-left: auto; + margin-right: auto; + margin-top: calc((100vh - var(--header-height) - var(--input-height) - 80px - var(--logo-size)) / 100 * 33); + filter: grayscale(50%); + width: var(--logo-size); + height: var(--logo-size); +} + +#logoIcon { + margin-bottom: calc(var(--logo-icon-size) / 4); + margin-right: calc(var(--logo-icon-size) / 4); + filter: grayscale(50%); + width: var(--logo-icon-size); + height: var(--logo-icon-size); +} + +#leftSide { + background-color: #000; + color: #f2f2f2; + width: var(--left-side-width); + max-width: var(--left-side-width); + height: 100vh; + max-height: 100vh; + overflow-y: auto; +} + +#newChatButton { + border: none; + cursor: pointer; + border-radius: var(--border-radius); + /* background-co lor: #557CB4; */ + width: calc(var(--left-side-width) - 16px); + margin-top: 16px; + margin-left: auto; + margin-right: auto; +} + +#rightSide { + width: 100%; + margin-left: auto; + margin-right: auto; + max-width: var(--right-side-max-width); +} + +#rightSideInside { + margin-left: auto; + margin-right: auto; + max-width: var(--right-side-max-width); +} + +#toggleThemeButton { + position: fixed; + top: 10px; + right: 0px; + cursor: pointer; + color: #fff; +} + +#chatPanel { + height: 100%; + max-height: calc(100vh - var(--header-height) - var(--input-height) - 32px); + overflow-y: auto; +} + +#sendButton { + border: none; + cursor: pointer; + font-size: 1em; + border-radius: var(--border-radius); + background-color: #557CB4; + width: var(--send-button-width); + padding: 0px; +} + +#userInputPanel { + display: flex; + max-width: 768px; +} + +#userInput { + margin-right: 15px; + width: 100%; + max-height: var(--max-textarea-height); + border-radius: var(--border-radius); + border-width: 2px; +} + +textarea { + resize: none; + background-color: #111; + color: #f2f2f2; +} + +body.light-theme textarea { + background-color: #fff; + color: #111; +} + +textarea.w3-border { + border-color: #333 !important; +} + +body.light-theme textarea.w3-border { + border-color: #ddd !important; +} + +textarea.w3-border:focus-visible { + border-color: #555 !important; + outline: none; +} + +body.light-theme textarea.w3-border:focus-visible { + border-color: #bbb !important; + outline: none; +} + +.user { + color: #d8d8d8; + background-color: #111; + border-radius: var(--border-radius); +} + +.computer { + color: #d8d8d8; + background-color: #111; + border-radius: var(--border-radius); +} + +div.user { + margin-bottom: 8px; + margin-right: 0px; + text-align: left; +} + +div.computer { + margin-bottom: 8px; + margin-right: 0px; + text-align: left; +} + +.message-author { + font-weight: bold; + padding-top: calc(var(--border-radius) / 2); + padding-left: var(--border-radius); + padding-right: var(--border-radius); +} + +p.message-author, p.message-author p { + margin: 0px; +} + +.message-content { + padding-left: var(--border-radius); + padding-bottom: calc(var(--border-radius) / 2); + padding-right: var(--border-radius); +} + +p.message-content, p.message-content p { + margin-top: 0px; + margin-left: 0px; + margin-right: 0px; +} + +.light-theme { + background-color: #fff; +} + +body.light-theme #toggleThemeButton { + color: #888; +} + +body.light-theme .user { + background-color: #fdfdfd; + color: #111; +} + +body.light-theme .computer { + background-color: #fdfdfd; + color: #111; +} + +#userInput::-webkit-scrollbar { + display: none; +} +#userInput { + -ms-overflow-style: none; + scrollbar-width: none; +} + +::-webkit-scrollbar { + height: 1rem; + width: .5rem; + background-color: #111; +} + +body.light-theme ::-webkit-scrollbar { + background-color: #fdfdfd; +} + +::-webkit-scrollbar:horizontal { + height: .5rem; + width: 1rem +} + +::-webkit-scrollbar:vertical { + height: .5rem; + width: 1rem +} + +::-webkit-scrollbar-track { + background-color: transparent; + border-radius: 9999px; +} + +::-webkit-scrollbar-thumb { + background-color: #0a0a0a; + border-color: rgba(255,255,255,var(--tw-border-opacity)); + border-radius: 9999px; + border-width: 1px; +} + +body.light-theme ::-webkit-scrollbar-thumb { + background-color: #fafafa; +} + +::-webkit-scrollbar-thumb:hover { + background-color: rgba(217,217,227,var(--tw-bg-opacity)) +} + + +.hljs { + margin: 0px; + padding: 16px; + padding-right: 0px; + border-radius: var(--border-radius); + overflow-x: auto; + max-width: 90vw; +} + +/* + +Atom One Dark by Daniel Gamage +Original One Dark Syntax theme from https://github.com/atom/one-dark-syntax + +base: #282c34 +mono-1: #abb2bf +mono-2: #818896 +mono-3: #5c6370 +hue-1: #56b6c2 +hue-2: #61aeee +hue-3: #c678dd +hue-4: #98c379 +hue-5: #e06c75 +hue-5-2: #be5046 +hue-6: #d19a66 +hue-6-2: #e6c07b + +*/ + +.hljs { + color: #abb2bf; + background: #282c34; + } + + .hljs-comment, + .hljs-quote { + color: #5c6370; + font-style: italic; + } + + .hljs-doctag, + .hljs-keyword, + .hljs-formula { + color: #c678dd; + } + + .hljs-section, + .hljs-name, + .hljs-selector-tag, + .hljs-deletion, + .hljs-subst { + color: #e06c75; + } + + .hljs-literal { + color: #56b6c2; + } + + .hljs-string, + .hljs-regexp, + .hljs-addition, + .hljs-attribute, + .hljs-meta .hljs-string { + color: #98c379; + } + + .hljs-attr, + .hljs-variable, + .hljs-template-variable, + .hljs-type, + .hljs-selector-class, + .hljs-selector-attr, + .hljs-selector-pseudo, + .hljs-number { + color: #d19a66; + } + + .hljs-symbol, + .hljs-bullet, + .hljs-link, + .hljs-meta, + .hljs-selector-id, + .hljs-title { + color: #61aeee; + } + + .hljs-built_in, + .hljs-title.class_, + .hljs-class .hljs-title { + color: #e6c07b; + } + + .hljs-emphasis { + font-style: italic; + } + + .hljs-strong { + font-weight: bold; + } + + .hljs-link { + text-decoration: underline; + } \ No newline at end of file diff --git a/src/ai/.x/templates/openai-webpage-ts/tsconfig.json b/src/ai/.x/templates/openai-webpage-ts/tsconfig.json new file mode 100644 index 00000000..464e3ae2 --- /dev/null +++ b/src/ai/.x/templates/openai-webpage-ts/tsconfig.json @@ -0,0 +1,16 @@ +{ + "compilerOptions": { + "moduleResolution": "node", + "esModuleInterop": true, + "outDir": "./dist/", + "sourceMap": true, + "strict": true, + "module": "es6", + "target": "es5", + "allowJs": true, + "typeRoots": ["./node_modules/@types", "./types"] + }, + "include": [ + "./src/**/*" + ] +} diff --git a/src/ai/.x/templates/openai-webpage-ts/types/marked.d.ts b/src/ai/.x/templates/openai-webpage-ts/types/marked.d.ts new file mode 100644 index 00000000..6c35e540 --- /dev/null +++ b/src/ai/.x/templates/openai-webpage-ts/types/marked.d.ts @@ -0,0 +1 @@ +declare module 'marked'; diff --git a/src/ai/.x/templates/openai-webpage-ts/webpack.config.js b/src/ai/.x/templates/openai-webpage-ts/webpack.config.js new file mode 100644 index 00000000..82f4687f --- /dev/null +++ b/src/ai/.x/templates/openai-webpage-ts/webpack.config.js @@ -0,0 +1,32 @@ +const path = require('path'); +const webpack = require('webpack'); +const Dotenv = require('dotenv-webpack'); + +module.exports = { + entry: './src/script.ts', + output: { + filename: 'main.js', + path: path.resolve(__dirname, 'dist'), + }, + plugins: [ + new Dotenv(), + new webpack.DefinePlugin({ + 'process.env.ENDPOINT': JSON.stringify(process.env.ENDPOINT), + 'process.env.AZURE_API_KEY': JSON.stringify(process.env.AZURE_API_KEY), + 'process.env.DEPLOYMENT_NAME': JSON.stringify(process.env.DEPLOYMENT_NAME), + 'process.env.SYSTEM_PROMPT': JSON.stringify(process.env.SYSTEM_PROMPT), + }), + ], + resolve: { + extensions: [ '.tsx', '.ts', '.js' ], + }, + module: { + rules: [ + { + test: /\.tsx?$/, + use: 'ts-loader', + exclude: /node_modules/, + }, + ], + }, +}; diff --git a/src/ai/.x/templates/openai-webpage-with-functions-ts/.env b/src/ai/.x/templates/openai-webpage-with-functions-ts/.env new file mode 100644 index 00000000..bd323058 --- /dev/null +++ b/src/ai/.x/templates/openai-webpage-with-functions-ts/.env @@ -0,0 +1,10 @@ +<#@ template hostspecific="true" #> +<#@ output extension=".env" encoding="utf-8" #> +<#@ parameter type="System.String" name="AZURE_OPENAI_ENDPOINT" #> +<#@ parameter type="System.String" name="AZURE_OPENAI_KEY" #> +<#@ parameter type="System.String" name="AZURE_OPENAI_CHAT_DEPLOYMENT" #> +<#@ parameter type="System.String" name="AZURE_OPENAI_SYSTEM_PROMPT" #> +AZURE_OPENAI_CHAT_DEPLOYMENT=<#= AZURE_OPENAI_CHAT_DEPLOYMENT #> +AZURE_OPENAI_KEY=<#= AZURE_OPENAI_KEY #> +AZURE_OPENAI_ENDPOINT=<#= AZURE_OPENAI_ENDPOINT #> +AZURE_OPENAI_SYSTEM_PROMPT=<#= AZURE_OPENAI_SYSTEM_PROMPT #> diff --git a/src/ai/.x/templates/openai-webpage-with-functions-ts/README.md b/src/ai/.x/templates/openai-webpage-with-functions-ts/README.md new file mode 100644 index 00000000..8fee923d --- /dev/null +++ b/src/ai/.x/templates/openai-webpage-with-functions-ts/README.md @@ -0,0 +1,35 @@ +# `ai` chat website + +This is a simple website chat interface that uses OpenAI's API to generate text responses to user input. + +User input is typed into a text box and added to the conversation as a message inside a chat panel. The panel scrolls up and the computer responds with streaming text output into another message in the chat panel. There is a left nav that has a "new chat" button and has a spot for future expansion w/ a list of historical chats. + +## Setup + +To build the website, run the following commands: + +```bash +npm install +npx webpack +``` + +To run the website, launch `index.html` in your browser. + +These setup steps are also represented in tasks.json and launch.json, so that you can build and run the website from within VS Code. + +## Project structure + +| Category | File | Description +| --- | --- | --- +| **SOURCE CODE** | ai.png | Logo/icon for the website. +| | index.html | HTML file with controls and layout. +| | style.css | CSS file with layout and styling. +| | src/script.js | Main JS file with HTML to JS interactions. +| | src/ChatCompletionsStreaming.js | Main JS file with JS to OpenAI interactions. +| | | +| **VS CODE** | .vscode/tasks.json | VS Code tasks to build and run the website. +| | .vscode/launch.json | VS Code launch configuration to run the website. +| | | +| **BUILD + PACKAGING** | .env | Contains the API keys, endpoints, etc. +| | package.json | Contains the dependencies. +| | webpack.config.js | The webpack config file. \ No newline at end of file diff --git a/src/ai/.x/templates/openai-webpage-with-functions-ts/_.json b/src/ai/.x/templates/openai-webpage-with-functions-ts/_.json new file mode 100644 index 00000000..8b9f1e93 --- /dev/null +++ b/src/ai/.x/templates/openai-webpage-with-functions-ts/_.json @@ -0,0 +1,10 @@ +{ + "_LongName": "OpenAI Webpage (w/ Functions)", + "_ShortName": "openai-webpage-with-functions", + "_Language": "TypeScript", + "ClassName": "OpenAIChatCompletionsFunctionsStreamingClass", + "AZURE_OPENAI_ENDPOINT": "", + "AZURE_OPENAI_KEY": "", + "AZURE_OPENAI_CHAT_DEPLOYMENT": "", + "AZURE_OPENAI_SYSTEM_PROMPT": "You are a helpful AI assistant." +} \ No newline at end of file diff --git a/src/ai/.x/templates/openai-webpage-with-functions-ts/ai.png b/src/ai/.x/templates/openai-webpage-with-functions-ts/ai.png new file mode 100644 index 0000000000000000000000000000000000000000..4ba344c95ca7f9b1da183b2b3c959f6c4b723ee5 GIT binary patch literal 46281 zcmV)TK(W7xP)PyA07*naRCr$Oy$Q5rSy?9fohgPpX5P%3V`bI|RTKpRQV0kX&+k>fK2t>8 z%tY;8>#dHpaz);V6KB}_+yDOl*}?yQ{qGU@|LF)g=X=R7A}e+a~$NZgTetEAV=;Na8Tqf3a0@498T`!Md~_v z<~V#FM~>xDK<*V@?zx2{e;2vyfE&<%*A4yt?CSjdL#OsGrmK5)&77O< zd&9yH{Lq726lvraMd2icQ>2LlFUedlLE#l{k>ze)j-m^AIf~qIdGP`Tih_^MKluPI zcnf?S84JUjR;?r92Uz4TbAv zZc${sbXF7|ih_sAg*fc|xdTT=OzuZ6nd`{&Paev_lM~vPjH%uOP{2Mdr^kNEUlop< zA$NGU^0_=Z_+!U$T)78x4h}nq%yk^kDT>_TnRz^rIWD}c*Yl%ZuX}E$=RLS@|MLC2 zcg)_o_WdBS!&0V z7ejS!^S6Z!k)$ZEuETTjdwUI;r`qL3R^-*ntgl1+EcSSn8+_@r+wYOJe_#8qu}GSq z>q6q(dS*UC6nTQ8NZ>dge5V7??P0W)px5ybIKyOdE;{ zC|;Ru#vko?9pD`B;P3w#9{AKhVQw?WLg)g61Rd8y*AI}%kP<8vm_)=2?_4yw ztMB#W2zsaR(8+r844YnvvmFnomzHt#@L_!6$}8~I9lMY_J*19j?$?E%@O}Z$%sPeR zB+yx)5IhyY^qxv!5Q?%;ToLm;wgk6X1SPnvu&sU0V>yhnSmi$KJEeu5s|2O_9R+py z5+UANG$nA2u*hrYs3S@+Ez+6$h;ANoANsk-w{p4JWv9o>=kmRaEOp?!E<7)S<0i=S z46?1J9U$}JxijeaE|TF2dffqh=S04LfAP;Reev}-ecRVBer`LEkAXWZiWhqsKJIf< zyn6$8{K`iW-SrThtqe=OIRuG=EXm>eo=DgtiDkx8(g)d-c*+8W%N^FN^}G-X9f*_z z5~sj0P)XcNNiB3Q=9j*iBzbT*N9rn|1p>Jv=O7?BM+A1tRRf-1ubUeQBLL*Y^S;cn z{NCd(3@!=>Uhbloae*cnc7_=GF1Flx+`Ib#K6mXkxc~6wSo7yF^nE0Hd*~(l90FL` zfihXJ7L;`$TUJm9w3X0S3JQl30%jOYQGr~h-wdMNE$l+#=RB_?(s&%)g#ovnd>$vi zt$|xOoPb-UZ++(}uQvhQx+A!kOX#jH}=BW8XIS3)2AJ0C!OouS9{53Jp0i!aw}t zhp>3>I%ZcB6q!onA}!!}0sKyYEKAT4Ntz?`9HAO~a65j5>mVx}0a-T%I;0?mVhnwy zC>>6Yk+K|~PVSV)5WJBG0gub`ZgJV}b633Qy<7{tM!0l#0H`8ciIvND1 z3PPpp`d$aOU5MpP=EqV&rseBk5|fAv$ph+StBEDT)C zW*$7pMiUlfhewx#H9pk(S?$m8cte$Z+NxY zmWij5H_M<^^nzgU9lw%{Mi5lA21rtldR9KCf~(^0oVNyO>$JomLdM%jVAVBi^v2K@ z*-_MKz*3=;8I$P#P&e_lLAK;8F3ZUk|C3h zj?>}Rk0i{nd#Q`z=yqK9h~CG3{ina-6>Wf5;0}ud5r7AW_}icV0QyJQu&`C2SNP}@ z9%oN~>o~$udGB7}VZ_zu@o%GL^SE4?FK?3zQp^|pKL=i^0=FVc-W-vi$kr~W7Ot#P zP`50KS}Sq+TUVr~j1`YmRb05aE(+z~shcDANohD9dasn!yu(iJiIks>^&#&J6#%ph^16&YUM8#hs!t<~J9DVPHv3~2l=nWloGFQMu{AwIfgwKqX0-K(RI=jziVF~|m`DM8C zvP*H_zCAeOcj3l`NK@`$RN7>*nGr2{b18vq61@gmOIt)CG{%OyG`32(%#8(i3zT*e z32+sTZEH$X(VYaY#`&%CUsrg!5IIlf#VS*a6~^UGUAw>bd1d)F6lr@x4^Hl@@+mi% z%Wf-zM#-2iV6*$G(9GpoDbQqjdj;Hh6Z6?BUj5R?yy3O~e)q50vn#lV#Z$S@pZdsO z|J|8C`3H1Q4-pg{nPl>4zK84UqCjQ@;m#h4%Xj_hwcqmGpLSmO$S>AcOYx7V_u=lZ zKIQ&@zWqlYy7i7{bk`#6pWlUal!&w~JP&c4BH;ghAnO-c>m=x1x$iTNdis-oI{1z! zezI}C&#W)4-SN*aKJodxek*Mb7={&JTMS%ZK2ceAn5?{Vi(8r zjpVAQKjFiNpM2eWo##C2?)o`5e&rM2d)t5g^!s*3!9qXw;DtL@hV%Y%*0@=H$Oj@^qo=Koyc)E)h)Wt}5NFS-Esqz`;4rYS@vvGt4|G~5G`l%bf z{?S)D$K~8|;jLmXPK4Ke@;|-ruXh|DU`IsPf!cSAj*HFBb?lj6z*aZIiTMQAfA>qC zHuG)Q|9$;oCjCGC?SFdptsndI4+Lk1bF+yLpIdp*5mQfxw&fth!?Pni`PJY2I{!6K z`OWj4@8Mtg^B;TQFF*GSI|c=Icr(~sTNm!VxU>s#9AZ5kBH7i&Xu1EHr@rpHzsY&p z^5(R27Pp(+x3lwy3M>8{3=&3T$8%(|jj=?Qgy-yUn(q0EPq7J)2I3KJ(S>6l^mGEQP176n(~ zFa9Rk1=MuP!HbIsbKO})XNOqu&f!UqkACkL-}Ka1SKtndD^LBGPkiORKl)UE->`#z z?0`PSI3w=?I{g4i5#iiijwik0<&Sq>_V|Xh-VT(-$G-B^fB4|XKDjt>I^HmY?*{NZ zns^Fq%ocdu8(#9Q&MO}CN9O~!{l56UFTMOTANc6U_9g*(v5W4^EY6%=L4RQe>)iya z%iUwo`q-PVxX|<6{`wF6ZFt-9r|pP4=;l7cq>vrO^MO1`;pHCA%vr57X_>3ZLtt3r5dgVN_(vu2Z{goNz zy=|_vWUH zFmkNUW>|d6)xUbhyS}|a+Ajn|_rK-$-}~_2ANle5K>;@cW_q(2q#2T{mX1C4cYo-D zfoQLF{N_LW^|OEd&p)s`bm1mGT$=g%K2COHJoX1(`t0D9k84nyZ5Q7^Jv8&#w|(&3 z%!9+;Vle~9pGBO~OO_*Ws7K^j_xd7T;nlWVKc;RrvG3r7dh=Xz-?9KX5~M7oCN-s5%$3T$DOBQu4jC7LHv=Ri z%sZQS%p>ss{MyZr`8Eaj7tj3mCx7ynzT=X04|7AnFMJGX;R}3tVTxfV!`VF@Jo$~U zy2knDEAF}wK-Z5c{_f2DCvN`vvj^f{$KA}(>&;-`I#_zvb-%yw?Js)OMW64}Ck}n) z=3hO2Fbu%h58n?kj7K=Nv%ph7^|q#Hx`314`Xe9sxc7xqFJ9R6;dt}vlIAKzioh)} zNuxlo=b<+wxAN& zG!fbu3iTu#ud17u?34cN&C3^i zz6XB!4}W;;Hw%V- zSny4`HaI@Z$BP*{!Ul8P*OwAgg0b&PQK|#HG0C z`yK}I7Mz}oD2g!a^bkbHix2W$&R`UU46DOn z!N;B6dhwD^zq1*gbP*`}%*)o-o4%e!YIb(dBcV&wFYIesHRsU}#AOId@i)Y?V!mvwn_k{ILC zv^C4`#ey*&jHMQh>Xp^v_Y{RDsH;XJ8cLnHmVTx_g}RMettYCq@txL=HlW)U5BYn7 z%^%$cPJ5Za%M+@A2s;QNN|mJG>qr7_^CUdxNFlvKkEs{_vvWKGo(ndH3uB)Z57Y|7l8Y3p7m3s zy&Fz%F6$uoJe&z~?0WWNe*4mQzU(_M7K}XpvoC&ovg>T_JE0?1jzKcO+Mb|z%3uDc zi(Y{D{>uORj+GDp{ZCyI&BN);BBSFajX_JgZ@`qfJ_ha#Ry#Xz@2+Kh?(${ayL&fQ zf+d8M7O7f75*qId7ePi?aAJnYJUXfp{XFT3mo;-DBYhZTGXdN+hm1;21$7%}RkWUt zqK#&?w1pI_X^E-8Uoo00vd_5DN~dBa+4UaV_G(=WEmLAWrlb{nqPjq3(qxE+>}s_g zq|^@t)K%<@KEEoK zG~3baLWz@L#Tj9=zn?w+2fzIZ&P%TN^2LI&_~YBY;dAf($mb4>0(24&u>)-OeJnin zk)JyJzE?c!V$T=<**#DG^7}vf+5X7^`b7_&P6tKb#Zh-1&v@%Q1Ls+kp03!oqw~HiSz*l!H;0uR#;PyRxaddGX)`J;Q z!pSBQCxDaD3rdd~!_P-Z{RD}GV>{?1T?0Us%qD(u47BClmy=$()a`fHI#GX8V`Qlm z$d-9f=|P=mH-o0s?o++2htd9S%(Pdt>Zk;6j(gkVx~*xjc3+d6PW6Q?^iIN{a|sP$ zjF8Vb6(LZX^~^Wzx#_v@zR+_P|LypJzkAOIP8`_mpqKerOH%m#F1#d_ zpspa-z^uXvkc-!87OUME+`nr9NA@h^^9K&&fw@^#c9=oIoiF#%Nmc1f>ByvDF*=OL zc!|6aZZA^MqjYauQF!;I>vE#RwvwEF+&b9XZg3mLW|iAEhn}Q;3vkRy88FvaJZ+Du z1jnf(tzk83arH@de0j=RJh?3OiJr{b9n=HVG#UuL6VRIjPr#j`<2;D>{Nz&=+@E~S zyS}>T=-`qaBM%9Qz2_rM>6&H~U?Mi|d`A~+J3Q=u-s9hN`5Rt*!QeUftAF+5U;g9& z{@(ptKKh%C-SE-r_OXQw4|YQ=J^wL(e9g_TdfA17@#N2b{N@M$_MhL}Kzk@+P$Eleld-@N5*A31Kt~#%isVEBXp6~v(FNSxYxNc_TqU-gLbbN#<pKvBJ_G#>-BNx(h~mu@D;dg=MJoNJ%p|gH|Y=*1^mFH@E7hq^)o7dI4*F(6clYSKddBQ`hG}E|fa$YSSW#CWJ2(8WHD+h5!beJ&5<- z{Ee5%9sKpTzw7?}C*n&N2b4S}b|OyU_#P5wO?4f_d5jG=La}=u2cGz-UqA4cZ~LL` z09h1;d-7-h=)Dhq=1V{3p4q~|-U8CCNW!Vm=^%1}q*GusA7cKx!=JwT#&3P0^L6_e zY&-RJ;>{oX&2yiA&c!aE5U}dJj;cxnex9q$p$`Qty#9$Y=NzrPDx8yoQGJ@p)kvE;0~QSdC@`YdU$Yl z3AgUO9Jekn@>Cgts(S=S=a4l2ESW3n$ zFQp`GDwNiA)_%@%TQ`ALb*D@HSd%kS0!dudr?+KaDY z88Sa?T9lekHl}kFnb49Vrnl2U>Zb7A6L{ZSZcuQ4;#KcCw&z56+2Vk*#+P6oa!9&m zM{WwIA0P~e2)aHt(+KHIfQ`Plan&=g|1W!<@~Dq=Zn*5$az+06eb?Xrg|B?;*(3M9 z-aj|k+1X67h%T7?kR*xh3NdnoX@*YM$E@#RJxFn4C!*yiJ^I5}UH{k*_V+Cuu|SXh z^4{x?fBBAAKlG1BerU%!7JFw#=tc$nZV${<9MYigy71!&v%ZhODX{LwSl`>jLtb?1 z2`_l&&+L8FH6MqcZsDBgU}v!7_@_U2<5&Ouvu`@&%)uE@dik<=$=zvA@;V+e0#s7d z7z@q#ZPz4Hm?+1>#d@An_X1M>?Kgr(cMf>^z53=!tSFn&MULR}NB z$Y@E)ldN9icF!2;UmBLmfG1Mh?Gu~Q`H^~Mz`9P!bn%umM zfoXwUA}%M;5_@lLIx?A_R7nlZj3SfnS-U@LS}c=l?MMKX)AzB~%Ms5oHrT~rFp&H1%*y3+~7osJ}fAGnCJR6=1L&l4Ci)?C0U6HlsAAaMhUJ-K^fKR$Kf z5x8T=UaWR{7qvT$FT`+%!IM3==m;*FAw>{^@5NmM8z4*S!DOp1apBo1=TY z-@_=4(Irldwk#W`Zs@)&^Kwm`5HZ0)|mWZk{9P^^sNt-#(Ll3A!32jVsWjuW4)ofTb+U5hZ-P^hj>lHA0Bw{7cnD$?A>nmL?px<&yYao({eJ8*1!?kW7z zTfR~4+@Jog_dl?F$I9Ww*h3gFchM6?18D-Ec3V!Tcw#jZhT}{rq`5a5_|mqwT4#bC zd>J6o7HsIG4Iwbe2ofiu76u-o-YbnFsJ2_KZnbp^H?+wQr89z(NOx=kSrU~^I4U_O zQ{f^{f6HBk&%&rePGl#iG^R_0Sv!zpGtU!l&Fk>I+~!?MQ$Dw=_euqF=r7`UuoqvN z-;ck$6QrumVU%6eNM5TFTIY>Ab81!)OG#{Yjs!{3lUSt>){_|8_iPp9x}@jM zG3ov+*z5P-=G&6gK;gmJiL?v`anpBB;Fo{qhP{G?PkrzEPVBy8<-k%BAaptT#uci* zpr%8ZzS*e!CFK-lW5(x4^IMn9VBB>dnQJuvSKLc>+#kTGSJG)l=PymAhvV?dvG0u zZEJ_d##G@0R6lfW8S9MM!7R9$9BjXDF9KpD$usz*jv*tJNi0+Ja1!tRsi!Z?nEl0T z-+gM&of~`i#WY!IY8XX5kwMukE7`;zb6aI{+}fsJbK|6=Hz5ZxWfqN)nf&xlrY9B zW*sN?xx~cB#YuavEIQo=eaWWRE+Fq18dp@we$|{Jd_b*S{;= z2*BHEdUXLH!l!`mcHl*thnW>Idd@@m`8PjJ!TqPNeaAz~cdqU~5cic6GIfg5+okrY z4{Rl~Ot?(8N@fCzH_W_5p)BU&5{YSY>{Q}acO^NC1fhV%#CTD%np{VfzSiAL_-n4A zE&3y8G7`jso{}Z;ARtsFt^&6lE;~e}7)m*Y$_!k_S8&1dxNfQm%@nsRFA)clZj6A` z#O^Yb9L!YWVu(lBaQiqmw-;YpIEc^fx&jZ*??sYmtV)3ptjR?ELl#e`hcq`;UrfoW zhT>+1s~t-+<^gQQPgkRC>0B!^u>_CiYX(%tWZ}@6R7D|L&fTzAx1_d!vBse%(UUru z?e~0Y%eh-VoR(r;4-iuvwxu_B*TD6KmT+7%#OUB8-uL!z+%H)C%kO>1Lp$$W+rKyI z!7Y4|+!F3cpS{V?6An)4Xg({m&?ULo7KgFqpsEnnkf4%$OOKse5qOS1C4NpoYL>h5 zY@7bn+^!_O$ONj}ZITjD_XAA^CctHtg&6Uy1`d94KGw%DP?CS&d=5c-R_XMP|4V#i^8xxa)#e;1;_MT`MD9fmdd zpsvY+M+7?4?v%bYoggJhTXtd-TUudw%$=@Z@mVbx1Q4wx1zAY60*R@z8t-=EuUL>; z4xH-Z{6cjjtr4d}#nKMDSlWNr?fT5hvahz@W@%GNg%!{;)0v`<15aQ}XL4 z1h%aJMG*BEU{orRQme=;C_Q)So;CGI_?e|Yd}vYpjHK3uq1pLbt4Hru2Gi&fU85VQ z(rS*cm1*N=OmOaIIsB9oMyq5c41*OPGj|f@$@V1)`f)7x>U%SIsJnoF=Squ3QWn0nj~v;R*8|8vWn9bHZ#7d zoiWvgs`@2{QpzGRQGX^(pl#_8El9!urrWFAbx}%%*Jj^SXqoY)xKg;VK#|O#(%D*N zE6FILAp@yc@$%S3?t6$Ni|S##@2%ggrojLFGk^Av!}aKTR{2R;U!>3gsyxk*`&y=q z#b_Kx37TzR4EGZKrlE?}HgAH<&E(U&r^=*}07jVax?S4(BFt43`op+t z3FX!A6nOkxZNzHL(BeImwpOysZ-kcUvaw~|sElyu=xCiaBt9)8rb?EW{zyMWom4Jv z3juKkBw3YNA_m}jh9F~&4G*d3VRaN^Wi!Gs3oyz&9NK>gyvob3q2?Z9pC+um>c z-8yCL=VN8x$^~i`k?2-cnKsX(Q%vlzje04ZXO3G}62w`#z(LySV6)>Q4Nl{oZ+XEX z8LPMb@~y?0)I;L*C7akUT=dXIk!DDJ0)>-D6QF{-u4HynayAC6sMNL^cWD7I9zHwh zjNz3vDb&P8EQ({b-HX}H*pCT4F@=O_q|xT z>wXlQBkbF`2hVxj<8fskqaUsS!3a?j!uR@ePHTP?;F>Mnjt}3ZhNr0~SbFzt@V;FV zwW2_6cf7SVTkc!8qUfSmi>xK1W3{XanQjaa4`tZ;KsMVTsilTf8>t!`$w}217}{r znX$|nv(!`i;nlF0b+ovR5msV}$eI;;HQ&vcbj;mLH6bII#*_hLiLr`?POxhCB86)q zl(s&HrH*3Fv5|8&aMWC<3MExqQ&(UbW-6&5rSeb|;4$87%CQ**QU_Wetn{F9vBwGe zovy^?6sd=kM^E6Me>?(j7-E+n;QGZ5zWee;>>izlyJB9Hl$fD;i z#a#mksMyltl6PcR(!!oxc#+t2S-{xVN1P;~OAxiJ6tY-lle10|tNg827WhI&YE|W&l-Nq%1hQUtO|p*le9t~t> z{NwGw`Vg~ujBA|%Ua>33)0P7Cl1=nj_$Ou2NEVebW!}tgz#S^(9C)#@?ig@qj05+z zE}=`uStPq3!ELGCMEcRvi^goloV%Lqf0_iYOX~U`eyYkz^=(SeTUCvFY z4`QULW{yaGMY2Xqb&Pj5BY-9Gm>iT|QkERG79|E=X<~9A%1&h&DJAtbD6P`~+TJe| z-0g)gQoIK4`gz+D-wtDG$1ZKg%_5=~1#XilAv&6)<4{z#7+tNu%l8#Nyzxy{x+3Ea~FE-SlM^|bx~YO$#3h?SG$PK!!{HiVH) zJt;+tQBW@e-1YmO2JT6~ws$0k8M^iJ9JI#7=Qi=ct@mKoS4!(hntfBPY3s;_>PMK zcU?f$rMDe3@!lGGs>AU4dIX&X z_Y~jpc<_ExHZ+Q{>jN})q4xK73)O>E86~z7mZ@dfY~dhY9pJ=~<9PV!gJS>gr4ivG#Xa301NEZ3WEKU5Im~CFjI{K$_V}D^}N$1z0*M)U(zu&Thw}4eoaITqD`knA#drxAIQjc7ULRNui1|JPegO#+CtPgdJ)C*@$qTCk??vnl)1(nyI1nyGE*`2jUE8DbZ zDyGXXj1o#uD-10Wa3^@?+rC##fp7S|yY9blv~=l~k=!gWZ^6yl5yI_qZi_k*CAe*` znh9LHUGb=Wqt)Ow?c5fO)#TJN!<)iLOjvEYFdpjJy7E|^j>(I&;_9u7)=nouT{)dF zESWrn>tyJRe1w}BPMuuE!}lLYe(oXoMT#`@u^bKYlI2-EZ@G(WlXc7v&q(PE?FXa| zG)7yBNmB~YB*uxJcEyGp>sW=qDhfK33M{J_$hQqXLsaDEzyI18r2* zd`;r^w9I^MQd>EY;DxlrD3rXcfcq`4z0`vH*nOj=%eI_FjjGF4aKq>shtthh%_{{!b@pTtHDuTqZZR5lRjlfn8c}7GHwrU zFY_d@DfYG`D9IZI2nRXNoZG^guO3JA;4yRxCVe>Q#}O`%hj{h1hw&7w;*w+yULKmD zFWP%;u^AHsV`9fOhTJII1^o(IVobQW;pJmMx&Ux1s9Fm71j9i6RFlT34Qw7i3EYjH z!)zC2Po)+eJipX7N6rnnkB=$l-ySR z5l0MZiI$V)E2ynfYe8v)VbVj_=>u1CRXU)x%3IyZd>vDLrn zMr4RlSK_-zTQOG8tsp$Rg5=>d7_F}&%(oaKE}^meqb?)*-UegM-_g90hHA0k#Kn(ju##i4|To zS=T9Mz`myRlEqsQ!ZT+PuWVp+c7V;*a|rV_-vQu8P<2@2@(D8)kg$+FC=m zNYU|fgjs})-2(c3Bq1AWG2&3;X$XE*ENS<)p|q+KO?yp~-1gb0{&0fyZ~sPwK`k^v zzi-ZyO@6lct%Ka=qgrrZoYXcq$Yccy;>>uqJ*eb<_v^2);68ruVAr8dN5O5KI0|lc zzq3}J20H1BxsC!JDN?VWZMVYe_3$PQXxY@ZmR;Sn(KZ#ok~+6+q?Rph2kuFeP{05B zbEunt{P~m;kupSZSQI9imx+xia0e06b7zpQ46t!}gss5_2H{!c>6Ro{&$$IUXCA^+ z7iaJv9&r#?Bty&&H!vRFcV$;~wmn^OJX65MTJ)gB9J z|7#*ICyuKv4Pk!Rmf(^k-KttOX}s&0Y2eJX5_GBiP~rA~Q~eOn{khjExNrKMJ0G}b zxa;tiyF=rt8Bin@mgopJk|CEq0>)aRiM$sA?pi$OMDW{fzmzaGKW|L&GXQI^46!QM zkjU`>oOXC-dy%yl=622_dET0}C6}%Mv{%!FH-{|Oz^g1A0cX!4UK?P2CBfF#7B)6c zYk%=9LeF>L4F=c~ZQ=#{m+>n4bbA>xiNWg&W!Sgb8ZXfS+@o^kiPDuyl0 z3sKq*+|w#Bm4jqr94e~O-hpbK(N;xN@gUMLOT4?EURW4725`}Qbsr9ARX+R@kvNw8q| zH#GnXu^Z!s+*5sbwWrSt#&i2j z3Db|-;J)#-A-sNu{kU z_c1XsCF<@%C3hKOTJK6wOQWhhZOug>A-VM)jpjba2GT6}HXW&stH9mZj!SUs_q1jZ z+$`#oj8a7FA>yqe;^7G4AjW7d!N$s2c)%nhDe4XMr$JswlkUV5`8E51=k# z+ksPK<0|Q4O~6e9+xT3X&J4EP*%(ZlCD|-}tnQEDwwmcwzFJnbwm`8?J^kD|5*6Gc zn4L_lCC`8F8Rrh97^{1`AF$J>i zAe-Q+s)M_21sPui7Y5RbW7~nfu-~=8txKKuZq{?5y|q9*$`Edi1l;K$MluR9+}yxu za}&{UfFh;9V67KTSL+oayx|F4zv$t{M_-NIrys&4y*{F?6{KAzRkQbE0zdQ6N&C{$ zC1kfXCx_4Uwr#+@om7-DsDXPDJ*Y2^iF0ZTk4yk|o5omhSLeAnaO)*n!qN8Xq|zce>PXs{X)yNZYW;O6>ObSb=bI62vrcTf6qSwc|CG^CxrG+iP!6~OVyX;&|; zBzJ@MY!j7OEl|w_p*fD#0^|?Uk|;&I6(fsc0XN?X>cIx3_Xd(UR3+F}Iw}&(!ok95 z9m|8$_?{;|29JwF?9GPKc7%DBorD2ZWVTE%qa2^r^fDIQl#HPBgL@*?+eVobIy$v3 zikX24-OlXqWLK**j)}=E6^^C@yNRy~@6->Q;HJo7Y)J|3H+<;Gv18Gm%j)2!uU`2O zp}jr0$C?hAtG7cQ+n|+I&ZLq?h0_XE^8ISmD}Pa1%{idaavG7?;TX$w8b|mlVC_+3OU^E;ExRW@B zRB$EVOcV4veH2M75`8WoVqbO^ulc$w@w8-weWMMN>X&MqH=X`cW2I_NdOlh*jgPm- zcajuuE{f^aut`E?nxL`QY4^k#zy4%huDmB=M>a3JXtn`&8Km?zfV(|V(@cYHaPxtx zDuYeM@qZ<_>-RY|(}(VGDoxZKvI1F>>%UPZMfd5%vEZiFg!F_`JPsoyX@r2?07hGI z{XoXRW!0)8L~Hzu$7(y+_04E3FepEJSo3NKlKEngVi-3and8 zq?YxB+R{2mMzO2atZQuZHiUCRUvl|%E2^)#Md_LNfMqB zQ|yjb@bW!z`lBL0iyA8QP9JSmB(+juVh3M_7)V=f!u%5W90x$a6l zX}E>GNyz3ZQrW7AMp7fs)(&q=-R&qs1LRYx2UxIO2)L&S9!pQ#_i{dv*M(zatm}?F zL)FENDEE1omPQ)pKo&zG60i&{BMme3vKYPL79RIMy1xC#>n_8d)zet;yKqAG4C3_? zjRk5U`+WSm@qKOQnzTU1CFo>t=J^3Uy?jF(KU0OFxxxDP1h)hON=p_NBuGV}e8GEu zSVLalc*|{f-xn@FVj?Xu;6`RbUXA;mb@t?BrR#QkCr%sO3W9d+!eHjS?vn;Y2Kn>>CKZeWs}1JquKMi&uHRLbk@m5ZXh`FjRuY6zQh97Gnn`fW@ocs`7vTl( zexqjMyz#@g-Fe?=`RZxlCM~g&TM8VNw8Vn@e89FPX~!$)bAhyDt{W0$g5kd{Vd{Kr zy!~@4gw<4awZ9R{C1Yf0)RYbeWprB$^p%ocfm;fQ8gQp^QG%OFWEQg8R4Uezgy3cz zNs=%)$O==L13!wezw4m8dIHb8Y%iX_G>c2(A^d2B88$Sg^q!E3bH1&= z4f3W}QM2kNO4KQ3^QP{k5-Hd;_f5LD@$0wdZGxxMmiQ14rI}auXljD{mfP;QFI>Jx zvfrg#LQSE(Ajv%)+j68wlid> z@_gXF?Y?mN+BUf9p=6rbbcOQ#;I?fc>O8eBIWHF6?d|VEz^!9#yr^|>PYAVa0JpZ; z=nkY!lI$_Ocg9YL^vGE4H5Y3M6+}vIN@RAn6$g*-Z}RL^X^2o0N_bX#r9)?3IxNRA zTa*)0Y=2lFNizh!9+HrCQ3g2VZsL*AI{iHrDF6T<07*naQ~<6(QNM0{%r!VLSVJ!h z;pYsVvI)bc5{TTpmS`4bT#Ei{fRcTdY8lsQLtAk+lKEc&+%k9DP^h*pH3{7H`R7zh zUQ%AQJLEH-nF;ZNcipIUOK!U5wj;-)J&)eRg3uE2_}F-E`je%cnz5FM)54kprx&f! zrt{k;4fA`3-cM41N{KVYmRbi`d#kO>UMeb<8dVfYkFaGKbb)aawJ44RLz#xEQWxBS zTwJ_19Ds?b{98H~aPgQFv$Rm~#)^CeH{VH3QmN%_!A%=I<4{OXhWSVu%hH_4{4$@h zC+uXws!9R3-VkmcVMnrseZwKXacL)B_Q*@I=iJ@c7(E;1&WbXi<8L5K`Je+E8j ze32vZBB|-YU@I2SFT_7V4`-DcJ;Hh0w;l+pdT-r#dB^4?mFnFf!jXd zBzjVRxbb9!CI_ttw@(^)X`DI%+>{A4%~kr7N-(RqFiAi>leWc$Y0p8)EsdNa18z!k z;oM41@;DU5izMOar&o1HM*px3AaUS*5ME0k>V8!nuF$_1Y6>n&h_N))L0sgS%}>8!Iq5 z#ZyTG{6xuZo@x>|oCNM^yTp0HZMqB#Z<|nNxh&Kv+c{Io4EL5atsJQoE>c^o^thqm zodRxE9F=QZa1+er>QR~?&KO&gqDV5h0`43!r7aa4rlvV;-;$y;8emWE;p_SizVpgu z912%4M|x7Q-wqc;Alo(he2;E@lkduI4Wah=Mknftv`ZZ}7ci+{6TschTpAnamItpr zmvwNTZ!V|L;Wz^jp(1=s=Y!8O72HR|) zmeeG-a&T2Bd5lB4lEm_V_N%8vx15|UwFRIoufYmxtlP#OnRL-dA-Zvb!)b^Y?Ont- z9omJ?*<)A=GFf2SNXpS>QR1G~{__NJ=%w;3otSMI*{Dl*=4r2K<5wQP4Y=(w9P@hQ zs2y{@w*#lz3t+pyCxM&Ciz)D3uh%NCq$NjN3MHcg3GU2Pd9{+;+PSsqDhEZuT?d`q zknL?VQ5w|pKfXqA*JDqx%Kc5c#=Hn^p2NSsP#FY<1JJ5Lqh1U%0Z(E_9G=QYNMG==w?#n=^&a7_~8 zC0Fjl6S`a2m2H5oHZCYWO1IlZOb;kM%BJ6BCck&#rJ5E;s&X;l9-|2p)%{S{aqoWg7tqmw3*_9o9N!bvSAQrt&A)TW*~^vzm*?Va(w$;0P~$Db7I zU&lmyel8B&d=@nrkd`#S{fn>D;`29t=t#p_B7rE33Y6@3qfHymUH4FG-IDDkx4mH7 z&0`;;?EPLZ-B1sZv5R5cJla_D6O~2T_S&+hmR&CZ4%BQSS^k^~dR;hZYx<| z1%N?h3UFbrtmrSsB%1#WElFJ?!A&p4gdNT_pvv-TOG{E(G1!WbH<>0VD7901FcT!? z--gz30a!<>9|O9_i+O3;*oJEt>;ZMMa7 zqKc*jWQMUem1RO~PO~06a#36)aIy?L;{gtBoWX0Ja5bJ#Y+%Q56&+^xNbPE))5udQ zxb=BM4QCd*GCcv|eANuj2X0Yt>m{ID=3x~c)oT*Co4BbKlLVHU=i5$l-*GftzNSt~ z2yVuKTX5S1Vb(3NcJ8+1w&0fcS^!gf#n#=#(^~x5bB$GwYUNv6pw@Yu{Y>bMlrE?Z zUR&vv`?T%vQUR@IQ&V!Y(}9DOF7SXc5~)~67>Vb4J+Veew=uDKE7y+WRKP9$967Np z^F?aG;3xxf^<=8@7Ykh>xKk8agd~qdiDW5b#*Gw70YA;*g_Pb(hf+635}2Tz0_Y<{ zcjXMOpKlNGyoiM_0eFZSRmJ~6xpb+edb<-QmMywr( zsUT)qgtUkd7Ae9ama+wiaw**7#~GX?se#)FT;@=^E_!i-L-8h_+Xr5H`CcpyA4YGq ziN4ztr8jlhA-a$@CB6rANwL_ko*jJ0x_DMIda>ZP9J?uPYr>@sZdnbrox?PN8UxiN zcRuBY)M;#T?gqGT`p}U(j*gbEVS+Hh!HHsmum!iZb4y8qT0FNjQYN8qibnaX+%8%n zsrJyyjol1nvfsB8bxV#u!CqdM6m`W_KcBqi+6AcFHYGO~vFWkGKAV|`Ad3;NpTZ4K zc{GanEc!tVAp?A{~Y$bKZDQ91k7WM{jKfS0x*G<)bdeQ{54E z4Oh^YZWsnLV+^1iPNbQ$EtK@3T}yJBw&mqgVwDeiLhM6c&r!W zyAChm8Ye_&xQW1Z5cys4b~@e=Zpu0+u9ydHAA=6382l_ zO-%Q;;1*M0sI+7|a5INeskC)jUpTl+$y*ci%84fr-ub{?O8!?a<5%DF4D@v|^yg!kJz!?)X>k zpTl$ZEnv@Z4KrCN9UthL4}9GX+{8mC=!sRv*8dnEJ0V!Q6HfpdKTpS!I$E52%C0c& ze07Q51by>9%ALbBS!Lz2x`(pl+&6vb$ZbbQ%Mx31G2mwIA5DSV(ULLFZ4Rzz#@7b! zQt}&cw`q%=knuVhwZZ3hW^?h_Q=K`9^z0QtCymi7)-Z>M@cy?xRlq%q2qYu2R~54u zI6l7XXYa*Bqh$fNEPDg)IxUfVGawa*PEDbdNJAmTw3cuMq>=!f@SPUMEJI2&W0q)Q z0>PapQeIj_KNlr?=VTqYj*lo$FrRMWk?{&%e8pbeu;61UTthL~gvT;?y{@Fg<)I_F zm@MAj4o__YZ+mcC$vsW-Poq53z+En+8V|42%WC&kI;pfI8*{%m6w32SZre7*N^S{v zkSeb&aGOa@vx=R2aRgo0jebO;NEuxY?`SP-F@3C2Tv= z=+w_*^ouq0vj_1DZ~F%9bSb%0bfGzqo->aDfbV+iF+4QfSt^nQw|FXZE#F%@ciOas z6sDlMt_mfUMOs6scuI|LhR_-gFXlg%31;aqf}Fgce7uOI2RwJbAe#|~Qj#W^@e6q2 zNnAe%eCOevcuYRP(r6t;93ks3B4$I@Vf{{8|cism|uMe-+o{g z|8Bv@qnsi9txcrC95S~f3&<0Tf$lg~VoJGf#?&`O6zgM)R4sK6PVQ#9xl5S@8cy6(C7iCYJ@zB=h&G@xG zkM-?Er5n3EXksVJ8Q5l`tk~)0-LjOPoUw>exQWdl1XzKa-j82>$1~Z44>N{$Q?cZ0wE$lC z*8A{qxQs9~b!Xqke9mT zbsiXIz%eXiPQ~y4OFfKcc{0TaJESZT#%VK zcQj6#-I9Z3!GN0+%t$%b1=&E~U&Hsm<}p|dfE`T89hiG^fvo~~*Z*}t*5Vx{EzuGW z>XDR5;=3u>L%>>a^IgDd3JWQ4)!8GG+<;rTHUVzItzf4mC6Qf+J7_9pklABiYRd>! z?7>eW?8!FpXlE1O_lW(tA{n4RWN2)L{$d|zS6498>&P59l0B|Mfm!&h39jSC?*jN~ zS7P_2KBN;P^;vC}g-o^SYx^|`+;z%Qm*zCUO3$X@ zzG=4{FDzNRX`3Ky+RX_h8L4NuFTq!K409q0x|iNFsV@zd1UxR?$gI4?rlUN!iNF@n zlX_?X6v53PBL2&yr8MooZ2!%uImU?0ipyPh5&9zO?v4?URaoP%Ua&3|L zG!rrjffAn*oZT9vo{W}R%h=&7j}fFRxH4SFYY*+m^&MbuK7^MJ;q<%Mh=%BRO0gUY z6zRRgaU#HLnLFTkQ$jObfv*&qM`6Rb8Bc0sw*%uY6x`Dk(QzxyG;4`63D%334cdbH z-9KPMUTd8DA|0ic6eq_Lk; ziuB;6v|j+3Kav;;f?KbxZdwJkN)=7Mmnj$NukGzL7>e7gaLcSOu5{Z9gk*)s^a3Bz z<|aCMijEr~4+AM7M^B&hbIDR^BVKCpHWP=JBCRuuq{N6&`(h9v!CQjaq^H%u-H_ZS z^i^pIEhPlM+Q2oiirk&MD=jGnJhso^Wh8l$j-L%&nnrkD*T+i_?#AWG3cBG263>Ct z4Mb6u5LgK}pczy{Yo)rct0q#J7t$6J`pey%MU?q+C(sfV5LQu~VkbA4u!ezgmeQs( zPH#uKQai}XKx}b!r08v0`&hu(uKNpj5PL>ER ze93VPa*ygJy6C`jdI%%!2`NQ%ib&$x$q;0sjKnPimTKVESQ8r>t29L7!G(sHcyI%9 z;op3Y7PzT|T7c_Lz#uMKU39@P$%Nynk~I;sHb2EF==Pj+ZKcEieDATCxg2Z zqE&(n$4MG)DFZ|s+?AdCUrBP;x%M>El7O2NBCRm=u~d3%@RHDN;gppvR^qQE9JQ;` zn6!k^ab=1ar5Gg*fA1la=sr$3HGf`D3%Prw1roNd^$*GINk-s|L}5)u>IVp%zHHs2 zwGD~Q?sgWC<_;21l{{7_Z-ZN&*C-;QP#Qb8CNt1xUC|DLTfV;_a8pWG(%VXSCI-i{ z`*EF9N|@cwNkq}=r7>pWO+5eL4*Z9m9UR;^iy7BJoD9Lp0TMY%ZVeXG41XzdZ}hF4 zfm)J$tGfcnO6QG@O*ziz2X|c;6Cq$xJgwr_SiH-4B6(eMTh7hXHo)ERP=1Zzwg8@# zhge^2^?r~NbxJ-b14e1U8790riR6vk8l~`3X-N^zEnYl~!KC!&r&7qBC4jjDNULdXzbs=h(jMyt&mSNfMwp-J%c7uEP7?=FQ6TXP@$lt? zRP5sQ?Nuc>Hv%f+N;_!7)|e8bNK)!*mphf*1ity51+7SKBgspJvITByaUsA-dl>8` zaklJ!meUjIpxg5?jMi{D2Ke4XOL%(U!;bY;%s44xH&&~M#Ch|XHTa*>U$cJHprAV| zU)r08O5-o6-5__zW>14k}Nqx!$VS9%BUq^C_O_b4awd=Rb znbl)IN-j*miKT$R?Fcs&X-a@IhS>m=N?q;|@dxQPW?c3MV~- z67Nz3vX!#col0l5$2+3gQ&}XPpm&hg0^3KOR)R>3VL8j0nRa?bv;`kSbld_7BLKS{ zY=i?iUJr=+aP#gMxW!hTk(;t&W!=PQ=!c;%47UH5fjd>j(wHD+Q1b*{PN~i72jbGt zIw@wNHGF-(h3|X(wRpt(3KqjPxE@Wt4BaJtGnN)Hsi|4e$(VDgN+#Pls+9N9F*zLx zL*5OzRj6qA!M`=QMc-n`?j&%}hIr9?UZ>gbKm6g_kKD7h^ymRQ2(vUmQlRT}MDY@7 zRx<@^R3IxM*&c+t(x-DJ^Ih42w^9s93phr;?~A>ajtokLzTd-OFu?5WtmYBMTbSuF z_<0opRn$OQ=#GvMg(=RQiE(B%!7vVxcpVAsACd~tf$q$RiHuaC z6J#j79EnX@5CxQ4pivHGo91pXLXfSYldq!ZX9!2Mw|gi$KEkwz9QzSP0X)|)?b4F? zYE6MusKrys+0b|OTw2qg0fUv^R+3xJZB2wmnWW{UG#gT>EHq;$mP%D7JD5FmV%;?o zkK^kijRu%?63nIRc=BQo-+gEq`_@ij9wBDj0`V|HZ)O&oQH0$0;ROMbXoQ|b(Qx;p zE@Ns#=4=_L=D(CXr&K1ZGGzFWy?cZ-~IJkT3<%=VM#Nq9D1x< zQiA)9zjgbU@7-K_%+~_$aqpxq<92jj)k~)4W+k{OfmwJx1!g*dw6&u%g8zza2rnC9 z*J1~ce#9X>`YH!kUjfW>!la;&X$n^Ib+!72IdIQ0;MO}L+1+pL`{PHy{T5k5)gSIi4>wYq&YgV z7N@78>32GC!$|alFyF#qjPM-?=JCv3JuGdUL$I}ho&7$-FhtP}5IZ@dyg-+~i{jGz z$DLFZ%iM#Xu@eOQ(W^pUf60AM@miLPU`jw$3F0a4_X^|+ZZXCg52cJdv)>8scfU?e zfo*UTZynkPj3kpLqQ;41B{$vg^%BOF^loaoRcNby{JLVGDlP}L-<^R^4L#b#tQTUb z8{vj0UWw~2_i$(l;zY6QpyL57E&pm(< z4k7V&W5^7Yu7@PwQqIrh1g4jXv}SCHgKj>6J3NjLzW<4sXQ(U3g#w=DJRjI}f$w|E z12{EY6d(%d*nG&+Yk|?ZQ+v-*kv2++GlR9todzOw+$!--*3-Y?eW~>R7Xey{` z-66P1!7>AGFIMG{#Wg)YKxd>pBIy9Z@C+X7Z{T_R=5fRPEOrkzvCDNa+FD0&`dWJz?gC-j*H?O>d76(lF(AVU;G~30H%g<^xWUQpFOK%xl z$0)S4f*Ff5!F_gagm3w|?^jy#BOkuwi+6AB{5mVS|EvePsbP@u4h!;NV zDtz!}#TQJ`UXwSY&WXnQ`o5-2uMuZAbCo zU{@(krEMHJexflOmWD{#wYtgANM;)nYq!_Z{#^sNN_Z=+RpMK4(}O7si`1lG`8D-q zx+KFaGf||3BK$%|wWGM!5All2F2$1(FSIdL1cGOP%0S=6=gi#kQI;BNn}#D)QPWlA28P83>VvfnMZd8=YA zdA9-g`6RbwNt@#H7b3Y;acnl|tN>jz>eJC(`0%{~Gl_t=qV{MEhdnFbCE-ATt_?ibhY5n&9@cZP+yfaMDfh-gjR@Ss;-mVM1d}63KwaHYV7> z7%^3Ge2CB8eOYHJcZ> zJIe3M0+WdCX|(8W#O0L5TvNo|u8b_D*AX_Jj1U%^g>)Ro#0?TL6UD% zunJikA(v8GW;*qzGAfl(S(#ui88sogG$XtS+)cjSo8g;5NZe zp(U%M2M=!ER63Lz53LmSFn9KiJjVfB=bMDY` zhv4?PP&I+T!?Yhw@rRkp4Ea_fh5l*M6KgpeoyPv|7>~X%#>-xC1m~?NNZHgfoC0^8 z2EO(EA-;F>Jvh0;*)S>{%#hy3S_fA=@&KOs#8o`}kODj!Ptb1De*kui7?T$8AHFrg zzyIr7ktK&QVRyScIxad6USe2G%P4!r5Ut7W_}quC#QvBxiCes^XpF`MvThr%t-x&$ zNtmcuZqgH4KIk zaM6}7lj%6G0JRB5E4rv@#opjne9WiU4di=8<9u3T;O4Qk+&;J^6i9H3$2ps!<%0R5 zZ6V7<0l@4XZ?snH)#8H&@I^nsM{)oFAOJ~3K~&&g2!6(uWCS3)NfONRwrJb2p1GS0 z(VpFnjinJ@{DLcR`NM%5ZUFxBul@nYPUgs4hcHeKVkWU@4N0acn{@k_jqk*T2MWCK zX_w%6k6*?<5(o*iUxV4M!_Puy!(!#4A zc?ljmz7MPUF1p>eIQlX7LJ8xtZoMdl-%<7>>tCyD5sO zi%GtOIO^);ftcwQtfv8A!+*-XCEF=Zn>w1NX;No2^*F0NVR@`Xzgz_-GntYJ zj>cFoOROE=#fvUFg6HoW<6+4F#c-gbx8oF9nM)xS4PT4#{~TyBvP706vr5(eRe%h% zOT;#Myrnu$pfO)qrGIzwbyZkJk27t@kh-YUCU=rl#OGh~uU_jUjK6U0k8a;N@|a<~ zE`>t|Zf=3%D3lRe^x(Ks6Kw`CrJ}tiBUlJfZhYWL^E@(b%$2t)SR~6KR>4xg;y@J% zI_)JVU^CM4PPvd4T1i*tr85JUthO=A$B0r|l^NRE5LaB-#Xo!1LE+16_?4!_R!bbg z*L~ziI9?oP$-eMyf?IC2b7Yu#?G&f6KD`~EC%9vVMP zml50?6Gz2STJEY&`=BG0-h4OKZIb z$A8HrU0|Gz5Oq3IyiYJKrHRP=E@gM(s<-?TT(VzV#H4v4ig22e3?I7zKh8E$Fbd?r zEjx;Zpnghlvp`Cd3iIGrAUkVkAU3VRzyYlQZXDfNYWux542KDyI&N;@rv9LT+bp9> z0GCLj63Jj251Eefb7_f}Uv@Dr8vYm?`B1C=WHgEvQ&|k28A@X0OyKsd`I*2i)FTk% zsg!4fWDXab&&4H>=atw^**~>3!AmP}f8p9|ZreHXSOYgLH&@EpfSbJ;{)ec(w+@*F zN|Z)RBr+k@XLNMBD)8e%PP$>ukfa6jY^Y!;$;p@G+R}E8n03W*gz@!ZF+2)IeC=v41Sz&roVow)I24^!r+d0yoM!j}bn4%{2FV-L!5 zF9Nqblz^6S8x*I@B#;BkZNTNV84andd}X<7t8IUyqbc9J4jzJAsEjKemVllBmOD{~ zc(RTCD6!EW;mRZHc|-8sUtm@f|*HuL=?A?Wi4c+80kp*E?JryCq3jjX$oVqGt9=jSdL4qMJMp$ zryjtIuG|no9Y$TqS%-0o_kZma{{F_1)SQs_ON2z|(gO$gv0Y{;og%Cxf~Yp-5*w}o zm+@!?x6zF{Dxx4X2h%x5W~m)WD^QAaS^pPWZtLvfj%VClXbFYwnL!mv2-SNOqY~3$ zWOy(`IUb-LcX6b@h2OpC2%fSOV|{!AUCyYVI>VDrbe!XiwMTL$J)h?lT z;FiRhRc?CqCslsaX+2h4+jchdjU>>)&%#h84TaK@mwx|*8J>qBtM$Px!`%Xn&JD@EK%5of0)n7~@~jckf1jfZ&C;~s&F zhj(Hl>q}p)6lNvb$I2>132lNpiSs?e25t)lE(SMgp!yzjRfH9RtsP09j_7CBpoFvw z65QgOcW`oPjF)|)vfM|1c-!{jigPcr+-C!Bv(!GgeZFnL*1x+EpDZ{r_s%9s}g6Ud9eO;H{?l=la2T5f$htPEtRjI||dx!EhJ zrpYpmPOrsTtg1TDnid>Av8V~}QmUeN$bW85Hf=e3>ay~E7RAjlZ7x8FD{G8}djRIkp zo`trtF%{jgzqu%iZ?WYzdeJPw5iPf<8Zbwp(KlM|-!NM8mMU~^6N`gs4ixsqlG zk9Km^;vboO9=k;tzGD%TySM63mKekl-v7mIeD7F+QNfyT1-H(+(Wk>^iFT>ck_Q5A zwcuLv`ar-f*30AG25!4g75DZmw}t&0;J0eR0Nkt~G;mi{g%W$0f>!DFr_@8v$;a6Y z`@757y!RO1@u)+1ba4v%S`&1nA=ycBq}oH7B z8R0j~p?vF?uKVGw+lMb3$arjRMCFz$+mItv1UJ9JP~alb8y;sqMxCK*axh6VGfrA$ zSYGa7Hrf?E>2=mL?A(Q?J>dd8|LKQtu9ldi>&hEh`6u9Wz^-f0{5?WJPT+S$3{~q- zblC&L1o+Tb2Kd(XePk4fv|4T#3N&zci__Rw9K+{7@boj&5*G@r#Ogh8TUf+Gc;-Or z^fWEktU*fNZk;(c2GI3fR?(6m`BRMx)Ofc6z2@Rg{S$jHje1bXM{^9*n7$2bNi@Tn zMG-b9J9tud60d&j`M7j?4|*f!5jK%?W=}ChhjMJJ{auvra2DKZNPKIp_g9~sS?;i* zSmq2#B;L`IietO_cPW)%WSi7hyRk&tqwI*~e)&~zQqKMVE4Zb)4@gD>tR`cuCEIw> zvoFKXUk)i&q8JV>Z$6%3shfzD7`b-@maDS7)SzoOVdMfsJ*UW;NjC{?zO=N(z#sxX z@TFb+x0{B@Ip>-?`0ISOzqq%uBTwV@douPnHeZ*x9_ zfmh?-eAtV@tsVH9g zIrsoKPxANe~>K$D75NIL|?cjt@AIY#YCJesWvh_vI~o=avk))TwLdyIK-= zv&%@d0@r4@-;d??!Ofek-QX21sk-(TTU={OumYByi{4bFuzq64dNmuo+)86D zzOG!~l&fmX%^iVtD9qeOh}knD7EsFx`5MU>Px_+y{ZB7CfS+qkv5%celU$-IS;mqE z8-*3@oZ*B8@8=K`YRPS9H|-kzmrq3&shcz%>VeaNmV!5S>3f#@MW1+MWw|9SS+vej*5e!+4x^un_da(OEpgyx!!2#aI*O-T`YalPMBI*vJj#7jBE2r?^kL8$To;{c^St>*XyH=7%HjyPxM4n6vj3L$CeO048b14}x`ctR*D=zMghA6iG;F6aQu0u*Uz#9$VSw#K#h0blsW zH?cEZL00TTHfd`uFB1xGOEUXR@puBcH#>!2yYeA;;nR;`DNzTLh4*X^d;eGO!FO*N zBb%+D0Cn(O;gqx7e7GGK3M9A>{^T_jo)2qFUi;n!;HKr~uEN?97w6s!+(t{Pt=goI zJ;%~_6t!%@Ii0eLnN&ivkjgB#sbSgBR?+4*xd>UH6JtDQ;(%j2v$xoyp-uLAn;~#Ei+I|I7&vJ_;6n+>9En)iofr48~ z7f4+h=XQ=A!L5-D*D3BYZ7R=POB9gS$Z>mQEJJV?F1coL`U=$MP>R2y1zVxz*6VRm z76*D6TqPwDZfy3fL>i}vc1F0UHNz$2lX&G77vX~4+i_U5;iw~%&Vk!3bEwZ97F1Qz zc8)Vj35JwEu+B*7uMU}2aO<==uiL%?w-+s0$?(#TzEP_J-^Nhj?xD*#`(0$T3YKf~ zf;k9Di-~098&4j~q{d3cDcoKv65<8wePYt!+@kR}!g^~9Pr0OrH@xtCtaAK?^es^Y zGsac~{O{k|#b5s2z1WQoV>buVZdd5YWV9>RRKHXgkT*vrBg}y1;$FP;>1%k|l?Tyd z+*|AHF>C>U_T}UF?lI1MToRrp>FDaFX;)5nwk@GR)^jsM$&y#wB6;KJn7)n~%Ko|| z{W>dN!L8MhR`E9vZr5XI&YyR zdFHNau++jT-QVbZOeC!xryb1tBaz?ejr%w!@8ikK1zz=t!&p6aw@^swUzB^O)Xx`2 zRdA~z(;`Ui$)tFuSZps!qTtovs~(OymkMs}%M@>*dRAPFx)^x15vEZKTTH)yO#_|0b@#u~v(s~vORVhhIseC``};qR``F=-t{NuhP#k}BypEgdd&#URbz^;y3Cq;m zu1;I15<7&4Bc*nXrP?insmIp~)P8RnOw$}R@Y}VhtO~7Ot}+{1bgr#Wq$G(OEkbTS z8)5&rk4t75p0}}q=bXQZ?&+IwuoGiC;;z|}NESsp+r2h&p2HPE706H~tpCv7fbOI% z&n;`QkJdG8xRMO{j1J5i2+-$Z)koTaqmp%kL$ThhV&wp<2&Y$uc-cq(#|qp(xOsTs z@_xKeg5k{4u@i^jmejZt%@7k*1B^;Vl9-b|}DQnH?NX$wHWEdr8btVE~r!Y6Lx z*PpQuo1*rhBf0aqjnfhE;V;~ZYwztKOV%-VRc>otrrW2OOiQ%Wt`yN{oCwBlkrpEX z8M>1@@ye&|$1ATq7pr<_MCGbjomr&d=6|yI_yFd>O<_qTlY}|J z{oC)o33u)6ulx?)Bk7?Gz-`GI1E)MPHYAgNSZi6MAj;y`F6~{#wVQ=iypNjMtdlrG z3xl-%Uf>p5O`}lcgtzLR&>^P~)hq&s2V``LxfP}}vt(Wtr%A9h>H{HxzoaKG~l*M0w{!RF)61a6AKivsT%z}?`~7Q*s4 ziEr7fy6K|8YP5~#K7Io)dqxLGs70a?eoT`H$7jHwf9)8q{c(hyVg_K%#!H@@;+4-mq+RqbTC)2S=km|4z7OBNaSRkID5%RV zPN^17N_7ZzO9b4v;jrvA!re9K*x zEy(P51F~Q3XHgQpfjX%{o1ts!p*w&8!`jt?IC$M~TsxBXdM-F_l8m%u)IMRyjZnoY z(y879T1Ntu216dqWCz{tZCo0)@YLlNUVhFJ4h>JB8_$qW$6ygpi&pgX2<`5Q?2?p< zuJ{Xmj)&x>EbVhT~QQ9PFIN%b&4?=RaW?Ytjl&tCV0n zj_`rMIF7%+ag1`-(>S+Ol`#axBn-7nBJ_$KtQNQ8(;s}gw3(5XXe|bnBegF44ew?5 zdkt<|%U1Hb2;55Z`2R@a;T1pw%dH@EJEA$FEGF>M>?ZDS;5J%W`40gtYNqJfd#Po> z6IKL*g=BORspG;?DwG6c;ZB0F+&ju5&(J9|Yz!y3c({XCoU@K!*yy0$zX!{m0)y!Q zow$weSVf-)ZQbp;(-VUe>VlUsI+@NMVw)N%hL(CbJ}r9Sru6HqHo3N666@%W4iaE% zAN!3 zSZ*nhVuEM~7p#u(FMsa>1^09!_1bi9CRmlWun&Ij+qi4Hhdg-*2GbUj_7YAFPJv{) zm8R(A9i-C~N!&(@o!ir0B;#A~{@?#6c;sQAEfkAeTZ)rEaf06e7hCwwO?|0LR%eua zSyg9gt*AKFEw->y-j2^+^%Qj|2}tu_BY-==>q$!n`y`95qMt5p?CQBia9$#tP82`r zEv*!(D)2fh6||Yv8O&ytpsVSxonedny7#lReN{c^$+QWl=FU+T7^`IH9Jr&r5b>iv zWs#{K+hvFoXpII~-`U0!lNo;bqJwx`I>h>P8+p!}%LGg5I>uQk`X@97y-WrFhiNKJ z<}Im7(clFClLk1UAGMN{0G#)x+`4$Bty^|Sqb1WQ#@6bt2X2p+gy4>x2s!1{ z7J=I(Fg^02-x}xMt8hxeO-(=+>-4c6ox*$m;88fg%Pw=i69q}Q+SkxWj1w8~m4AN= zuDPj?qP>CB{UMe%R#Bv5jCLuWvyA0V5B>f&vTO^FdenJ%_LI)Xm5)~CD$4mri~vOt za9?HM7BPETYkK=Cd{3*Lh$zn%R*Kv4nGgPK!QEKw76qAWLa0Mta(uwc%>s)<>PMy) zfZMMgGz+ghU1<%!&ya~jDIB>dgq9?nj>alMHer*Jrfw{EJ`$N^y5gJtZ9H?mi{HBF zAT~}N!wQ=j@(fYb!I-iWDH}*iEK#Swh%juWV#%#}Lv|;Zon)Mv4F}RuFTmCs6Z<>C z9_qdepJ({6VBAb6B5$wk;B_Bz_1r$^{%OHoh5F9W8Sh6+IJc!!4zN_5#;;!aC_Mj3 zsT06)4q&9ZfSCxz19xo$-~R90*qzMqlD}~_ETFkf2XuM@yf9@ zF(50va_>hOHrAJL^oTTn2=!7fZs67?<_NPSLYA_tXRMOa?F6lCjE7J9c;lfBJTaZ( zU@}0I?MMVf!hUfq#zOeJ1SDdiBI9HVYKo^EH02r}WNa(}Ekmmaa3OG>!|jCeCCbE> zH}ZLwEVH04;m`^JcYmdiSAX;s=gc{j4;0*zlnAm>zUB7yxkFkap{s)Gqa!Sp1DwCV z#P7f6QXKqANh*OvaEd;P3qKN{RGe~6!Qli37H%f<`5F z5aFX=KY_nJx{W+$hLYfBTN}Zdh&WJOklCNYa(N3r{lRAoU7}#I=;P@<$2KUZc70QE z%Hp@yW>&?lT_(4xB{@5AOXXgJiyJMmriZ|CyHKuK=re&^xGtwi$oLX=HECUufScv8 zMAwA9Qi)82$T)|-m4KU_7F_X8PqDtUg{MwO_^ro29OsQrVr{yEIAv#*jx`a1QCbyF zHj0$m65JyHKkGQF%NR8&3g;n_gKHAUG!d+}#Om$U(JYZCEX|IQa|EI2y#u4=o$L)) zy;_qv@BYe7-#fas_H&GLlLfWnmO2u`uQe3tM@x9(w3_){hXNZ8q&09lS%;8!O9#6X zb!oB7W7J2d*uoW;F5`JWe+Z8~m*c3l(@~`*Mb<}=P<%g@xHc{Fhzem;g`H9xJw}$UAVVtU%ar}kBc&D-w(9YD za^cBzIm$M0m+|h0(}jMYfI);PwXbE0_jpTABm_q)G(^S7?sTKffpn=3MnQbasU z0B(zO^9J6ZL)jR&5HND1B{HCu3a{A=og|Ub(|P}PEXUh;%4HYhc~3cj^VWeS5ya65 zL)FkE%?n!3EtxSVj5%bcne^op@E`xL@8IDNc_^Ou?EO-o%rE{!oAR+Q?BLti50EV_ z3Ai~ZNp>8Go3bm1^UO}8Q{09>|G;x(2cS>7;?6Uc!5qMwKX4@MXm)P{SaRr1!-F5Itt?A@h;9Cf_u*5hca$1LP1~{`v zIW0m0lGa>@uF=|Q&>wf3?%E=LNe=M8YPk)CTKZW ziq`qmG{TssYsOgnjHw=(lcpA4sgM>#*|dhbUsI*0sW}rNj#}E{7Z(_}PvUK#c-3J6 z_domU&3|+4*4nc|aO>pi+H#+bLs?m9f0oFcyQ%G5w7?wyOvi!A0^Xnm?3c{Yp|o*1 z#*-g=F&=dZ@aRi{9_20Exm=RiS^paJ_v`@w_1ihVd(Exr?+)>tpMM;F@wx2zQ|%JI zZaf1%`$gc!yEE(*ncPHCV6xae!%H&;mUgky-opR%M~}t1ow}_}hW%0_oZ$Y`+pslR z6;(LVDRCLUYHFKF5j$!ji>DD8X*!Mu-kN*6Itss@!V~Z@N3#wm261rvOqHaBrRVj1 z>z+%7n?;>8yn!7sLAoEsH5m1XG13AWL~Zj_)eJ0AB>N=tJ$ZOQIoX>}>1b@>RI3l)V@ z6<_P_($Zw7ENhM39Qj=oXhjO{cYO3shXgBsa`ml$eeJ2`XAjd2S*;A^&~nF8&-lQ= zZCj_+CojOYQdZ}10nIZq%$gb%r+P7~Co$GIMh6KxEZ`ZPLN^_u*B)VgCC1@{t2lUI z1D#e|(!6ET!e}(XaJP>;f4qfbcaD+AtB9A@kmos;q71$E4C8zYaXdvk=RBN!*v=1O z72Znj-4VaJ-+R>^&4imM!uDP?y^k zJefigm8Z;d`@pVgNS*xgsfo4kRhD`YxW%eEm0i>Oc9DlFnsNqk>)c9RJQgLPzo8Xl7wURD857f8zEt0huffyil$6sYf0wkh#U*kz2y>$q$Becr`ZTg z{7W`N+U=oaa%Y@L_OP>g5clnDqucIk*9p6*C8Wl5F)0O;5IH)?Fdc1UHHom4b}$*G zm`o-}S4tVJkx|%~gH0oyqHFPF_Mpn`ufVMw*0{H%6KDiof-R&G4;0+C2<&xMeJJDf zY=EmQhMKeT*wkqb(uxXabvujHn?xl>GI5#8UmQom%%!IK$y#Z>g%8fyC>*2LN-)V! zVgGm=FFEfV>>G__(M+QhImPH`U6VFS;)=U4!^}c`NCQbPoFak?C3B&rgt8CrY8Dgy zz!Ig5Oe`cbO}a#IXRQ->&&Pj9!Tm>Hy5R@co?g9t+};!(WtKaoXH3P=pTeQs%W@mI z858YLVwhtqCM70}ueMSoDJS!0qC_gQ9H$Y|ltWJ4Y;=(%NHLmZSX)^}k&gwmNo!fg zJ`ZL)SXu4@qYUl1hZ)#YnV`qkDCEdG5ysToriCy^Qc7@jG&Vzo!qJOdoF+;kv-^aB zO4heb<33^zC>3cpJ}B2Y!rg}t$LW@vP6Oq69^rqZ{$w zP>QdQOAoB#VQer?UHcauKas7|=PdU}qAj@$2~c{ZVLpkD(U)?g(^<=7f=NP0MEG8< zHdFdCP*xOZwQw$tOH5iP@h?99dkXG%eewDqT(`Zi0=E=3O-qE}ey|RuKm5rakkOJf zjxd>yRP;S)_OYm{pLN-NrWsUxj6fao}gs-8qL~l*o9hutrPf zx%xb~4Mg^OjS4X&S7I1PO2zV+^-)4Ucw8oFd2(zms8VqAHL6F*sSVssIc|iR<-UHW+(sCc&TDyGGIZ(|-XDE>1=74+U$eMna_IW9J)d%M* z37M4^3l)R{{l8hXL~rsGQ5uPttHt!x=@z+p5y2Gs0cn(EKbbN?{>=EOY@^Wxopy@Z zm;+Ebfon->&WE#;V&!9c)2y#fbR1xtlfK$*^mA@$jBhgpNKR)Xp>;H&f?HF>VmS{{V`ZJ2ABfE=;oSW|H3>hu=M_>OCYgYKdZG-)nvsuAB{v=8;WmqW5h1iPwY^TtPz@xq z&NYHA&9HPPcZGW{?^VJ^)vWDT=4nBIbkhLw0p0t)dIw zjF&1L=&Jm5H#1S5z*2kjJJo13Gm=`HQn#IjbjC87=3x4Zh{wj9|6Ym|raIPKs3M?G zuAF1c3BzSBNgbBWvRsw^(S#wy8OJJ)q(goqO>x1#HJsDxp_^rhru3Ou*~zKQZf=fi zX5h$~mP-=pr%}Cg{jQFRG@>I-g>QGP&7bFqTN0D+mLY8^CV@z$|BYe%U#y#o3HnSbDrVZ`BsoEZ#Bw4S#a7dH?nq?dQd`!|sa-V|s=A z7I$~xH*ouod&wR{kEVpR7gaaCJI2>=)Fh2-B$jqH1vk&f@?T1(kydcG1mpOO`mrgb zh;=c{I3Q{Z>v4fY?Jf?sd)Q=@i`JZzhhu8F)7jISZpl*+*eo=ppJM%>wfU+*ZUG4q z!1s(}EhJ&ir;bvm!Y_CEZW&?Rx)1OE?CZ{zC;HYeU;m@q`x`1gPX~_Mk_T>X3N^T` zHRx=?t$^NZCcGc>T?Fo`hQj@EFMux+{+tuPzFq)%Ntx8?QUI7z- zw#HaLE!TT1J$BHak#gPl`=v%o1()Zxgrgb>x?Dd zOqJIXhmqkth9JdxtSqt_N&N)M!%Z!$-uVsOwlJjnTe5g9p-gS2_R|yilh3{0>6Uol z<{1^-Oo`^Q`h0LRHL7&lwr?%`*o&6bH@mis4G=G|xc=X(&Wr`!JU)m>P4)u54~8>= z+qZ}YZlfFK$Qiipy=z%8Z%SLEO3x)rJX+#kkE)z%<(+StYw$OBifTHVrR-I9q%L{2 zh6zs}T`l`OxJlgdSW5)DSqT{fU+UbWiTE9w%wb%c($HdvW~@WeRD5TaW1|@3NUw{H zc2}lXq!UJR=|CH!0S=Xvn> zbi<+*KIgReZh%&5u4~I|w5fit`tNF~T_bN$|1m$|w%(x{i_D3vPe2lfSz(xt2+ijyg zndn$?8c_B!G5*cI7$#~=OqJF-%Y?;jA(9a?U2CLHJHhuzz9H%&EL?)|T3G{J?x)bv(|DNvdqo(E}t zUfae6R11bm$t?@EZ2x=;^5%{Za`O;;Mo&VJhv&7$>}P7tdK-7v#jtl_l-wP6d%}3P z-nu#z*f_Vh%aHcW1KV~1<=1*Wnt9adCo2INB4vV$bFvr`RGI0d7-O07>>|RUIKrAt z8P+VLS?D;TkEQWx@@fM(iH1FG$xu4Wt=wC`YDdz3wrG|*q!OjG$ci|d&H+!Tjc(vR zfwz9j>6XxP-?qCKEm3I)qb0uOHZbZ#I(u*{5b98#eK7xgd-Lb!qOr!=pBCJ67T7FE z$(?}q)KLu=Y7OhU{qqD}I=+Rs#j^5}hrM^4+XqAw;5GOwOBz__3SdF?VYn!KN7HxY?)UXoJlVK( zZC!jT8A@2xj^N((r2gETz@U~V++nJP;y8QAp+gSetTa>yRV{zBNus`X)t*Unt;Zjv z)D6@G4e3d1>V(~yR4*uAG*$-^IS!Ol9Gd1>X62;J5>jzg8JXvT4Q7>D8^We}r4ah1 z_CgmGC#m`IY(M7BQ_1y+<^Yc+UDkw{_Qb2@=9h>u$&lV0)9x1D{PEXo_xszvalQB{b`CD_BSE$7*R`~J9ht>W5@!7FEO(x-6ogy8oVX;t#-(J$Kp8&qXZ z`a!|n&RH*AV92R)lEq>T2`88-07dzUaqH;>OS2pYXF2v4IrdQ(#zjpee<Stdy9!mfNK5o@Y3BUogWAQypTE8YE*XpH0BEFCCf1$Uk!?``1^ zKKXhlr}mYjKRiaoDaxraS|Y_>oE0M)PO?G>Zbk^^X^B?9g-IGe|rQSvXj#Nu9r@25k^VmK3a>JNC-2Vm}5E5u|A#Rz%<8N zIYqaihKq&-<>Nb}XIHBU8;yoYt%`-Z>yJ|z6YX!eOt%5svX-^Qm)a8(pK$`p!sl5u z#RL?KvS_)Nw(;gqyiURW_OD*&(GqS=1UDm|6AC>mEc& z?3M%%*WNdLeQ5Ekou-n7vz;U4%1v<97Ba|STJj{g4}NCW!02BaYeVeI2YJP@z3Xc% zGMmTfCyb;Vq8TUmdnOh5&A57_7DW;(re0MbEvINrM_8Fou|LnTF`Hp!mZ2?O@=Vav zd9)#@&f+?d8rfqpeA#UH{EqBgVv8Y^5-4I1%z)Q5tshke1ML zhv0680-fb1xLK{o6}pI)_?EM>P}M_j?mvD=&3~9-H`-nRUu3~$y?aA9<-9@0bMNkZ z$FCm*xb0!cwFa*bdq)?=y?x7Uaq9Y>)DKyX3&3snr~2GMhmL{UcCtnBZCjvH32n(F z>G;*UYSx>u6t+|3Sjt9Nn`YRUW>}fBd4qo=xRqbCz|L<2s~NL?4(X=1f+$zvr)l-t z8vB-8v-EW=yE>n<@KS3+%Wc-1u=gxCcM5Xuw|(NZMoX@}_SoRyqne!i>}iRD%q-A+ z1)qI5c`zP5ksB)&i#6xk%i^91+`d(|??cWXE}Fe9w~PBaZ{DXn{-U&$cUZ9HM^D1V z=UeW|3cF~?{LT`TvR08umoYUOWYLfX8B~40#ig|Tn>1<3&dlvxQqNlXILjn<-J4FZ zF&Sasbc~hRSS&WnRaN3it2K2;cCBHKewcG8(hM$8FNISL>#cr))Nad%Z)S#hEU(dO zycsQ#`{`vmld5rnw8}+G@*LUHDZKqtuQOWmk4KLU4n1Z*b?!3;4rTq&=aW0Sbq6$JkDX&La4&L1{q1{EVVC}W zo|eeX59!$+8OoYsxQ82X{JEVjD0MZXb5jjDu^ zFkXiOStBEUvY5Y&*DP~Zd^E%&D#xZn1bqt_oBY(9LHY-pTYJWA&3 zDLX`WdM4oJ=Yj`hferh(r8N6ms$P7t=v5dt*Y%h zaBM@1KcBI*!A7>an%CoJB+U}npUF)(@kMj0RcZ|kO-HVN|N8g3e;c=Na%OYH{iKdq zG$-=TXJL^KQJIpmezlPjAC?3&o1h_aY&)JcAnH7|q=tl5fNZd16Txye!T#YG>*Fyt z#yPq(7Px9WTO3DGigLz?;XZM*kjI?hJV84jVIM&~9ASNw zVPBr3EooRy)ac`?yU^txxDR1b68>0?lW}>9x@MjnYdc zFXxDJ(h`C@TiU`Oe*Co;tL6UcbvN8G*t}E=r}hH3(h>@V*D*~`J1?+s^Ok&Z$!Zcl zEN2LSB`=vm0ojuJAWLOeg0mB33BFk;RTV%QYI$plbVY@%sBcXRNa~C zrU$vlq$1+CxWT9{0VA_wq&824=rj)OivKmn&7=(#p4bOjU}ZAJ%6N>m;Sek1304T~ z8T%YX^}4d$0&b_t$hYq>TI?05X-eFzv)c7$?MSUTA{N=pK5KZdZZos%4S_Akh`^G5 zjz!>p%O_v^P>pk6ef^Ec`uoLlTb#R$I18nwCDw^UnX3l_?(pFn2z z?b>wLqXH_m{WR*qzlnyUj6I>_&Yf0AVu{MC{9TXFB_qi;!YMk8VfQE4*UzwTFvjvk zBO_w5CD)-63bK}{z)Ecps<#TbwerswAn#Z{Y`2*oT0cg`V3u4qNx#8gl1Va}z`Ye&hqa0X8E znk`Y~Pr~v6?AQA)0>2MP9~AXDY75>3YtXakGGi5pg3mP5>3@#8W2fezV#zs&QXEB1tdW-JIaKGG&x?kIliXMkMIVX0dnltkrszf!ERV)m z-{KBL8LEkrk$CfBG)NVBTz_o63e4fTDz*GbZa8A27N(I|tB9MuU0w6wI^?TI!c zu%0B33iMla3dh(t9$>9Mz}jwxwE^h}$AYjza4OyJoIob?CJGnob8|X*UNvqkz^ZFZ z1e6TpQS8!%O9iY|vw=QAE(}lCpyJsEehVr3ud{n)^hGI;8Ygz+vyzqI&U)K;>!)6? zwB#LEAHDV1;NV5ptRQ-K)cSB4%0=Mj?|9N$hOCC2|FG-si)Px_03D{w>(}~j$|8VU z+P`UeLvZ^UGqapOD{zbD47y9s7Th7ts;?=Gdxt+eaCqYNy8G#UpdHipF{g&gMq7eA zbC#P@XcVDLX6WQ2te-xGef3iP#0LL>s>&6`{Mz#RF!UdOSao-Qmiuf$9#;9)ud9V}nwGD5t<`lbu$1<> zYJ#JK#y>M(#2e@1;FdJ!>&P z;(#)uq2np2gk7Rd2{fwc_II#;;y$cTCc>TDBC?}yD92-yzF6>y;HD8LxV8B}Kc_`t zEFfqJ9I?(e_RzrVV&pzXWb%R+;WDimduAetPk5|iYNi;XSmEhLGsRG&Zjt}nZ zjvX5uINzrw5t|i6YpiZosN&pmpIynh1NUA44{3yNH3c;O2Me7uFD<{wcFPS2GIwVJ zw||Wx4XQ(o&GLZ?{Cj{~i{|T_J?FT(IC!`?9w@kdx>PMXsi|wQ^PlF=t4|0+Xd3yyY`*c$m?WJMI`BIFFWF`7~2b+JT{* z+iNp(oO=LD(GYBGqCUqtj@2~GSrHDCal64`y zJjZH(g7s4atnLi3IvFArC0|kvb>>KM1nf3{z(b>iCQWtK@#`>irm|=@9-Pi;t)=MG z523KG_D2>oyXro$+Uu;TDeq|(Ks+2J03?FDWDDX&dPx$B0X>%DEBttyyu5NkYN zp*h;rJaM%pq$Le-zvJrbj@>cbJYT`>H1cAKyF1HWft%@f4L#|@W)ep`aTZza@G=)! zIBi+0E!|#)N_7vk1En}DeoaObtn4pz4MIIum$8kJn>f-;mS znHbef=R38WB9wufp+>nKt`g6p8CrU(fAPO8q9NMb5uH?Ly~C!4YDco2vS}#i5ux(X*t}3Vw~i`~5`x<`2rmG4O-q`^>U)EHF}QeR z`Bk@LEeZ?PRfBSKO^nk`I}3Tdo-ec~^XY%NR(78Fi3BgsZGZO{WuxlPTjXIAK>hxK zbVOJyv`2f|TyiHUFspZx+Hc`CXt%qK4b(akCS=md5S`r}ENyK|bY$NoN0&oRc|5by zl?B$Mj9G5;Y5cl-1GZ)f?S+fME#XNAZn=*?1-LZ=V}?~1l+GL`J)>T`#N z7#_H=X}QU{HG{dVEWt4h2;dV#-} zhF!N))GkffX687z3zu2}l-(yknWOLB^l|KZYRlae^|4ruIiQ)0ZK1UF0&pK29k_4^ zqa`XVDEiVegk*};sen?gn%=2dZeK)Y5y0zkU{FJ0wH(f?-iy=Phmvt4|mRsMaCYgM#a_-uiRGNQw;hvnDygsnp7Ddqktv&^^IKF%IL2^nB zg3hVoXQeuyszfo6XJgDMV3|wSar3?{EFC|E)xj=$F++V*WV0z+ak~oZHA+ey;G`2s z261sl4O`FDRvTTB;xH}<2T!{@(hBD7o*K^ct**wym2S>gbJ{btW(K~upHuKb&tutU z7A3UYtP0%0TR-upOXc+M{@-r8{if};3&!bwZ7UNAEeQ;19f{1Op$I({idIp01uiBE zzSn@K>H~&pX7}(dlvY`efEM_`)ra5oV9Z+9rpGL((Vh7id}9#?Y4Uov(Dw;Lgw<-QxtT-%!0QR2T3ABI@@1tLHGwO2d9f?iB>aCNuP*gHq7i`?0a1#i4NEktP|B0Dq6 zF+?-0ZtdX6T_>=5atFQ11TD^yNg=~kSx+wp%FzY2HNF;~niS#nE~ltX^5C7FBrBGBU!6T=s7R}knSN^r@A~BLD7gRV z>(}3QQ-AZqETKO46baL|Y&)V}pj;sb5D#1lSIFv?Pi$;^cW zZM8axRn)}2zXMe*5m8eY-3gqiJ2-tU!i13n_Buud)}~XepV&g@z7y!3-oknTtWuYI zmSdPOgho5xawV&7OR1Uzn&8%F?|8AZnqo0S`r-3;_xH+@2jCV;vmlIoCX4MuU*qT0 z2;?MV73*XlLcg_zcYN|q50^Xmp8w<4zrAi}^C_cb6~$zPZW>`W;jEf2#!SPrm8l$S z+s~{g+X1VD32YY7oGrM;I)&}-3&3rb%eLP+tEL;)+>Kduqe2bC@R%^(T)$V-lJ5m> z%fi&>b&Dhr_1T-27zx)aA$-<`*?a(WlZ zc!1?{CZh{UV>n-uOt}>)EvcFZ7Jz$EGDkSL-*Mwm7dA*&nM!xwDsCP6T7elqlf`Zv z>J-vsJi0{r6x?t7#2b{BeE2`z`Hg?Lcl9O1bPeTXf*wb#O(uxa4u)}8!$h16#=$+a zmTM5zX49PGsXc9FABeKDgOyx4P-dYQ>`lro=i|DIKt4}Xn!M%yz+G9DFa+pXP<7t) z$oY1;x}Vgp&#vDVr@TJM#8$K)i_9>rJsp35IX0;PlwC%tU6`%`S=~^4XYYFS7|Kp zOfo4-h?U0c=RLG}+K#pg*&x{h9PFWT*#!dng8pZ|YPeD1Gqj(&GH*^hXZp_i11 z@(H52jbYpt>nyyR9Lq^#ahsJ4S)zJ!{z_{&QVYOsmnjebEWqs_?*mA`pFctK{r4{F zYTnGn(H!H@4+PwDUbjP8l&7L4p3aCK*Zcry18%{H9LNb|njk4S#ee{g3oK72Sia{3 z+V`BmdVh%Z8K)p{q@8x{%u?2)>hRS{C`WvC=bTl*R-SATr8;tW#_OyZ7!TgI_5dnE%&ovyR90*wo_aSwZNDRY#n(Ek}y_qIKI3%(I9^WV4r-fi#J zyH{D<`BmVbKU_d_O#=bfUNphM2Ww?{gAAk(^2YHMKXQ5{uH7zd(rC7s(XhCYfG9;U zXL&S4_v987#=>u5me3IrXNc>z zvZk3)&^HRBjPiILHC0vQUW>%>@}|2gRzchAV%-%x1UG&AC`Es1yZpn?|Mo$-rGIl% z`P6^?xA*+l6VU-AocT!gPtGUg0L`>37MgPfWqygYF_Cd=W>U77DpN-Uxa7lGfV<|G z^=)Z7jr_fxuX&8m$D7;RVsJN)(Q}31ZY)wSc@n_s%Wvum67Jq95B%{rZa#i8Jr^0;Xt!hJEUKYez0;9M9k^-DXRHV2Sf7YB zzN%*tTE0cwxWBUf2d-I1T`cp3PsYva=s~!ZEwhIt-YsQQIP7x z>RY<=N~U>^)ii%0!aNNL>t-gD7)*s(l#+C$KM|i>{P^wv+0CKk8@}!9x88YAbm-iZ z0}lLYVWpd5GI2U@oXpCu9Cl1|(rlT^G5ks;nJ{1W8G<|HP6nd-QkxmMd287Wjh!vP ze-_~OuO(azp~G4o>w#N-7mWC;ERm~{t1P$MQ4HMGgB2ZbSB5HEwN_OOry17U9jxvQ z(YoVqY~FD~q#0U5BVG2Cveaxhi`YpMOZSwGb+Nkbu-^J?z^!z~EU?GFJWE{zGpIdr ztb9IC5gzB(U==oOf}5>nX&PfP-bInj5HGjz_+Px@PoMMtC%m`f{a^ag^o{@NyLbF+ zcEE2>60~~DQd`0<4pI_oH1mt#&XFRIbQU`;I<0s8wRh}NAAHk-@ls8Js%hEvxVboW zm{Ii5D}WE89(#k^RVU1wjz);FT0|k;a2GodZYOcvm?*5-M4>Q2nw=3Se@Bq}A<-Gv zE9o3R(eQKP>pmsRiwp;%1k1-xAi3pEZ1zW3m9csg#EIr1nAqTRYu^+3TlnW2QU4;@IW%VCw@V%aAo=xliXy>)qdNaCW*U(DZ*d1)4 zvk_ww53zh;{jRrt`nMhyT^daqnTmh@e;)hp&G!|LpLP#mn`0wd9T@~fIRLgGaqG$0 z#1L~taw=O`OG+gD2GJ&X=RaGWEa8nezG*kFrYlWwn??E=1iXC6=SSGm^G&_Xgu7|ejmFR+{WN#{>k^GrqKmN@yp?Py*TR|6bts@uUkdM%l{IPflXGoa5zheAUY@d+m8&=Wz{ifAik*imU(c zAAN7wJ{+II60&4TCtK0Mblh?hYvM*qP9*(GE3kLW@=T#Rxv8{d{#69MjB^E0dpN2- z=zI%aZU4cJVc<3?iMocQxg34VJ@4##QUzg?MX<H-Q}{x@?aOUn{LOxyH4YPh*8dfX@-`QrX#qyZ;+MpGfKSKlUt}ht(6{E`r4ME zG&*R;M!_v{1X*kbYAcE|j_l(9uIjUTpD;8mzLwmb#*O#E?K_|q_(c>`m0h#e2&cRE z;E_+i+w$N%)NzIW?qPo@VjZmj|=0_RO}1uB{FHys5bQ)M0Z zUS#pq^5|a{ac+qS1ly8XJgZglXWH0moOAlzn>T>tz-61BA8GQV5I(r)t-EJE_n;-! z%?@ZtsM9Tah(U~d5x6abTXS+3PBX{W(Gvo7Ez6PIcoTZ}p1}Um3@a?4$|qvs+l-9l zIc6NCffyZ*-6J3gHqkk88iST~cUEMPL#bTfe4-lM`q}LSZrG_H%Hz!eZ6qN5FQ(7k zT|OhYed?q*P=|Lqo1vd>W98B5&F}ia?>|1eG}_q{-2B7G{-0C-;oEoTzp@>zN+~6W z*{kqMiA;j#IXWyD;+V9kI|tAv_?qj-{$`+J0?5EG|9T+d&*98?|7>#8t4vo-hs<&d zcdb@ky*(}h_acxs7p!}DjYr^FS}&fipmm>X0r&P#5rz`y+RSF9SZz^om844|KWCGP z)MxMe(aku_N1bSvl(!z56du{PnHd<+H{`PtpUFx+NdvjiDD#y=LiOi+d9DmYQNG zWo!D|`g2_h>HpoVq=8#X;3XR-xu>e=-=`)1qOpyv+R!Z9DRRBd^u2z@#o)H<@b7~< zbn~CbuB-mnD<<+RyDc91H=rxwR_Qkg%VKtPMAHW>f+}>7k(*c3LxOfb!|3D|k~{9k zh4-AofdUv8nRMN(yD`y>qQq%Sl5UKSvKJP-as7~sfGsZ%9QLXfzp~)=y!8y>(y4r12Lx$L#P!_H%=uC<%1gaP!&+&hx!-g z)ZA8`fvDM)t0k^JmV(*`&s>tkUEm_{`J(n!(!(hu`5+d{T5;mN$8L)VfV+PAe4^0q zYM-qRnJP~FqhboWU_$L_`I&Ip^6 z4819LP?vNu9_TuE32uH#=ZpymIl+Ias+iF<)geY}+zlQ45I9w}CwX_!*XSFeaEwS* zH(vFuYK@3IpQ;N;sgx9SF<;JL29If#n59#U%Mr?UiD>`y#2f$Q)xUb)m79OJ$Vl%A zZaesk-Fn&M=3Efn}b z|24oWkaguW|Me{gEw`kL8r;_}(6Z0m*dZ3oz_jD!0lPVhzP0vWUpu7p^h_LbM_<@| zu9lLa)!N6IP*F*Ss87+GEmjW;vEtdKl&d(GbUS ziLJ>t9&yDbKYYoXe&sg~KCOHGB4EpB-Rlqj@BikO@)6%Wde=v8J2m;mVYJ$rB|S_z za8-oOgHtT%q%jcs;I{L#CF)F|Hj7{H49&PWBROhixTti#i~Za9Y|he#7Iv>tVd%6j z`k9|xn*_X6c|b$z*ICrT+qb0tfDe??3MzAE=Q7L9dqXv;&a^~(r;pCD<5<4?1P-6( z0H!ITHg{8-i|!>jytXX4JCy;dfg^~-L1ol{=q`4DpF|- zC+mDu4M{Qt$2xP}!Q^)(OF&AFu*PRMy(a+KH1EJpz#7e(K zD$eO?)Z<2EJ@I(yE+Ta-%c!JcW5j%P?B0s`0x zcc?UwGeIpcEDEuyMwYZ15)re!tlB9{&WF<#s!;{87;$w2boEz=PRckc+!i8b3kq)5 z^hE?IugPsef{G%amSsLeXI8Y^JAzO3?od_Jej3cvP;;9ehz(G z>r}LV#Z|1Y$0=rsrK_46Wo!qduGq z>@K|TGX0^&a~Zf}I=nivRZ0UQ%8Sh6yqqR^|MDk4_OD+4+84j4_r&Pf{Q>)|E%#n0 zy79)cbLT#^T1|0%HfuIM2>pG6;gyM*x8R~i-tRBWP~P6I zcP0L8hmRh;u*$YNTkl##a?E!DJOSX({#)63d)A$`$IS_^XEf6H-F*Ve9Vx7~+0?TE@p6ra@J zv!56^4&Y$pEedy!i%qG#mCrMsxPM0|FS*d&cXuDKyT@~|_jhNHp6K-a9>j + + + + + + + + Chat Interface + + + + +
+ +
+ +
+ +
+ +
+ +
+ + + + +
+
+ + + +
+ +
+
+
+ + +
+ + +
+ +
+ +
+ + + + + + + \ No newline at end of file diff --git a/src/ai/.x/templates/openai-webpage-with-functions-ts/package.json b/src/ai/.x/templates/openai-webpage-with-functions-ts/package.json new file mode 100644 index 00000000..7e60f6a1 --- /dev/null +++ b/src/ai/.x/templates/openai-webpage-with-functions-ts/package.json @@ -0,0 +1,25 @@ +{ + "name": "chat-interface", + "version": "1.0.0", + "description": "Chat Interface with OpenAI", + "main": "script.ts", + "scripts": { + "test": "echo \"Error: no test specified\" && exit 1" + }, + "author": "", + "license": "MIT", + "dependencies": { + "@azure/openai": "1.0.0-beta.10", + "highlight.js": "^11.7.2", + "marked": "^4.0.10" + }, + "keywords": [], + "devDependencies": { + "@types/node": "^20.11.1", + "dotenv-webpack": "^7.0.3", + "ts-loader": "^9.5.1", + "typescript": "^5.3.3", + "webpack": "^5.89.0", + "webpack-cli": "^5.1.4" + } +} diff --git a/src/ai/.x/templates/openai-webpage-with-functions-ts/src/FunctionCallContext.ts b/src/ai/.x/templates/openai-webpage-with-functions-ts/src/FunctionCallContext.ts new file mode 100644 index 00000000..7cef905b --- /dev/null +++ b/src/ai/.x/templates/openai-webpage-with-functions-ts/src/FunctionCallContext.ts @@ -0,0 +1,53 @@ +import { ChatChoice, ChatRequestMessage } from "@azure/openai"; +import { FunctionFactory } from "./FunctionFactory"; + +export class FunctionCallContext { + private function_factory: FunctionFactory; + private messages: ChatRequestMessage[]; + private function_name: string; + private function_arguments: string; + + constructor(function_factory: FunctionFactory, messages: ChatRequestMessage[]) { + this.function_factory = function_factory; + this.messages = messages; + this.function_name = ""; + this.function_arguments = ""; + } + + checkForUpdate(choice: ChatChoice): boolean { + let updated = false; + + const name = choice.delta?.functionCall?.name; + if (name !== undefined) { + this.function_name = name; + updated = true; + } + + const args = choice.delta?.functionCall?.arguments; + if (args !== undefined) { + this.function_arguments = `${this.function_arguments}${args}`; + updated = true; + } + + return updated; + } + + tryCallFunction(): string | undefined { + let result = this.function_factory.tryCallFunction(this.function_name, this.function_arguments); + if (result === undefined) { + return undefined; + } + + console.log(`assistant-function: ${this.function_name}(${this.function_arguments}) => ${result}`); + + this.messages.push({ role: 'assistant', content: '', functionCall: { name: this.function_name, arguments: this.function_arguments } }); + this.messages.push({ role: 'function', content: result, name: this.function_name }); + + return result; + } + + clear(): void { + this.function_name = ""; + this.function_arguments = ""; + } +} diff --git a/src/ai/.x/templates/openai-webpage-with-functions-ts/src/FunctionFactory.ts b/src/ai/.x/templates/openai-webpage-with-functions-ts/src/FunctionFactory.ts new file mode 100644 index 00000000..b2807021 --- /dev/null +++ b/src/ai/.x/templates/openai-webpage-with-functions-ts/src/FunctionFactory.ts @@ -0,0 +1,24 @@ +export class FunctionFactory { + private functions: { [key: string]: { schema: any, function: any } }; + + constructor() { + this.functions = {}; + } + + addFunction(schema: any, fun: any): void { + this.functions[schema.name] = { schema: schema, function: fun }; + } + + getFunctionSchemas(): any[] { + return Object.values(this.functions).map(value => value.schema); + } + + tryCallFunction(function_name: string, function_arguments: string) { + const function_info = this.functions[function_name]; + if (function_info === undefined) { + return undefined; + } + + return function_info.function(function_arguments); + } +} diff --git a/src/ai/.x/templates/openai-webpage-with-functions-ts/src/OpenAIChatCompletionsCustomFunctions.ts b/src/ai/.x/templates/openai-webpage-with-functions-ts/src/OpenAIChatCompletionsCustomFunctions.ts new file mode 100644 index 00000000..273b75dc --- /dev/null +++ b/src/ai/.x/templates/openai-webpage-with-functions-ts/src/OpenAIChatCompletionsCustomFunctions.ts @@ -0,0 +1,60 @@ +import { FunctionFactory } from './FunctionFactory'; +export let factory = new FunctionFactory(); + +function getCurrentWeather(function_arguments: string): string { + const location = JSON.parse(function_arguments).location; + return `The weather in ${location} is 72 degrees and sunny.`; + }; + +const getCurrentWeatherSchema = { + name: "get_current_weather", + description: "Get the current weather in a given location", + parameters: { + type: "object", + properties: { + location: { + type: "string", + description: "The city and state, e.g. San Francisco, CA", + }, + unit: { + type: "string", + enum: ["celsius", "fahrenheit"], + }, + }, + required: ["location"], + }, +}; + +factory.addFunction(getCurrentWeatherSchema, getCurrentWeather); + +function getCurrentDate(): string { + const date = new Date(); + return `${date.getFullYear()}-${date.getMonth() + 1}-${date.getDate()}`; +} + +const getCurrentDateSchema = { + name: "get_current_date", + description: "Get the current date", + parameters: { + type: "object", + properties: {}, + }, +}; + +factory.addFunction(getCurrentDateSchema, getCurrentDate); + +function getCurrentTime(): string { + const date = new Date(); + return `${date.getHours()}:${date.getMinutes()}:${date.getSeconds()}`; +} + +const getCurrentTimeSchema = { + name: "get_current_time", + description: "Get the current time", + parameters: { + type: "object", + properties: {}, + }, +}; + +factory.addFunction(getCurrentTimeSchema, getCurrentTime); \ No newline at end of file diff --git a/src/ai/.x/templates/openai-webpage-with-functions-ts/src/OpenAIChatCompletionsFunctionsStreamingClass.ts b/src/ai/.x/templates/openai-webpage-with-functions-ts/src/OpenAIChatCompletionsFunctionsStreamingClass.ts new file mode 100644 index 00000000..4e0324d3 --- /dev/null +++ b/src/ai/.x/templates/openai-webpage-with-functions-ts/src/OpenAIChatCompletionsFunctionsStreamingClass.ts @@ -0,0 +1,67 @@ +<#@ template hostspecific="true" #> +<#@ output extension=".ts" encoding="utf-8" #> +<#@ parameter type="System.String" name="ClassName" #> +import { OpenAIClient, AzureKeyCredential, ChatRequestMessage } from "@azure/openai"; +import { FunctionCallContext } from "./FunctionCallContext" +import { FunctionFactory } from "./FunctionFactory" + +export class <#= ClassName #> { + private openAISystemPrompt: string; + private openAIChatDeploymentName: string; + private client: OpenAIClient; + private messages: ChatRequestMessage[] = []; + private functionCallContext: FunctionCallContext | undefined; + private functionFactory: FunctionFactory; + + constructor(openAIEndpoint: string, openAIKey: string, openAIChatDeploymentName: string, openAISystemPrompt: string, functionFactory: FunctionFactory) { + this.openAISystemPrompt = openAISystemPrompt; + this.openAIChatDeploymentName = openAIChatDeploymentName; + this.client = new OpenAIClient(openAIEndpoint, new AzureKeyCredential(openAIKey)); + this.functionFactory = functionFactory; + this.clearConversation(); + } + + clearConversation(): void { + this.messages = [ + { role: 'system', content: this.openAISystemPrompt } + ]; + this.functionCallContext = new FunctionCallContext(this.functionFactory, this.messages); + } + + async getChatCompletions(userInput: string, callback: (content: string) => void): Promise { + this.messages.push({ role: 'user', content: userInput }); + + let contentComplete = ''; + while (true) { + const events = await this.client.streamChatCompletions(this.openAIChatDeploymentName, this.messages, { + functions: this.functionFactory.getFunctionSchemas(), + }); + + for await (const event of events) { + for (const choice of event.choices) { + + this.functionCallContext!.checkForUpdate(choice); + + let content = choice.delta?.content; + if (choice.finishReason === 'length') { + content = `${content}\nERROR: Exceeded token limit!`; + } + + if (content != null) { + callback(content); + await new Promise(r => setTimeout(r, 50)); // delay to simulate real-time output, word by word + contentComplete += content; + } + } + } + + if (this.functionCallContext!.tryCallFunction() !== undefined) { + this.functionCallContext!.clear(); + continue; + } + + this.messages.push({ role: 'assistant', content: contentComplete }); + return contentComplete; + } + } +} \ No newline at end of file diff --git a/src/ai/.x/templates/openai-webpage-with-functions-ts/src/script.ts b/src/ai/.x/templates/openai-webpage-with-functions-ts/src/script.ts new file mode 100644 index 00000000..0ba30e6e --- /dev/null +++ b/src/ai/.x/templates/openai-webpage-with-functions-ts/src/script.ts @@ -0,0 +1,300 @@ +<#@ template hostspecific="true" #> +<#@ output extension=".ts" encoding="utf-8" #> +<#@ parameter type="System.String" name="ClassName" #> +<#@ parameter type="System.String" name="AZURE_OPENAI_ENDPOINT" #> +<#@ parameter type="System.String" name="AZURE_OPENAI_KEY" #> +<#@ parameter type="System.String" name="AZURE_OPENAI_CHAT_DEPLOYMENT" #> +<#@ parameter type="System.String" name="AZURE_OPENAI_SYSTEM_PROMPT" #> +import { marked } from "marked" +import hljs from "highlight.js"; + +import { factory } from './OpenAIChatCompletionsCustomFunctions'; + +import { <#= ClassName #> } from './OpenAIChatCompletionsFunctionsStreamingClass'; +let streamingChatCompletions: <#= ClassName #> | undefined; + +function streamingChatCompletionsInit(): void { + + const openAIEndpoint = process.env.AZURE_OPENAI_ENDPOINT || "<#= AZURE_OPENAI_ENDPOINT #>"; + const openAIKey = process.env.AZURE_OPENAI_KEY || "<#= AZURE_OPENAI_KEY #>"; + const openAIChatDeploymentName = process.env.AZURE_OPENAI_CHAT_DEPLOYMENT || "<#= AZURE_OPENAI_CHAT_DEPLOYMENT #>" ; + const openAISystemPrompt = process.env.AZURE_OPENAI_SYSTEM_PROMPT || "<#= AZURE_OPENAI_SYSTEM_PROMPT #>"; + + if (!openAIEndpoint || openAIEndpoint.startsWith('(openAIEndpoint, openAIKey, openAIChatDeploymentName, openAISystemPrompt, factory); +} + +function streamingChatCompletionsClear(): void { + streamingChatCompletions!.clearConversation(); +} + +async function streamingChatCompletionsProcessInput(userInput: string): Promise { + const blackVerticalRectangle = '\u25AE'; // Black vertical rectangle ('▮') to simulate an insertion point + + let newMessage = chatPanelAppendMessage('computer', blackVerticalRectangle); + let completeResponse = ""; + + let computerResponse = await streamingChatCompletions!.getChatCompletions(userInput, function (response: string) { + let atBottomBeforeUpdate = chatPanelIsScrollAtBottom(); + + completeResponse += response; + let withEnding = `${completeResponse}${blackVerticalRectangle}`; + let asHtml = markdownToHtml(withEnding); + + if (asHtml !== undefined) { + newMessage.innerHTML = asHtml; + + if (atBottomBeforeUpdate) { + chatPanelScrollToBottom(); + } + } + }); + + newMessage.innerHTML = markdownToHtml(computerResponse) || computerResponse.replace(/\n/g, '
'); + chatPanelScrollToBottom(); +} + +function chatPanelGetElement(): HTMLElement | null { + return document.getElementById("chatPanel"); +} + +function chatPanelAppendMessage(sender: any, message: string) { + logoHide(); + + let messageContent = document.createElement("p"); + messageContent.className = "message-content"; + messageContent.innerHTML = message; + + let messageAuthor = document.createElement("p"); + messageAuthor.className = "message-author"; + messageAuthor.innerHTML = sender == "user" ? "You" : "Assistant"; + + let divContainingBoth = document.createElement("div"); + divContainingBoth.className = sender === "user" ? "user" : "computer"; + divContainingBoth.appendChild(messageAuthor); + divContainingBoth.appendChild(messageContent); + + let chatPanel = chatPanelGetElement(); + chatPanel?.appendChild(divContainingBoth); + chatPanelScrollToBottom(); + + return messageContent; +} + +function chatPanelIsScrollAtBottom(): boolean { + let chatPanel = chatPanelGetElement(); + let atBottom = chatPanel + ? Math.abs(chatPanel.scrollHeight - chatPanel.clientHeight - chatPanel.scrollTop) < 1 + : true; + return atBottom; +} + +function chatPanelScrollToBottom() { + let chatPanel = chatPanelGetElement(); + if (chatPanel) { + chatPanel.scrollTop = chatPanel.scrollHeight; + } +} + +function chatPanelClear() { + let chatPanel = chatPanelGetElement(); + if (chatPanel) { + chatPanel.innerHTML = ''; + } +} + +function logoGetElement() { + return document.getElementById("logo"); +} + +function logoShow() { + let logo = logoGetElement(); + if (logo) { + logo.style.display = "block"; + } +} + +function logoHide() { + let logo = logoGetElement(); + if (logo) { + logo.style.display = "none"; + } +} + +function markdownInit() { + marked.setOptions({ + highlight: (code: string, lang: string) => { + let hl = lang === undefined || lang === '' + ? hljs.highlightAuto(code).value + : hljs.highlight(lang, code).value; + return `
${hl}
`; + } + }); +} + +function markdownToHtml(markdownText: string) { + try { + return marked.parse(markdownText); + } + catch (error) { + return undefined; + } +} + +function themeInit() { + let currentTheme = localStorage.getItem('theme'); + if (currentTheme === 'dark') { + themeSetDark(); + } + else if (currentTheme === 'light') { + themeSetLight(); + } + toggleThemeButtonInit(); +} + +function themeIsLight() { + return document.body.classList.contains("light-theme"); +} + +function themeIsDark() { + return !themeIsLight(); +} + +function toggleTheme() { + if (themeIsLight()) { + themeSetDark(); + } else { + themeSetLight(); + } +} + +function themeSetLight() { + if (!themeIsLight()) { + document.body.classList.add("light-theme"); + localStorage.setItem('theme', 'light'); + + let iconElement = toggleThemeButtonGetElement()!.children[0]; + iconElement.classList.remove("fa-toggle-on"); + iconElement.classList.add("fa-toggle-off"); + } +} + +function themeSetDark() { + if (!themeIsDark()) { + document.body.classList.remove("light-theme"); + localStorage.setItem('theme', 'dark'); + + let iconElement = toggleThemeButtonGetElement()!.children[0]; + iconElement.classList.remove("fa-toggle-off"); + iconElement.classList.add("fa-toggle-on"); + } +} + +function toggleThemeButtonGetElement() { + return document.getElementById("toggleThemeButton"); +} + +function toggleThemeButtonInit() { + let buttonElement = toggleThemeButtonGetElement(); + buttonElement!.addEventListener("click", toggleTheme); + buttonElement!.addEventListener('keydown', toggleThemeButtonHandleKeyDown()); +} + +function toggleThemeButtonHandleKeyDown() { + return function (event: KeyboardEvent) { + if (event.code === 'Enter' || event.code === 'Space') { + toggleTheme(); + } + }; +} + +function userInputTextAreaGetElement() : HTMLTextAreaElement | null { + return document.getElementById("userInput") as HTMLTextAreaElement | null; +} + +function userInputTextAreaInit() { + let inputElement = userInputTextAreaGetElement(); + inputElement!.addEventListener("keydown", userInputTextAreaHandleKeyDown()); + inputElement!.addEventListener("input", userInputTextAreaUpdateHeight); +} + +function userInputTextAreaFocus() { + let inputElement = userInputTextAreaGetElement(); + inputElement!.focus(); +} + +function userInputTextAreaClear() { + userInputTextAreaGetElement()!.value = ''; + userInputTextAreaUpdateHeight(); +} + +function userInputTextAreaUpdateHeight() { + let userInput = userInputTextAreaGetElement()!; + let inputElement = userInputTextAreaGetElement(); + inputElement!.style.height = 'auto'; + inputElement!.style.height = (userInput.scrollHeight) + 'px'; +} + +function userInputTextAreaHandleKeyDown() { + return function (event: KeyboardEvent) { + if (event.key === "Enter") { + if (!event.shiftKey) { + event.preventDefault(); + sendMessage(); + } + } + }; +} + +function varsInit() { + document.addEventListener('DOMContentLoaded', varsUpdateHeightsAndWidths); + window.addEventListener('resize', varsUpdateHeightsAndWidths); +} + +function varsUpdateHeightsAndWidths() { + let headerHeight = (document.querySelector('#header') as HTMLElement).offsetHeight; + let userInputHeight = (document.querySelector('#userInputPanel') as HTMLElement).offsetHeight; + document.documentElement.style.setProperty('--header-height', headerHeight + 'px'); + document.documentElement.style.setProperty('--input-height', userInputHeight + 'px'); +} + +function newChat() { + chatPanelClear(); + logoShow(); + userInputTextAreaFocus(); + streamingChatCompletionsClear(); +} + +function sendMessage() { + let inputElement = userInputTextAreaGetElement(); + let inputValue = inputElement!.value; + + let notEmpty = inputValue.trim() !== ''; + if (notEmpty) { + let html = markdownToHtml(inputValue) || inputValue.replace(/\n/g, '
'); + chatPanelAppendMessage('user', html); + userInputTextAreaClear(); + varsUpdateHeightsAndWidths(); + streamingChatCompletionsProcessInput(inputValue); + } +} + +themeInit(); +markdownInit(); +userInputTextAreaInit(); +varsInit(); +streamingChatCompletionsInit(); +userInputTextAreaFocus(); + +(window as any).sendMessage = sendMessage; +(window as any).toggleTheme = toggleTheme; +(window as any).newChat = newChat; diff --git a/src/ai/.x/templates/openai-webpage-with-functions-ts/style.css b/src/ai/.x/templates/openai-webpage-with-functions-ts/style.css new file mode 100644 index 00000000..2b1dd145 --- /dev/null +++ b/src/ai/.x/templates/openai-webpage-with-functions-ts/style.css @@ -0,0 +1,367 @@ +:root { + --header-height: 0px; + --input-height: 0px; + --send-button-width: 36px; + --left-side-width: 250px; + --right-side-width: 0px; + --right-side-max-width: 768px; + --max-textarea-height: 200px; + --logo-size: 0.75in; + --logo-icon-size: 1.5em; + --border-radius: 10px; +} + +body { + background-color: #111; + color: #f2f2f2; + font-size: medium; + font-family: system-ui; + height: 100vh; + margin: 0px; + overflow: hidden; + max-height: 100vh; +} + +#header { + color: #222; +} + +body.light-theme #header { + color: #f2f2f2; +} + +#logo { + display: block; + margin-left: auto; + margin-right: auto; + margin-top: calc((100vh - var(--header-height) - var(--input-height) - 80px - var(--logo-size)) / 100 * 33); + filter: grayscale(50%); + width: var(--logo-size); + height: var(--logo-size); +} + +#logoIcon { + margin-bottom: calc(var(--logo-icon-size) / 4); + margin-right: calc(var(--logo-icon-size) / 4); + filter: grayscale(50%); + width: var(--logo-icon-size); + height: var(--logo-icon-size); +} + +#leftSide { + background-color: #000; + color: #f2f2f2; + width: var(--left-side-width); + max-width: var(--left-side-width); + height: 100vh; + max-height: 100vh; + overflow-y: auto; +} + +#newChatButton { + border: none; + cursor: pointer; + border-radius: var(--border-radius); + /* background-co lor: #557CB4; */ + width: calc(var(--left-side-width) - 16px); + margin-top: 16px; + margin-left: auto; + margin-right: auto; +} + +#rightSide { + width: 100%; + margin-left: auto; + margin-right: auto; + max-width: var(--right-side-max-width); +} + +#rightSideInside { + margin-left: auto; + margin-right: auto; + max-width: var(--right-side-max-width); +} + +#toggleThemeButton { + position: fixed; + top: 10px; + right: 0px; + cursor: pointer; + color: #fff; +} + +#chatPanel { + height: 100%; + max-height: calc(100vh - var(--header-height) - var(--input-height) - 32px); + overflow-y: auto; +} + +#sendButton { + border: none; + cursor: pointer; + font-size: 1em; + border-radius: var(--border-radius); + background-color: #557CB4; + width: var(--send-button-width); + padding: 0px; +} + +#userInputPanel { + display: flex; + max-width: 768px; +} + +#userInput { + margin-right: 15px; + width: 100%; + max-height: var(--max-textarea-height); + border-radius: var(--border-radius); + border-width: 2px; +} + +textarea { + resize: none; + background-color: #111; + color: #f2f2f2; +} + +body.light-theme textarea { + background-color: #fff; + color: #111; +} + +textarea.w3-border { + border-color: #333 !important; +} + +body.light-theme textarea.w3-border { + border-color: #ddd !important; +} + +textarea.w3-border:focus-visible { + border-color: #555 !important; + outline: none; +} + +body.light-theme textarea.w3-border:focus-visible { + border-color: #bbb !important; + outline: none; +} + +.user { + color: #d8d8d8; + background-color: #111; + border-radius: var(--border-radius); +} + +.computer { + color: #d8d8d8; + background-color: #111; + border-radius: var(--border-radius); +} + +div.user { + margin-bottom: 8px; + margin-right: 0px; + text-align: left; +} + +div.computer { + margin-bottom: 8px; + margin-right: 0px; + text-align: left; +} + +.message-author { + font-weight: bold; + padding-top: calc(var(--border-radius) / 2); + padding-left: var(--border-radius); + padding-right: var(--border-radius); +} + +p.message-author, p.message-author p { + margin: 0px; +} + +.message-content { + padding-left: var(--border-radius); + padding-bottom: calc(var(--border-radius) / 2); + padding-right: var(--border-radius); +} + +p.message-content, p.message-content p { + margin-top: 0px; + margin-left: 0px; + margin-right: 0px; +} + +.light-theme { + background-color: #fff; +} + +body.light-theme #toggleThemeButton { + color: #888; +} + +body.light-theme .user { + background-color: #fdfdfd; + color: #111; +} + +body.light-theme .computer { + background-color: #fdfdfd; + color: #111; +} + +#userInput::-webkit-scrollbar { + display: none; +} +#userInput { + -ms-overflow-style: none; + scrollbar-width: none; +} + +::-webkit-scrollbar { + height: 1rem; + width: .5rem; + background-color: #111; +} + +body.light-theme ::-webkit-scrollbar { + background-color: #fdfdfd; +} + +::-webkit-scrollbar:horizontal { + height: .5rem; + width: 1rem +} + +::-webkit-scrollbar:vertical { + height: .5rem; + width: 1rem +} + +::-webkit-scrollbar-track { + background-color: transparent; + border-radius: 9999px; +} + +::-webkit-scrollbar-thumb { + background-color: #0a0a0a; + border-color: rgba(255,255,255,var(--tw-border-opacity)); + border-radius: 9999px; + border-width: 1px; +} + +body.light-theme ::-webkit-scrollbar-thumb { + background-color: #fafafa; +} + +::-webkit-scrollbar-thumb:hover { + background-color: rgba(217,217,227,var(--tw-bg-opacity)) +} + + +.hljs { + margin: 0px; + padding: 16px; + padding-right: 0px; + border-radius: var(--border-radius); + overflow-x: auto; + max-width: 90vw; +} + +/* + +Atom One Dark by Daniel Gamage +Original One Dark Syntax theme from https://github.com/atom/one-dark-syntax + +base: #282c34 +mono-1: #abb2bf +mono-2: #818896 +mono-3: #5c6370 +hue-1: #56b6c2 +hue-2: #61aeee +hue-3: #c678dd +hue-4: #98c379 +hue-5: #e06c75 +hue-5-2: #be5046 +hue-6: #d19a66 +hue-6-2: #e6c07b + +*/ + +.hljs { + color: #abb2bf; + background: #282c34; + } + + .hljs-comment, + .hljs-quote { + color: #5c6370; + font-style: italic; + } + + .hljs-doctag, + .hljs-keyword, + .hljs-formula { + color: #c678dd; + } + + .hljs-section, + .hljs-name, + .hljs-selector-tag, + .hljs-deletion, + .hljs-subst { + color: #e06c75; + } + + .hljs-literal { + color: #56b6c2; + } + + .hljs-string, + .hljs-regexp, + .hljs-addition, + .hljs-attribute, + .hljs-meta .hljs-string { + color: #98c379; + } + + .hljs-attr, + .hljs-variable, + .hljs-template-variable, + .hljs-type, + .hljs-selector-class, + .hljs-selector-attr, + .hljs-selector-pseudo, + .hljs-number { + color: #d19a66; + } + + .hljs-symbol, + .hljs-bullet, + .hljs-link, + .hljs-meta, + .hljs-selector-id, + .hljs-title { + color: #61aeee; + } + + .hljs-built_in, + .hljs-title.class_, + .hljs-class .hljs-title { + color: #e6c07b; + } + + .hljs-emphasis { + font-style: italic; + } + + .hljs-strong { + font-weight: bold; + } + + .hljs-link { + text-decoration: underline; + } diff --git a/src/ai/.x/templates/openai-webpage-with-functions-ts/tsconfig.json b/src/ai/.x/templates/openai-webpage-with-functions-ts/tsconfig.json new file mode 100644 index 00000000..464e3ae2 --- /dev/null +++ b/src/ai/.x/templates/openai-webpage-with-functions-ts/tsconfig.json @@ -0,0 +1,16 @@ +{ + "compilerOptions": { + "moduleResolution": "node", + "esModuleInterop": true, + "outDir": "./dist/", + "sourceMap": true, + "strict": true, + "module": "es6", + "target": "es5", + "allowJs": true, + "typeRoots": ["./node_modules/@types", "./types"] + }, + "include": [ + "./src/**/*" + ] +} diff --git a/src/ai/.x/templates/openai-webpage-with-functions-ts/types/marked.d.ts b/src/ai/.x/templates/openai-webpage-with-functions-ts/types/marked.d.ts new file mode 100644 index 00000000..6c35e540 --- /dev/null +++ b/src/ai/.x/templates/openai-webpage-with-functions-ts/types/marked.d.ts @@ -0,0 +1 @@ +declare module 'marked'; diff --git a/src/ai/.x/templates/openai-webpage-with-functions-ts/webpack.config.js b/src/ai/.x/templates/openai-webpage-with-functions-ts/webpack.config.js new file mode 100644 index 00000000..82f4687f --- /dev/null +++ b/src/ai/.x/templates/openai-webpage-with-functions-ts/webpack.config.js @@ -0,0 +1,32 @@ +const path = require('path'); +const webpack = require('webpack'); +const Dotenv = require('dotenv-webpack'); + +module.exports = { + entry: './src/script.ts', + output: { + filename: 'main.js', + path: path.resolve(__dirname, 'dist'), + }, + plugins: [ + new Dotenv(), + new webpack.DefinePlugin({ + 'process.env.ENDPOINT': JSON.stringify(process.env.ENDPOINT), + 'process.env.AZURE_API_KEY': JSON.stringify(process.env.AZURE_API_KEY), + 'process.env.DEPLOYMENT_NAME': JSON.stringify(process.env.DEPLOYMENT_NAME), + 'process.env.SYSTEM_PROMPT': JSON.stringify(process.env.SYSTEM_PROMPT), + }), + ], + resolve: { + extensions: [ '.tsx', '.ts', '.js' ], + }, + module: { + rules: [ + { + test: /\.tsx?$/, + use: 'ts-loader', + exclude: /node_modules/, + }, + ], + }, +}; diff --git a/src/ai/.x/templates/openai-webpage-with-functions/.env b/src/ai/.x/templates/openai-webpage-with-functions/.env new file mode 100644 index 00000000..bd323058 --- /dev/null +++ b/src/ai/.x/templates/openai-webpage-with-functions/.env @@ -0,0 +1,10 @@ +<#@ template hostspecific="true" #> +<#@ output extension=".env" encoding="utf-8" #> +<#@ parameter type="System.String" name="AZURE_OPENAI_ENDPOINT" #> +<#@ parameter type="System.String" name="AZURE_OPENAI_KEY" #> +<#@ parameter type="System.String" name="AZURE_OPENAI_CHAT_DEPLOYMENT" #> +<#@ parameter type="System.String" name="AZURE_OPENAI_SYSTEM_PROMPT" #> +AZURE_OPENAI_CHAT_DEPLOYMENT=<#= AZURE_OPENAI_CHAT_DEPLOYMENT #> +AZURE_OPENAI_KEY=<#= AZURE_OPENAI_KEY #> +AZURE_OPENAI_ENDPOINT=<#= AZURE_OPENAI_ENDPOINT #> +AZURE_OPENAI_SYSTEM_PROMPT=<#= AZURE_OPENAI_SYSTEM_PROMPT #> diff --git a/src/ai/.x/templates/openai-webpage-with-functions/README.md b/src/ai/.x/templates/openai-webpage-with-functions/README.md new file mode 100644 index 00000000..8fee923d --- /dev/null +++ b/src/ai/.x/templates/openai-webpage-with-functions/README.md @@ -0,0 +1,35 @@ +# `ai` chat website + +This is a simple website chat interface that uses OpenAI's API to generate text responses to user input. + +User input is typed into a text box and added to the conversation as a message inside a chat panel. The panel scrolls up and the computer responds with streaming text output into another message in the chat panel. There is a left nav that has a "new chat" button and has a spot for future expansion w/ a list of historical chats. + +## Setup + +To build the website, run the following commands: + +```bash +npm install +npx webpack +``` + +To run the website, launch `index.html` in your browser. + +These setup steps are also represented in tasks.json and launch.json, so that you can build and run the website from within VS Code. + +## Project structure + +| Category | File | Description +| --- | --- | --- +| **SOURCE CODE** | ai.png | Logo/icon for the website. +| | index.html | HTML file with controls and layout. +| | style.css | CSS file with layout and styling. +| | src/script.js | Main JS file with HTML to JS interactions. +| | src/ChatCompletionsStreaming.js | Main JS file with JS to OpenAI interactions. +| | | +| **VS CODE** | .vscode/tasks.json | VS Code tasks to build and run the website. +| | .vscode/launch.json | VS Code launch configuration to run the website. +| | | +| **BUILD + PACKAGING** | .env | Contains the API keys, endpoints, etc. +| | package.json | Contains the dependencies. +| | webpack.config.js | The webpack config file. \ No newline at end of file diff --git a/src/ai/.x/templates/openai-webpage/_.json b/src/ai/.x/templates/openai-webpage-with-functions/_.json similarity index 89% rename from src/ai/.x/templates/openai-webpage/_.json rename to src/ai/.x/templates/openai-webpage-with-functions/_.json index 9cdb4b6f..b5280310 100644 --- a/src/ai/.x/templates/openai-webpage/_.json +++ b/src/ai/.x/templates/openai-webpage-with-functions/_.json @@ -1,6 +1,6 @@ { "_LongName": "OpenAI Webpage (w/ Functions)", - "_ShortName": "openai-webpage", + "_ShortName": "openai-webpage-with-functions", "_Language": "JavaScript", "ClassName": "OpenAIChatCompletionsFunctionsStreamingClass", "AZURE_OPENAI_ENDPOINT": "", diff --git a/src/ai/.x/templates/openai-webpage-with-functions/ai.png b/src/ai/.x/templates/openai-webpage-with-functions/ai.png new file mode 100644 index 0000000000000000000000000000000000000000..4ba344c95ca7f9b1da183b2b3c959f6c4b723ee5 GIT binary patch literal 46281 zcmV)TK(W7xP)PyA07*naRCr$Oy$Q5rSy?9fohgPpX5P%3V`bI|RTKpRQV0kX&+k>fK2t>8 z%tY;8>#dHpaz);V6KB}_+yDOl*}?yQ{qGU@|LF)g=X=R7A}e+a~$NZgTetEAV=;Na8Tqf3a0@498T`!Md~_v z<~V#FM~>xDK<*V@?zx2{e;2vyfE&<%*A4yt?CSjdL#OsGrmK5)&77O< zd&9yH{Lq726lvraMd2icQ>2LlFUedlLE#l{k>ze)j-m^AIf~qIdGP`Tih_^MKluPI zcnf?S84JUjR;?r92Uz4TbAv zZc${sbXF7|ih_sAg*fc|xdTT=OzuZ6nd`{&Paev_lM~vPjH%uOP{2Mdr^kNEUlop< zA$NGU^0_=Z_+!U$T)78x4h}nq%yk^kDT>_TnRz^rIWD}c*Yl%ZuX}E$=RLS@|MLC2 zcg)_o_WdBS!&0V z7ejS!^S6Z!k)$ZEuETTjdwUI;r`qL3R^-*ntgl1+EcSSn8+_@r+wYOJe_#8qu}GSq z>q6q(dS*UC6nTQ8NZ>dge5V7??P0W)px5ybIKyOdE;{ zC|;Ru#vko?9pD`B;P3w#9{AKhVQw?WLg)g61Rd8y*AI}%kP<8vm_)=2?_4yw ztMB#W2zsaR(8+r844YnvvmFnomzHt#@L_!6$}8~I9lMY_J*19j?$?E%@O}Z$%sPeR zB+yx)5IhyY^qxv!5Q?%;ToLm;wgk6X1SPnvu&sU0V>yhnSmi$KJEeu5s|2O_9R+py z5+UANG$nA2u*hrYs3S@+Ez+6$h;ANoANsk-w{p4JWv9o>=kmRaEOp?!E<7)S<0i=S z46?1J9U$}JxijeaE|TF2dffqh=S04LfAP;Reev}-ecRVBer`LEkAXWZiWhqsKJIf< zyn6$8{K`iW-SrThtqe=OIRuG=EXm>eo=DgtiDkx8(g)d-c*+8W%N^FN^}G-X9f*_z z5~sj0P)XcNNiB3Q=9j*iBzbT*N9rn|1p>Jv=O7?BM+A1tRRf-1ubUeQBLL*Y^S;cn z{NCd(3@!=>Uhbloae*cnc7_=GF1Flx+`Ib#K6mXkxc~6wSo7yF^nE0Hd*~(l90FL` zfihXJ7L;`$TUJm9w3X0S3JQl30%jOYQGr~h-wdMNE$l+#=RB_?(s&%)g#ovnd>$vi zt$|xOoPb-UZ++(}uQvhQx+A!kOX#jH}=BW8XIS3)2AJ0C!OouS9{53Jp0i!aw}t zhp>3>I%ZcB6q!onA}!!}0sKyYEKAT4Ntz?`9HAO~a65j5>mVx}0a-T%I;0?mVhnwy zC>>6Yk+K|~PVSV)5WJBG0gub`ZgJV}b633Qy<7{tM!0l#0H`8ciIvND1 z3PPpp`d$aOU5MpP=EqV&rseBk5|fAv$ph+StBEDT)C zW*$7pMiUlfhewx#H9pk(S?$m8cte$Z+NxY zmWij5H_M<^^nzgU9lw%{Mi5lA21rtldR9KCf~(^0oVNyO>$JomLdM%jVAVBi^v2K@ z*-_MKz*3=;8I$P#P&e_lLAK;8F3ZUk|C3h zj?>}Rk0i{nd#Q`z=yqK9h~CG3{ina-6>Wf5;0}ud5r7AW_}icV0QyJQu&`C2SNP}@ z9%oN~>o~$udGB7}VZ_zu@o%GL^SE4?FK?3zQp^|pKL=i^0=FVc-W-vi$kr~W7Ot#P zP`50KS}Sq+TUVr~j1`YmRb05aE(+z~shcDANohD9dasn!yu(iJiIks>^&#&J6#%ph^16&YUM8#hs!t<~J9DVPHv3~2l=nWloGFQMu{AwIfgwKqX0-K(RI=jziVF~|m`DM8C zvP*H_zCAeOcj3l`NK@`$RN7>*nGr2{b18vq61@gmOIt)CG{%OyG`32(%#8(i3zT*e z32+sTZEH$X(VYaY#`&%CUsrg!5IIlf#VS*a6~^UGUAw>bd1d)F6lr@x4^Hl@@+mi% z%Wf-zM#-2iV6*$G(9GpoDbQqjdj;Hh6Z6?BUj5R?yy3O~e)q50vn#lV#Z$S@pZdsO z|J|8C`3H1Q4-pg{nPl>4zK84UqCjQ@;m#h4%Xj_hwcqmGpLSmO$S>AcOYx7V_u=lZ zKIQ&@zWqlYy7i7{bk`#6pWlUal!&w~JP&c4BH;ghAnO-c>m=x1x$iTNdis-oI{1z! zezI}C&#W)4-SN*aKJodxek*Mb7={&JTMS%ZK2ceAn5?{Vi(8r zjpVAQKjFiNpM2eWo##C2?)o`5e&rM2d)t5g^!s*3!9qXw;DtL@hV%Y%*0@=H$Oj@^qo=Koyc)E)h)Wt}5NFS-Esqz`;4rYS@vvGt4|G~5G`l%bf z{?S)D$K~8|;jLmXPK4Ke@;|-ruXh|DU`IsPf!cSAj*HFBb?lj6z*aZIiTMQAfA>qC zHuG)Q|9$;oCjCGC?SFdptsndI4+Lk1bF+yLpIdp*5mQfxw&fth!?Pni`PJY2I{!6K z`OWj4@8Mtg^B;TQFF*GSI|c=Icr(~sTNm!VxU>s#9AZ5kBH7i&Xu1EHr@rpHzsY&p z^5(R27Pp(+x3lwy3M>8{3=&3T$8%(|jj=?Qgy-yUn(q0EPq7J)2I3KJ(S>6l^mGEQP176n(~ zFa9Rk1=MuP!HbIsbKO})XNOqu&f!UqkACkL-}Ka1SKtndD^LBGPkiORKl)UE->`#z z?0`PSI3w=?I{g4i5#iiijwik0<&Sq>_V|Xh-VT(-$G-B^fB4|XKDjt>I^HmY?*{NZ zns^Fq%ocdu8(#9Q&MO}CN9O~!{l56UFTMOTANc6U_9g*(v5W4^EY6%=L4RQe>)iya z%iUwo`q-PVxX|<6{`wF6ZFt-9r|pP4=;l7cq>vrO^MO1`;pHCA%vr57X_>3ZLtt3r5dgVN_(vu2Z{goNz zy=|_vWUH zFmkNUW>|d6)xUbhyS}|a+Ajn|_rK-$-}~_2ANle5K>;@cW_q(2q#2T{mX1C4cYo-D zfoQLF{N_LW^|OEd&p)s`bm1mGT$=g%K2COHJoX1(`t0D9k84nyZ5Q7^Jv8&#w|(&3 z%!9+;Vle~9pGBO~OO_*Ws7K^j_xd7T;nlWVKc;RrvG3r7dh=Xz-?9KX5~M7oCN-s5%$3T$DOBQu4jC7LHv=Ri z%sZQS%p>ss{MyZr`8Eaj7tj3mCx7ynzT=X04|7AnFMJGX;R}3tVTxfV!`VF@Jo$~U zy2knDEAF}wK-Z5c{_f2DCvN`vvj^f{$KA}(>&;-`I#_zvb-%yw?Js)OMW64}Ck}n) z=3hO2Fbu%h58n?kj7K=Nv%ph7^|q#Hx`314`Xe9sxc7xqFJ9R6;dt}vlIAKzioh)} zNuxlo=b<+wxAN& zG!fbu3iTu#ud17u?34cN&C3^i zz6XB!4}W;;Hw%V- zSny4`HaI@Z$BP*{!Ul8P*OwAgg0b&PQK|#HG0C z`yK}I7Mz}oD2g!a^bkbHix2W$&R`UU46DOn z!N;B6dhwD^zq1*gbP*`}%*)o-o4%e!YIb(dBcV&wFYIesHRsU}#AOId@i)Y?V!mvwn_k{ILC zv^C4`#ey*&jHMQh>Xp^v_Y{RDsH;XJ8cLnHmVTx_g}RMettYCq@txL=HlW)U5BYn7 z%^%$cPJ5Za%M+@A2s;QNN|mJG>qr7_^CUdxNFlvKkEs{_vvWKGo(ndH3uB)Z57Y|7l8Y3p7m3s zy&Fz%F6$uoJe&z~?0WWNe*4mQzU(_M7K}XpvoC&ovg>T_JE0?1jzKcO+Mb|z%3uDc zi(Y{D{>uORj+GDp{ZCyI&BN);BBSFajX_JgZ@`qfJ_ha#Ry#Xz@2+Kh?(${ayL&fQ zf+d8M7O7f75*qId7ePi?aAJnYJUXfp{XFT3mo;-DBYhZTGXdN+hm1;21$7%}RkWUt zqK#&?w1pI_X^E-8Uoo00vd_5DN~dBa+4UaV_G(=WEmLAWrlb{nqPjq3(qxE+>}s_g zq|^@t)K%<@KEEoK zG~3baLWz@L#Tj9=zn?w+2fzIZ&P%TN^2LI&_~YBY;dAf($mb4>0(24&u>)-OeJnin zk)JyJzE?c!V$T=<**#DG^7}vf+5X7^`b7_&P6tKb#Zh-1&v@%Q1Ls+kp03!oqw~HiSz*l!H;0uR#;PyRxaddGX)`J;Q z!pSBQCxDaD3rdd~!_P-Z{RD}GV>{?1T?0Us%qD(u47BClmy=$()a`fHI#GX8V`Qlm z$d-9f=|P=mH-o0s?o++2htd9S%(Pdt>Zk;6j(gkVx~*xjc3+d6PW6Q?^iIN{a|sP$ zjF8Vb6(LZX^~^Wzx#_v@zR+_P|LypJzkAOIP8`_mpqKerOH%m#F1#d_ zpspa-z^uXvkc-!87OUME+`nr9NA@h^^9K&&fw@^#c9=oIoiF#%Nmc1f>ByvDF*=OL zc!|6aZZA^MqjYauQF!;I>vE#RwvwEF+&b9XZg3mLW|iAEhn}Q;3vkRy88FvaJZ+Du z1jnf(tzk83arH@de0j=RJh?3OiJr{b9n=HVG#UuL6VRIjPr#j`<2;D>{Nz&=+@E~S zyS}>T=-`qaBM%9Qz2_rM>6&H~U?Mi|d`A~+J3Q=u-s9hN`5Rt*!QeUftAF+5U;g9& z{@(ptKKh%C-SE-r_OXQw4|YQ=J^wL(e9g_TdfA17@#N2b{N@M$_MhL}Kzk@+P$Eleld-@N5*A31Kt~#%isVEBXp6~v(FNSxYxNc_TqU-gLbbN#<pKvBJ_G#>-BNx(h~mu@D;dg=MJoNJ%p|gH|Y=*1^mFH@E7hq^)o7dI4*F(6clYSKddBQ`hG}E|fa$YSSW#CWJ2(8WHD+h5!beJ&5<- z{Ee5%9sKpTzw7?}C*n&N2b4S}b|OyU_#P5wO?4f_d5jG=La}=u2cGz-UqA4cZ~LL` z09h1;d-7-h=)Dhq=1V{3p4q~|-U8CCNW!Vm=^%1}q*GusA7cKx!=JwT#&3P0^L6_e zY&-RJ;>{oX&2yiA&c!aE5U}dJj;cxnex9q$p$`Qty#9$Y=NzrPDx8yoQGJ@p)kvE;0~QSdC@`YdU$Yl z3AgUO9Jekn@>Cgts(S=S=a4l2ESW3n$ zFQp`GDwNiA)_%@%TQ`ALb*D@HSd%kS0!dudr?+KaDY z88Sa?T9lekHl}kFnb49Vrnl2U>Zb7A6L{ZSZcuQ4;#KcCw&z56+2Vk*#+P6oa!9&m zM{WwIA0P~e2)aHt(+KHIfQ`Plan&=g|1W!<@~Dq=Zn*5$az+06eb?Xrg|B?;*(3M9 z-aj|k+1X67h%T7?kR*xh3NdnoX@*YM$E@#RJxFn4C!*yiJ^I5}UH{k*_V+Cuu|SXh z^4{x?fBBAAKlG1BerU%!7JFw#=tc$nZV${<9MYigy71!&v%ZhODX{LwSl`>jLtb?1 z2`_l&&+L8FH6MqcZsDBgU}v!7_@_U2<5&Ouvu`@&%)uE@dik<=$=zvA@;V+e0#s7d z7z@q#ZPz4Hm?+1>#d@An_X1M>?Kgr(cMf>^z53=!tSFn&MULR}NB z$Y@E)ldN9icF!2;UmBLmfG1Mh?Gu~Q`H^~Mz`9P!bn%umM zfoXwUA}%M;5_@lLIx?A_R7nlZj3SfnS-U@LS}c=l?MMKX)AzB~%Ms5oHrT~rFp&H1%*y3+~7osJ}fAGnCJR6=1L&l4Ci)?C0U6HlsAAaMhUJ-K^fKR$Kf z5x8T=UaWR{7qvT$FT`+%!IM3==m;*FAw>{^@5NmM8z4*S!DOp1apBo1=TY z-@_=4(Irldwk#W`Zs@)&^Kwm`5HZ0)|mWZk{9P^^sNt-#(Ll3A!32jVsWjuW4)ofTb+U5hZ-P^hj>lHA0Bw{7cnD$?A>nmL?px<&yYao({eJ8*1!?kW7z zTfR~4+@Jog_dl?F$I9Ww*h3gFchM6?18D-Ec3V!Tcw#jZhT}{rq`5a5_|mqwT4#bC zd>J6o7HsIG4Iwbe2ofiu76u-o-YbnFsJ2_KZnbp^H?+wQr89z(NOx=kSrU~^I4U_O zQ{f^{f6HBk&%&rePGl#iG^R_0Sv!zpGtU!l&Fk>I+~!?MQ$Dw=_euqF=r7`UuoqvN z-;ck$6QrumVU%6eNM5TFTIY>Ab81!)OG#{Yjs!{3lUSt>){_|8_iPp9x}@jM zG3ov+*z5P-=G&6gK;gmJiL?v`anpBB;Fo{qhP{G?PkrzEPVBy8<-k%BAaptT#uci* zpr%8ZzS*e!CFK-lW5(x4^IMn9VBB>dnQJuvSKLc>+#kTGSJG)l=PymAhvV?dvG0u zZEJ_d##G@0R6lfW8S9MM!7R9$9BjXDF9KpD$usz*jv*tJNi0+Ja1!tRsi!Z?nEl0T z-+gM&of~`i#WY!IY8XX5kwMukE7`;zb6aI{+}fsJbK|6=Hz5ZxWfqN)nf&xlrY9B zW*sN?xx~cB#YuavEIQo=eaWWRE+Fq18dp@we$|{Jd_b*S{;= z2*BHEdUXLH!l!`mcHl*thnW>Idd@@m`8PjJ!TqPNeaAz~cdqU~5cic6GIfg5+okrY z4{Rl~Ot?(8N@fCzH_W_5p)BU&5{YSY>{Q}acO^NC1fhV%#CTD%np{VfzSiAL_-n4A zE&3y8G7`jso{}Z;ARtsFt^&6lE;~e}7)m*Y$_!k_S8&1dxNfQm%@nsRFA)clZj6A` z#O^Yb9L!YWVu(lBaQiqmw-;YpIEc^fx&jZ*??sYmtV)3ptjR?ELl#e`hcq`;UrfoW zhT>+1s~t-+<^gQQPgkRC>0B!^u>_CiYX(%tWZ}@6R7D|L&fTzAx1_d!vBse%(UUru z?e~0Y%eh-VoR(r;4-iuvwxu_B*TD6KmT+7%#OUB8-uL!z+%H)C%kO>1Lp$$W+rKyI z!7Y4|+!F3cpS{V?6An)4Xg({m&?ULo7KgFqpsEnnkf4%$OOKse5qOS1C4NpoYL>h5 zY@7bn+^!_O$ONj}ZITjD_XAA^CctHtg&6Uy1`d94KGw%DP?CS&d=5c-R_XMP|4V#i^8xxa)#e;1;_MT`MD9fmdd zpsvY+M+7?4?v%bYoggJhTXtd-TUudw%$=@Z@mVbx1Q4wx1zAY60*R@z8t-=EuUL>; z4xH-Z{6cjjtr4d}#nKMDSlWNr?fT5hvahz@W@%GNg%!{;)0v`<15aQ}XL4 z1h%aJMG*BEU{orRQme=;C_Q)So;CGI_?e|Yd}vYpjHK3uq1pLbt4Hru2Gi&fU85VQ z(rS*cm1*N=OmOaIIsB9oMyq5c41*OPGj|f@$@V1)`f)7x>U%SIsJnoF=Squ3QWn0nj~v;R*8|8vWn9bHZ#7d zoiWvgs`@2{QpzGRQGX^(pl#_8El9!urrWFAbx}%%*Jj^SXqoY)xKg;VK#|O#(%D*N zE6FILAp@yc@$%S3?t6$Ni|S##@2%ggrojLFGk^Av!}aKTR{2R;U!>3gsyxk*`&y=q z#b_Kx37TzR4EGZKrlE?}HgAH<&E(U&r^=*}07jVax?S4(BFt43`op+t z3FX!A6nOkxZNzHL(BeImwpOysZ-kcUvaw~|sElyu=xCiaBt9)8rb?EW{zyMWom4Jv z3juKkBw3YNA_m}jh9F~&4G*d3VRaN^Wi!Gs3oyz&9NK>gyvob3q2?Z9pC+um>c z-8yCL=VN8x$^~i`k?2-cnKsX(Q%vlzje04ZXO3G}62w`#z(LySV6)>Q4Nl{oZ+XEX z8LPMb@~y?0)I;L*C7akUT=dXIk!DDJ0)>-D6QF{-u4HynayAC6sMNL^cWD7I9zHwh zjNz3vDb&P8EQ({b-HX}H*pCT4F@=O_q|xT z>wXlQBkbF`2hVxj<8fskqaUsS!3a?j!uR@ePHTP?;F>Mnjt}3ZhNr0~SbFzt@V;FV zwW2_6cf7SVTkc!8qUfSmi>xK1W3{XanQjaa4`tZ;KsMVTsilTf8>t!`$w}217}{r znX$|nv(!`i;nlF0b+ovR5msV}$eI;;HQ&vcbj;mLH6bII#*_hLiLr`?POxhCB86)q zl(s&HrH*3Fv5|8&aMWC<3MExqQ&(UbW-6&5rSeb|;4$87%CQ**QU_Wetn{F9vBwGe zovy^?6sd=kM^E6Me>?(j7-E+n;QGZ5zWee;>>izlyJB9Hl$fD;i z#a#mksMyltl6PcR(!!oxc#+t2S-{xVN1P;~OAxiJ6tY-lle10|tNg827WhI&YE|W&l-Nq%1hQUtO|p*le9t~t> z{NwGw`Vg~ujBA|%Ua>33)0P7Cl1=nj_$Ou2NEVebW!}tgz#S^(9C)#@?ig@qj05+z zE}=`uStPq3!ELGCMEcRvi^goloV%Lqf0_iYOX~U`eyYkz^=(SeTUCvFY z4`QULW{yaGMY2Xqb&Pj5BY-9Gm>iT|QkERG79|E=X<~9A%1&h&DJAtbD6P`~+TJe| z-0g)gQoIK4`gz+D-wtDG$1ZKg%_5=~1#XilAv&6)<4{z#7+tNu%l8#Nyzxy{x+3Ea~FE-SlM^|bx~YO$#3h?SG$PK!!{HiVH) zJt;+tQBW@e-1YmO2JT6~ws$0k8M^iJ9JI#7=Qi=ct@mKoS4!(hntfBPY3s;_>PMK zcU?f$rMDe3@!lGGs>AU4dIX&X z_Y~jpc<_ExHZ+Q{>jN})q4xK73)O>E86~z7mZ@dfY~dhY9pJ=~<9PV!gJS>gr4ivG#Xa301NEZ3WEKU5Im~CFjI{K$_V}D^}N$1z0*M)U(zu&Thw}4eoaITqD`knA#drxAIQjc7ULRNui1|JPegO#+CtPgdJ)C*@$qTCk??vnl)1(nyI1nyGE*`2jUE8DbZ zDyGXXj1o#uD-10Wa3^@?+rC##fp7S|yY9blv~=l~k=!gWZ^6yl5yI_qZi_k*CAe*` znh9LHUGb=Wqt)Ow?c5fO)#TJN!<)iLOjvEYFdpjJy7E|^j>(I&;_9u7)=nouT{)dF zESWrn>tyJRe1w}BPMuuE!}lLYe(oXoMT#`@u^bKYlI2-EZ@G(WlXc7v&q(PE?FXa| zG)7yBNmB~YB*uxJcEyGp>sW=qDhfK33M{J_$hQqXLsaDEzyI18r2* zd`;r^w9I^MQd>EY;DxlrD3rXcfcq`4z0`vH*nOj=%eI_FjjGF4aKq>shtthh%_{{!b@pTtHDuTqZZR5lRjlfn8c}7GHwrU zFY_d@DfYG`D9IZI2nRXNoZG^guO3JA;4yRxCVe>Q#}O`%hj{h1hw&7w;*w+yULKmD zFWP%;u^AHsV`9fOhTJII1^o(IVobQW;pJmMx&Ux1s9Fm71j9i6RFlT34Qw7i3EYjH z!)zC2Po)+eJipX7N6rnnkB=$l-ySR z5l0MZiI$V)E2ynfYe8v)VbVj_=>u1CRXU)x%3IyZd>vDLrn zMr4RlSK_-zTQOG8tsp$Rg5=>d7_F}&%(oaKE}^meqb?)*-UegM-_g90hHA0k#Kn(ju##i4|To zS=T9Mz`myRlEqsQ!ZT+PuWVp+c7V;*a|rV_-vQu8P<2@2@(D8)kg$+FC=m zNYU|fgjs})-2(c3Bq1AWG2&3;X$XE*ENS<)p|q+KO?yp~-1gb0{&0fyZ~sPwK`k^v zzi-ZyO@6lct%Ka=qgrrZoYXcq$Yccy;>>uqJ*eb<_v^2);68ruVAr8dN5O5KI0|lc zzq3}J20H1BxsC!JDN?VWZMVYe_3$PQXxY@ZmR;Sn(KZ#ok~+6+q?Rph2kuFeP{05B zbEunt{P~m;kupSZSQI9imx+xia0e06b7zpQ46t!}gss5_2H{!c>6Ro{&$$IUXCA^+ z7iaJv9&r#?Bty&&H!vRFcV$;~wmn^OJX65MTJ)gB9J z|7#*ICyuKv4Pk!Rmf(^k-KttOX}s&0Y2eJX5_GBiP~rA~Q~eOn{khjExNrKMJ0G}b zxa;tiyF=rt8Bin@mgopJk|CEq0>)aRiM$sA?pi$OMDW{fzmzaGKW|L&GXQI^46!QM zkjU`>oOXC-dy%yl=622_dET0}C6}%Mv{%!FH-{|Oz^g1A0cX!4UK?P2CBfF#7B)6c zYk%=9LeF>L4F=c~ZQ=#{m+>n4bbA>xiNWg&W!Sgb8ZXfS+@o^kiPDuyl0 z3sKq*+|w#Bm4jqr94e~O-hpbK(N;xN@gUMLOT4?EURW4725`}Qbsr9ARX+R@kvNw8q| zH#GnXu^Z!s+*5sbwWrSt#&i2j z3Db|-;J)#-A-sNu{kU z_c1XsCF<@%C3hKOTJK6wOQWhhZOug>A-VM)jpjba2GT6}HXW&stH9mZj!SUs_q1jZ z+$`#oj8a7FA>yqe;^7G4AjW7d!N$s2c)%nhDe4XMr$JswlkUV5`8E51=k# z+ksPK<0|Q4O~6e9+xT3X&J4EP*%(ZlCD|-}tnQEDwwmcwzFJnbwm`8?J^kD|5*6Gc zn4L_lCC`8F8Rrh97^{1`AF$J>i zAe-Q+s)M_21sPui7Y5RbW7~nfu-~=8txKKuZq{?5y|q9*$`Edi1l;K$MluR9+}yxu za}&{UfFh;9V67KTSL+oayx|F4zv$t{M_-NIrys&4y*{F?6{KAzRkQbE0zdQ6N&C{$ zC1kfXCx_4Uwr#+@om7-DsDXPDJ*Y2^iF0ZTk4yk|o5omhSLeAnaO)*n!qN8Xq|zce>PXs{X)yNZYW;O6>ObSb=bI62vrcTf6qSwc|CG^CxrG+iP!6~OVyX;&|; zBzJ@MY!j7OEl|w_p*fD#0^|?Uk|;&I6(fsc0XN?X>cIx3_Xd(UR3+F}Iw}&(!ok95 z9m|8$_?{;|29JwF?9GPKc7%DBorD2ZWVTE%qa2^r^fDIQl#HPBgL@*?+eVobIy$v3 zikX24-OlXqWLK**j)}=E6^^C@yNRy~@6->Q;HJo7Y)J|3H+<;Gv18Gm%j)2!uU`2O zp}jr0$C?hAtG7cQ+n|+I&ZLq?h0_XE^8ISmD}Pa1%{idaavG7?;TX$w8b|mlVC_+3OU^E;ExRW@B zRB$EVOcV4veH2M75`8WoVqbO^ulc$w@w8-weWMMN>X&MqH=X`cW2I_NdOlh*jgPm- zcajuuE{f^aut`E?nxL`QY4^k#zy4%huDmB=M>a3JXtn`&8Km?zfV(|V(@cYHaPxtx zDuYeM@qZ<_>-RY|(}(VGDoxZKvI1F>>%UPZMfd5%vEZiFg!F_`JPsoyX@r2?07hGI z{XoXRW!0)8L~Hzu$7(y+_04E3FepEJSo3NKlKEngVi-3and8 zq?YxB+R{2mMzO2atZQuZHiUCRUvl|%E2^)#Md_LNfMqB zQ|yjb@bW!z`lBL0iyA8QP9JSmB(+juVh3M_7)V=f!u%5W90x$a6l zX}E>GNyz3ZQrW7AMp7fs)(&q=-R&qs1LRYx2UxIO2)L&S9!pQ#_i{dv*M(zatm}?F zL)FENDEE1omPQ)pKo&zG60i&{BMme3vKYPL79RIMy1xC#>n_8d)zet;yKqAG4C3_? zjRk5U`+WSm@qKOQnzTU1CFo>t=J^3Uy?jF(KU0OFxxxDP1h)hON=p_NBuGV}e8GEu zSVLalc*|{f-xn@FVj?Xu;6`RbUXA;mb@t?BrR#QkCr%sO3W9d+!eHjS?vn;Y2Kn>>CKZeWs}1JquKMi&uHRLbk@m5ZXh`FjRuY6zQh97Gnn`fW@ocs`7vTl( zexqjMyz#@g-Fe?=`RZxlCM~g&TM8VNw8Vn@e89FPX~!$)bAhyDt{W0$g5kd{Vd{Kr zy!~@4gw<4awZ9R{C1Yf0)RYbeWprB$^p%ocfm;fQ8gQp^QG%OFWEQg8R4Uezgy3cz zNs=%)$O==L13!wezw4m8dIHb8Y%iX_G>c2(A^d2B88$Sg^q!E3bH1&= z4f3W}QM2kNO4KQ3^QP{k5-Hd;_f5LD@$0wdZGxxMmiQ14rI}auXljD{mfP;QFI>Jx zvfrg#LQSE(Ajv%)+j68wlid> z@_gXF?Y?mN+BUf9p=6rbbcOQ#;I?fc>O8eBIWHF6?d|VEz^!9#yr^|>PYAVa0JpZ; z=nkY!lI$_Ocg9YL^vGE4H5Y3M6+}vIN@RAn6$g*-Z}RL^X^2o0N_bX#r9)?3IxNRA zTa*)0Y=2lFNizh!9+HrCQ3g2VZsL*AI{iHrDF6T<07*naQ~<6(QNM0{%r!VLSVJ!h z;pYsVvI)bc5{TTpmS`4bT#Ei{fRcTdY8lsQLtAk+lKEc&+%k9DP^h*pH3{7H`R7zh zUQ%AQJLEH-nF;ZNcipIUOK!U5wj;-)J&)eRg3uE2_}F-E`je%cnz5FM)54kprx&f! zrt{k;4fA`3-cM41N{KVYmRbi`d#kO>UMeb<8dVfYkFaGKbb)aawJ44RLz#xEQWxBS zTwJ_19Ds?b{98H~aPgQFv$Rm~#)^CeH{VH3QmN%_!A%=I<4{OXhWSVu%hH_4{4$@h zC+uXws!9R3-VkmcVMnrseZwKXacL)B_Q*@I=iJ@c7(E;1&WbXi<8L5K`Je+E8j ze32vZBB|-YU@I2SFT_7V4`-DcJ;Hh0w;l+pdT-r#dB^4?mFnFf!jXd zBzjVRxbb9!CI_ttw@(^)X`DI%+>{A4%~kr7N-(RqFiAi>leWc$Y0p8)EsdNa18z!k z;oM41@;DU5izMOar&o1HM*px3AaUS*5ME0k>V8!nuF$_1Y6>n&h_N))L0sgS%}>8!Iq5 z#ZyTG{6xuZo@x>|oCNM^yTp0HZMqB#Z<|nNxh&Kv+c{Io4EL5atsJQoE>c^o^thqm zodRxE9F=QZa1+er>QR~?&KO&gqDV5h0`43!r7aa4rlvV;-;$y;8emWE;p_SizVpgu z912%4M|x7Q-wqc;Alo(he2;E@lkduI4Wah=Mknftv`ZZ}7ci+{6TschTpAnamItpr zmvwNTZ!V|L;Wz^jp(1=s=Y!8O72HR|) zmeeG-a&T2Bd5lB4lEm_V_N%8vx15|UwFRIoufYmxtlP#OnRL-dA-Zvb!)b^Y?Ont- z9omJ?*<)A=GFf2SNXpS>QR1G~{__NJ=%w;3otSMI*{Dl*=4r2K<5wQP4Y=(w9P@hQ zs2y{@w*#lz3t+pyCxM&Ciz)D3uh%NCq$NjN3MHcg3GU2Pd9{+;+PSsqDhEZuT?d`q zknL?VQ5w|pKfXqA*JDqx%Kc5c#=Hn^p2NSsP#FY<1JJ5Lqh1U%0Z(E_9G=QYNMG==w?#n=^&a7_~8 zC0Fjl6S`a2m2H5oHZCYWO1IlZOb;kM%BJ6BCck&#rJ5E;s&X;l9-|2p)%{S{aqoWg7tqmw3*_9o9N!bvSAQrt&A)TW*~^vzm*?Va(w$;0P~$Db7I zU&lmyel8B&d=@nrkd`#S{fn>D;`29t=t#p_B7rE33Y6@3qfHymUH4FG-IDDkx4mH7 z&0`;;?EPLZ-B1sZv5R5cJla_D6O~2T_S&+hmR&CZ4%BQSS^k^~dR;hZYx<| z1%N?h3UFbrtmrSsB%1#WElFJ?!A&p4gdNT_pvv-TOG{E(G1!WbH<>0VD7901FcT!? z--gz30a!<>9|O9_i+O3;*oJEt>;ZMMa7 zqKc*jWQMUem1RO~PO~06a#36)aIy?L;{gtBoWX0Ja5bJ#Y+%Q56&+^xNbPE))5udQ zxb=BM4QCd*GCcv|eANuj2X0Yt>m{ID=3x~c)oT*Co4BbKlLVHU=i5$l-*GftzNSt~ z2yVuKTX5S1Vb(3NcJ8+1w&0fcS^!gf#n#=#(^~x5bB$GwYUNv6pw@Yu{Y>bMlrE?Z zUR&vv`?T%vQUR@IQ&V!Y(}9DOF7SXc5~)~67>Vb4J+Veew=uDKE7y+WRKP9$967Np z^F?aG;3xxf^<=8@7Ykh>xKk8agd~qdiDW5b#*Gw70YA;*g_Pb(hf+635}2Tz0_Y<{ zcjXMOpKlNGyoiM_0eFZSRmJ~6xpb+edb<-QmMywr( zsUT)qgtUkd7Ae9ama+wiaw**7#~GX?se#)FT;@=^E_!i-L-8h_+Xr5H`CcpyA4YGq ziN4ztr8jlhA-a$@CB6rANwL_ko*jJ0x_DMIda>ZP9J?uPYr>@sZdnbrox?PN8UxiN zcRuBY)M;#T?gqGT`p}U(j*gbEVS+Hh!HHsmum!iZb4y8qT0FNjQYN8qibnaX+%8%n zsrJyyjol1nvfsB8bxV#u!CqdM6m`W_KcBqi+6AcFHYGO~vFWkGKAV|`Ad3;NpTZ4K zc{GanEc!tVAp?A{~Y$bKZDQ91k7WM{jKfS0x*G<)bdeQ{54E z4Oh^YZWsnLV+^1iPNbQ$EtK@3T}yJBw&mqgVwDeiLhM6c&r!W zyAChm8Ye_&xQW1Z5cys4b~@e=Zpu0+u9ydHAA=6382l_ zO-%Q;;1*M0sI+7|a5INeskC)jUpTl+$y*ci%84fr-ub{?O8!?a<5%DF4D@v|^yg!kJz!?)X>k zpTl$ZEnv@Z4KrCN9UthL4}9GX+{8mC=!sRv*8dnEJ0V!Q6HfpdKTpS!I$E52%C0c& ze07Q51by>9%ALbBS!Lz2x`(pl+&6vb$ZbbQ%Mx31G2mwIA5DSV(ULLFZ4Rzz#@7b! zQt}&cw`q%=knuVhwZZ3hW^?h_Q=K`9^z0QtCymi7)-Z>M@cy?xRlq%q2qYu2R~54u zI6l7XXYa*Bqh$fNEPDg)IxUfVGawa*PEDbdNJAmTw3cuMq>=!f@SPUMEJI2&W0q)Q z0>PapQeIj_KNlr?=VTqYj*lo$FrRMWk?{&%e8pbeu;61UTthL~gvT;?y{@Fg<)I_F zm@MAj4o__YZ+mcC$vsW-Poq53z+En+8V|42%WC&kI;pfI8*{%m6w32SZre7*N^S{v zkSeb&aGOa@vx=R2aRgo0jebO;NEuxY?`SP-F@3C2Tv= z=+w_*^ouq0vj_1DZ~F%9bSb%0bfGzqo->aDfbV+iF+4QfSt^nQw|FXZE#F%@ciOas z6sDlMt_mfUMOs6scuI|LhR_-gFXlg%31;aqf}Fgce7uOI2RwJbAe#|~Qj#W^@e6q2 zNnAe%eCOevcuYRP(r6t;93ks3B4$I@Vf{{8|cism|uMe-+o{g z|8Bv@qnsi9txcrC95S~f3&<0Tf$lg~VoJGf#?&`O6zgM)R4sK6PVQ#9xl5S@8cy6(C7iCYJ@zB=h&G@xG zkM-?Er5n3EXksVJ8Q5l`tk~)0-LjOPoUw>exQWdl1XzKa-j82>$1~Z44>N{$Q?cZ0wE$lC z*8A{qxQs9~b!Xqke9mT zbsiXIz%eXiPQ~y4OFfKcc{0TaJESZT#%VK zcQj6#-I9Z3!GN0+%t$%b1=&E~U&Hsm<}p|dfE`T89hiG^fvo~~*Z*}t*5Vx{EzuGW z>XDR5;=3u>L%>>a^IgDd3JWQ4)!8GG+<;rTHUVzItzf4mC6Qf+J7_9pklABiYRd>! z?7>eW?8!FpXlE1O_lW(tA{n4RWN2)L{$d|zS6498>&P59l0B|Mfm!&h39jSC?*jN~ zS7P_2KBN;P^;vC}g-o^SYx^|`+;z%Qm*zCUO3$X@ zzG=4{FDzNRX`3Ky+RX_h8L4NuFTq!K409q0x|iNFsV@zd1UxR?$gI4?rlUN!iNF@n zlX_?X6v53PBL2&yr8MooZ2!%uImU?0ipyPh5&9zO?v4?URaoP%Ua&3|L zG!rrjffAn*oZT9vo{W}R%h=&7j}fFRxH4SFYY*+m^&MbuK7^MJ;q<%Mh=%BRO0gUY z6zRRgaU#HLnLFTkQ$jObfv*&qM`6Rb8Bc0sw*%uY6x`Dk(QzxyG;4`63D%334cdbH z-9KPMUTd8DA|0ic6eq_Lk; ziuB;6v|j+3Kav;;f?KbxZdwJkN)=7Mmnj$NukGzL7>e7gaLcSOu5{Z9gk*)s^a3Bz z<|aCMijEr~4+AM7M^B&hbIDR^BVKCpHWP=JBCRuuq{N6&`(h9v!CQjaq^H%u-H_ZS z^i^pIEhPlM+Q2oiirk&MD=jGnJhso^Wh8l$j-L%&nnrkD*T+i_?#AWG3cBG263>Ct z4Mb6u5LgK}pczy{Yo)rct0q#J7t$6J`pey%MU?q+C(sfV5LQu~VkbA4u!ezgmeQs( zPH#uKQai}XKx}b!r08v0`&hu(uKNpj5PL>ER ze93VPa*ygJy6C`jdI%%!2`NQ%ib&$x$q;0sjKnPimTKVESQ8r>t29L7!G(sHcyI%9 z;op3Y7PzT|T7c_Lz#uMKU39@P$%Nynk~I;sHb2EF==Pj+ZKcEieDATCxg2Z zqE&(n$4MG)DFZ|s+?AdCUrBP;x%M>El7O2NBCRm=u~d3%@RHDN;gppvR^qQE9JQ;` zn6!k^ab=1ar5Gg*fA1la=sr$3HGf`D3%Prw1roNd^$*GINk-s|L}5)u>IVp%zHHs2 zwGD~Q?sgWC<_;21l{{7_Z-ZN&*C-;QP#Qb8CNt1xUC|DLTfV;_a8pWG(%VXSCI-i{ z`*EF9N|@cwNkq}=r7>pWO+5eL4*Z9m9UR;^iy7BJoD9Lp0TMY%ZVeXG41XzdZ}hF4 zfm)J$tGfcnO6QG@O*ziz2X|c;6Cq$xJgwr_SiH-4B6(eMTh7hXHo)ERP=1Zzwg8@# zhge^2^?r~NbxJ-b14e1U8790riR6vk8l~`3X-N^zEnYl~!KC!&r&7qBC4jjDNULdXzbs=h(jMyt&mSNfMwp-J%c7uEP7?=FQ6TXP@$lt? zRP5sQ?Nuc>Hv%f+N;_!7)|e8bNK)!*mphf*1ity51+7SKBgspJvITByaUsA-dl>8` zaklJ!meUjIpxg5?jMi{D2Ke4XOL%(U!;bY;%s44xH&&~M#Ch|XHTa*>U$cJHprAV| zU)r08O5-o6-5__zW>14k}Nqx!$VS9%BUq^C_O_b4awd=Rb znbl)IN-j*miKT$R?Fcs&X-a@IhS>m=N?q;|@dxQPW?c3MV~- z67Nz3vX!#col0l5$2+3gQ&}XPpm&hg0^3KOR)R>3VL8j0nRa?bv;`kSbld_7BLKS{ zY=i?iUJr=+aP#gMxW!hTk(;t&W!=PQ=!c;%47UH5fjd>j(wHD+Q1b*{PN~i72jbGt zIw@wNHGF-(h3|X(wRpt(3KqjPxE@Wt4BaJtGnN)Hsi|4e$(VDgN+#Pls+9N9F*zLx zL*5OzRj6qA!M`=QMc-n`?j&%}hIr9?UZ>gbKm6g_kKD7h^ymRQ2(vUmQlRT}MDY@7 zRx<@^R3IxM*&c+t(x-DJ^Ih42w^9s93phr;?~A>ajtokLzTd-OFu?5WtmYBMTbSuF z_<0opRn$OQ=#GvMg(=RQiE(B%!7vVxcpVAsACd~tf$q$RiHuaC z6J#j79EnX@5CxQ4pivHGo91pXLXfSYldq!ZX9!2Mw|gi$KEkwz9QzSP0X)|)?b4F? zYE6MusKrys+0b|OTw2qg0fUv^R+3xJZB2wmnWW{UG#gT>EHq;$mP%D7JD5FmV%;?o zkK^kijRu%?63nIRc=BQo-+gEq`_@ij9wBDj0`V|HZ)O&oQH0$0;ROMbXoQ|b(Qx;p zE@Ns#=4=_L=D(CXr&K1ZGGzFWy?cZ-~IJkT3<%=VM#Nq9D1x< zQiA)9zjgbU@7-K_%+~_$aqpxq<92jj)k~)4W+k{OfmwJx1!g*dw6&u%g8zza2rnC9 z*J1~ce#9X>`YH!kUjfW>!la;&X$n^Ib+!72IdIQ0;MO}L+1+pL`{PHy{T5k5)gSIi4>wYq&YgV z7N@78>32GC!$|alFyF#qjPM-?=JCv3JuGdUL$I}ho&7$-FhtP}5IZ@dyg-+~i{jGz z$DLFZ%iM#Xu@eOQ(W^pUf60AM@miLPU`jw$3F0a4_X^|+ZZXCg52cJdv)>8scfU?e zfo*UTZynkPj3kpLqQ;41B{$vg^%BOF^loaoRcNby{JLVGDlP}L-<^R^4L#b#tQTUb z8{vj0UWw~2_i$(l;zY6QpyL57E&pm(< z4k7V&W5^7Yu7@PwQqIrh1g4jXv}SCHgKj>6J3NjLzW<4sXQ(U3g#w=DJRjI}f$w|E z12{EY6d(%d*nG&+Yk|?ZQ+v-*kv2++GlR9todzOw+$!--*3-Y?eW~>R7Xey{` z-66P1!7>AGFIMG{#Wg)YKxd>pBIy9Z@C+X7Z{T_R=5fRPEOrkzvCDNa+FD0&`dWJz?gC-j*H?O>d76(lF(AVU;G~30H%g<^xWUQpFOK%xl z$0)S4f*Ff5!F_gagm3w|?^jy#BOkuwi+6AB{5mVS|EvePsbP@u4h!;NV zDtz!}#TQJ`UXwSY&WXnQ`o5-2uMuZAbCo zU{@(krEMHJexflOmWD{#wYtgANM;)nYq!_Z{#^sNN_Z=+RpMK4(}O7si`1lG`8D-q zx+KFaGf||3BK$%|wWGM!5All2F2$1(FSIdL1cGOP%0S=6=gi#kQI;BNn}#D)QPWlA28P83>VvfnMZd8=YA zdA9-g`6RbwNt@#H7b3Y;acnl|tN>jz>eJC(`0%{~Gl_t=qV{MEhdnFbCE-ATt_?ibhY5n&9@cZP+yfaMDfh-gjR@Ss;-mVM1d}63KwaHYV7> z7%^3Ge2CB8eOYHJcZ> zJIe3M0+WdCX|(8W#O0L5TvNo|u8b_D*AX_Jj1U%^g>)Ro#0?TL6UD% zunJikA(v8GW;*qzGAfl(S(#ui88sogG$XtS+)cjSo8g;5NZe zp(U%M2M=!ER63Lz53LmSFn9KiJjVfB=bMDY` zhv4?PP&I+T!?Yhw@rRkp4Ea_fh5l*M6KgpeoyPv|7>~X%#>-xC1m~?NNZHgfoC0^8 z2EO(EA-;F>Jvh0;*)S>{%#hy3S_fA=@&KOs#8o`}kODj!Ptb1De*kui7?T$8AHFrg zzyIr7ktK&QVRyScIxad6USe2G%P4!r5Ut7W_}quC#QvBxiCes^XpF`MvThr%t-x&$ zNtmcuZqgH4KIk zaM6}7lj%6G0JRB5E4rv@#opjne9WiU4di=8<9u3T;O4Qk+&;J^6i9H3$2ps!<%0R5 zZ6V7<0l@4XZ?snH)#8H&@I^nsM{)oFAOJ~3K~&&g2!6(uWCS3)NfONRwrJb2p1GS0 z(VpFnjinJ@{DLcR`NM%5ZUFxBul@nYPUgs4hcHeKVkWU@4N0acn{@k_jqk*T2MWCK zX_w%6k6*?<5(o*iUxV4M!_Puy!(!#4A zc?ljmz7MPUF1p>eIQlX7LJ8xtZoMdl-%<7>>tCyD5sO zi%GtOIO^);ftcwQtfv8A!+*-XCEF=Zn>w1NX;No2^*F0NVR@`Xzgz_-GntYJ zj>cFoOROE=#fvUFg6HoW<6+4F#c-gbx8oF9nM)xS4PT4#{~TyBvP706vr5(eRe%h% zOT;#Myrnu$pfO)qrGIzwbyZkJk27t@kh-YUCU=rl#OGh~uU_jUjK6U0k8a;N@|a<~ zE`>t|Zf=3%D3lRe^x(Ks6Kw`CrJ}tiBUlJfZhYWL^E@(b%$2t)SR~6KR>4xg;y@J% zI_)JVU^CM4PPvd4T1i*tr85JUthO=A$B0r|l^NRE5LaB-#Xo!1LE+16_?4!_R!bbg z*L~ziI9?oP$-eMyf?IC2b7Yu#?G&f6KD`~EC%9vVMP zml50?6Gz2STJEY&`=BG0-h4OKZIb z$A8HrU0|Gz5Oq3IyiYJKrHRP=E@gM(s<-?TT(VzV#H4v4ig22e3?I7zKh8E$Fbd?r zEjx;Zpnghlvp`Cd3iIGrAUkVkAU3VRzyYlQZXDfNYWux542KDyI&N;@rv9LT+bp9> z0GCLj63Jj251Eefb7_f}Uv@Dr8vYm?`B1C=WHgEvQ&|k28A@X0OyKsd`I*2i)FTk% zsg!4fWDXab&&4H>=atw^**~>3!AmP}f8p9|ZreHXSOYgLH&@EpfSbJ;{)ec(w+@*F zN|Z)RBr+k@XLNMBD)8e%PP$>ukfa6jY^Y!;$;p@G+R}E8n03W*gz@!ZF+2)IeC=v41Sz&roVow)I24^!r+d0yoM!j}bn4%{2FV-L!5 zF9Nqblz^6S8x*I@B#;BkZNTNV84andd}X<7t8IUyqbc9J4jzJAsEjKemVllBmOD{~ zc(RTCD6!EW;mRZHc|-8sUtm@f|*HuL=?A?Wi4c+80kp*E?JryCq3jjX$oVqGt9=jSdL4qMJMp$ zryjtIuG|no9Y$TqS%-0o_kZma{{F_1)SQs_ON2z|(gO$gv0Y{;og%Cxf~Yp-5*w}o zm+@!?x6zF{Dxx4X2h%x5W~m)WD^QAaS^pPWZtLvfj%VClXbFYwnL!mv2-SNOqY~3$ zWOy(`IUb-LcX6b@h2OpC2%fSOV|{!AUCyYVI>VDrbe!XiwMTL$J)h?lT z;FiRhRc?CqCslsaX+2h4+jchdjU>>)&%#h84TaK@mwx|*8J>qBtM$Px!`%Xn&JD@EK%5of0)n7~@~jckf1jfZ&C;~s&F zhj(Hl>q}p)6lNvb$I2>132lNpiSs?e25t)lE(SMgp!yzjRfH9RtsP09j_7CBpoFvw z65QgOcW`oPjF)|)vfM|1c-!{jigPcr+-C!Bv(!GgeZFnL*1x+EpDZ{r_s%9s}g6Ud9eO;H{?l=la2T5f$htPEtRjI||dx!EhJ zrpYpmPOrsTtg1TDnid>Av8V~}QmUeN$bW85Hf=e3>ay~E7RAjlZ7x8FD{G8}djRIkp zo`trtF%{jgzqu%iZ?WYzdeJPw5iPf<8Zbwp(KlM|-!NM8mMU~^6N`gs4ixsqlG zk9Km^;vboO9=k;tzGD%TySM63mKekl-v7mIeD7F+QNfyT1-H(+(Wk>^iFT>ck_Q5A zwcuLv`ar-f*30AG25!4g75DZmw}t&0;J0eR0Nkt~G;mi{g%W$0f>!DFr_@8v$;a6Y z`@757y!RO1@u)+1ba4v%S`&1nA=ycBq}oH7B z8R0j~p?vF?uKVGw+lMb3$arjRMCFz$+mItv1UJ9JP~alb8y;sqMxCK*axh6VGfrA$ zSYGa7Hrf?E>2=mL?A(Q?J>dd8|LKQtu9ldi>&hEh`6u9Wz^-f0{5?WJPT+S$3{~q- zblC&L1o+Tb2Kd(XePk4fv|4T#3N&zci__Rw9K+{7@boj&5*G@r#Ogh8TUf+Gc;-Or z^fWEktU*fNZk;(c2GI3fR?(6m`BRMx)Ofc6z2@Rg{S$jHje1bXM{^9*n7$2bNi@Tn zMG-b9J9tud60d&j`M7j?4|*f!5jK%?W=}ChhjMJJ{auvra2DKZNPKIp_g9~sS?;i* zSmq2#B;L`IietO_cPW)%WSi7hyRk&tqwI*~e)&~zQqKMVE4Zb)4@gD>tR`cuCEIw> zvoFKXUk)i&q8JV>Z$6%3shfzD7`b-@maDS7)SzoOVdMfsJ*UW;NjC{?zO=N(z#sxX z@TFb+x0{B@Ip>-?`0ISOzqq%uBTwV@douPnHeZ*x9_ zfmh?-eAtV@tsVH9g zIrsoKPxANe~>K$D75NIL|?cjt@AIY#YCJesWvh_vI~o=avk))TwLdyIK-= zv&%@d0@r4@-;d??!Ofek-QX21sk-(TTU={OumYByi{4bFuzq64dNmuo+)86D zzOG!~l&fmX%^iVtD9qeOh}knD7EsFx`5MU>Px_+y{ZB7CfS+qkv5%celU$-IS;mqE z8-*3@oZ*B8@8=K`YRPS9H|-kzmrq3&shcz%>VeaNmV!5S>3f#@MW1+MWw|9SS+vej*5e!+4x^un_da(OEpgyx!!2#aI*O-T`YalPMBI*vJj#7jBE2r?^kL8$To;{c^St>*XyH=7%HjyPxM4n6vj3L$CeO048b14}x`ctR*D=zMghA6iG;F6aQu0u*Uz#9$VSw#K#h0blsW zH?cEZL00TTHfd`uFB1xGOEUXR@puBcH#>!2yYeA;;nR;`DNzTLh4*X^d;eGO!FO*N zBb%+D0Cn(O;gqx7e7GGK3M9A>{^T_jo)2qFUi;n!;HKr~uEN?97w6s!+(t{Pt=goI zJ;%~_6t!%@Ii0eLnN&ivkjgB#sbSgBR?+4*xd>UH6JtDQ;(%j2v$xoyp-uLAn;~#Ei+I|I7&vJ_;6n+>9En)iofr48~ z7f4+h=XQ=A!L5-D*D3BYZ7R=POB9gS$Z>mQEJJV?F1coL`U=$MP>R2y1zVxz*6VRm z76*D6TqPwDZfy3fL>i}vc1F0UHNz$2lX&G77vX~4+i_U5;iw~%&Vk!3bEwZ97F1Qz zc8)Vj35JwEu+B*7uMU}2aO<==uiL%?w-+s0$?(#TzEP_J-^Nhj?xD*#`(0$T3YKf~ zf;k9Di-~098&4j~q{d3cDcoKv65<8wePYt!+@kR}!g^~9Pr0OrH@xtCtaAK?^es^Y zGsac~{O{k|#b5s2z1WQoV>buVZdd5YWV9>RRKHXgkT*vrBg}y1;$FP;>1%k|l?Tyd z+*|AHF>C>U_T}UF?lI1MToRrp>FDaFX;)5nwk@GR)^jsM$&y#wB6;KJn7)n~%Ko|| z{W>dN!L8MhR`E9vZr5XI&YyR zdFHNau++jT-QVbZOeC!xryb1tBaz?ejr%w!@8ikK1zz=t!&p6aw@^swUzB^O)Xx`2 zRdA~z(;`Ui$)tFuSZps!qTtovs~(OymkMs}%M@>*dRAPFx)^x15vEZKTTH)yO#_|0b@#u~v(s~vORVhhIseC``};qR``F=-t{NuhP#k}BypEgdd&#URbz^;y3Cq;m zu1;I15<7&4Bc*nXrP?insmIp~)P8RnOw$}R@Y}VhtO~7Ot}+{1bgr#Wq$G(OEkbTS z8)5&rk4t75p0}}q=bXQZ?&+IwuoGiC;;z|}NESsp+r2h&p2HPE706H~tpCv7fbOI% z&n;`QkJdG8xRMO{j1J5i2+-$Z)koTaqmp%kL$ThhV&wp<2&Y$uc-cq(#|qp(xOsTs z@_xKeg5k{4u@i^jmejZt%@7k*1B^;Vl9-b|}DQnH?NX$wHWEdr8btVE~r!Y6Lx z*PpQuo1*rhBf0aqjnfhE;V;~ZYwztKOV%-VRc>otrrW2OOiQ%Wt`yN{oCwBlkrpEX z8M>1@@ye&|$1ATq7pr<_MCGbjomr&d=6|yI_yFd>O<_qTlY}|J z{oC)o33u)6ulx?)Bk7?Gz-`GI1E)MPHYAgNSZi6MAj;y`F6~{#wVQ=iypNjMtdlrG z3xl-%Uf>p5O`}lcgtzLR&>^P~)hq&s2V``LxfP}}vt(Wtr%A9h>H{HxzoaKG~l*M0w{!RF)61a6AKivsT%z}?`~7Q*s4 ziEr7fy6K|8YP5~#K7Io)dqxLGs70a?eoT`H$7jHwf9)8q{c(hyVg_K%#!H@@;+4-mq+RqbTC)2S=km|4z7OBNaSRkID5%RV zPN^17N_7ZzO9b4v;jrvA!re9K*x zEy(P51F~Q3XHgQpfjX%{o1ts!p*w&8!`jt?IC$M~TsxBXdM-F_l8m%u)IMRyjZnoY z(y879T1Ntu216dqWCz{tZCo0)@YLlNUVhFJ4h>JB8_$qW$6ygpi&pgX2<`5Q?2?p< zuJ{Xmj)&x>EbVhT~QQ9PFIN%b&4?=RaW?Ytjl&tCV0n zj_`rMIF7%+ag1`-(>S+Ol`#axBn-7nBJ_$KtQNQ8(;s}gw3(5XXe|bnBegF44ew?5 zdkt<|%U1Hb2;55Z`2R@a;T1pw%dH@EJEA$FEGF>M>?ZDS;5J%W`40gtYNqJfd#Po> z6IKL*g=BORspG;?DwG6c;ZB0F+&ju5&(J9|Yz!y3c({XCoU@K!*yy0$zX!{m0)y!Q zow$weSVf-)ZQbp;(-VUe>VlUsI+@NMVw)N%hL(CbJ}r9Sru6HqHo3N666@%W4iaE% zAN!3 zSZ*nhVuEM~7p#u(FMsa>1^09!_1bi9CRmlWun&Ij+qi4Hhdg-*2GbUj_7YAFPJv{) zm8R(A9i-C~N!&(@o!ir0B;#A~{@?#6c;sQAEfkAeTZ)rEaf06e7hCwwO?|0LR%eua zSyg9gt*AKFEw->y-j2^+^%Qj|2}tu_BY-==>q$!n`y`95qMt5p?CQBia9$#tP82`r zEv*!(D)2fh6||Yv8O&ytpsVSxonedny7#lReN{c^$+QWl=FU+T7^`IH9Jr&r5b>iv zWs#{K+hvFoXpII~-`U0!lNo;bqJwx`I>h>P8+p!}%LGg5I>uQk`X@97y-WrFhiNKJ z<}Im7(clFClLk1UAGMN{0G#)x+`4$Bty^|Sqb1WQ#@6bt2X2p+gy4>x2s!1{ z7J=I(Fg^02-x}xMt8hxeO-(=+>-4c6ox*$m;88fg%Pw=i69q}Q+SkxWj1w8~m4AN= zuDPj?qP>CB{UMe%R#Bv5jCLuWvyA0V5B>f&vTO^FdenJ%_LI)Xm5)~CD$4mri~vOt za9?HM7BPETYkK=Cd{3*Lh$zn%R*Kv4nGgPK!QEKw76qAWLa0Mta(uwc%>s)<>PMy) zfZMMgGz+ghU1<%!&ya~jDIB>dgq9?nj>alMHer*Jrfw{EJ`$N^y5gJtZ9H?mi{HBF zAT~}N!wQ=j@(fYb!I-iWDH}*iEK#Swh%juWV#%#}Lv|;Zon)Mv4F}RuFTmCs6Z<>C z9_qdepJ({6VBAb6B5$wk;B_Bz_1r$^{%OHoh5F9W8Sh6+IJc!!4zN_5#;;!aC_Mj3 zsT06)4q&9ZfSCxz19xo$-~R90*qzMqlD}~_ETFkf2XuM@yf9@ zF(50va_>hOHrAJL^oTTn2=!7fZs67?<_NPSLYA_tXRMOa?F6lCjE7J9c;lfBJTaZ( zU@}0I?MMVf!hUfq#zOeJ1SDdiBI9HVYKo^EH02r}WNa(}Ekmmaa3OG>!|jCeCCbE> zH}ZLwEVH04;m`^JcYmdiSAX;s=gc{j4;0*zlnAm>zUB7yxkFkap{s)Gqa!Sp1DwCV z#P7f6QXKqANh*OvaEd;P3qKN{RGe~6!Qli37H%f<`5F z5aFX=KY_nJx{W+$hLYfBTN}Zdh&WJOklCNYa(N3r{lRAoU7}#I=;P@<$2KUZc70QE z%Hp@yW>&?lT_(4xB{@5AOXXgJiyJMmriZ|CyHKuK=re&^xGtwi$oLX=HECUufScv8 zMAwA9Qi)82$T)|-m4KU_7F_X8PqDtUg{MwO_^ro29OsQrVr{yEIAv#*jx`a1QCbyF zHj0$m65JyHKkGQF%NR8&3g;n_gKHAUG!d+}#Om$U(JYZCEX|IQa|EI2y#u4=o$L)) zy;_qv@BYe7-#fas_H&GLlLfWnmO2u`uQe3tM@x9(w3_){hXNZ8q&09lS%;8!O9#6X zb!oB7W7J2d*uoW;F5`JWe+Z8~m*c3l(@~`*Mb<}=P<%g@xHc{Fhzem;g`H9xJw}$UAVVtU%ar}kBc&D-w(9YD za^cBzIm$M0m+|h0(}jMYfI);PwXbE0_jpTABm_q)G(^S7?sTKffpn=3MnQbasU z0B(zO^9J6ZL)jR&5HND1B{HCu3a{A=og|Ub(|P}PEXUh;%4HYhc~3cj^VWeS5ya65 zL)FkE%?n!3EtxSVj5%bcne^op@E`xL@8IDNc_^Ou?EO-o%rE{!oAR+Q?BLti50EV_ z3Ai~ZNp>8Go3bm1^UO}8Q{09>|G;x(2cS>7;?6Uc!5qMwKX4@MXm)P{SaRr1!-F5Itt?A@h;9Cf_u*5hca$1LP1~{`v zIW0m0lGa>@uF=|Q&>wf3?%E=LNe=M8YPk)CTKZW ziq`qmG{TssYsOgnjHw=(lcpA4sgM>#*|dhbUsI*0sW}rNj#}E{7Z(_}PvUK#c-3J6 z_domU&3|+4*4nc|aO>pi+H#+bLs?m9f0oFcyQ%G5w7?wyOvi!A0^Xnm?3c{Yp|o*1 z#*-g=F&=dZ@aRi{9_20Exm=RiS^paJ_v`@w_1ihVd(Exr?+)>tpMM;F@wx2zQ|%JI zZaf1%`$gc!yEE(*ncPHCV6xae!%H&;mUgky-opR%M~}t1ow}_}hW%0_oZ$Y`+pslR z6;(LVDRCLUYHFKF5j$!ji>DD8X*!Mu-kN*6Itss@!V~Z@N3#wm261rvOqHaBrRVj1 z>z+%7n?;>8yn!7sLAoEsH5m1XG13AWL~Zj_)eJ0AB>N=tJ$ZOQIoX>}>1b@>RI3l)V@ z6<_P_($Zw7ENhM39Qj=oXhjO{cYO3shXgBsa`ml$eeJ2`XAjd2S*;A^&~nF8&-lQ= zZCj_+CojOYQdZ}10nIZq%$gb%r+P7~Co$GIMh6KxEZ`ZPLN^_u*B)VgCC1@{t2lUI z1D#e|(!6ET!e}(XaJP>;f4qfbcaD+AtB9A@kmos;q71$E4C8zYaXdvk=RBN!*v=1O z72Znj-4VaJ-+R>^&4imM!uDP?y^k zJefigm8Z;d`@pVgNS*xgsfo4kRhD`YxW%eEm0i>Oc9DlFnsNqk>)c9RJQgLPzo8Xl7wURD857f8zEt0huffyil$6sYf0wkh#U*kz2y>$q$Becr`ZTg z{7W`N+U=oaa%Y@L_OP>g5clnDqucIk*9p6*C8Wl5F)0O;5IH)?Fdc1UHHom4b}$*G zm`o-}S4tVJkx|%~gH0oyqHFPF_Mpn`ufVMw*0{H%6KDiof-R&G4;0+C2<&xMeJJDf zY=EmQhMKeT*wkqb(uxXabvujHn?xl>GI5#8UmQom%%!IK$y#Z>g%8fyC>*2LN-)V! zVgGm=FFEfV>>G__(M+QhImPH`U6VFS;)=U4!^}c`NCQbPoFak?C3B&rgt8CrY8Dgy zz!Ig5Oe`cbO}a#IXRQ->&&Pj9!Tm>Hy5R@co?g9t+};!(WtKaoXH3P=pTeQs%W@mI z858YLVwhtqCM70}ueMSoDJS!0qC_gQ9H$Y|ltWJ4Y;=(%NHLmZSX)^}k&gwmNo!fg zJ`ZL)SXu4@qYUl1hZ)#YnV`qkDCEdG5ysToriCy^Qc7@jG&Vzo!qJOdoF+;kv-^aB zO4heb<33^zC>3cpJ}B2Y!rg}t$LW@vP6Oq69^rqZ{$w zP>QdQOAoB#VQer?UHcauKas7|=PdU}qAj@$2~c{ZVLpkD(U)?g(^<=7f=NP0MEG8< zHdFdCP*xOZwQw$tOH5iP@h?99dkXG%eewDqT(`Zi0=E=3O-qE}ey|RuKm5rakkOJf zjxd>yRP;S)_OYm{pLN-NrWsUxj6fao}gs-8qL~l*o9hutrPf zx%xb~4Mg^OjS4X&S7I1PO2zV+^-)4Ucw8oFd2(zms8VqAHL6F*sSVssIc|iR<-UHW+(sCc&TDyGGIZ(|-XDE>1=74+U$eMna_IW9J)d%M* z37M4^3l)R{{l8hXL~rsGQ5uPttHt!x=@z+p5y2Gs0cn(EKbbN?{>=EOY@^Wxopy@Z zm;+Ebfon->&WE#;V&!9c)2y#fbR1xtlfK$*^mA@$jBhgpNKR)Xp>;H&f?HF>VmS{{V`ZJ2ABfE=;oSW|H3>hu=M_>OCYgYKdZG-)nvsuAB{v=8;WmqW5h1iPwY^TtPz@xq z&NYHA&9HPPcZGW{?^VJ^)vWDT=4nBIbkhLw0p0t)dIw zjF&1L=&Jm5H#1S5z*2kjJJo13Gm=`HQn#IjbjC87=3x4Zh{wj9|6Ym|raIPKs3M?G zuAF1c3BzSBNgbBWvRsw^(S#wy8OJJ)q(goqO>x1#HJsDxp_^rhru3Ou*~zKQZf=fi zX5h$~mP-=pr%}Cg{jQFRG@>I-g>QGP&7bFqTN0D+mLY8^CV@z$|BYe%U#y#o3HnSbDrVZ`BsoEZ#Bw4S#a7dH?nq?dQd`!|sa-V|s=A z7I$~xH*ouod&wR{kEVpR7gaaCJI2>=)Fh2-B$jqH1vk&f@?T1(kydcG1mpOO`mrgb zh;=c{I3Q{Z>v4fY?Jf?sd)Q=@i`JZzhhu8F)7jISZpl*+*eo=ppJM%>wfU+*ZUG4q z!1s(}EhJ&ir;bvm!Y_CEZW&?Rx)1OE?CZ{zC;HYeU;m@q`x`1gPX~_Mk_T>X3N^T` zHRx=?t$^NZCcGc>T?Fo`hQj@EFMux+{+tuPzFq)%Ntx8?QUI7z- zw#HaLE!TT1J$BHak#gPl`=v%o1()Zxgrgb>x?Dd zOqJIXhmqkth9JdxtSqt_N&N)M!%Z!$-uVsOwlJjnTe5g9p-gS2_R|yilh3{0>6Uol z<{1^-Oo`^Q`h0LRHL7&lwr?%`*o&6bH@mis4G=G|xc=X(&Wr`!JU)m>P4)u54~8>= z+qZ}YZlfFK$Qiipy=z%8Z%SLEO3x)rJX+#kkE)z%<(+StYw$OBifTHVrR-I9q%L{2 zh6zs}T`l`OxJlgdSW5)DSqT{fU+UbWiTE9w%wb%c($HdvW~@WeRD5TaW1|@3NUw{H zc2}lXq!UJR=|CH!0S=Xvn> zbi<+*KIgReZh%&5u4~I|w5fit`tNF~T_bN$|1m$|w%(x{i_D3vPe2lfSz(xt2+ijyg zndn$?8c_B!G5*cI7$#~=OqJF-%Y?;jA(9a?U2CLHJHhuzz9H%&EL?)|T3G{J?x)bv(|DNvdqo(E}t zUfae6R11bm$t?@EZ2x=;^5%{Za`O;;Mo&VJhv&7$>}P7tdK-7v#jtl_l-wP6d%}3P z-nu#z*f_Vh%aHcW1KV~1<=1*Wnt9adCo2INB4vV$bFvr`RGI0d7-O07>>|RUIKrAt z8P+VLS?D;TkEQWx@@fM(iH1FG$xu4Wt=wC`YDdz3wrG|*q!OjG$ci|d&H+!Tjc(vR zfwz9j>6XxP-?qCKEm3I)qb0uOHZbZ#I(u*{5b98#eK7xgd-Lb!qOr!=pBCJ67T7FE z$(?}q)KLu=Y7OhU{qqD}I=+Rs#j^5}hrM^4+XqAw;5GOwOBz__3SdF?VYn!KN7HxY?)UXoJlVK( zZC!jT8A@2xj^N((r2gETz@U~V++nJP;y8QAp+gSetTa>yRV{zBNus`X)t*Unt;Zjv z)D6@G4e3d1>V(~yR4*uAG*$-^IS!Ol9Gd1>X62;J5>jzg8JXvT4Q7>D8^We}r4ah1 z_CgmGC#m`IY(M7BQ_1y+<^Yc+UDkw{_Qb2@=9h>u$&lV0)9x1D{PEXo_xszvalQB{b`CD_BSE$7*R`~J9ht>W5@!7FEO(x-6ogy8oVX;t#-(J$Kp8&qXZ z`a!|n&RH*AV92R)lEq>T2`88-07dzUaqH;>OS2pYXF2v4IrdQ(#zjpee<Stdy9!mfNK5o@Y3BUogWAQypTE8YE*XpH0BEFCCf1$Uk!?``1^ zKKXhlr}mYjKRiaoDaxraS|Y_>oE0M)PO?G>Zbk^^X^B?9g-IGe|rQSvXj#Nu9r@25k^VmK3a>JNC-2Vm}5E5u|A#Rz%<8N zIYqaihKq&-<>Nb}XIHBU8;yoYt%`-Z>yJ|z6YX!eOt%5svX-^Qm)a8(pK$`p!sl5u z#RL?KvS_)Nw(;gqyiURW_OD*&(GqS=1UDm|6AC>mEc& z?3M%%*WNdLeQ5Ekou-n7vz;U4%1v<97Ba|STJj{g4}NCW!02BaYeVeI2YJP@z3Xc% zGMmTfCyb;Vq8TUmdnOh5&A57_7DW;(re0MbEvINrM_8Fou|LnTF`Hp!mZ2?O@=Vav zd9)#@&f+?d8rfqpeA#UH{EqBgVv8Y^5-4I1%z)Q5tshke1ML zhv0680-fb1xLK{o6}pI)_?EM>P}M_j?mvD=&3~9-H`-nRUu3~$y?aA9<-9@0bMNkZ z$FCm*xb0!cwFa*bdq)?=y?x7Uaq9Y>)DKyX3&3snr~2GMhmL{UcCtnBZCjvH32n(F z>G;*UYSx>u6t+|3Sjt9Nn`YRUW>}fBd4qo=xRqbCz|L<2s~NL?4(X=1f+$zvr)l-t z8vB-8v-EW=yE>n<@KS3+%Wc-1u=gxCcM5Xuw|(NZMoX@}_SoRyqne!i>}iRD%q-A+ z1)qI5c`zP5ksB)&i#6xk%i^91+`d(|??cWXE}Fe9w~PBaZ{DXn{-U&$cUZ9HM^D1V z=UeW|3cF~?{LT`TvR08umoYUOWYLfX8B~40#ig|Tn>1<3&dlvxQqNlXILjn<-J4FZ zF&Sasbc~hRSS&WnRaN3it2K2;cCBHKewcG8(hM$8FNISL>#cr))Nad%Z)S#hEU(dO zycsQ#`{`vmld5rnw8}+G@*LUHDZKqtuQOWmk4KLU4n1Z*b?!3;4rTq&=aW0Sbq6$JkDX&La4&L1{q1{EVVC}W zo|eeX59!$+8OoYsxQ82X{JEVjD0MZXb5jjDu^ zFkXiOStBEUvY5Y&*DP~Zd^E%&D#xZn1bqt_oBY(9LHY-pTYJWA&3 zDLX`WdM4oJ=Yj`hferh(r8N6ms$P7t=v5dt*Y%h zaBM@1KcBI*!A7>an%CoJB+U}npUF)(@kMj0RcZ|kO-HVN|N8g3e;c=Na%OYH{iKdq zG$-=TXJL^KQJIpmezlPjAC?3&o1h_aY&)JcAnH7|q=tl5fNZd16Txye!T#YG>*Fyt z#yPq(7Px9WTO3DGigLz?;XZM*kjI?hJV84jVIM&~9ASNw zVPBr3EooRy)ac`?yU^txxDR1b68>0?lW}>9x@MjnYdc zFXxDJ(h`C@TiU`Oe*Co;tL6UcbvN8G*t}E=r}hH3(h>@V*D*~`J1?+s^Ok&Z$!Zcl zEN2LSB`=vm0ojuJAWLOeg0mB33BFk;RTV%QYI$plbVY@%sBcXRNa~C zrU$vlq$1+CxWT9{0VA_wq&824=rj)OivKmn&7=(#p4bOjU}ZAJ%6N>m;Sek1304T~ z8T%YX^}4d$0&b_t$hYq>TI?05X-eFzv)c7$?MSUTA{N=pK5KZdZZos%4S_Akh`^G5 zjz!>p%O_v^P>pk6ef^Ec`uoLlTb#R$I18nwCDw^UnX3l_?(pFn2z z?b>wLqXH_m{WR*qzlnyUj6I>_&Yf0AVu{MC{9TXFB_qi;!YMk8VfQE4*UzwTFvjvk zBO_w5CD)-63bK}{z)Ecps<#TbwerswAn#Z{Y`2*oT0cg`V3u4qNx#8gl1Va}z`Ye&hqa0X8E znk`Y~Pr~v6?AQA)0>2MP9~AXDY75>3YtXakGGi5pg3mP5>3@#8W2fezV#zs&QXEB1tdW-JIaKGG&x?kIliXMkMIVX0dnltkrszf!ERV)m z-{KBL8LEkrk$CfBG)NVBTz_o63e4fTDz*GbZa8A27N(I|tB9MuU0w6wI^?TI!c zu%0B33iMla3dh(t9$>9Mz}jwxwE^h}$AYjza4OyJoIob?CJGnob8|X*UNvqkz^ZFZ z1e6TpQS8!%O9iY|vw=QAE(}lCpyJsEehVr3ud{n)^hGI;8Ygz+vyzqI&U)K;>!)6? zwB#LEAHDV1;NV5ptRQ-K)cSB4%0=Mj?|9N$hOCC2|FG-si)Px_03D{w>(}~j$|8VU z+P`UeLvZ^UGqapOD{zbD47y9s7Th7ts;?=Gdxt+eaCqYNy8G#UpdHipF{g&gMq7eA zbC#P@XcVDLX6WQ2te-xGef3iP#0LL>s>&6`{Mz#RF!UdOSao-Qmiuf$9#;9)ud9V}nwGD5t<`lbu$1<> zYJ#JK#y>M(#2e@1;FdJ!>&P z;(#)uq2np2gk7Rd2{fwc_II#;;y$cTCc>TDBC?}yD92-yzF6>y;HD8LxV8B}Kc_`t zEFfqJ9I?(e_RzrVV&pzXWb%R+;WDimduAetPk5|iYNi;XSmEhLGsRG&Zjt}nZ zjvX5uINzrw5t|i6YpiZosN&pmpIynh1NUA44{3yNH3c;O2Me7uFD<{wcFPS2GIwVJ zw||Wx4XQ(o&GLZ?{Cj{~i{|T_J?FT(IC!`?9w@kdx>PMXsi|wQ^PlF=t4|0+Xd3yyY`*c$m?WJMI`BIFFWF`7~2b+JT{* z+iNp(oO=LD(GYBGqCUqtj@2~GSrHDCal64`y zJjZH(g7s4atnLi3IvFArC0|kvb>>KM1nf3{z(b>iCQWtK@#`>irm|=@9-Pi;t)=MG z523KG_D2>oyXro$+Uu;TDeq|(Ks+2J03?FDWDDX&dPx$B0X>%DEBttyyu5NkYN zp*h;rJaM%pq$Le-zvJrbj@>cbJYT`>H1cAKyF1HWft%@f4L#|@W)ep`aTZza@G=)! zIBi+0E!|#)N_7vk1En}DeoaObtn4pz4MIIum$8kJn>f-;mS znHbef=R38WB9wufp+>nKt`g6p8CrU(fAPO8q9NMb5uH?Ly~C!4YDco2vS}#i5ux(X*t}3Vw~i`~5`x<`2rmG4O-q`^>U)EHF}QeR z`Bk@LEeZ?PRfBSKO^nk`I}3Tdo-ec~^XY%NR(78Fi3BgsZGZO{WuxlPTjXIAK>hxK zbVOJyv`2f|TyiHUFspZx+Hc`CXt%qK4b(akCS=md5S`r}ENyK|bY$NoN0&oRc|5by zl?B$Mj9G5;Y5cl-1GZ)f?S+fME#XNAZn=*?1-LZ=V}?~1l+GL`J)>T`#N z7#_H=X}QU{HG{dVEWt4h2;dV#-} zhF!N))GkffX687z3zu2}l-(yknWOLB^l|KZYRlae^|4ruIiQ)0ZK1UF0&pK29k_4^ zqa`XVDEiVegk*};sen?gn%=2dZeK)Y5y0zkU{FJ0wH(f?-iy=Phmvt4|mRsMaCYgM#a_-uiRGNQw;hvnDygsnp7Ddqktv&^^IKF%IL2^nB zg3hVoXQeuyszfo6XJgDMV3|wSar3?{EFC|E)xj=$F++V*WV0z+ak~oZHA+ey;G`2s z261sl4O`FDRvTTB;xH}<2T!{@(hBD7o*K^ct**wym2S>gbJ{btW(K~upHuKb&tutU z7A3UYtP0%0TR-upOXc+M{@-r8{if};3&!bwZ7UNAEeQ;19f{1Op$I({idIp01uiBE zzSn@K>H~&pX7}(dlvY`efEM_`)ra5oV9Z+9rpGL((Vh7id}9#?Y4Uov(Dw;Lgw<-QxtT-%!0QR2T3ABI@@1tLHGwO2d9f?iB>aCNuP*gHq7i`?0a1#i4NEktP|B0Dq6 zF+?-0ZtdX6T_>=5atFQ11TD^yNg=~kSx+wp%FzY2HNF;~niS#nE~ltX^5C7FBrBGBU!6T=s7R}knSN^r@A~BLD7gRV z>(}3QQ-AZqETKO46baL|Y&)V}pj;sb5D#1lSIFv?Pi$;^cW zZM8axRn)}2zXMe*5m8eY-3gqiJ2-tU!i13n_Buud)}~XepV&g@z7y!3-oknTtWuYI zmSdPOgho5xawV&7OR1Uzn&8%F?|8AZnqo0S`r-3;_xH+@2jCV;vmlIoCX4MuU*qT0 z2;?MV73*XlLcg_zcYN|q50^Xmp8w<4zrAi}^C_cb6~$zPZW>`W;jEf2#!SPrm8l$S z+s~{g+X1VD32YY7oGrM;I)&}-3&3rb%eLP+tEL;)+>Kduqe2bC@R%^(T)$V-lJ5m> z%fi&>b&Dhr_1T-27zx)aA$-<`*?a(WlZ zc!1?{CZh{UV>n-uOt}>)EvcFZ7Jz$EGDkSL-*Mwm7dA*&nM!xwDsCP6T7elqlf`Zv z>J-vsJi0{r6x?t7#2b{BeE2`z`Hg?Lcl9O1bPeTXf*wb#O(uxa4u)}8!$h16#=$+a zmTM5zX49PGsXc9FABeKDgOyx4P-dYQ>`lro=i|DIKt4}Xn!M%yz+G9DFa+pXP<7t) z$oY1;x}Vgp&#vDVr@TJM#8$K)i_9>rJsp35IX0;PlwC%tU6`%`S=~^4XYYFS7|Kp zOfo4-h?U0c=RLG}+K#pg*&x{h9PFWT*#!dng8pZ|YPeD1Gqj(&GH*^hXZp_i11 z@(H52jbYpt>nyyR9Lq^#ahsJ4S)zJ!{z_{&QVYOsmnjebEWqs_?*mA`pFctK{r4{F zYTnGn(H!H@4+PwDUbjP8l&7L4p3aCK*Zcry18%{H9LNb|njk4S#ee{g3oK72Sia{3 z+V`BmdVh%Z8K)p{q@8x{%u?2)>hRS{C`WvC=bTl*R-SATr8;tW#_OyZ7!TgI_5dnE%&ovyR90*wo_aSwZNDRY#n(Ek}y_qIKI3%(I9^WV4r-fi#J zyH{D<`BmVbKU_d_O#=bfUNphM2Ww?{gAAk(^2YHMKXQ5{uH7zd(rC7s(XhCYfG9;U zXL&S4_v987#=>u5me3IrXNc>z zvZk3)&^HRBjPiILHC0vQUW>%>@}|2gRzchAV%-%x1UG&AC`Es1yZpn?|Mo$-rGIl% z`P6^?xA*+l6VU-AocT!gPtGUg0L`>37MgPfWqygYF_Cd=W>U77DpN-Uxa7lGfV<|G z^=)Z7jr_fxuX&8m$D7;RVsJN)(Q}31ZY)wSc@n_s%Wvum67Jq95B%{rZa#i8Jr^0;Xt!hJEUKYez0;9M9k^-DXRHV2Sf7YB zzN%*tTE0cwxWBUf2d-I1T`cp3PsYva=s~!ZEwhIt-YsQQIP7x z>RY<=N~U>^)ii%0!aNNL>t-gD7)*s(l#+C$KM|i>{P^wv+0CKk8@}!9x88YAbm-iZ z0}lLYVWpd5GI2U@oXpCu9Cl1|(rlT^G5ks;nJ{1W8G<|HP6nd-QkxmMd287Wjh!vP ze-_~OuO(azp~G4o>w#N-7mWC;ERm~{t1P$MQ4HMGgB2ZbSB5HEwN_OOry17U9jxvQ z(YoVqY~FD~q#0U5BVG2Cveaxhi`YpMOZSwGb+Nkbu-^J?z^!z~EU?GFJWE{zGpIdr ztb9IC5gzB(U==oOf}5>nX&PfP-bInj5HGjz_+Px@PoMMtC%m`f{a^ag^o{@NyLbF+ zcEE2>60~~DQd`0<4pI_oH1mt#&XFRIbQU`;I<0s8wRh}NAAHk-@ls8Js%hEvxVboW zm{Ii5D}WE89(#k^RVU1wjz);FT0|k;a2GodZYOcvm?*5-M4>Q2nw=3Se@Bq}A<-Gv zE9o3R(eQKP>pmsRiwp;%1k1-xAi3pEZ1zW3m9csg#EIr1nAqTRYu^+3TlnW2QU4;@IW%VCw@V%aAo=xliXy>)qdNaCW*U(DZ*d1)4 zvk_ww53zh;{jRrt`nMhyT^daqnTmh@e;)hp&G!|LpLP#mn`0wd9T@~fIRLgGaqG$0 z#1L~taw=O`OG+gD2GJ&X=RaGWEa8nezG*kFrYlWwn??E=1iXC6=SSGm^G&_Xgu7|ejmFR+{WN#{>k^GrqKmN@yp?Py*TR|6bts@uUkdM%l{IPflXGoa5zheAUY@d+m8&=Wz{ifAik*imU(c zAAN7wJ{+II60&4TCtK0Mblh?hYvM*qP9*(GE3kLW@=T#Rxv8{d{#69MjB^E0dpN2- z=zI%aZU4cJVc<3?iMocQxg34VJ@4##QUzg?MX<H-Q}{x@?aOUn{LOxyH4YPh*8dfX@-`QrX#qyZ;+MpGfKSKlUt}ht(6{E`r4ME zG&*R;M!_v{1X*kbYAcE|j_l(9uIjUTpD;8mzLwmb#*O#E?K_|q_(c>`m0h#e2&cRE z;E_+i+w$N%)NzIW?qPo@VjZmj|=0_RO}1uB{FHys5bQ)M0Z zUS#pq^5|a{ac+qS1ly8XJgZglXWH0moOAlzn>T>tz-61BA8GQV5I(r)t-EJE_n;-! z%?@ZtsM9Tah(U~d5x6abTXS+3PBX{W(Gvo7Ez6PIcoTZ}p1}Um3@a?4$|qvs+l-9l zIc6NCffyZ*-6J3gHqkk88iST~cUEMPL#bTfe4-lM`q}LSZrG_H%Hz!eZ6qN5FQ(7k zT|OhYed?q*P=|Lqo1vd>W98B5&F}ia?>|1eG}_q{-2B7G{-0C-;oEoTzp@>zN+~6W z*{kqMiA;j#IXWyD;+V9kI|tAv_?qj-{$`+J0?5EG|9T+d&*98?|7>#8t4vo-hs<&d zcdb@ky*(}h_acxs7p!}DjYr^FS}&fipmm>X0r&P#5rz`y+RSF9SZz^om844|KWCGP z)MxMe(aku_N1bSvl(!z56du{PnHd<+H{`PtpUFx+NdvjiDD#y=LiOi+d9DmYQNG zWo!D|`g2_h>HpoVq=8#X;3XR-xu>e=-=`)1qOpyv+R!Z9DRRBd^u2z@#o)H<@b7~< zbn~CbuB-mnD<<+RyDc91H=rxwR_Qkg%VKtPMAHW>f+}>7k(*c3LxOfb!|3D|k~{9k zh4-AofdUv8nRMN(yD`y>qQq%Sl5UKSvKJP-as7~sfGsZ%9QLXfzp~)=y!8y>(y4r12Lx$L#P!_H%=uC<%1gaP!&+&hx!-g z)ZA8`fvDM)t0k^JmV(*`&s>tkUEm_{`J(n!(!(hu`5+d{T5;mN$8L)VfV+PAe4^0q zYM-qRnJP~FqhboWU_$L_`I&Ip^6 z4819LP?vNu9_TuE32uH#=ZpymIl+Ias+iF<)geY}+zlQ45I9w}CwX_!*XSFeaEwS* zH(vFuYK@3IpQ;N;sgx9SF<;JL29If#n59#U%Mr?UiD>`y#2f$Q)xUb)m79OJ$Vl%A zZaesk-Fn&M=3Efn}b z|24oWkaguW|Me{gEw`kL8r;_}(6Z0m*dZ3oz_jD!0lPVhzP0vWUpu7p^h_LbM_<@| zu9lLa)!N6IP*F*Ss87+GEmjW;vEtdKl&d(GbUS ziLJ>t9&yDbKYYoXe&sg~KCOHGB4EpB-Rlqj@BikO@)6%Wde=v8J2m;mVYJ$rB|S_z za8-oOgHtT%q%jcs;I{L#CF)F|Hj7{H49&PWBROhixTti#i~Za9Y|he#7Iv>tVd%6j z`k9|xn*_X6c|b$z*ICrT+qb0tfDe??3MzAE=Q7L9dqXv;&a^~(r;pCD<5<4?1P-6( z0H!ITHg{8-i|!>jytXX4JCy;dfg^~-L1ol{=q`4DpF|- zC+mDu4M{Qt$2xP}!Q^)(OF&AFu*PRMy(a+KH1EJpz#7e(K zD$eO?)Z<2EJ@I(yE+Ta-%c!JcW5j%P?B0s`0x zcc?UwGeIpcEDEuyMwYZ15)re!tlB9{&WF<#s!;{87;$w2boEz=PRckc+!i8b3kq)5 z^hE?IugPsef{G%amSsLeXI8Y^JAzO3?od_Jej3cvP;;9ehz(G z>r}LV#Z|1Y$0=rsrK_46Wo!qduGq z>@K|TGX0^&a~Zf}I=nivRZ0UQ%8Sh6yqqR^|MDk4_OD+4+84j4_r&Pf{Q>)|E%#n0 zy79)cbLT#^T1|0%HfuIM2>pG6;gyM*x8R~i-tRBWP~P6I zcP0L8hmRh;u*$YNTkl##a?E!DJOSX({#)63d)A$`$IS_^XEf6H-F*Ve9Vx7~+0?TE@p6ra@J zv!56^4&Y$pEedy!i%qG#mCrMsxPM0|FS*d&cXuDKyT@~|_jhNHp6K-a9>j + + + + + + + + Chat Interface + + + + +
+ +
+ +
+ +
+ +
+ +
+ + + + +
+
+ + + +
+ +
+
+
+ + +
+ + +
+ +
+ +
+ + + + + + + \ No newline at end of file diff --git a/src/ai/.x/templates/openai-webpage-with-functions/package.json b/src/ai/.x/templates/openai-webpage-with-functions/package.json new file mode 100644 index 00000000..89463238 --- /dev/null +++ b/src/ai/.x/templates/openai-webpage-with-functions/package.json @@ -0,0 +1,22 @@ +{ + "name": "chat-interface", + "version": "1.0.0", + "description": "Chat Interface with OpenAI", + "main": "script.js", + "scripts": { + "test": "echo \"Error: no test specified\" && exit 1" + }, + "author": "", + "license": "MIT", + "dependencies": { + "@azure/openai": "1.0.0-beta.10", + "highlight.js": "^11.7.2", + "marked": "^4.0.10" + }, + "keywords": [], + "devDependencies": { + "dotenv-webpack": "^7.0.3", + "webpack": "^5.89.0", + "webpack-cli": "^5.1.4" + } +} diff --git a/src/ai/.x/templates/openai-webpage/src/FunctionCallContext.js b/src/ai/.x/templates/openai-webpage-with-functions/src/FunctionCallContext.js similarity index 100% rename from src/ai/.x/templates/openai-webpage/src/FunctionCallContext.js rename to src/ai/.x/templates/openai-webpage-with-functions/src/FunctionCallContext.js diff --git a/src/ai/.x/templates/openai-webpage/src/FunctionFactory.js b/src/ai/.x/templates/openai-webpage-with-functions/src/FunctionFactory.js similarity index 100% rename from src/ai/.x/templates/openai-webpage/src/FunctionFactory.js rename to src/ai/.x/templates/openai-webpage-with-functions/src/FunctionFactory.js diff --git a/src/ai/.x/templates/openai-webpage/src/OpenAIChatCompletionsCustomFunctions.js b/src/ai/.x/templates/openai-webpage-with-functions/src/OpenAIChatCompletionsCustomFunctions.js similarity index 100% rename from src/ai/.x/templates/openai-webpage/src/OpenAIChatCompletionsCustomFunctions.js rename to src/ai/.x/templates/openai-webpage-with-functions/src/OpenAIChatCompletionsCustomFunctions.js diff --git a/src/ai/.x/templates/openai-webpage/src/OpenAIChatCompletionsFunctionsStreamingClass.js b/src/ai/.x/templates/openai-webpage-with-functions/src/OpenAIChatCompletionsFunctionsStreamingClass.js similarity index 100% rename from src/ai/.x/templates/openai-webpage/src/OpenAIChatCompletionsFunctionsStreamingClass.js rename to src/ai/.x/templates/openai-webpage-with-functions/src/OpenAIChatCompletionsFunctionsStreamingClass.js diff --git a/src/ai/.x/templates/openai-webpage/src/script.js b/src/ai/.x/templates/openai-webpage-with-functions/src/script.js similarity index 100% rename from src/ai/.x/templates/openai-webpage/src/script.js rename to src/ai/.x/templates/openai-webpage-with-functions/src/script.js diff --git a/src/ai/.x/templates/openai-webpage-with-functions/style.css b/src/ai/.x/templates/openai-webpage-with-functions/style.css new file mode 100644 index 00000000..2b1dd145 --- /dev/null +++ b/src/ai/.x/templates/openai-webpage-with-functions/style.css @@ -0,0 +1,367 @@ +:root { + --header-height: 0px; + --input-height: 0px; + --send-button-width: 36px; + --left-side-width: 250px; + --right-side-width: 0px; + --right-side-max-width: 768px; + --max-textarea-height: 200px; + --logo-size: 0.75in; + --logo-icon-size: 1.5em; + --border-radius: 10px; +} + +body { + background-color: #111; + color: #f2f2f2; + font-size: medium; + font-family: system-ui; + height: 100vh; + margin: 0px; + overflow: hidden; + max-height: 100vh; +} + +#header { + color: #222; +} + +body.light-theme #header { + color: #f2f2f2; +} + +#logo { + display: block; + margin-left: auto; + margin-right: auto; + margin-top: calc((100vh - var(--header-height) - var(--input-height) - 80px - var(--logo-size)) / 100 * 33); + filter: grayscale(50%); + width: var(--logo-size); + height: var(--logo-size); +} + +#logoIcon { + margin-bottom: calc(var(--logo-icon-size) / 4); + margin-right: calc(var(--logo-icon-size) / 4); + filter: grayscale(50%); + width: var(--logo-icon-size); + height: var(--logo-icon-size); +} + +#leftSide { + background-color: #000; + color: #f2f2f2; + width: var(--left-side-width); + max-width: var(--left-side-width); + height: 100vh; + max-height: 100vh; + overflow-y: auto; +} + +#newChatButton { + border: none; + cursor: pointer; + border-radius: var(--border-radius); + /* background-co lor: #557CB4; */ + width: calc(var(--left-side-width) - 16px); + margin-top: 16px; + margin-left: auto; + margin-right: auto; +} + +#rightSide { + width: 100%; + margin-left: auto; + margin-right: auto; + max-width: var(--right-side-max-width); +} + +#rightSideInside { + margin-left: auto; + margin-right: auto; + max-width: var(--right-side-max-width); +} + +#toggleThemeButton { + position: fixed; + top: 10px; + right: 0px; + cursor: pointer; + color: #fff; +} + +#chatPanel { + height: 100%; + max-height: calc(100vh - var(--header-height) - var(--input-height) - 32px); + overflow-y: auto; +} + +#sendButton { + border: none; + cursor: pointer; + font-size: 1em; + border-radius: var(--border-radius); + background-color: #557CB4; + width: var(--send-button-width); + padding: 0px; +} + +#userInputPanel { + display: flex; + max-width: 768px; +} + +#userInput { + margin-right: 15px; + width: 100%; + max-height: var(--max-textarea-height); + border-radius: var(--border-radius); + border-width: 2px; +} + +textarea { + resize: none; + background-color: #111; + color: #f2f2f2; +} + +body.light-theme textarea { + background-color: #fff; + color: #111; +} + +textarea.w3-border { + border-color: #333 !important; +} + +body.light-theme textarea.w3-border { + border-color: #ddd !important; +} + +textarea.w3-border:focus-visible { + border-color: #555 !important; + outline: none; +} + +body.light-theme textarea.w3-border:focus-visible { + border-color: #bbb !important; + outline: none; +} + +.user { + color: #d8d8d8; + background-color: #111; + border-radius: var(--border-radius); +} + +.computer { + color: #d8d8d8; + background-color: #111; + border-radius: var(--border-radius); +} + +div.user { + margin-bottom: 8px; + margin-right: 0px; + text-align: left; +} + +div.computer { + margin-bottom: 8px; + margin-right: 0px; + text-align: left; +} + +.message-author { + font-weight: bold; + padding-top: calc(var(--border-radius) / 2); + padding-left: var(--border-radius); + padding-right: var(--border-radius); +} + +p.message-author, p.message-author p { + margin: 0px; +} + +.message-content { + padding-left: var(--border-radius); + padding-bottom: calc(var(--border-radius) / 2); + padding-right: var(--border-radius); +} + +p.message-content, p.message-content p { + margin-top: 0px; + margin-left: 0px; + margin-right: 0px; +} + +.light-theme { + background-color: #fff; +} + +body.light-theme #toggleThemeButton { + color: #888; +} + +body.light-theme .user { + background-color: #fdfdfd; + color: #111; +} + +body.light-theme .computer { + background-color: #fdfdfd; + color: #111; +} + +#userInput::-webkit-scrollbar { + display: none; +} +#userInput { + -ms-overflow-style: none; + scrollbar-width: none; +} + +::-webkit-scrollbar { + height: 1rem; + width: .5rem; + background-color: #111; +} + +body.light-theme ::-webkit-scrollbar { + background-color: #fdfdfd; +} + +::-webkit-scrollbar:horizontal { + height: .5rem; + width: 1rem +} + +::-webkit-scrollbar:vertical { + height: .5rem; + width: 1rem +} + +::-webkit-scrollbar-track { + background-color: transparent; + border-radius: 9999px; +} + +::-webkit-scrollbar-thumb { + background-color: #0a0a0a; + border-color: rgba(255,255,255,var(--tw-border-opacity)); + border-radius: 9999px; + border-width: 1px; +} + +body.light-theme ::-webkit-scrollbar-thumb { + background-color: #fafafa; +} + +::-webkit-scrollbar-thumb:hover { + background-color: rgba(217,217,227,var(--tw-bg-opacity)) +} + + +.hljs { + margin: 0px; + padding: 16px; + padding-right: 0px; + border-radius: var(--border-radius); + overflow-x: auto; + max-width: 90vw; +} + +/* + +Atom One Dark by Daniel Gamage +Original One Dark Syntax theme from https://github.com/atom/one-dark-syntax + +base: #282c34 +mono-1: #abb2bf +mono-2: #818896 +mono-3: #5c6370 +hue-1: #56b6c2 +hue-2: #61aeee +hue-3: #c678dd +hue-4: #98c379 +hue-5: #e06c75 +hue-5-2: #be5046 +hue-6: #d19a66 +hue-6-2: #e6c07b + +*/ + +.hljs { + color: #abb2bf; + background: #282c34; + } + + .hljs-comment, + .hljs-quote { + color: #5c6370; + font-style: italic; + } + + .hljs-doctag, + .hljs-keyword, + .hljs-formula { + color: #c678dd; + } + + .hljs-section, + .hljs-name, + .hljs-selector-tag, + .hljs-deletion, + .hljs-subst { + color: #e06c75; + } + + .hljs-literal { + color: #56b6c2; + } + + .hljs-string, + .hljs-regexp, + .hljs-addition, + .hljs-attribute, + .hljs-meta .hljs-string { + color: #98c379; + } + + .hljs-attr, + .hljs-variable, + .hljs-template-variable, + .hljs-type, + .hljs-selector-class, + .hljs-selector-attr, + .hljs-selector-pseudo, + .hljs-number { + color: #d19a66; + } + + .hljs-symbol, + .hljs-bullet, + .hljs-link, + .hljs-meta, + .hljs-selector-id, + .hljs-title { + color: #61aeee; + } + + .hljs-built_in, + .hljs-title.class_, + .hljs-class .hljs-title { + color: #e6c07b; + } + + .hljs-emphasis { + font-style: italic; + } + + .hljs-strong { + font-weight: bold; + } + + .hljs-link { + text-decoration: underline; + } diff --git a/src/ai/.x/templates/openai-webpage-with-functions/webpack.config.js b/src/ai/.x/templates/openai-webpage-with-functions/webpack.config.js new file mode 100644 index 00000000..b3b87bf1 --- /dev/null +++ b/src/ai/.x/templates/openai-webpage-with-functions/webpack.config.js @@ -0,0 +1,20 @@ +const path = require('path'); +const webpack = require('webpack'); +const Dotenv = require('dotenv-webpack'); + +module.exports = { + entry: './src/script.js', + output: { + filename: 'main.js', + path: path.resolve(__dirname, 'dist'), + }, + plugins: [ + new Dotenv(), + new webpack.DefinePlugin({ + 'process.env.ENDPOINT': JSON.stringify(process.env.ENDPOINT), + 'process.env.AZURE_API_KEY': JSON.stringify(process.env.AZURE_API_KEY), + 'process.env.DEPLOYMENT_NAME': JSON.stringify(process.env.DEPLOYMENT_NAME), + 'process.env.SYSTEM_PROMPT': JSON.stringify(process.env.SYSTEM_PROMPT), + }), + ], +}; \ No newline at end of file diff --git a/src/ai/.x/templates/openai-webpage/.vscode/launch.json b/src/ai/.x/templates/openai-webpage/.vscode/launch.json deleted file mode 100644 index 30fc6258..00000000 --- a/src/ai/.x/templates/openai-webpage/.vscode/launch.json +++ /dev/null @@ -1,11 +0,0 @@ -{ - "version": "0.2.0", - "configurations": [ - { - "type": "chrome", - "request": "launch", - "name": "Launch Chrome", - "file": "${workspaceFolder}/index.html" - } - ] -} \ No newline at end of file diff --git a/src/ai/.x/templates/openai-webpage/.vscode/tasks.json b/src/ai/.x/templates/openai-webpage/.vscode/tasks.json deleted file mode 100644 index d5460be9..00000000 --- a/src/ai/.x/templates/openai-webpage/.vscode/tasks.json +++ /dev/null @@ -1,17 +0,0 @@ -{ - "version": "2.0.0", - "tasks": [ - { - "label": "install dependencies", - "type": "shell", - "command": "npm install", - "problemMatcher": [] - }, - { - "label": "build", - "type": "shell", - "command": "npx webpack", - "problemMatcher": [] - } - ] -} \ No newline at end of file diff --git a/src/common/details/named_values/tokens/programming_language_token.cs b/src/common/details/named_values/tokens/programming_language_token.cs index f8f0d80f..0e9ddffb 100644 --- a/src/common/details/named_values/tokens/programming_language_token.cs +++ b/src/common/details/named_values/tokens/programming_language_token.cs @@ -16,6 +16,7 @@ public static string GetExtension(string language) "java" => ".java", "javascript" => ".js", "python" => ".py", + "typescript" => ".ts", _ => string.Empty }; } @@ -27,7 +28,7 @@ public static string GetSuffix(string language) public static NamedValueTokenData Data() => new NamedValueTokenData(_optionName, _fullName, _optionExample, _requiredDisplayName); public static INamedValueTokenParser Parser() => new NamedValueTokenParserList( - new NamedValueTokenParser(_optionName, _fullName, "01", "1", "C#;c#;cs;Go;go;Java;java;JavaScript;javascript;js;Python;python;py"), + new NamedValueTokenParser(_optionName, _fullName, "01", "1", "C#;c#;cs;Go;go;Java;java;JavaScript;javascript;js;Python;python;py;TypeScript;typescript;ts"), new NamedValueTokenParser("--C#", "programming.language.csharp", "001", "0", null, null, "C#", _fullName), new NamedValueTokenParser("--CS", "programming.language.csharp", "001", "0", null, null, "C#", _fullName), new NamedValueTokenParser("--Go", "programming.language.go", "001", "0", null, null, "Go", _fullName), @@ -35,7 +36,9 @@ public static string GetSuffix(string language) new NamedValueTokenParser("--JavaScript", "programming.language.javascript", "001", "0", null, null, "JavaScript", _fullName), new NamedValueTokenParser("--JS", "programming.language.javascript", "001", "0", null, null, "JavaScript", _fullName), new NamedValueTokenParser("--Python", "programming.language.python", "001", "0", null, null, "Python", _fullName), - new NamedValueTokenParser("--PY", "programming.language.python", "001", "0", null, null, "Python", _fullName) + new NamedValueTokenParser("--PY", "programming.language.python", "001", "0", null, null, "Python", _fullName), + new NamedValueTokenParser("--TypeScript", "programming.language.typescript", "001", "0", null, null, "TypeScript", _fullName), + new NamedValueTokenParser("--TS", "programming.language.typescript", "001", "0", null, null, "TypeScript", _fullName) ); private const string _requiredDisplayName = "programming language"; From 31cae8f300635a90b38e787ea749200a015f5cd9 Mon Sep 17 00:00:00 2001 From: Rob Chambers Date: Wed, 17 Jan 2024 10:26:26 -0800 Subject: [PATCH 09/30] added test adapter/runner (#152) * added test adapter/runner * update cicd pipeline to allow for testing using test adapter... * try again * updated * try again * updates * update build pipeline and rethrow with correct stack info * one more change * updated again * udpate directory * fix tests * set the test run title correctly * updated for new templates * removed test runner; adjusted to run test yamls from their source directory * updated local bin output in build.yaml * update readme.md * setup a better working directory for test runs * update test results path --- .azure/pipelines/build.yaml | 161 +++- ai-cli.sln | 6 + src/ai/Program_AI.cs | 4 +- src/common/details/console/gui/Screen.cs | 20 +- .../Azure-AI-CLI-TestRunner-Default-Tags.yaml | 2 + tests/test.yaml | 93 ++ tests/test2.yaml | 16 + tests/test3.yaml | 2 + tests/testadapter/Logger.cs | 126 +++ tests/testadapter/Properties/AssemblyInfo.cs | 36 + tests/testadapter/README.md | 206 +++++ tests/testadapter/TestAdapterTest.runsettings | 7 + tests/testadapter/TestDiscoverer.cs | 40 + tests/testadapter/TestExecutor.cs | 41 + tests/testadapter/YamlHelpers.cs | 58 ++ tests/testadapter/YamlNodeExtensions.cs | 113 +++ tests/testadapter/YamlTagHelpers.cs | 126 +++ tests/testadapter/YamlTestAdapter.cs | 167 ++++ tests/testadapter/YamlTestAdapter.csproj | 9 + .../testadapter/YamlTestAdapterCommon.targets | 68 ++ tests/testadapter/YamlTestCaseFilter.cs | 74 ++ tests/testadapter/YamlTestCaseParser.cs | 426 ++++++++++ tests/testadapter/YamlTestCaseRunner.cs | 804 ++++++++++++++++++ tests/testadapter/YamlTestProperties.cs | 53 ++ .../YamlTestRunnerTriggerAttribute.cs | 21 + 25 files changed, 2671 insertions(+), 8 deletions(-) create mode 100644 tests/Azure-AI-CLI-TestRunner-Default-Tags.yaml create mode 100644 tests/test.yaml create mode 100644 tests/test2.yaml create mode 100644 tests/test3.yaml create mode 100644 tests/testadapter/Logger.cs create mode 100644 tests/testadapter/Properties/AssemblyInfo.cs create mode 100644 tests/testadapter/README.md create mode 100644 tests/testadapter/TestAdapterTest.runsettings create mode 100644 tests/testadapter/TestDiscoverer.cs create mode 100644 tests/testadapter/TestExecutor.cs create mode 100644 tests/testadapter/YamlHelpers.cs create mode 100644 tests/testadapter/YamlNodeExtensions.cs create mode 100644 tests/testadapter/YamlTagHelpers.cs create mode 100644 tests/testadapter/YamlTestAdapter.cs create mode 100644 tests/testadapter/YamlTestAdapter.csproj create mode 100644 tests/testadapter/YamlTestAdapterCommon.targets create mode 100644 tests/testadapter/YamlTestCaseFilter.cs create mode 100644 tests/testadapter/YamlTestCaseParser.cs create mode 100644 tests/testadapter/YamlTestCaseRunner.cs create mode 100644 tests/testadapter/YamlTestProperties.cs create mode 100644 tests/testadapter/YamlTestRunnerTriggerAttribute.cs diff --git a/.azure/pipelines/build.yaml b/.azure/pipelines/build.yaml index ed75350a..23a2481b 100644 --- a/.azure/pipelines/build.yaml +++ b/.azure/pipelines/build.yaml @@ -187,8 +187,165 @@ stages: docker tag acrbn.azurecr.io/azure-ai-cli:bookworm-$(AICLIVersion) acrbn.azurecr.io/azure-ai-cli:latest docker push acrbn.azurecr.io/azure-ai-cli:latest -- stage: ManualApproval +- stage: TestStage dependsOn: [SetupStage, BuildStage] + condition: and(succeeded(), or(eq(variables['IsRelease'], 'true'), eq(variables['TestDevBuild'], 'true'))) + variables: + AICLIVersion: $[ stageDependencies.SetupStage.SetupJob.outputs['Variables.AICLIVersion']] + AICLISemVerVersion: $[ stageDependencies.SetupStage.SetupJob.outputs['Variables.AICLISemVerVersion']] + AICLINuPkgFileName: $[ stageDependencies.SetupStage.SetupJob.outputs['Variables.AICLINuPkgFileName']] + BuildConfiguration: 'ReleaseUnixOS' + LocalBinOutputPath: '$(Build.SourcesDirectory)/tests/build/bin' + LocalInputPath: '$(Build.SourcesDirectory)/tests' + TargetFolder: '$(System.DefaultWorkingDirectory)' + TestFilter: 'cli=ai&tag!=skip' + TestResultsPath: '$(Build.SourcesDirectory)/testresults' + TestRunTitle: 'Azure AI CLI $(AICLIVersion) ($(BuildConfiguration)-$(Build.BuildNumber))' + TestRunTrxFileName: '$(TestResultsPath)/ai-cli-test-results-$(BuildConfiguration)-$(Build.BuildNumber).trx' + TestBackupArtifactFile: '$(Build.ArtifactStagingDirectory)/ai-cli-test-run-backup-artifact-$(BuildConfiguration)-$(Build.BuildNumber).zip' + jobs: + - job: TestJob + steps: + + # ----------------------------------------------------------------------------- + # Download the ai CLI artifacts + # ----------------------------------------------------------------------------- + - task: DownloadPipelineArtifact@2 + displayName: Download ai-cli-artifacts + inputs: + artifact: 'ai-cli-artifacts' + targetPath: '$(Build.ArtifactStagingDirectory)/ai-cli-artifacts' + - task: CopyFiles@2 + displayName: Copy downloaded ai-cli-artifacts + continueOnError: true + inputs: + Contents: '**/*' + SourceFolder: $(Build.ArtifactStagingDirectory)/ai-cli-artifacts + TargetFolder: $(TargetFolder) + FlattenFolders: true + - task: Bash@3 + displayName: List files... + continueOnError: true + inputs: + targetType: 'inline' + script: | + echo listing for $(System.DefaultWorkingDirectory) + ls -la $(System.DefaultWorkingDirectory) + echo listing for $(Build.ArtifactStagingDirectory) + ls -la $(Build.ArtifactStagingDirectory) + + # ----------------------------------------------------------------------------- + # Install the ai CLI + # ----------------------------------------------------------------------------- + - task: DotNetCoreCLI@2 + displayName: INSTALL AI - Installing ai CLI via `dotnet tool install` + continueOnError: true + inputs: + includeNuGetOrg: false + command: custom + custom: tool + version: '7.0.x' + arguments: install + --ignore-failed-sources + --add-source "$(System.DefaultWorkingDirectory)" + --global Azure.AI.CLI + --version "$(AICLIVersion)" + + # ----------------------------------------------------------------------------- + # Finish job prep (mkdir, config cli) + # ----------------------------------------------------------------------------- + - bash: | + env | sort + which dotnet + dotnet --version + echo "TestResultsPath: $(TestResultsPath)" + mkdir $(TestResultsPath) + echo "LocalInputPath: $(LocalInputPath)" + ls -la $(LocalInputPath) + ai config system --set input.path $(LocalInputPath) + ai config --find + echo "DefaultWorkingDirectory: $(System.DefaultWorkingDirectory)" + ls -la $(System.DefaultWorkingDirectory) + displayName: Finish job prep (mkdir, config cli) + workingDirectory: '$(System.DefaultWorkingDirectory)' + + # ----------------------------------------------------------------------------- + # Build the YamlTestAdapter + # ----------------------------------------------------------------------------- + - task: DotNetCoreCLI@2 + displayName: Build YamlTestAdapter + continueOnError: true + inputs: + includeNuGetOrg: false + command: build + version: '7.0.x' + projects: '**/testadapter/YamlTestAdapter.csproj' + arguments: + -c $(BuildConfiguration) + /p:Platform=x64 + /p:LocalBinOutputPath="$(LocalBinOutputPath)" + + # ----------------------------------------------------------------------------- + # Run the tests + # ----------------------------------------------------------------------------- + - task: DotNetCoreCLI@2 + displayName: Run ai-cli tests + continueOnError: true + inputs: + includeNuGetOrg: false + command: test + version: '7.0.x' + arguments: + --logger:trx;LogFileName="$(TestRunTrxFileName)" + --logger:console;verbosity=normal + --filter "$(TestFilter)" + "$(LocalBinOutputPath)/$(BuildConfiguration)/net7.0/Azure.AI.CLI.TestAdapter.dll" + workingDirectory: '$(TestResultsPath)' + + # ----------------------------------------------------------------------------- + # Publish the test results + # ----------------------------------------------------------------------------- + - task: PublishTestResults@2 + displayName: Publish ai-cli test results + continueOnError: true + inputs: + testRunner: VSTest + testResultsFiles: '$(TestRunTrxFileName)' + testRunTitle: '$(TestRunTitle)' + failTaskOnFailedTests: true + + # ----------------------------------------------------------------------------- + # Archive and publish the test run backup artifact + # ----------------------------------------------------------------------------- + - task: ArchiveFiles@2 + displayName: Archive ai-cli-test run backup artifact (build/bin) + continueOnError: true + inputs: + rootFolderOrFile: '$(LocalBinOutputPath)' + includeRootFolder: false + archiveFile: '$(TestBackupArtifactFile)' + replaceExistingArchive: false + + - task: ArchiveFiles@2 + displayName: Archive ai-cli-test run backup artifact (testresults) + continueOnError: true + inputs: + rootFolderOrFile: '$(TestResultsPath)' + includeRootFolder: false + archiveFile: '$(TestBackupArtifactFile)' + replaceExistingArchive: false + + - task: PublishBuildArtifacts@1 + displayName: Publish ai-cli-test run backup artifact + continueOnError: true + retryCountOnTaskFailure: 5 + inputs: + parallel: true + pathToPublish: '$(TestBackupArtifactFile)' + artifactName: TestRunBackup + +- stage: ManualApproval + dependsOn: [SetupStage, BuildStage, TestStage] condition: and(succeeded(), or(eq(stageDependencies.SetupStage.outputs['SetupJob.Variables.IsRelease'], 'true'), eq(variables['PublishDevBuild'], 'true'))) variables: AICLIVersion: $[ stageDependencies.SetupStage.SetupJob.outputs['Variables.AICLIVersion']] @@ -208,7 +365,7 @@ stages: onTimeout: reject - stage: PublishPublic - dependsOn: [SetupStage, BuildStage, ManualApproval] + dependsOn: [SetupStage, BuildStage, TestStage, ManualApproval] condition: and(succeeded(), or(eq(stageDependencies.SetupStage.outputs['SetupJob.Variables.IsRelease'], 'true'), eq(variables['PublishDevBuild'], 'true'))) variables: AICLIVersion: $[ stageDependencies.SetupStage.SetupJob.outputs['Variables.AICLIVersion']] diff --git a/ai-cli.sln b/ai-cli.sln index 739356d3..c226ae76 100644 --- a/ai-cli.sln +++ b/ai-cli.sln @@ -15,6 +15,8 @@ Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "test_helper_functions_exten EndProject Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "template_extension", "src\extensions\template_extension\template_extension.csproj", "{023B4F9C-E2B3-4CCD-A993-87E337C16EDE}" EndProject +Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "YamlTestAdapter", "tests\testadapter\YamlTestAdapter.csproj", "{7C3F1355-B679-487D-904D-7E5FEBA9E75C}" +EndProject Global GlobalSection(SolutionConfigurationPlatforms) = preSolution Debug|Any CPU = Debug|Any CPU @@ -41,6 +43,10 @@ Global {023B4F9C-E2B3-4CCD-A993-87E337C16EDE}.Debug|Any CPU.Build.0 = Debug|Any CPU {023B4F9C-E2B3-4CCD-A993-87E337C16EDE}.Release|Any CPU.ActiveCfg = Release|Any CPU {023B4F9C-E2B3-4CCD-A993-87E337C16EDE}.Release|Any CPU.Build.0 = Release|Any CPU + {7C3F1355-B679-487D-904D-7E5FEBA9E75C}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {7C3F1355-B679-487D-904D-7E5FEBA9E75C}.Debug|Any CPU.Build.0 = Debug|Any CPU + {7C3F1355-B679-487D-904D-7E5FEBA9E75C}.Release|Any CPU.ActiveCfg = Release|Any CPU + {7C3F1355-B679-487D-904D-7E5FEBA9E75C}.Release|Any CPU.Build.0 = Release|Any CPU EndGlobalSection GlobalSection(SolutionProperties) = preSolution HideSolutionNode = FALSE diff --git a/src/ai/Program_AI.cs b/src/ai/Program_AI.cs index 8a48972a..afcaa983 100644 --- a/src/ai/Program_AI.cs +++ b/src/ai/Program_AI.cs @@ -17,9 +17,9 @@ namespace Azure.AI.Details.Common.CLI { public class AiProgram { - static void Main(string[] args) + static int Main(string[] args) { - Program.Main(new AiProgramData(), args); + return Program.Main(new AiProgramData(), args); } } diff --git a/src/common/details/console/gui/Screen.cs b/src/common/details/console/gui/Screen.cs index f1ceac09..7fccb0e5 100644 --- a/src/common/details/console/gui/Screen.cs +++ b/src/common/details/console/gui/Screen.cs @@ -328,11 +328,23 @@ private static Colors GetColorsNow() return new Colors(Console.ForegroundColor, Console.BackgroundColor); } + private static int TryCatchNoThrow(Func function, int defaultResult) + { + try + { + return function(); + } + catch (Exception) + { + return defaultResult; + } + } + private static Screen _current = new Screen(); - private int _initialWidth = Console.WindowWidth; - private int _initialHeight = Console.WindowHeight; - private int _initialTop = Console.CursorTop; // Console.WindowTop; - private int _initialLeft = Console.WindowLeft; + private int _initialWidth = TryCatchNoThrow(() => Console.WindowWidth, 200); + private int _initialHeight = TryCatchNoThrow(() => Console.WindowHeight, 50); + private int _initialTop = TryCatchNoThrow(() => Console.CursorTop, 0); + private int _initialLeft = TryCatchNoThrow(() => Console.WindowLeft, 0); private bool _initialCursorVisible = GetCursorVisible(); private Colors _initialColors = GetColorsNow(); private int _biggestYSoFar = 0; diff --git a/tests/Azure-AI-CLI-TestRunner-Default-Tags.yaml b/tests/Azure-AI-CLI-TestRunner-Default-Tags.yaml new file mode 100644 index 00000000..1eb9a5b3 --- /dev/null +++ b/tests/Azure-AI-CLI-TestRunner-Default-Tags.yaml @@ -0,0 +1,2 @@ +cli: ai +workingDirectory: ../testruns diff --git a/tests/test.yaml b/tests/test.yaml new file mode 100644 index 00000000..5d7557bf --- /dev/null +++ b/tests/test.yaml @@ -0,0 +1,93 @@ +- name: simulate pass + simulate: Passed + +- name: simulate skipped + simulate: Skipped + +- test1: run --script "echo hello" +- test2: run --script "echo oh yeah?" + +- name: try1a command ... ai + command: ai + +- name: try1b command ... ai run --script "echo hello" + command: ai run --script "echo hello" + expect: hello + +- name: try2a script ... ai + script: ai + +- name: try2b script ... ai run --script "echo hello" + script: ai run --script "echo hello" + expect: hello + +- name: simple help test + script: ai + expect: | + (?# ---------- BANNER) + AI - Azure AI CLI, Version [01]\.[0-9].[0-9] + Copyright \(c\) 2023 Microsoft Corporation\. All Rights Reserved\. + + This PUBLIC PREVIEW version may change at any time\. + See: https://aka\.ms/azure-ai-cli-public-preview + + ___ ____ ___ _____ + / _ /_ / / _ |/_ _/ + / __ |/ /_/ __ |_/ /_ + /_/ |_/___/_/ |_/____/ + + USAGE: ai \[\.\.\.\]\r?$\n + ^\r?$\n + ^HELP\r?$\n + ^\r?$\n + ^ ai help\r?$\n + ^ ai help init\r?$\n + ^\r?$\n + ^COMMANDS\r?$\n + ^\r?$\n + ^ ai init \[\.\.\.\] \(see: ai help init\)\r?$\n + ^ ai config \[\.\.\.\] \(see: ai help config\)\r?$\n + ^\r?$\n + ^ ai dev \[\.\.\.\] \(see: ai help dev\)\r?$\n + ^\r?$\n + ^ ai chat \[\.\.\.\] \(see: ai help chat\)\r?$\n + ^ ai flow \[\.\.\.\] \(see: ai help flow\)\r?$\n + ^\r?$\n + ^ ai search \[\.\.\.\] \(see: ai help search\)\r?$\n + ^ ai speech \[\.\.\.\] \(see: ai help speech\)\r?$\n + ^\r?$\n + ^ ai service \[\.\.\.\] \(see: ai help service\)\r?$\n + ^\r?$\n + ^EXAMPLES\r?$\n + ^\r?$\n + ^ ai init\r?$\n + ^ ai chat --interactive --system @prompt\.txt\r?$\n + ^\r?$\n + ^ ai search index update --name MyIndex --files \*\.md\r?$\n + ^ ai chat --interactive --system @prompt\.txt --index-name MyIndex\r?$\n + ^\r?$\n + ^SEE ALSO\r?$\n + ^\r?$\n + ^ ai help examples\r?$\n + ^\r?$\n + ^ ai help find "prompt"\r?$\n + ^ ai help find "prompt" --expand\r?$\n + ^\r?$\n + ^ ai help find topics "examples"\r?$\n + ^ ai help list topics\r?$\n + ^\r?$\n + ^ ai help documentation\r?$\n + ^\r?$\n + +- name: dev new list + command: dev new list + arguments: + expect: | + ^Name +Short +Name +Language +\r?$\n + ^-+ +-+ +-+\r?$\n + ^Environment +Variables +\.env *\r?$\n + ^Helper +Function +Class +Library +helper-functions +C# *\r?$\n + ^OpenAI +Chat +Completions +openai-chat +C#, +Go, +Java, +JavaScript, +Python *\r?$\n + ^OpenAI +Chat +Completions +\(Streaming\) +openai-chat-streaming +C#, +Go, +Java, +JavaScript, +Python *\r?$\n + ^OpenAI +Chat +Completions +\(w/ +Data +\+ +AI +Search\) +openai-chat-streaming-with-data +C#(, +Python){0,1} *\r?$\n + ^OpenAI +Chat +Completions +\(w/ +Functions\) +openai-chat-streaming-with-functions +C#, +Go, +JavaScript, +Python *\r?$\n diff --git a/tests/test2.yaml b/tests/test2.yaml new file mode 100644 index 00000000..a3ea4935 --- /dev/null +++ b/tests/test2.yaml @@ -0,0 +1,16 @@ +- name: simple help test + script: | + ai help + ai dev + ai dev new + ai dev new --help + + expect: | + AI + USAGE + COMMANDS + EXAMPLES + ADDITIONAL TOPICS + +- name: simple chat example + command: chat --question "tell me a joke" --save chat.job diff --git a/tests/test3.yaml b/tests/test3.yaml new file mode 100644 index 00000000..9791da9d --- /dev/null +++ b/tests/test3.yaml @@ -0,0 +1,2 @@ +- name: simple dev new command + command: ai dev new openai-chat --cs diff --git a/tests/testadapter/Logger.cs b/tests/testadapter/Logger.cs new file mode 100644 index 00000000..e4da6ec6 --- /dev/null +++ b/tests/testadapter/Logger.cs @@ -0,0 +1,126 @@ +using Microsoft.VisualStudio.TestPlatform.ObjectModel; +using Microsoft.VisualStudio.TestPlatform.ObjectModel.Adapter; +using Microsoft.VisualStudio.TestPlatform.ObjectModel.Logging; +using System; +using System.Collections.Generic; +using System.Diagnostics; +using System.IO; +using System.Linq; +using System.Text; +using System.Threading; +using System.Threading.Tasks; + +namespace TestAdapterTest +{ + public class Logger + { + public static void Log(IMessageLogger logger) + { + Logger.logger = logger; + } + + public static void Log(string text) + { + LogInfo(text); + Logger.DbgTraceInfo(text); + } + + public static void LogIf(bool log, string text) + { + if (log) Log(text); + } + + #region log methods + + public static void LogInfo(string text) + { + using (var mutex = new Mutex(false, "Logger Mutex")) + { + mutex.WaitOne(); + File.AppendAllText(_logPath, $"{DateTime.Now}: INFO: {text}\n"); + mutex.ReleaseMutex(); + } + } + + public static void LogWarning(string text) + { + using (var mutex = new Mutex(false, "Logger Mutex")) + { + mutex.WaitOne(); + File.AppendAllText(_logPath, $"{DateTime.Now}: WARNING: {text}\n"); + mutex.ReleaseMutex(); + } + } + + public static void LogError(string text) + { + using (var mutex = new Mutex(false, "Logger Mutex")) + { + mutex.WaitOne(); + File.AppendAllText(_logPath, $"{DateTime.Now}: ERROR: {text}\n"); + mutex.ReleaseMutex(); + } + } + + #endregion + + #region dbg trace methods + + public static void DbgTraceInfo(string text) + { +#if DEBUG + TraceInfo(text); +#endif + } + + public static void DbgTraceWarning(string text) + { +#if DEBUG + TraceWarning(text); +#endif + } + + public static void DbgTraceError(string text) + { +#if DEBUG + TraceError(text); +#endif + } + + #endregion + + #region trace methods + + public static void TraceInfo(string text) + { + logger?.SendMessage(TestMessageLevel.Informational, $"{DateTime.Now}: {text}"); + } + + public static void TraceWarning(string text) + { + logger?.SendMessage(TestMessageLevel.Warning, $"{DateTime.Now}: {text}"); + } + + public static void TraceError(string text) + { + logger?.SendMessage(TestMessageLevel.Error, $"{DateTime.Now}: {text}"); + } + + #endregion + + #region private methods and data + + private static string GetLogPath() + { + var pid = Process.GetCurrentProcess().Id.ToString(); + var time = DateTime.Now.ToFileTime().ToString(); + return $"log-test-adatper-{time}-{pid}.log"; + } + + private static IMessageLogger logger = null; + + private static string _logPath = GetLogPath(); + + #endregion + } +} diff --git a/tests/testadapter/Properties/AssemblyInfo.cs b/tests/testadapter/Properties/AssemblyInfo.cs new file mode 100644 index 00000000..8acd379b --- /dev/null +++ b/tests/testadapter/Properties/AssemblyInfo.cs @@ -0,0 +1,36 @@ +using System.Reflection; +using System.Runtime.CompilerServices; +using System.Runtime.InteropServices; + +// General Information about an assembly is controlled through the following +// set of attributes. Change these attribute values to modify the information +// associated with an assembly. +[assembly: AssemblyTitle("TestAdapterTest")] +[assembly: AssemblyDescription("")] +[assembly: AssemblyConfiguration("")] +[assembly: AssemblyCompany("")] +[assembly: AssemblyProduct("TestAdapterTest")] +[assembly: AssemblyCopyright("Copyright © 2022")] +[assembly: AssemblyTrademark("")] +[assembly: AssemblyCulture("")] + +// Setting ComVisible to false makes the types in this assembly not visible +// to COM components. If you need to access a type in this assembly from +// COM, set the ComVisible attribute to true on that type. +[assembly: ComVisible(false)] + +// The following GUID is for the ID of the typelib if this project is exposed to COM +[assembly: Guid("9409c89f-ae64-4d4f-820e-e4248512733a")] + +// Version information for an assembly consists of the following four values: +// +// Major Version +// Minor Version +// Build Number +// Revision +// +// You can specify all the values or you can default the Build and Revision Numbers +// by using the '*' as shown below: +// [assembly: AssemblyVersion("1.0.*")] +[assembly: AssemblyVersion("1.0.0.0")] +[assembly: AssemblyFileVersion("1.0.0.0")] diff --git a/tests/testadapter/README.md b/tests/testadapter/README.md new file mode 100644 index 00000000..40189c92 --- /dev/null +++ b/tests/testadapter/README.md @@ -0,0 +1,206 @@ +# `ai` CLI Yaml Test Adapter + +PRE-REQUISITES: +* `ai` must be accessible in `PATH` +* `ai` must be configured as required for tests (run `ai init`, or use `ai config --set KEY=VALUE` for all required information) +- see: https://crbn.us/searchdocs?ai +- OR ... + ```dotnetcli + dotnet tool install --global Azure.AI.CLI + ai init + ``` + +## Run ALL tests + +**dotnet test** +From fresh clone (one step, CLI): +* DEBUG: + ```dotnetcli + dotnet test --logger:trx + ``` +* RELEASE: + ```dotnetcli + dotnet test --configuration release --logger:trx + ``` + +OR ... [Build](#BUILD) first, then w/CLI: +* DEBUG: + ```dotnetcli + cd tests\testadapter\bin\Debug\net7.0 + dotnet test Azure.AI.CLI.TestAdapter.dll --logger:trx + ``` +* RELEASE: + ```dotnetcli + cd tests\testadapter\bin\Release\net7.0 + dotnet test Azure.AI.CLI.TestAdapter.dll --logger:trx --logger:console;verbosity=normal + ``` + +**dotnet vstest** +OR ... [Build](#BUILD) first, then w/CLI: +* DEBUG: + ```dotnetcli + cd tests\testadapter\bin\Debug\net7.0 + dotnet vstest Azure.AI.CLI.TestAdapter.dll --logger:trx + ``` +* RELEASE: + ```dotnetcli + cd tests\testadapter\bin\Release\net7.0 + dotnet vstest Azure.AI.CLI.TestAdapter.dll --logger:trx --logger:console;verbosity=normal + ``` + +**VS 2019+** +OR ... [Build](#BUILD) first, then w/Visual Studio 2019+: +* Open Test Explorer (`T`) +* Run all tests (`V`) + +--- +## LIST tests + +**dotnet test** +From fresh clone (one step, CLI): +* DEBUG: + ```dotnetcli + dotnet test -t + ``` +* RELEASE: + ```dotnetcli + dotnet test --configuration release -t + ``` + +OR ... [Build](#BUILD) first, then w/CLI: +* DEBUG: + ```dotnetcli + cd tests\testadapter\bin\Debug\net7.0 + dotnet test Azure.AI.CLI.TestAdapter.dll -t + ``` +* RELEASE: + ```dotnetcli + cd tests\testadapter\bin\Release\net7.0 + dotnet test Azure.AI.CLI.TestAdapter.dll -t + ``` + +**dotnet vstest** +OR ... [Build](#BUILD) first, then w/CLI: +* DEBUG: + ```dotnetcli + cd tests\testadapter\bin\Debug\net7.0 + dotnet vstest Azure.AI.CLI.TestAdapter.dll -lt + ``` +* RELEASE: + ```dotnetcli + cd tests\testadapter\bin\Release\net7.0 + dotnet vstest Azure.AI.CLI.TestAdapter.dll -lt + ``` + +--- +## Run SOME tests + +**dotnet test** +From fresh clone (one step, CLI): +* DEBUG: + ```dotnetcli + dotnet test --filter:name~PARTIAL_NAME + ``` +* RELEASE: + ```dotnetcli + dotnet test --configuration release --filter:name~PARTIAL_NAME + ``` + +OR ... [Build](#BUILD) first, then w/CLI: + +* DEBUG: + ```dotnetcli + cd tests\testadapter\bin\Debug\net7.0 + dotnet test --filter:name~PARTIAL_NAME Azure.AI.CLI.TestAdapter.dll + ``` +* RELEASE: + ```dotnetcli + cd tests\testadapter\bin\Release\net7.0 + dotnet test --filter:name~PARTIAL_NAME Azure.AI.CLI.TestAdapter.dll + ``` + +**dotnet vstest** +OR ... [Build](#BUILD) first, then w/CLI: +* DEBUG: + ```dotnetcli + cd tests\testadapter\bin\Debug\net7.0 + dotnet vstest Azure.AI.CLI.TestAdapter.dll --logger:trx --testcasefilter:name~PARTIAL_NAME + ``` +* RELEASE: + ```dotnetcli + cd tests\testadapter\bin\Release\net7.0 + dotnet vstest Azure.AI.CLI.TestAdapter.dll --logger:trx --testcasefilter:name~PARTIAL_NAME + ``` + +**VS 2019+** +OR ... [Build](#BUILD) first, then w/Visual Studio 2019+: +* Open Test Explorer (`T`) +- Select tests (w/ mouse: `Left-click`, extend w/`Shift-left-click` and/or `Ctrl-left-click`) +- OR ... ``, enter search criteria, press `` +* Run selected tests (w/ mouse: `Right-click`, click on `Run`) + +**Additional CLI test case filters** + +`Operator[|&]` + +Where Operator is one of `=`, `!=` or `~` (Operator ~ has 'contains' +semantics and is applicable for string properties like DisplayName). + +Parenthesis () can be used to group sub-expressions. + +| property | aliases | example | +|-|-|-| +| Name | DisplayName | `Name=NAME` +| | | `Name!=NAME` +| | | `Name~PARTIAL` +| fqn | FullyQualifiedName | `fqn=yaml.FILE.AREA.CLASS.NAME` +| | | `fqn!=yaml.FILE.AREA.CLASS.NAME` +| | | `fqn~PARTIAL` +| command | | `command~recognize` +| | | `command~synthesize` +| | | `command~translate` +| | | `command~weather` +| | | `command~mp3` +| script | | `script~echo` +| | | `script~recognize` +| | | `script~weather` +| | | `script~mp3` +| expect | | `expect~RECOGNIZED:` +| not-expect | | `not-expect~ERROR` +| log-expect | | `log-expect~path:` +| log-not-expect | | `log-not-expect~ERROR` + +--- +# BUILD + +**dotnet build** +* DEBUG: `dotnet build` +* RELEASE: `dotnet build --configuration release` + +**VS 2019+** +* Open `ai-cli.sln` +* Select `Debug` or `Release` +* Run (``) + +--- + +## ADDITIONAL OPTIONS + +**dotnet test** +Console logging: `-v` or `--verbosity` followed one of: +* `q[uiet]` +* `m[inimal]` +* `n[ormal]` +* `d[etailed]` +* `diag[nostic]` + +e.g. `dotnet test --configuration release --v n` + +**dotnet vstest** +Console logging: `--logger:console`, optionally followed by one of: +* `;verbosity=quiet` +* `;verbosity=minimal` +* `;verbosity=normal` +* `;verbosity=detailed` + +e.g. `dotnet vstest Azure.AI.CLI.TestAdapter.dll --logger:trx --logger:console;verbosity=normal` diff --git a/tests/testadapter/TestAdapterTest.runsettings b/tests/testadapter/TestAdapterTest.runsettings new file mode 100644 index 00000000..dc0165f9 --- /dev/null +++ b/tests/testadapter/TestAdapterTest.runsettings @@ -0,0 +1,7 @@ + + + . + + + + \ No newline at end of file diff --git a/tests/testadapter/TestDiscoverer.cs b/tests/testadapter/TestDiscoverer.cs new file mode 100644 index 00000000..f53ffd62 --- /dev/null +++ b/tests/testadapter/TestDiscoverer.cs @@ -0,0 +1,40 @@ +using Microsoft.VisualStudio.TestPlatform.ObjectModel; +using Microsoft.VisualStudio.TestPlatform.ObjectModel.Adapter; +using Microsoft.VisualStudio.TestPlatform.ObjectModel.Logging; +using System; +using System.Collections.Generic; +using System.Diagnostics; +using System.IO; +using System.Linq; +using System.Text; +using System.Threading; +using System.Threading.Tasks; + +namespace TestAdapterTest +{ + [FileExtension(YamlTestAdapter.FileExtensionYaml)] + [FileExtension(YamlTestAdapter.FileExtensionDll)] + [DefaultExecutorUri(YamlTestAdapter.Executor)] + public class TestDiscoverer : ITestDiscoverer + { + public void DiscoverTests(IEnumerable sources, IDiscoveryContext discoveryContext, IMessageLogger logger, ITestCaseDiscoverySink discoverySink) + { + try + { + Logger.Log(logger); + Logger.Log($"TestDiscoverer.DiscoverTests(): ENTER"); + Logger.Log($"TestDiscoverer.DiscoverTests(): count={sources.Count()}"); + foreach (var test in YamlTestAdapter.GetTestsFromFiles(sources)) + { + discoverySink.SendTestCase(test); + } + Logger.Log($"TestDiscoverer.DiscoverTests(): EXIT"); + } + catch (Exception ex) + { + Logger.Log($"EXCEPTION: {ex.Message}\nSTACK: {ex.StackTrace}"); + throw; + } + } + } +} diff --git a/tests/testadapter/TestExecutor.cs b/tests/testadapter/TestExecutor.cs new file mode 100644 index 00000000..9ebca03c --- /dev/null +++ b/tests/testadapter/TestExecutor.cs @@ -0,0 +1,41 @@ +using Microsoft.VisualStudio.TestPlatform.ObjectModel; +using Microsoft.VisualStudio.TestPlatform.ObjectModel.Adapter; +using Microsoft.VisualStudio.TestPlatform.ObjectModel.Logging; +using System; +using System.Collections.Generic; +using System.Diagnostics; +using System.IO; +using System.Linq; +using System.Text; +using System.Threading; +using System.Threading.Tasks; + +namespace TestAdapterTest +{ + [ExtensionUri(YamlTestAdapter.Executor)] + public class TextExecutor : ITestExecutor + { + public void RunTests(IEnumerable tests, IRunContext runContext, IFrameworkHandle frameworkHandle) + { + Logger.Log(frameworkHandle); + Logger.Log($"TextExecutor.RunTests(IEnumerable(): ENTER"); + Logger.Log($"TextExecutor.RunTests(IEnumerable(): count={tests.Count()}"); + YamlTestAdapter.RunTests(tests, runContext, frameworkHandle); + Logger.Log($"TextExecutor.RunTests(IEnumerable(): EXIT"); + } + + public void RunTests(IEnumerable sources, IRunContext runContext, IFrameworkHandle frameworkHandle) + { + Logger.Log(frameworkHandle); + Logger.Log($"TextExecutor.RunTests(IEnumerable(): ENTER"); + Logger.Log($"TextExecutor.RunTests(IEnumerable(): count={sources.Count()}"); + RunTests(YamlTestAdapter.GetTestsFromFiles(sources), runContext, frameworkHandle); + Logger.Log($"TextExecutor.RunTests(IEnumerable(): EXIT"); + } + + public void Cancel() + { + Logger.Log($"TextExecutor.Cancel(): ENTER/EXIT"); + } + } +} diff --git a/tests/testadapter/YamlHelpers.cs b/tests/testadapter/YamlHelpers.cs new file mode 100644 index 00000000..112ca04a --- /dev/null +++ b/tests/testadapter/YamlHelpers.cs @@ -0,0 +1,58 @@ +using System; +using System.IO; +using YamlDotNet.RepresentationModel; +using YamlDotNet.Serialization; + +namespace TestAdapterTest +{ + public class YamlHelpers + { + public static YamlStream ParseYamlStream(string fullName) + { + var stream = new YamlStream(); + var text = File.OpenText(fullName); + var error = string.Empty; + + try + { + stream.Load(text); + } + catch (YamlDotNet.Core.YamlException ex) + { + var where = $"{fullName}({ex.Start.Line},{ex.Start.Column})"; + error = $"Error parsing YAML (YamlException={ex.GetType()}):\n {where}\n {ex.Message}"; + } + catch (Exception ex) + { + var where = fullName; + error = $"Error parsing YAML (YamlException={ex.GetType()}):\n {where}\n {ex.Message}"; + } + + if (!string.IsNullOrEmpty(error)) + { + Logger.LogError(error); + Logger.TraceError(error); + } + + return stream; + } + + public static string ToYamlOrJsonString(YamlNode node, bool yaml) + { + var serializer = yaml + ? new SerializerBuilder().Build() + : new SerializerBuilder().JsonCompatible().Build(); + + using var writer = new StringWriter(); + var stream = new YamlStream { new YamlDocument(node) }; + stream.Save(writer); + + using var reader = new StringReader(writer.ToString()); + var deserializer = new Deserializer(); + var yamlObject = deserializer.Deserialize(reader); + + var trimmed = serializer.Serialize(yamlObject).Trim('\r', '\n'); + return yaml ? trimmed : trimmed.Replace("\t", "\\t").Replace("\f", "\\f"); + } + } +} diff --git a/tests/testadapter/YamlNodeExtensions.cs b/tests/testadapter/YamlNodeExtensions.cs new file mode 100644 index 00000000..5bb42ab8 --- /dev/null +++ b/tests/testadapter/YamlNodeExtensions.cs @@ -0,0 +1,113 @@ +using Microsoft.VisualStudio.TestPlatform.ObjectModel; +using System; +using System.Collections.Generic; +using System.IO; +using System.Linq; +using System.Text; +using YamlDotNet.RepresentationModel; + +namespace TestAdapterTest +{ + public static class YamlNodeExtensions + { + public static string ToYamlString(this YamlNode node) + { + return YamlHelpers.ToYamlOrJsonString(node, true); + } + + public static string ToJsonString(this YamlNode node) + { + return YamlHelpers.ToYamlOrJsonString(node, false); + } + + public static YamlScalarNode ConvertScalarSequenceToMultiLineTsvScalarNode(this YamlNode yaml, TestCase test, string[] keys) + { + var text = yaml.ConvertScalarSequenceToMultilineTsvString(keys); + if (text == null) + { + text = $"Invalid sequence or sequence value at {test.CodeFilePath}({yaml.Start.Line},{yaml.Start.Column})"; + Logger.Log(text); + } + + return new YamlScalarNode(text); + } + + public static string ConvertScalarSequenceToMultilineTsvString(this YamlNode node, string[] keys = null) + { + // ensure it's a sequence + var ok = node is YamlSequenceNode; + if (!ok) return null; + + var lines = new List(); + foreach (var item in (node as YamlSequenceNode).Children) + { + var line = item is YamlScalarNode + ? (item as YamlScalarNode).Value + : item is YamlSequenceNode + ? item.ConvertScalarSequenceToTsvString(keys) + : item.ConvertScalarMapToTsvString(keys); + + // ensure each item is either scalar, or sequence of scalar + var invalidItem = (line == null); + Logger.LogIf(invalidItem, $"Invalid item at ({item.Start.Line},{item.Start.Column})"); + if (invalidItem) return null; + + lines.Add(line); + } + return string.Join("\n", lines); + } + + public static string ConvertScalarSequenceToTsvString(this YamlNode node, string[] keys = null) + { + // ensure it's a sequence (list/array) + var sequence = node as YamlSequenceNode; + if (sequence == null) return null; + + // ensure there are no non-scalar children + var count = sequence.Count(x => !(x is YamlScalarNode)); + Logger.LogIf(count > 0, $"Invalid: (non-scalar) count({count}) > 0"); + if (count > 0) return null; + + // join the scalar children separated by tabs + var tsv = string.Join("\t", sequence.Children + .Select(x => (x as YamlScalarNode).Value)); + + // if we don't have enough items, append empty string columns (count of items == count of tabs + 1) + while (tsv.Count(x => x == '\t') + 1 < keys?.Length) + { + tsv += "\t"; + } + + tsv = tsv.Replace('\n', '\f'); + Logger.Log($"YamlNodeExtensions.ConvertScalarSequenceToTsvString: tsv='{tsv}'"); + return tsv; + } + + public static string ConvertScalarMapToTsvString(this YamlNode node, string[] keys) + { + // ensure it's a mapping node and we have keys + var mapping = node as YamlMappingNode; + if (mapping == null || keys == null) return null; + + // ensure there are no non-scalar kvp children + var count = mapping.Count(x => !(x.Key is YamlScalarNode) || !(x.Value is YamlScalarNode)); + Logger.LogIf(count > 0, $"Invalid: (non-scalar key or value) count({count}) > 0"); + if (count > 0) return null; + + // ensure the key specified is in the list of keys + count = mapping.Count(x => !keys.Contains((x.Key as YamlScalarNode).Value)); + Logger.LogIf(count > 0, $"Invalid: key not found count({count}) > 0"); + if (count > 0) return null; + + // join the scalar children ordered by keys, separated by tabs + var tsv = string.Join("\t", keys + .Select(key => mapping.Children.ContainsKey(key) + ? (mapping.Children[key] as YamlScalarNode).Value + : "")); + + tsv = tsv.Replace('\n', '\f'); + Logger.Log($"YamlNodeExtensions.ConvertScalarMapToTsvString: tsv='{tsv}'"); + return tsv; + } + } +} diff --git a/tests/testadapter/YamlTagHelpers.cs b/tests/testadapter/YamlTagHelpers.cs new file mode 100644 index 00000000..7b544c08 --- /dev/null +++ b/tests/testadapter/YamlTagHelpers.cs @@ -0,0 +1,126 @@ +using System; +using System.Collections.Generic; +using System.IO; +using System.Linq; +using YamlDotNet.RepresentationModel; + +namespace TestAdapterTest +{ + public class YamlTagHelpers + { + public static FileInfo GetYamlDefaultTagsFullFileName(DirectoryInfo directory) + { + var found = directory.GetFiles(YamlTestAdapter.YamlDefaultTagsFileName); + return found.Length == 1 + ? found[0] + : directory.Parent != null + ? GetYamlDefaultTagsFullFileName(directory.Parent) + : null; + } + + public static Dictionary> GetDefaultTags(DirectoryInfo directory) + { + var defaultTags = new Dictionary>(); + + var defaultsFile = GetYamlDefaultTagsFullFileName(directory)?.FullName; + if (defaultsFile != null) + { + Logger.Log($"Loading default tags from {defaultsFile}"); + var parsed = YamlHelpers.ParseYamlStream(defaultsFile); + if (parsed.Documents.Count() > 0) + { + var tagsNode = parsed.Documents[0].RootNode; + if (tagsNode != null) + { + defaultTags = UpdateCopyTags(defaultTags, null, tagsNode); + } + } + } + + return defaultTags; + } + + public static Dictionary> UpdateCopyTags(Dictionary> tags, YamlMappingNode mapping) + { + var tagNode = mapping.Children.ContainsKey("tag") ? mapping.Children["tag"] : null; + var tagsNode = mapping.Children.ContainsKey("tags") ? mapping.Children["tags"] : null; + if (tagNode == null && tagsNode == null) return tags; + + return UpdateCopyTags(tags, tagNode, tagsNode); + } + + private static Dictionary> UpdateCopyTags(Dictionary> tags, YamlNode tagNode, YamlNode tagsNode) + { + // make a copy that we'll update and return + tags = new Dictionary>(tags); + + var value = (tagNode as YamlScalarNode)?.Value; + AddOptionalTag(tags, "tag", value); + + var values = (tagsNode as YamlScalarNode)?.Value; + AddOptionalCommaSeparatedTags(tags, values); + + AddOptionalNameValueTags(tags, tagsNode as YamlMappingNode); + AddOptionalTagsForEachChild(tags, tagsNode as YamlSequenceNode); + + return tags; + } + + private static void AddOptionalTag(Dictionary> tags, string name, string value) + { + if (!string.IsNullOrEmpty(name) && !string.IsNullOrEmpty(value)) + { + if (!tags.ContainsKey(name)) + { + tags.Add(name, new List()); + } + tags[name].Add(value); + } + } + + private static void AddOptionalCommaSeparatedTags(Dictionary> tags, string values) + { + if (values != null) + { + foreach (var tag in values.Split(",".ToArray(), StringSplitOptions.RemoveEmptyEntries)) + { + AddOptionalTag(tags, "tag", tag); + } + } + } + + private static void AddOptionalNameValueTags(Dictionary> tags, YamlMappingNode mapping) + { + var children = mapping?.Children; + if (children == null) return; + + foreach (var child in children) + { + var key = (child.Key as YamlScalarNode)?.Value; + var value = (child.Value as YamlScalarNode)?.Value; + AddOptionalTag(tags, key, value); + } + } + + private static void AddOptionalTagsForEachChild(Dictionary> tags, YamlSequenceNode sequence) + { + var children = sequence?.Children; + if (children == null) return; + + foreach (var child in children) + { + if (child is YamlScalarNode) + { + AddOptionalTag(tags, "tag", (child as YamlScalarNode).Value); + continue; + } + + if (child is YamlMappingNode) + { + AddOptionalNameValueTags(tags, child as YamlMappingNode); + continue; + } + } + } + } +} diff --git a/tests/testadapter/YamlTestAdapter.cs b/tests/testadapter/YamlTestAdapter.cs new file mode 100644 index 00000000..0981a82c --- /dev/null +++ b/tests/testadapter/YamlTestAdapter.cs @@ -0,0 +1,167 @@ +using Microsoft.VisualStudio.TestPlatform.ObjectModel; +using Microsoft.VisualStudio.TestPlatform.ObjectModel.Adapter; +using Microsoft.VisualStudio.TestPlatform.ObjectModel.Logging; +using System; +using System.Collections.Generic; +using System.Diagnostics; +using System.IO; +using System.Linq; +using System.Reflection; +using System.Text; +using System.Threading; +using System.Threading.Tasks; +using System.Threading.Tasks.Dataflow; + +namespace TestAdapterTest +{ + public class YamlTestAdapter + { + public static IEnumerable GetTestsFromFiles(IEnumerable sources) + { + Logger.Log($"YamlTestAdapter.GetTestsFromFiles(source.Count={sources.Count()})"); + + var tests = new List(); + foreach (var source in sources) + { + Logger.Log($"YamlTestAdapter.GetTestsFromFiles('{source}')"); + tests.AddRange(GetTestsFromFile(source)); + } + + Logger.Log($"YamlTestAdapter.GetTestsFromFiles() found count={tests.Count()}"); + return tests; + } + + public static IEnumerable GetTestsFromFile(string source) + { + Logger.Log($"YamlTestAdapter.GetTestsFromFile('{source}')"); + + var file = new FileInfo(source); + Logger.Log($"YamlTestAdapter.GetTestsFromFile('{source}'): Extension={file.Extension}"); + + return file.Extension.Trim('.') == FileExtensionYaml.Trim('.') + ? GetTestsFromYaml(source, file) + : GetTestsFromSource(source, file); + } + + public static void RunTests(IEnumerable tests, IRunContext runContext, IFrameworkHandle frameworkHandle) + { + var parallelWorkers = Environment.ProcessorCount; + Logger.Log($"YamlTestAdapter.RunTests(): {parallelWorkers} parallel Workers"); + // Must run before, middle, and after testSets in certain order so cannot parallelize those + // Can parallelize tests within each testSet + foreach (var testSet in FilterTestCases(tests, runContext, frameworkHandle)) + { + if (!testSet.Any()) continue; + var parallelTestSet = testSet.Where(test => YamlTestProperties.Get(test, "parallelize") == "true"); + var nonParallelTestSet = testSet.Where(test => YamlTestProperties.Get(test, "parallelize") != "true"); + + var workerBlock = new ActionBlock( + test => RunAndRecordTestCase(test, frameworkHandle), + new ExecutionDataflowBlockOptions { MaxDegreeOfParallelism = parallelWorkers }); + foreach (var test in parallelTestSet) + { + workerBlock.Post(test); + } + workerBlock.Complete(); + workerBlock.Completion.Wait(); + + foreach (var test in nonParallelTestSet) + { + RunAndRecordTestCase(test, frameworkHandle); + } + } + } + + #region private methods + + private static IEnumerable GetTestsFromSource(string source, FileInfo file) + { + var sourceOk = + source.Contains("Azure.AI.CLI.TestAdapter") || + Assembly.LoadFile(source).GetReferencedAssemblies().Count(x => x.Name.Contains("Azure.AI.CLI.TestAdapter")) > 0; + + // foreach (var a in Assembly.LoadFile(source).GetReferencedAssemblies()) + // { + // Logger.Log($"a.Name={a.Name}"); + // Logger.Log($"a.FullName={a.FullName}"); + // } + + Logger.Log($"YamlTestAdapter.GetTestsFromSource('{source}'): sourceOk = {sourceOk}"); + + return !sourceOk + ? Enumerable.Empty() + : GetTestsFromDirectory(source, file.Directory); + } + + private static IEnumerable GetTestsFromDirectory(string source, DirectoryInfo directory) + { + Logger.Log($"YamlTestAdapter.GetTestsFromDirectory('{source}', '{directory.FullName}'): ENTER"); + + directory = YamlTagHelpers.GetYamlDefaultTagsFullFileName(directory)?.Directory ?? directory; + foreach (var file in FindFiles(directory)) + { + foreach (var test in GetTestsFromYaml(source, file)) + { + yield return test; + } + } + Logger.Log($"YamlTestAdapter.GetTestsFromDirectory('{source}', '{directory.FullName}'): EXIT"); + } + + private static IEnumerable FindFiles(DirectoryInfo directory) + { + return directory.GetFiles($"*{FileExtensionYaml}", SearchOption.AllDirectories) + .Where(file => file.Name != YamlDefaultTagsFileName); + } + + private static IEnumerable GetTestsFromYaml(string source, FileInfo file) + { + Logger.Log($"YamlTestAdapter.GetTestsFromYaml('{source}', '{file.FullName}'): ENTER"); + foreach (var test in YamlTestCaseParser.TestCasesFromYaml(source, file)) + { + yield return test; + } + Logger.Log($"YamlTestAdapter.GetTestsFromYaml('{source}', '{file.FullName}'): EXIT"); + } + + private static bool IsTrait(Trait trait, string check) + { + return trait.Name == check || trait.Value == check; + } + + private static IEnumerable> FilterTestCases(IEnumerable tests, IRunContext runContext, IFrameworkHandle frameworkHandle) + { + Logger.Log($"YamlTestAdapter.FilterTestCases()"); + + tests = YamlTestCaseFilter.FilterTestCases(tests, runContext, frameworkHandle); + + var before = tests.Where(test => test.Traits.Count(x => IsTrait(x, "before")) > 0); + var after = tests.Where(test => test.Traits.Count(x => IsTrait(x, "after")) > 0); + var middle = tests.Where(test => !before.Contains(test) && !after.Contains(test)); + + var testsList = new List> { before, middle, after }; + Logger.Log("YamlTestAdapter.FilterTestCases() ==> {string.Join('\n', tests.Select(x => x.Name))}"); + + return testsList; + } + + private static TestOutcome RunAndRecordTestCase(TestCase test, IFrameworkHandle frameworkHandle) + { + Logger.Log($"YamlTestAdapter.TestRunAndRecord({test.DisplayName})"); + return YamlTestCaseRunner.RunAndRecordTestCase(test, frameworkHandle); + } + + #endregion + + #region test adapter registration data + public const string FileExtensionDll = ".dll"; + public const string FileExtensionYaml = ".yaml"; + public const string Executor = "executor://ai/yaml/VsTestRunner1"; + #endregion + + #region other constants + public const string YamlDefaultTagsFileName = "Azure-AI-CLI-TestRunner-Default-Tags.yaml"; + public const string DefaultTimeout = "600000"; + #endregion + } +} diff --git a/tests/testadapter/YamlTestAdapter.csproj b/tests/testadapter/YamlTestAdapter.csproj new file mode 100644 index 00000000..b2671a63 --- /dev/null +++ b/tests/testadapter/YamlTestAdapter.csproj @@ -0,0 +1,9 @@ + + + + net7.0 + + + + + \ No newline at end of file diff --git a/tests/testadapter/YamlTestAdapterCommon.targets b/tests/testadapter/YamlTestAdapterCommon.targets new file mode 100644 index 00000000..c18b567d --- /dev/null +++ b/tests/testadapter/YamlTestAdapterCommon.targets @@ -0,0 +1,68 @@ + + + + + net7.0 + Library + Azure.AI.CLI.TestAdapter + false + + + True + 1.1.0 + + + + $(LocalBuildSDKBinPath) + bin + + + + + + x64 + $(LocalBinOutputPath)\Release\ + + + + + x64 + $(LocalBinOutputPath)\Debug\ + + + + + $(DefineConstants);UNIX + $(LocalBinOutputPath)\ReleaseUnixOS + + + + + $(DefineConstants);UNIX + $(LocalBinOutputPath)\DebugUnixOS + + + + + + 1.0.0 + $(TAAssemblyVersion) + + + + $(TAAssemblyVersion) + $(TAAssemblyVersion) + $(TAAssemblyInformationalVersion) + + + + + + + + + + + + + diff --git a/tests/testadapter/YamlTestCaseFilter.cs b/tests/testadapter/YamlTestCaseFilter.cs new file mode 100644 index 00000000..d14a35af --- /dev/null +++ b/tests/testadapter/YamlTestCaseFilter.cs @@ -0,0 +1,74 @@ +using Microsoft.VisualStudio.TestPlatform.ObjectModel; +using Microsoft.VisualStudio.TestPlatform.ObjectModel.Adapter; +using Microsoft.VisualStudio.TestPlatform.ObjectModel.Logging; +using System; +using System.Collections.Generic; +using System.Diagnostics; +using System.IO; +using System.Linq; +using System.Text; +using System.Threading; +using System.Threading.Tasks; + +namespace TestAdapterTest +{ + public class YamlTestCaseFilter + { + public static IEnumerable FilterTestCases(IEnumerable tests, IRunContext runContext, IFrameworkHandle frameworkHandle) + { + var names = GetSupportedFilterableNames(tests); + var filter = runContext.GetTestCaseFilter(names, null); + return tests.Where(test => filter == null || filter.MatchTestCase(test, name => GetPropertyValue(test, name))); + } + + private static HashSet GetSupportedFilterableNames(IEnumerable tests) + { + var filterable = new HashSet(supportedFilterProperties); + foreach (var test in tests) + { + foreach (var trait in test.Traits) + { + filterable.Add(trait.Name); + } + } + + if (filterable.Contains("tag")) filterable.Add("tags"); + + return filterable; + } + + private static object GetPropertyValue(TestCase test, string name) + { + switch (name.ToLower()) + { + case "name": + case "displayname": return test.DisplayName; + + case "fqn": + case "fullyqualifiedname": return test.FullyQualifiedName; + + case "cli": return YamlTestProperties.Get(test, "cli"); + case "command": return YamlTestProperties.Get(test, "command"); + case "script": return YamlTestProperties.Get(test, "script"); + + case "foreach": return YamlTestProperties.Get(test, "foreach"); + case "arguments": return YamlTestProperties.Get(test, "arguments"); + + case "expect": return YamlTestProperties.Get(test, "expect"); + case "not-expect": return YamlTestProperties.Get(test, "not-expect"); + + case "simulate": return YamlTestProperties.Get(test, "simulate"); + + case "timeout": return YamlTestProperties.Get(test, "timeout"); + case "working-directory": return YamlTestProperties.Get(test, "working-directory"); + } + + var tags = test.Traits.Where(x => x.Name == name || name == "tags"); + if (tags.Count() == 0) return null; + + return tags.Select(x => x.Value).ToArray(); + } + + private static readonly string[] supportedFilterProperties = { "DisplayName", "FullyQualifiedName", "Category", "cli", "command", "script", "foreach", "arguments", "expect", "not-expect", "simulate" }; + } +} diff --git a/tests/testadapter/YamlTestCaseParser.cs b/tests/testadapter/YamlTestCaseParser.cs new file mode 100644 index 00000000..12fea61d --- /dev/null +++ b/tests/testadapter/YamlTestCaseParser.cs @@ -0,0 +1,426 @@ +using System; +using System.Collections.Generic; +using System.Diagnostics; +using System.IO; +using System.Linq; +using System.Text; +using Microsoft.VisualStudio.TestPlatform.ObjectModel; +using YamlDotNet.Helpers; +using YamlDotNet.RepresentationModel; + +namespace TestAdapterTest +{ + public class YamlTestCaseParser + { + public static IEnumerable TestCasesFromYaml(string source, FileInfo file) + { + var area = GetRootArea(file); + var parsed = YamlHelpers.ParseYamlStream(file.FullName); + return TestCasesFromYamlStream(source, file, area, parsed); + } + + #region private methods + + private static IEnumerable TestCasesFromYamlStream(string source, FileInfo file, string area, YamlStream parsed) + { + var tests = new List(); + var defaultTags = YamlTagHelpers.GetDefaultTags(file.Directory); + var parallelize = "false"; + if (defaultTags.ContainsKey("parallelize")) + { + parallelize = defaultTags["parallelize"].Last(); + } + foreach (var document in parsed?.Documents) + { + var fromDocument = TestCasesFromYamlNode(source, file, document.RootNode, area, defaultClassName, defaultTags, parallelize); + tests.AddRange(fromDocument); + } + return tests; + } + + private static IEnumerable TestCasesFromYamlNode(string source, FileInfo file, YamlNode node, string area, string @class, Dictionary> tags, string parallelize) + { + return node is YamlMappingNode + ? TestCasesFromYamlMapping(source, file, node as YamlMappingNode, area, @class, tags, parallelize) + : TestCasesFromYamlSequence(source, file, node as YamlSequenceNode, area, @class, tags, parallelize); + } + + private static IEnumerable TestCasesFromYamlMapping(string source, FileInfo file, YamlMappingNode mapping, string area, string @class, Dictionary> tags, string parallelize) + { + var children = CheckForChildren(source, file, mapping, area, @class, tags, parallelize); + if (children != null) + { + return children; + } + + var test = GetTestFromNode(source, file, mapping, area, @class, tags, parallelize); + if (test != null) + { + return new[] { test }; + } + + return null; + } + + private static IEnumerable TestCasesFromYamlSequence(string source, FileInfo file, YamlSequenceNode sequence, string area, string @class, Dictionary> tags, string parallelize) + { + var tests = new List(); + if (sequence == null) return tests; + + foreach (YamlMappingNode mapping in sequence.Children) + { + var fromMapping = TestCasesFromYamlMapping(source, file, mapping, area, @class, tags, parallelize); + if (fromMapping != null) + { + tests.AddRange(fromMapping); + } + } + + return tests; + } + + private static TestCase GetTestFromNode(string source, FileInfo file, YamlMappingNode mapping, string area, string @class, Dictionary> tags, string parallelize) + { + string simulate = GetScalarString(mapping, "simulate"); + var simulating = !string.IsNullOrEmpty(simulate); + + string cli = GetScalarString(mapping, tags, "cli"); + + string currentParallelize = GetScalarString(mapping, "parallelize"); + parallelize = currentParallelize == null ? parallelize : currentParallelize; + + string command = GetScalarString(mapping, "command"); + string script = GetScalarString(mapping, "script"); + + string fullyQualifiedName = command == null && script == null + ? GetFullyQualifiedNameAndCommandFromShortForm(mapping, area, @class, ref command) + : GetFullyQualifiedName(mapping, area, @class); + fullyQualifiedName ??= GetFullyQualifiedName(area, @class, $"Expected YAML node ('name') at {file.FullName}({mapping.Start.Line})"); + + var neitherOrBoth = (command == null) == (script == null); + if (neitherOrBoth && !simulating) + { + var message = $"Error parsing YAML: expected/unexpected key ('name', 'command', 'script', 'arguments') at {file.FullName}({mapping.Start.Line})"; + Logger.LogError(message); + Logger.TraceError(message); + return null; + } + + Logger.Log($"YamlTestCaseParser.GetTests(): new TestCase('{fullyQualifiedName}')"); + var test = new TestCase(fullyQualifiedName, new Uri(YamlTestAdapter.Executor), source) + { + CodeFilePath = file.FullName, + LineNumber = mapping.Start.Line + }; + + SetTestCaseProperty(test, "cli", cli); + SetTestCaseProperty(test, "command", command); + SetTestCaseProperty(test, "script", script); + SetTestCaseProperty(test, "simulate", simulate); + SetTestCaseProperty(test, "parallelize", parallelize); + + var timeout = GetScalarString(mapping, tags, "timeout", YamlTestAdapter.DefaultTimeout); + SetTestCaseProperty(test, "timeout", timeout); + + var workingDirectory = GetScalarString(mapping, tags, "workingDirectory", file.Directory.FullName); + SetTestCaseProperty(test, "working-directory", workingDirectory); + + SetTestCasePropertyMap(test, "foreach", mapping, "foreach", workingDirectory); + SetTestCasePropertyMap(test, "arguments", mapping, "arguments", workingDirectory); + + SetTestCaseProperty(test, "expect", mapping, "expect"); + SetTestCaseProperty(test, "not-expect", mapping, "not-expect"); + + SetTestCaseTagsAsTraits(test, YamlTagHelpers.UpdateCopyTags(tags, mapping)); + + CheckInvalidTestCaseNodes(file, mapping, test); + return test; + } + + private static IEnumerable CheckForChildren(string source, FileInfo file, YamlMappingNode mapping, string area, string @class, Dictionary> tags, string parallelize) + { + var sequence = mapping.Children.ContainsKey("tests") + ? mapping.Children["tests"] as YamlSequenceNode + : null; + if (sequence == null) return null; + + @class = GetScalarString(mapping, "class", @class); + area = UpdateArea(mapping, area); + tags = YamlTagHelpers.UpdateCopyTags(tags, mapping); + parallelize = GetParallelizeTag(mapping, parallelize); + + return TestCasesFromYamlSequence(source, file, sequence, area, @class, tags, parallelize); + } + + private static void CheckInvalidTestCaseNodes(FileInfo file, YamlMappingNode mapping, TestCase test) + { + foreach (YamlScalarNode key in mapping.Children.Keys) + { + if (!IsValidTestCaseNode(key.Value) && !test.DisplayName.EndsWith(key.Value)) + { + var error = $"Error parsing YAML: Unexpected YAML key/value ('{key.Value}', '{test.DisplayName}') in {file.FullName}({mapping[key].Start.Line})"; + test.DisplayName = error; + Logger.LogError(error); + Logger.TraceError(error); + } + } + } + + private static bool IsValidTestCaseNode(string value) + { + return ";area;class;name;cli;command;script;timeout;foreach;arguments;expect;not-expect;simulate;tag;tags;parallelize;workingDirectory;".IndexOf($";{value};") >= 0; + } + + private static void SetTestCaseProperty(TestCase test, string propertyName, YamlMappingNode mapping, string mappingName) + { + string value = GetScalarString(mapping, mappingName); + SetTestCaseProperty(test, propertyName, value); + } + + private static void SetTestCaseProperty(TestCase test, string propertyName, string value) + { + if (value != null) + { + YamlTestProperties.Set(test, propertyName, value); + } + } + + private static void SetTestCasePropertyMap(TestCase test, string propertyName, YamlMappingNode testNode, string mappingName, string workingDirectory) + { + var ok = testNode.Children.ContainsKey(mappingName); + if (!ok) return; + + var argumentsNode = testNode.Children[mappingName]; + if (argumentsNode == null) return; + + if (argumentsNode is YamlScalarNode) + { + var value = (argumentsNode as YamlScalarNode).Value; + SetTestCaseProperty(test, propertyName, $"\"{value}\""); + } + else if (argumentsNode is YamlMappingNode) + { + var asMapping = argumentsNode as YamlMappingNode; + SetTestCasePropertyMap(test, propertyName, asMapping + .Select(x => NormalizeToScalarKeyValuePair(test, x, workingDirectory))); + } + else if (argumentsNode is YamlSequenceNode) + { + var asSequence = argumentsNode as YamlSequenceNode; + + SetTestCasePropertyMap(test, propertyName, asSequence + .Select(mapping => (mapping as YamlMappingNode)? + .Select(x => NormalizeToScalarKeyValuePair(test, x, workingDirectory)))); + } + } + + private static void SetTestCasePropertyMap(TestCase test, string propertyName, IEnumerable>> kvss) + { + // flatten the kvs + var kvs = kvss.SelectMany(x => x); + + // ensure all keys are unique, if not, transform appropriately + var keys = kvs.GroupBy(kv => (kv.Key as YamlScalarNode)?.Value).Select(g => g.Key).ToArray(); + if (keys.Length < kvs.Count()) + { + Logger.Log($"keys.Length={keys.Length}, kvs.Count={kvs.Count()}"); + Logger.Log($"keys='{string.Join(",", keys)}'"); + + var values = new List(); + foreach (var items in kvss) + { + var map = new YamlMappingNode(items); + values.Add(map.ConvertScalarMapToTsvString(keys)); + } + + var combinedKey = new YamlScalarNode(string.Join("\t", keys)); + var combinedValue = new YamlScalarNode(string.Join("\n", values)); + var combinedKv = new KeyValuePair(combinedKey, combinedValue); + kvs = new List>(new[] { combinedKv }); + } + + SetTestCasePropertyMap(test, propertyName, kvs); + } + + private static void SetTestCasePropertyMap(TestCase test, string propertyName, IEnumerable> kvs) + { + var newMap = new YamlMappingNode(kvs); + SetTestCaseProperty(test, propertyName, newMap.ToJsonString()); + } + + private static KeyValuePair NormalizeToScalarKeyValuePair(TestCase test, KeyValuePair item, string workingDirectory = null) + { + var key = item.Key; + var keyOk = key is YamlScalarNode; + var value = item.Value; + var valueOk = value is YamlScalarNode; + if (keyOk && valueOk) return item; + + string[] keys = null; + if (!keyOk) + { + var text = key.ConvertScalarSequenceToTsvString(); + if (text == null) + { + text = $"Invalid key at {test.CodeFilePath}({key.Start.Line},{key.Start.Column})"; + Logger.Log(text); + } + else if (text.Contains('\t')) + { + keys = text.Split('\t'); + } + key = new YamlScalarNode(text); + } + + if (!valueOk) + { + value = value.ConvertScalarSequenceToMultiLineTsvScalarNode(test, keys); + } + else + { + var scalarValue = value.ToJsonString().Trim('\"'); + if (TryGetFileContentFromScalar(scalarValue, workingDirectory, out string fileContent)) + { + value = fileContent; + if (!(value is YamlScalarNode)) + { + value = value.ConvertScalarSequenceToMultiLineTsvScalarNode(test, keys); + } + } + } + + Logger.Log($"YamlTestCaseParser.NormalizeToScalarKeyValuePair: key='{(key as YamlScalarNode).Value}', value='{(value as YamlScalarNode).Value}'"); + return new KeyValuePair(key, value); + } + + private static bool TryGetFileContentFromScalar(string scalar, string workingDirectory, out string fileContent) + { + // Treat this scalar value as file if it starts with '@' and does not have InvalidFileNameChars + if (scalar.StartsWith("@") && Path.GetFileName(scalar).IndexOfAny(Path.GetInvalidFileNameChars()) == -1) + { + var fileName = scalar.Substring(1); + + // check if the file already exists + var filePath = fileName; + if (!File.Exists(filePath)) + { + filePath = Path.Combine(workingDirectory, fileName); + } + + Logger.Log($"YamlTestCaseParser.TryGetFileContentFromScalar: Read file contents from {filePath}"); + if (File.Exists(filePath)) + { + fileContent = File.ReadAllText(filePath); + return true; + } + } + + fileContent = ""; + return false; + } + + private static string GetScalarString(YamlMappingNode mapping, Dictionary> tags, string mappingName, string defaultValue = null) + { + var value = GetScalarString(mapping, mappingName, null); + if (value != null) return value; + + if (tags.ContainsKey(mappingName)) + { + value = tags[mappingName].Last(); + } + + return value ?? defaultValue; + } + + private static string GetScalarString(YamlMappingNode mapping, string mappingName, string defaultValue = null) + { + var ok = mapping.Children.ContainsKey(mappingName); + if (!ok) return defaultValue; + + var node = mapping.Children[mappingName] as YamlScalarNode; + var value = node?.Value; + + return value ?? defaultValue; + } + + private static string GetYamlNodeAsString(YamlMappingNode mapping, string nodeName, string defaultValue = null) + { + var ok = mapping.Children.ContainsKey(nodeName); + if (!ok) return defaultValue; + + var node = mapping.Children[nodeName]; + var value = node?.ToYamlString(); + + return value ?? defaultValue; + } + + private static string GetRootArea(FileInfo file) + { + return $"{file.Extension.TrimStart('.')}.{file.Name.Remove(file.Name.LastIndexOf(file.Extension))}"; + } + + private static string UpdateArea(YamlMappingNode mapping, string area) + { + var subArea = GetScalarString(mapping, "area"); + return string.IsNullOrEmpty(subArea) + ? area + : $"{area}.{subArea}"; + } + + private static string GetFullyQualifiedName(YamlMappingNode mapping, string area, string @class) + { + var name = GetScalarString(mapping, "name"); + if (name == null) return null; + + area = UpdateArea(mapping, area); + @class = GetScalarString(mapping, "class", @class); + + return GetFullyQualifiedName(area, @class, name); + } + + private static string GetFullyQualifiedNameAndCommandFromShortForm(YamlMappingNode mapping, string area, string @class, ref string command) + { + // if there's only one invalid mapping node, we'll treat it's key as "name" and value as "command" + var invalid = mapping.Children.Keys.Where(key => !IsValidTestCaseNode((key as YamlScalarNode).Value)); + if (invalid.Count() == 1 && command == null) + { + var name = (invalid.FirstOrDefault() as YamlScalarNode).Value; + if (name == null) return null; + + command = GetScalarString(mapping, name); + area = UpdateArea(mapping, area); + @class = GetScalarString(mapping, "class", @class); + + return GetFullyQualifiedName(area, @class, name); + } + + return null; + } + + private static string GetFullyQualifiedName(string area, string @class, string name) + { + return $"{area}.{@class}.{name}"; + } + + private static string GetParallelizeTag(YamlMappingNode mapping, string currentParallelize) + { + var parallelizeNode = mapping.Children.ContainsKey("parallelize") ? mapping.Children["parallelize"] : null; + return parallelizeNode == null ? currentParallelize : (parallelizeNode as YamlScalarNode)?.Value; + } + + private static void SetTestCaseTagsAsTraits(TestCase test, Dictionary> tags) + { + foreach (var tag in tags) + { + foreach (var value in tag.Value) + { + test.Traits.Add(tag.Key, value); + } + } + } + + private const string defaultClassName = "TestCases"; + + #endregion + } +} diff --git a/tests/testadapter/YamlTestCaseRunner.cs b/tests/testadapter/YamlTestCaseRunner.cs new file mode 100644 index 00000000..324032af --- /dev/null +++ b/tests/testadapter/YamlTestCaseRunner.cs @@ -0,0 +1,804 @@ +using System; +using System.Collections.Generic; +using System.Diagnostics; +using System.IO; +using System.Linq; +using System.Linq.Expressions; +using System.Runtime.InteropServices; +using System.Text; +using System.Threading.Tasks; +using Microsoft.VisualStudio.TestPlatform.ObjectModel; +using Microsoft.VisualStudio.TestPlatform.ObjectModel.Adapter; +using Newtonsoft.Json; +using Newtonsoft.Json.Linq; +using YamlDotNet.RepresentationModel; + +namespace TestAdapterTest +{ + public class YamlTestCaseRunner + { + public static TestOutcome RunAndRecordTestCase(TestCase test, IFrameworkHandle frameworkHandle) + { + TestCaseStart(test, frameworkHandle); + TestCaseRun(test, frameworkHandle, out TestOutcome outcome); + TestCaseStop(test, frameworkHandle, outcome); + return outcome; + } + + #region private methods + + private static void TestCaseStart(TestCase test, IFrameworkHandle frameworkHandle) + { + Logger.Log($"YamlTestCaseRunner.TestCaseStart({test.DisplayName})"); + frameworkHandle.RecordStart(test); + } + + private static TestOutcome TestCaseRun(TestCase test, IFrameworkHandle frameworkHandle, out TestOutcome outcome) + { + Logger.Log($"YamlTestCaseRunner.TestCaseRun({test.DisplayName})"); + + // run the test case, getting all the results, prior to recording any of those results + // (not doing this in this order seems to, for some reason, cause "foreach" test cases to run 5 times!?) + var results = TestCaseGetResults(test).ToList(); + foreach (var result in results) + { + frameworkHandle.RecordResult(result); + } + + var failed = results.Count(x => x.Outcome == TestOutcome.Failed) > 0; + var skipped = results.Count(x => x.Outcome == TestOutcome.Skipped) > 0; + var notFound = results.Count(x => x.Outcome == TestOutcome.NotFound) > 0 || results.Count() == 0; + + return outcome = + failed ? TestOutcome.Failed + : skipped ? TestOutcome.Skipped + : notFound ? TestOutcome.NotFound + : TestOutcome.Passed; + } + + private static IEnumerable TestCaseGetResults(TestCase test) + { + Logger.Log($"YamlTestCaseRunner.TestCaseGetResults: ENTER"); + + var cli = YamlTestProperties.Get(test, "cli") ?? ""; + var command = YamlTestProperties.Get(test, "command"); + var script = YamlTestProperties.Get(test, "script"); + var @foreach = YamlTestProperties.Get(test, "foreach"); + var arguments = YamlTestProperties.Get(test, "arguments"); + var expect = YamlTestProperties.Get(test, "expect"); + var notExpect = YamlTestProperties.Get(test, "not-expect"); + var workingDirectory = YamlTestProperties.Get(test, "working-directory"); + var timeout = int.Parse(YamlTestProperties.Get(test, "timeout")); + var simulate = YamlTestProperties.Get(test, "simulate"); + + var basePath = new FileInfo(test.CodeFilePath).DirectoryName; + workingDirectory = Path.Combine(basePath, workingDirectory ?? ""); + var tryCreateWorkingDirectory = !string.IsNullOrEmpty(workingDirectory) && !Directory.Exists(workingDirectory); + if (tryCreateWorkingDirectory) Directory.CreateDirectory(workingDirectory); + + var expanded = ExpandForEachGroups(@foreach); + Logger.Log($"YamlTestCaseRunner.TestCaseGetResults: expanded count = {expanded.Count()}"); + + foreach (var foreachItem in expanded) + { + var start = DateTime.Now; + + var outcome = string.IsNullOrEmpty(simulate) + ? RunTestCase(test, cli, command, script, foreachItem, arguments, expect, notExpect, workingDirectory, timeout, out string stdOut, out string stdErr, out string errorMessage, out string stackTrace, out string additional, out string debugTrace) + : SimulateTestCase(test, simulate, cli, command, script, foreachItem, arguments, expect, notExpect, workingDirectory, out stdOut, out stdErr, out errorMessage, out stackTrace, out additional, out debugTrace); + + #if DEBUG + additional += outcome == TestOutcome.Failed ? $"\nEXTRA: {ExtraDebugInfo()}" : ""; + #endif + + var stop = DateTime.Now; + var result = CreateTestResult(test, start, stop, stdOut, stdErr, errorMessage, stackTrace, additional, debugTrace, outcome); + if (!string.IsNullOrEmpty(foreachItem) && foreachItem != "{}") + { + result.DisplayName = GetTestResultDisplayName(test.DisplayName, foreachItem); + } + yield return result; + } + + Logger.Log($"YamlTestCaseRunner.TestCaseGetResults: EXIT"); + } + + private static string GetTestResultDisplayName(string testDisplayName, string foreachItem) + { + var testResultDisplayName = testDisplayName; + + if(JToken.Parse(foreachItem).Type == JTokenType.Object) + { + // get JObject properties + JObject foreachItemObject = JObject.Parse(foreachItem); + foreach(var property in foreachItemObject.Properties()) + { + var keys = property.Name.Split(new char[] { '\t' }); + var values = property.Value.Value().Split(new char[] { '\t' }); + + for (int i = 0; i < keys.Length; i++) + { + if (testResultDisplayName.Contains("{" + keys[i] + "}")) + { + testResultDisplayName = testResultDisplayName.Replace("{" +keys[i] + "}", values[i]); + } + } + } + } + + // if the testDisplayName was not templatized, ie, it had no {} + if (testResultDisplayName == testDisplayName) + { + return $"{testDisplayName}: {RedactSensitiveDataFromForeachItem(foreachItem)}"; + } + + return testResultDisplayName; + } + + // Finds "token" in foreach key and redacts its value + private static string RedactSensitiveDataFromForeachItem(string foreachItem) + { + var foreachObject = JObject.Parse(foreachItem); + + var sb = new StringBuilder(); + var sw = new StringWriter(sb); + + using (JsonWriter writer = new JsonTextWriter(sw){Formatting = Formatting.None}) + { + writer.WriteStartObject(); + foreach (var item in foreachObject) + { + if (string.IsNullOrWhiteSpace(item.Value.ToString())) + { + continue; + } + var keys = item.Key.ToLower().Split(new char[] {'\t'}); + + // find index of "token" in foreach key and redact its value to avoid getting it displayed + var tokenIndex = Array.IndexOf(keys, "token"); + var valueString = item.Value; + + if (tokenIndex >= 0) + { + var values = item.Value.ToString().Split(new char[] {'\t'}); + if (values.Count() == keys.Count()) + { + values[tokenIndex] = "***"; + valueString = string.Join("\t", values); + } + } + writer.WritePropertyName(item.Key); + writer.WriteValue(valueString); + } + + writer.WriteEndObject(); + } + + return sb.ToString(); + } + + private static IEnumerable ExpandForEachGroups(string @foreach) + { + var kvs = KeyValuePairsFromJson(@foreach, false) + .Select(kv => new KeyValuePair>( + kv.Key, + kv.Value.Split("\n".ToCharArray(), StringSplitOptions.RemoveEmptyEntries))); + + var dicts = new[] { new Dictionary() }.ToList(); + foreach (var item in kvs) + { + var lines = item.Value; + dicts = lines.SelectMany( + line => dicts.Select( + d => DupAndAdd(d, item.Key, line))) + .ToList(); + } + + return dicts.Select(d => JsonConvert.SerializeObject(d)); + } + + private static Dictionary DupAndAdd(Dictionary d, string key, string value) + { + var dup = new Dictionary(d); + dup[key] = value; + return dup; + } + + private static TestOutcome RunTestCase(TestCase test, string cli, string command, string script, string @foreach, string arguments, string expect, string notExpect, string workingDirectory, int timeout, out string stdOut, out string stdErr, out string errorMessage, out string stackTrace, out string additional, out string debugTrace) + { + var outcome = TestOutcome.None; + + additional = $"START TIME: {DateTime.Now}"; + debugTrace = ""; + stackTrace = script; + + Task stdOutTask = null; + Task stdErrTask = null; + List filesToDelete = null; + + try + { + var isWindows = RuntimeInformation.IsOSPlatform(OSPlatform.Windows); + script = WriteTextToTempFile(script, isWindows ? "cmd" : null); + + expect = WriteTextToTempFile(expect); + notExpect = WriteTextToTempFile(notExpect); + + var kvs = KeyValuePairsFromJson(arguments, true); + kvs.AddRange(KeyValuePairsFromJson(@foreach, false)); + kvs = ConvertValuesToAtArgs(kvs, ref filesToDelete); + + var startArgs = GetStartInfo(out string startProcess, cli, command, script, kvs, expect, notExpect, ref filesToDelete); + stackTrace = stackTrace ?? $"{startProcess} {startArgs}"; + + Logger.Log($"Process.Start('{startProcess} {startArgs}')"); + var startInfo = new ProcessStartInfo(startProcess, startArgs) + { + UseShellExecute = false, + RedirectStandardInput = true, + RedirectStandardError = true, + RedirectStandardOutput = true, + WorkingDirectory = workingDirectory + }; + UpdatePathEnvironment(startInfo); + + var process = Process.Start(startInfo); + stdOutTask = process.StandardOutput.ReadToEndAsync(); + stdErrTask = process.StandardError.ReadToEndAsync(); + + var exitedNotKilled = WaitForExit(process, timeout); + outcome = exitedNotKilled && process.ExitCode == 0 + ? TestOutcome.Passed + : TestOutcome.Failed; + + var exitCode = exitedNotKilled + ? process.ExitCode.ToString() + : $"(did not exit; timedout; killed)"; + var exitTime = exitedNotKilled + ? process.ExitTime.ToString() + : DateTime.UtcNow.ToString(); + + errorMessage = $"EXIT CODE: {exitCode}"; + additional = additional + + $" STOP TIME: {exitTime}" + + $" EXIT CODE: {exitCode}"; + } + catch (Exception ex) + { + outcome = TestOutcome.Failed; + errorMessage = ex.Message; + debugTrace = ex.ToString(); + stackTrace = $"{stackTrace}\n{ex.StackTrace}"; + } + finally + { + if (script != null) File.Delete(script); + if (expect != null) File.Delete(expect); + if (notExpect != null) File.Delete(notExpect); + filesToDelete?.ForEach(x => File.Delete(x)); + } + + stdOut = stdOutTask?.Result; + stdErr = stdErrTask?.Result; + + return outcome; + } + + private static List> ConvertValuesToAtArgs(List> kvs, ref List files) + { + var newList = new List>(); + foreach (var item in kvs) + { + if (item.Value.Count(x => x == '\t' || x == '\r' || x == '\n' || x == '\f' || x == '\"') > 0) + { + string file = WriteMultilineTsvToTempFile(item.Value, ref files); + newList.Add(new KeyValuePair(item.Key, $"@{file}")); + } + else + { + newList.Add(item); + } + } + + return newList; + } + + private static List> KeyValuePairsFromJson(string json, bool allowSimpleString) + { + var kvs = new List>(); + if (!string.IsNullOrEmpty(json)) + { + Logger.Log($"KeyValuePairsFromJson: 'json'='{json}'"); + var parsed = JToken.Parse(json); + if (parsed.Type == JTokenType.String && allowSimpleString) + { + // if it's a simple string, there is no "key" for the argument... pass it as value with an empty string as key + // this will ensure that an additional '--' isn't emitted preceding the string-only arguments + kvs.Add(new KeyValuePair("", parsed.Value())); + } + else if (parsed.Type != JTokenType.Object) + { + // if it's not a simple string, it must be an object... if it's not, we'll just log and continue + Logger.Log("KeyValuePairsFromJson: Invalid json (only supports `\"string\"`, or `{\"mapItem1\": \"value1\", \"...\": \"...\"}`!"); + } + else + { + foreach (var item in parsed as JObject) + { + kvs.Add(new KeyValuePair(item.Key, item.Value.Value())); + } + } + } + return kvs; + } + + private static string WriteMultilineTsvToTempFile(string text, ref List files) + { + files ??= new List(); + + var lines = text.Split('\r', '\n'); + var newLines = new List(); + foreach (var line in lines) + { + if (!line.Contains('\f')) + { + newLines.Add(line); + continue; + } + + var values = line.Split('\t'); + var newValues = new List(); + foreach (var value in values) + { + if (!value.Contains('\f')) + { + newValues.Add(value); + continue; + } + + var newValue = WriteTextToTempFile(value.Replace('\f', '\n')); + files.Add(newValue); + + newValues.Add($"@{newValue}"); + } + + newLines.Add(string.Join("\t", newValues)); + } + + var newText = string.Join("\n", newLines); + var file = WriteTextToTempFile(newText); + files.Add(file); + return file; + } + + private static string WriteTextToTempFile(string text, string extension = null) + { + if (!string.IsNullOrEmpty(text)) + { + var tempFile = Path.GetTempFileName(); + if (!string.IsNullOrEmpty(extension)) + { + tempFile = $"{tempFile}.{extension}"; + } + + File.WriteAllText(tempFile, text); + + var content = File.ReadAllText(tempFile).Replace("\n", "\\n"); + Logger.Log($"FILE: {tempFile}: '{content}'"); + + return tempFile; + } + return null; + } + + + private static string FindCacheCli(string cli) + { + if (_cliCache.ContainsKey(cli)) + { + return _cliCache[cli]; + } + + var found = FindCli(cli); + _cliCache[cli] = found; + + return found; + } + + private static string FindCli(string cli) + { + var specified = !string.IsNullOrEmpty(cli); + if (specified) + { + var found = FindCliOrNull(cli); + return found != null + ? CliFound(cli, found) // use what we found + : CliNotFound(cli); // use what was specified + } + else + { + var clis = new[] { "ai", "spx", "vz" }; + var found = PickCliOrNull(clis); + return found != null + ? PickCliFound(clis, found) // use what we found + : PickCliNotFound(clis, clis[0]); // use ai + } + } + + private static string FindCliOrNull(string cli) + { + var dll = $"{cli}.dll"; + var exe = RuntimeInformation.IsOSPlatform(OSPlatform.Windows) ? $"{cli}.exe" : cli; + + var path1 = Environment.GetEnvironmentVariable("PATH"); + var path2 = Directory.GetCurrentDirectory(); + var path3 = (new FileInfo(typeof(YamlTestCaseRunner).Assembly.Location)).DirectoryName; + var path = $"{path3}{Path.PathSeparator}{path2}{Path.PathSeparator}{path1}"; + + var paths = path.Split(Path.PathSeparator); + foreach (var part2 in new string[]{ "", "net6.0"}) + { + foreach (var part1 in paths) + { + var checkExe = Path.Combine(part1, part2, exe); + if (File.Exists(checkExe)) + { + // Logger.TraceInfo($"FindCliOrNull: Found CLI: {checkExe}"); + var checkDll = FindCliDllOrNull(checkExe, dll); + if (checkDll != null) + { + // Logger.TraceInfo($"FindCliOrNull: Found DLL: {checkDll}"); + return checkExe; + } + } + } + } + + return null; + } + + private static string FindCliDllOrNull(string cli, string dll) + { + var fi = new FileInfo(cli); + if (!fi.Exists) return null; + + var check = Path.Combine(fi.DirectoryName, dll); + if (File.Exists(check)) return check; + + var matches = fi.Directory.GetFiles(dll, SearchOption.AllDirectories); + if (matches.Length == 1) return matches.First().FullName; + + return null; + } + + private static string CliFound(string cli, string found) + { + Logger.Log($"CliFound: CLI specified ({cli}); found; using {found}"); + return found; + } + + private static string CliNotFound(string cli) + { + var message = $"CliNotFound: CLI specified ({cli}); tried searching PATH and working directory; not found; using {cli}"; + Logger.LogWarning(message); + // Logger.TraceWarning(message); + return cli; + } + + private static string PickCliOrNull(IEnumerable clis) + { + var cliOrNulls = new List(); + foreach (var cli in clis) + { + cliOrNulls.Add(FindCliOrNull(cli)); + } + + var clisFound = cliOrNulls.Where(cli => !string.IsNullOrEmpty(cli)); + return clisFound.Count() == 1 + ? clisFound.First() + : null; + } + + private static void PickCliUpdateYamlDefaultsFileWarning(IEnumerable clis) + { + var message = string.Join(" or ", clis.Select(cli => $"`cli: {cli}`")); + message = $"PickCli: CLI not specified; please create/update {YamlTestAdapter.YamlDefaultTagsFileName} with one of: {message}"; + Logger.LogWarning(message); + Logger.TraceWarning(message); + } + + private static string PickCliFound(IEnumerable clis, string cli) + { + PickCliUpdateYamlDefaultsFileWarning(clis); + + var message = $"PickCliFound: CLI not specified; found 1 CLI; using {cli}"; + Logger.LogInfo(message); + Logger.TraceInfo(message); + return cli; + } + + private static string PickCliNotFound(IEnumerable clis, string cli) + { + PickCliUpdateYamlDefaultsFileWarning(clis); + + var message = $"PickCliNotFound: CLI not specified; tried searching PATH and working directory; found 0 or >1 CLIs; using {cli}"; + Logger.LogInfo(message); + Logger.TraceInfo(message); + return cli; + } + + private static IEnumerable GetPossibleRunTimeLocations() + { + if (RuntimeInformation.IsOSPlatform(OSPlatform.Windows)) + { + return new string[]{ "", "runtimes/win-x64/native/", "../runtimes/win-x64/native/" }; + } + else if (RuntimeInformation.IsOSPlatform(OSPlatform.Linux)) + { + return new string[]{ "", "runtimes/linux-x64/native/", "../../runtimes/linux-x64/native/" }; + } + else if (RuntimeInformation.IsOSPlatform(OSPlatform.OSX)) + { + return new string[]{ "", "runtimes/osx-x64/native/", "../../runtimes/osx-x64/native/" }; + } + return new string[]{ "" }; + } + + static void UpdatePathEnvironment(ProcessStartInfo startInfo) + { + var cli = new FileInfo(startInfo.FileName); + if (cli.Exists) + { + var dll = FindCliDllOrNull(cli.FullName, cli.Name.Replace(".exe", "") + ".dll"); + if (dll != null) + { + var cliPath = cli.Directory.FullName; + var dllPath = new FileInfo(dll).Directory.FullName; + + var isWindows = RuntimeInformation.IsOSPlatform(OSPlatform.Windows); + var pathVar = isWindows ? "PATH" : "LD_LIBRARY_PATH"; + var path = Environment.GetEnvironmentVariable(pathVar) ?? ""; + + var locations = GetPossibleRunTimeLocations(); + path = AddToPath(path, cliPath, locations); + path = AddToPath(path, dllPath, locations); + + startInfo.Environment.Add(pathVar, path); + Logger.LogInfo($"UpdatePathEnvironment: {pathVar}={path}"); + } + } + } + + private static string AddToPath(string path, string value, IEnumerable locations) + { + foreach (var location in locations) + { + var check = Path.Combine(value, location); + if (Directory.Exists(check)) + { + path = AddToPath(path, check); + } + } + return path; + } + + private static string AddToPath(string path, string value) + { + var paths = path.Split(Path.PathSeparator); + return !paths.Contains(value) + ? $"{value}{Path.PathSeparator}{path}".Trim(Path.PathSeparator) + : path; + } + + private static bool WaitForExit(Process process, int timeout) + { + var completed = process.WaitForExit(timeout); + if (!completed) + { + var name = process.ProcessName; + var message = $"Timedout! Stopping process ({name})..."; + Logger.LogWarning(message); + Logger.TraceWarning(message); + + process.StandardInput.WriteLine("\x3"); // try ctrl-c first + process.StandardInput.Close(); + completed = process.WaitForExit(200); + + message = "Timedout! Sent " + (completed ? "; stopped" : "; trying Kill()"); + Logger.LogWarning(message); + Logger.TraceWarning(message); + + if (!completed) + { + process.Kill(); + var killed = process.HasExited ? "Done." : "Failed!"; + + message = $"Timedout! Killing process ({name})... {killed}"; + Logger.LogWarning(message); + Logger.TraceWarning(message); + } + } + + return completed; + } + + private static string GetStartInfo(out string startProcess, string cli, string command, string script, List> kvs, string expect, string notExpect, ref List files) + { + startProcess = FindCacheCli(cli); + + var isCommand = !string.IsNullOrEmpty(command) || string.IsNullOrEmpty(script); + if (isCommand) + { + command = $"{command} {GetKeyValueArgs(kvs)}"; + + var expectLess = string.IsNullOrEmpty(expect) && string.IsNullOrEmpty(notExpect); + if (expectLess) return command; + + command = WriteTextToTempFile(command); + files ??= new List(); + files.Add(command); + + return $"quiet run --command @{command} {GetAtArgs(expect, notExpect)}"; + } + + var isWindows = RuntimeInformation.IsOSPlatform(OSPlatform.Windows); + return isWindows + ? $"quiet run --cmd --script {script} {GetKeyValueArgs(kvs)} {GetAtArgs(expect, notExpect)}" + : $"quiet run --process /bin/bash --pre.script -l --script {script} {GetKeyValueArgs(kvs)} {GetAtArgs(expect, notExpect)}"; + } + + private static string GetAtArgs(string expect, string notExpect) + { + var atArgs = $""; + if (!string.IsNullOrEmpty(expect)) atArgs += $" --expect @{expect}"; + if (!string.IsNullOrEmpty(notExpect)) atArgs += $" --not expect @{notExpect}"; + return atArgs.TrimStart(' '); + } + + private static string GetKeyValueArgs(List> kvs) + { + var args = new StringBuilder(); + foreach (var item in kvs) + { + if (!string.IsNullOrEmpty(item.Key)) + { + if (item.Key.Contains('\t')) + { + var key = item.Key.Replace('\t', ';'); + args.Append($"--foreach {key} in "); + } + else + { + args.Append($"--{item.Key} "); + } + + if (!string.IsNullOrEmpty(item.Value)) + { + args.Append($"\"{item.Value}\" "); + } + } + else if (!string.IsNullOrEmpty(item.Value)) + { + args.Append(item.Value); + } + } + return args.ToString().TrimEnd(); + } + + private static TestOutcome SimulateTestCase(TestCase test, string simulate, string cli, string command, string script, string @foreach, string arguments, string expect, string notExpect, string workingDirectory, out string stdOut, out string stdErr, out string errorMessage, out string stackTrace, out string additional, out string debugTrace) + { + var sb = new StringBuilder(); + sb.AppendLine($"cli='{cli?.Replace("\n", "\\n")}'"); + sb.AppendLine($"command='{command?.Replace("\n", "\\n")}'"); + sb.AppendLine($"script='{script?.Replace("\n", "\\n")}'"); + sb.AppendLine($"foreach='{@foreach?.Replace("\n", "\\n")}'"); + sb.AppendLine($"arguments='{arguments?.Replace("\n", "\\n")}'"); + sb.AppendLine($"expect='{expect?.Replace("\n", "\\n")}'"); + sb.AppendLine($"not-expect='{notExpect?.Replace("\n", "\\n")}'"); + sb.AppendLine($"working-directory='{workingDirectory}'"); + + stdOut = sb.ToString(); + stdErr = "STDERR"; + additional = "ADDITIONAL-INFO"; + debugTrace = "DEBUG-TRACE"; + errorMessage = "ERRORMESSAGE"; + stackTrace = "STACKTRACE"; + + var outcome = OutcomeFromString(simulate); + if (outcome == TestOutcome.Passed) + { + stdErr = null; + debugTrace = null; + errorMessage = null; + } + + return outcome; + } + + private static TestOutcome OutcomeFromString(string simulate) + { + TestOutcome outcome = TestOutcome.None; + switch (simulate?.ToLower()) + { + case "failed": + outcome = TestOutcome.Failed; + break; + + case "skipped": + outcome = TestOutcome.Skipped; + break; + + case "passed": + outcome = TestOutcome.Passed; + break; + } + + return outcome; + } + + private static void TestCaseStop(TestCase test, IFrameworkHandle frameworkHandle, TestOutcome outcome) + { + Logger.Log($"YamlTestCaseRunner.TestCaseStop({test.DisplayName})"); + frameworkHandle.RecordEnd(test, outcome); + } + + private static TestResult CreateTestResult(TestCase test, DateTime start, DateTime stop, string stdOut, string stdErr, string errorMessage, string stackTrace, string additional, string debugTrace, TestOutcome outcome) + { + Logger.Log($"YamlTestCaseRunner.TestRecordResult({test.DisplayName})"); + + var result = new TestResult(test) { Outcome = outcome }; + result.Messages.Add(new TestResultMessage(TestResultMessage.StandardOutCategory, stdOut)); + result.Messages.Add(new TestResultMessage(TestResultMessage.StandardErrorCategory, stdErr)); + result.Messages.Add(new TestResultMessage(TestResultMessage.AdditionalInfoCategory, additional)); + result.Messages.Add(new TestResultMessage(TestResultMessage.DebugTraceCategory, debugTrace)); + result.ErrorMessage = errorMessage; + result.ErrorStackTrace = stackTrace; + result.StartTime = start; + result.EndTime = stop; + result.Duration = stop - start; + + Logger.Log("----------------------------\n\n"); + Logger.Log($" STDOUT: {stdOut}"); + Logger.Log($" STDERR: {stdErr}"); + Logger.Log($" STACK: {stackTrace}"); + Logger.Log($" ERROR: {errorMessage}"); + Logger.Log($" OUTCOME: {outcome}"); + Logger.Log($"ADDITIONAL: {additional}"); + Logger.Log($"DEBUGTRACE: {debugTrace}"); + Logger.Log("----------------------------\n\n"); + + return result; + } + + private static string ExtraDebugInfo() + { + var sb = new StringBuilder(); + + var cwd = new DirectoryInfo(Directory.GetCurrentDirectory()); + sb.AppendLine($"CURRENT DIRECTORY: {cwd.FullName}"); + + var files = cwd.GetFiles("*", SearchOption.AllDirectories); + foreach (var file in files) + { + sb.AppendLine($"{file.Length,10} {file.CreationTime.Date:MM/dd/yyyy} {file.CreationTime:hh:mm:ss tt} {file.FullName}"); + } + + var variables = Environment.GetEnvironmentVariables(); + var keys = new List(variables.Count); + foreach (var key in variables.Keys) keys.Add(key as string); + + keys.Sort(); + foreach (var key in keys) + { + var value = variables[key] as string; + sb.AppendLine($"{key,-20} {value}"); + } + + return sb.ToString(); + } + + #endregion + + private static Dictionary _cliCache = new Dictionary(); + } +} diff --git a/tests/testadapter/YamlTestProperties.cs b/tests/testadapter/YamlTestProperties.cs new file mode 100644 index 00000000..309b97b0 --- /dev/null +++ b/tests/testadapter/YamlTestProperties.cs @@ -0,0 +1,53 @@ +using System; +using System.Collections.Generic; +using System.Diagnostics; +using System.IO; +using System.Linq; +using Microsoft.VisualStudio.TestPlatform.ObjectModel; +using Microsoft.VisualStudio.TestPlatform.ObjectModel.Adapter; +using YamlDotNet.RepresentationModel; + +namespace TestAdapterTest +{ + public class YamlTestProperties + { + public static void Set(TestCase test, string name, string value) + { + Logger.Log($"YamlTestProperties.Set('{name}'='{value.Replace("\n", "\\n")}')"); + if (!string.IsNullOrEmpty(value)) + { + var property = properties[name]; + test.SetPropertyValue(property, value); + } + } + + public static string Get(TestCase test, string name, string defaultValue = null) + { + var value = test.GetPropertyValue(properties[name], defaultValue); + Logger.LogIf(!string.IsNullOrEmpty(value), $"TestCaseProperties.Get('{name}') = '{value?.Replace("\n", "\\n")}'"); + return value; + } + + #region private methods and data + private static TestProperty RegisterTestCaseProperty(string name) + { + return TestProperty.Register($"YamlTestCase.{name}", name, typeof(string), TestPropertyAttributes.Hidden, typeof(TestCase)); + } + + private static readonly Dictionary properties = new Dictionary() { + { "cli", RegisterTestCaseProperty("CLI") }, + { "command", RegisterTestCaseProperty("Command") }, + { "script", RegisterTestCaseProperty("Script") }, + { "parallelize", RegisterTestCaseProperty("Parallelize") }, + { "foreach", RegisterTestCaseProperty("ForEach") }, + { "arguments", RegisterTestCaseProperty("Arguments") }, + { "expect", RegisterTestCaseProperty("Expect") }, + { "not-expect", RegisterTestCaseProperty("NotExpect") }, + { "simulate", RegisterTestCaseProperty("Simulate") }, + { "timeout", RegisterTestCaseProperty("Timeout") }, + { "working-directory", RegisterTestCaseProperty("WorkingDirectory") } + }; + + #endregion + } +} diff --git a/tests/testadapter/YamlTestRunnerTriggerAttribute.cs b/tests/testadapter/YamlTestRunnerTriggerAttribute.cs new file mode 100644 index 00000000..5e8bf874 --- /dev/null +++ b/tests/testadapter/YamlTestRunnerTriggerAttribute.cs @@ -0,0 +1,21 @@ +using Microsoft.VisualStudio.TestPlatform.ObjectModel; +using Microsoft.VisualStudio.TestPlatform.ObjectModel.Adapter; +using Microsoft.VisualStudio.TestPlatform.ObjectModel.Logging; +using System; +using System.Collections.Generic; +using System.Diagnostics; +using System.IO; +using System.Linq; +using System.Text; +using System.Threading; +using System.Threading.Tasks; + +namespace TestAdapterTest +{ + public class YamlTestRunnerTriggerAttribute : Attribute + { + public YamlTestRunnerTriggerAttribute() + { + } + } +} From 70522c05f030264f00dc30d5fb60c15ff7a9bc6b Mon Sep 17 00:00:00 2001 From: Christopher Schraer <32145632+chschrae@users.noreply.github.com> Date: Wed, 17 Jan 2024 13:24:33 -0800 Subject: [PATCH 10/30] added javascript chat streaming with data template and fixed minor things in others (#153) Co-authored-by: Chris Schraer --- src/ai/.x/templates/openai-chat-js/Main.js | 1 + .../openai-chat-streaming-js/Main.js | 1 + .../OpenAIChatCompletionsStreamingClass.js | 4 +- .../Main.js | 56 ++++++++++++++++ ...AIChatCompletionsStreamingWithDataClass.js | 66 +++++++++++++++++++ .../openai-chat-streaming-with-data-js/_.json | 16 +++++ .../package.json | 15 +++++ .../openai-chat-streaming-with-data-py/_.json | 4 +- .../main.py | 2 +- 9 files changed, 161 insertions(+), 4 deletions(-) create mode 100644 src/ai/.x/templates/openai-chat-streaming-with-data-js/Main.js create mode 100644 src/ai/.x/templates/openai-chat-streaming-with-data-js/OpenAIChatCompletionsStreamingWithDataClass.js create mode 100644 src/ai/.x/templates/openai-chat-streaming-with-data-js/_.json create mode 100644 src/ai/.x/templates/openai-chat-streaming-with-data-js/package.json diff --git a/src/ai/.x/templates/openai-chat-js/Main.js b/src/ai/.x/templates/openai-chat-js/Main.js index 305234e7..afbf924e 100644 --- a/src/ai/.x/templates/openai-chat-js/Main.js +++ b/src/ai/.x/templates/openai-chat-js/Main.js @@ -32,6 +32,7 @@ async function main() { } console.log('Bye!'); + process.exit(); } main().catch((err) => { diff --git a/src/ai/.x/templates/openai-chat-streaming-js/Main.js b/src/ai/.x/templates/openai-chat-streaming-js/Main.js index cbe7bac3..6bad2289 100644 --- a/src/ai/.x/templates/openai-chat-streaming-js/Main.js +++ b/src/ai/.x/templates/openai-chat-streaming-js/Main.js @@ -35,6 +35,7 @@ async function main() { } console.log('Bye!'); + process.exit(); } main().catch((err) => { diff --git a/src/ai/.x/templates/openai-chat-streaming-js/OpenAIChatCompletionsStreamingClass.js b/src/ai/.x/templates/openai-chat-streaming-js/OpenAIChatCompletionsStreamingClass.js index 8b204001..eeed181c 100644 --- a/src/ai/.x/templates/openai-chat-streaming-js/OpenAIChatCompletionsStreamingClass.js +++ b/src/ai/.x/templates/openai-chat-streaming-js/OpenAIChatCompletionsStreamingClass.js @@ -32,7 +32,9 @@ class <#= ClassName #> { } if (content != null) { - callback(content); + if(callback != null) { + callback(content); + } await new Promise(r => setTimeout(r, 50)); // delay to simulate real-time output, word by word contentComplete += content; } diff --git a/src/ai/.x/templates/openai-chat-streaming-with-data-js/Main.js b/src/ai/.x/templates/openai-chat-streaming-with-data-js/Main.js new file mode 100644 index 00000000..fa1ee010 --- /dev/null +++ b/src/ai/.x/templates/openai-chat-streaming-with-data-js/Main.js @@ -0,0 +1,56 @@ +<#@ template hostspecific="true" #> +<#@ output extension=".js" encoding="utf-8" #> +<#@ parameter type="System.String" name="ClassName" #> +<#@ parameter type="System.String" name="AZURE_OPENAI_ENDPOINT" #> +<#@ parameter type="System.String" name="AZURE_OPENAI_KEY" #> +<#@ parameter type="System.String" name="AZURE_OPENAI_API_VERSION" #> +<#@ parameter type="System.String" name="AZURE_OPENAI_CHAT_DEPLOYMENT" #> +<#@ parameter type="System.String" name="AZURE_OPENAI_SYSTEM_PROMPT" #> +<#@ parameter type="System.String" name="AZURE_AI_SEARCH_ENDPOINT" #> +<#@ parameter type="System.String" name="AZURE_AI_SEARCH_KEY" #> +<#@ parameter type="System.String" name="AZURE_AI_SEARCH_INDEX_NAME" #> +<#@ parameter type="System.String" name="AZURE_OPENAI_EMBEDDING_DEPLOYMENT" #> +const { <#= ClassName #> } = require("./OpenAIChatCompletionsStreamingWithDataClass"); + +const readline = require('readline'); +const rl = readline.createInterface({ + input: process.stdin, + output: process.stdout +}); + +async function main() { + + const openAIEndpoint = process.env["AZURE_OPENAI_ENDPOINT"] || "<#= AZURE_OPENAI_ENDPOINT #>"; + const openAIKey = process.env["AZURE_OPENAI_KEY"] || "<#= AZURE_OPENAI_KEY #>"; + const openAIChatDeploymentName = process.env["AZURE_OPENAI_CHAT_DEPLOYMENT"] || "<#= AZURE_OPENAI_CHAT_DEPLOYMENT #>" ; + const openAISystemPrompt = process.env["AZURE_OPENAI_SYSTEM_PROMPT"] || "<#= AZURE_OPENAI_SYSTEM_PROMPT #>" ; + const openAIAPIVersion = process.env["AZURE_OPENAI_API_VERSION"] || "<#= AZURE_OPENAI_API_VERSION #>" ; + const searchEndpoint = process.env["AZURE_AI_SEARCH_ENDPOINT"] || "<#= AZURE_AI_SEARCH_ENDPOINT #>" ; + const searchAPIKey = process.env["AZURE_AI_SEARCH_KEY"] || "<#= AZURE_AI_SEARCH_KEY #>" ; + const searchIndexName = process.env["AZURE_AI_SEARCH_INDEX_NAME"] || "<#= AZURE_AI_SEARCH_INDEX_NAME #>" ; + const openAIEmbeddingsDeploymentName = process.env["AZURE_OPENAI_EMBEDDING_DEPLOYMENT"] || "<#= AZURE_OPENAI_EMBEDDING_DEPLOYMENT #>" ; + const openAIEmbeddingsEndpoint = `${openAIEndpoint.replace(/\/+$/, '')}/openai/deployments/${openAIEmbeddingsDeploymentName}/embeddings?api-version=${openAIAPIVersion}`; + + const chat = new <#= ClassName #>(openAIEndpoint, openAIKey, openAIChatDeploymentName, openAISystemPrompt, searchEndpoint, searchAPIKey, searchIndexName, openAIEmbeddingsEndpoint); + + while (true) { + + const input = await new Promise(resolve => rl.question('User: ', resolve)); + if (input === 'exit' || input === '') break; + + let response = await chat.getChatCompletions(input, (content) => { + console.log(`assistant-streaming: ${content}`); + }); + + console.log(`\nAssistant: ${response}\n`); + } + + console.log('Bye!'); + process.exit(); +} + +main().catch((err) => { + console.error("The sample encountered an error:", err); +}); + +module.exports = { main }; diff --git a/src/ai/.x/templates/openai-chat-streaming-with-data-js/OpenAIChatCompletionsStreamingWithDataClass.js b/src/ai/.x/templates/openai-chat-streaming-with-data-js/OpenAIChatCompletionsStreamingWithDataClass.js new file mode 100644 index 00000000..0d4446a6 --- /dev/null +++ b/src/ai/.x/templates/openai-chat-streaming-with-data-js/OpenAIChatCompletionsStreamingWithDataClass.js @@ -0,0 +1,66 @@ +<#@ template hostspecific="true" #> +<#@ output extension=".js" encoding="utf-8" #> +<#@ parameter type="System.String" name="ClassName" #> +const { OpenAIClient, AzureKeyCredential } = require("@azure/openai"); + +class <#= ClassName #> { + constructor(openAIEndpoint, openAIKey, openAIChatDeploymentName, openAISystemPrompt, searchEndpoint, searchAPIKey, searchIndexName, openAIEmbeddingsEndpoint) { + this.openAISystemPrompt = openAISystemPrompt; + this.openAIChatDeploymentName = openAIChatDeploymentName; + this.client = new OpenAIClient(openAIEndpoint, new AzureKeyCredential(openAIKey)); + + this.azureExtensionOptions = { + azureExtensionOptions: { + extensions: [ + { + type: "AzureCognitiveSearch", + endpoint: searchEndpoint, + key: searchAPIKey, + indexName: searchIndexName, + embeddingEndpoint: openAIEmbeddingsEndpoint, + embeddingKey: openAIKey, + queryType: "vectorSimpleHybrid" + }, + ], + } + } + + this.clearConversation(); + } + + clearConversation() { + this.messages = [ + { role: 'system', content: this.openAISystemPrompt } + ]; + } + + async getChatCompletions(userInput, callback) { + this.messages.push({ role: 'user', content: userInput }); + + let contentComplete = ''; + const events = await this.client.streamChatCompletions(this.openAIChatDeploymentName, this.messages, this.azureExtensionOptions); + + for await (const event of events) { + for (const choice of event.choices) { + + let content = choice.delta?.content; + if (choice.finishReason === 'length') { + content = `${content}\nERROR: Exceeded token limit!`; + } + + if (content != null) { + if(callback != null) { + callback(content); + } + await new Promise(r => setTimeout(r, 50)); // delay to simulate real-time output, word by word + contentComplete += content; + } + } + } + + this.messages.push({ role: 'assistant', content: contentComplete }); + return contentComplete; + } +} + +exports.<#= ClassName #> = <#= ClassName #>; \ No newline at end of file diff --git a/src/ai/.x/templates/openai-chat-streaming-with-data-js/_.json b/src/ai/.x/templates/openai-chat-streaming-with-data-js/_.json new file mode 100644 index 00000000..a44ddf53 --- /dev/null +++ b/src/ai/.x/templates/openai-chat-streaming-with-data-js/_.json @@ -0,0 +1,16 @@ +{ + "_LongName": "OpenAI Chat Completions (w/ Data + AI Search)", + "_ShortName": "openai-chat-streaming-with-data", + "_Language": "JavaScript", + "ClassName": "OpenAIChatCompletionsStreamingWithDataClass", + "AZURE_OPENAI_ENDPOINT": "", + "AZURE_OPENAI_KEY": "", + "AZURE_OPENAI_API_VERSION": "", + "AZURE_OPENAI_CHAT_DEPLOYMENT": "", + "AZURE_OPENAI_EMBEDDING_DEPLOYMENT": "", + "AZURE_OPENAI_SYSTEM_PROMPT": "You are a helpful AI assistant.", + "AZURE_AI_SEARCH_ENDPOINT": "", + "AZURE_AI_SEARCH_KEY": "", + "AZURE_AI_SEARCH_INDEX_NAME": "", + "OPENAI_API_VERSION": "" +} \ No newline at end of file diff --git a/src/ai/.x/templates/openai-chat-streaming-with-data-js/package.json b/src/ai/.x/templates/openai-chat-streaming-with-data-js/package.json new file mode 100644 index 00000000..17195509 --- /dev/null +++ b/src/ai/.x/templates/openai-chat-streaming-with-data-js/package.json @@ -0,0 +1,15 @@ +{ + "name": "openai-chat-streaming", + "version": "1.0.0", + "description": "", + "main": "Main.js", + "scripts": { + "test": "echo \"Error: no test specified\" && exit 1" + }, + "author": "", + "license": "MIT", + "dependencies": { + "@azure/openai": "1.0.0-beta.10" + } + } + \ No newline at end of file diff --git a/src/ai/.x/templates/openai-chat-streaming-with-data-py/_.json b/src/ai/.x/templates/openai-chat-streaming-with-data-py/_.json index e4b7b894..a4cfdfd5 100644 --- a/src/ai/.x/templates/openai-chat-streaming-with-data-py/_.json +++ b/src/ai/.x/templates/openai-chat-streaming-with-data-py/_.json @@ -2,10 +2,10 @@ "_LongName": "OpenAI Chat Completions (w/ Data + AI Search)", "_ShortName": "openai-chat-streaming-with-data", "_Language": "Python", - "ClassName": "OpenAIChatCompletionsStreamingWithDataAISearch", + "ClassName": "OpenAIChatCompletionsStreamingWithData", "AZURE_OPENAI_ENDPOINT": "", "AZURE_OPENAI_KEY": "", - "AZURE_OPENAI_API_VERSION": "2023-12-01-preview", + "AZURE_OPENAI_API_VERSION": "", "AZURE_OPENAI_CHAT_DEPLOYMENT": "", "AZURE_OPENAI_EMBEDDING_DEPLOYMENT": "", "AZURE_OPENAI_SYSTEM_PROMPT": "You are a helpful AI assistant.", diff --git a/src/ai/.x/templates/openai-chat-streaming-with-data-py/main.py b/src/ai/.x/templates/openai-chat-streaming-with-data-py/main.py index 632f4a3b..ad2ba210 100644 --- a/src/ai/.x/templates/openai-chat-streaming-with-data-py/main.py +++ b/src/ai/.x/templates/openai-chat-streaming-with-data-py/main.py @@ -23,7 +23,7 @@ def main(): search_api_key = os.getenv('AZURE_AI_SEARCH_KEY', '<#= AZURE_AI_SEARCH_KEY #>') search_index_name = os.getenv('AZURE_AI_SEARCH_INDEX_NAME', '<#= AZURE_AI_SEARCH_INDEX_NAME #>') openai_embeddings_deployment_name = os.getenv('AZURE_OPENAI_EMBEDDING_DEPLOYMENT', '<#= AZURE_OPENAI_EMBEDDING_DEPLOYMENT #>') - openai_embeddings_endpoint = f"{openai_endpoint.rstrip('/')}/openai/deployments/{openai_embeddings_deployment_name}/embeddings?api-version={openai_api_version}"; + openai_embeddings_endpoint = f"{openai_endpoint.rstrip('/')}/openai/deployments/{openai_embeddings_deployment_name}/embeddings?api-version={openai_api_version}" chat = <#= ClassName #>(openai_api_version, openai_endpoint, openai_key, openai_chat_deployment_name, openai_system_prompt, search_endpoint, search_api_key, search_index_name, openai_embeddings_endpoint) From 06fc8aec8430c40ea0b992c0338acddc00e1fb4b Mon Sep 17 00:00:00 2001 From: Rob Chambers Date: Thu, 18 Jan 2024 16:27:31 -0800 Subject: [PATCH 11/30] added first draft of tests for `ai dev new` template instantiation (#154) * added first draft of tests for `ai dev new` template instantiation * added test for with-data in js * add testing of c# templates run with inputs piped from stdin * update failed test due to new javascript template for openai-chat-streaming-with-data * updated with CR feedback * add quotes to java openai-chat run template test * update test to have quotes to fix linux run issues * update bulid script to not continue on error (was in place as i developed the changes to build.yaml) * try to fix build issues * remove a few continueOnErrors we don't need --- .azure/pipelines/build.yaml | 4 - src/ai/commands/dev_command.cs | 43 +++- src/common/details/helpers/process_helpers.cs | 7 +- .../Azure-AI-CLI-TestRunner-Default-Tags.yaml | 2 +- tests/test.yaml | 2 +- tests/test3.yaml | 218 +++++++++++++++++- tests/testadapter/YamlTestCaseFilter.cs | 3 +- tests/testadapter/YamlTestCaseParser.cs | 3 +- tests/testadapter/YamlTestCaseRunner.cs | 12 +- tests/testadapter/YamlTestProperties.cs | 1 + 10 files changed, 267 insertions(+), 28 deletions(-) diff --git a/.azure/pipelines/build.yaml b/.azure/pipelines/build.yaml index 23a2481b..e433eba3 100644 --- a/.azure/pipelines/build.yaml +++ b/.azure/pipelines/build.yaml @@ -217,7 +217,6 @@ stages: targetPath: '$(Build.ArtifactStagingDirectory)/ai-cli-artifacts' - task: CopyFiles@2 displayName: Copy downloaded ai-cli-artifacts - continueOnError: true inputs: Contents: '**/*' SourceFolder: $(Build.ArtifactStagingDirectory)/ai-cli-artifacts @@ -225,7 +224,6 @@ stages: FlattenFolders: true - task: Bash@3 displayName: List files... - continueOnError: true inputs: targetType: 'inline' script: | @@ -239,7 +237,6 @@ stages: # ----------------------------------------------------------------------------- - task: DotNetCoreCLI@2 displayName: INSTALL AI - Installing ai CLI via `dotnet tool install` - continueOnError: true inputs: includeNuGetOrg: false command: custom @@ -274,7 +271,6 @@ stages: # ----------------------------------------------------------------------------- - task: DotNetCoreCLI@2 displayName: Build YamlTestAdapter - continueOnError: true inputs: includeNuGetOrg: false command: build diff --git a/src/ai/commands/dev_command.cs b/src/ai/commands/dev_command.cs index 5292d97b..e0b5dda5 100644 --- a/src/ai/commands/dev_command.cs +++ b/src/ai/commands/dev_command.cs @@ -113,11 +113,16 @@ private void DoDevShell() Console.WriteLine(); var runCommand = RunCommandToken.Data().GetOrDefault(_values); - UpdateFileNameArguments(runCommand, ref fileName, ref arguments); + UpdateFileNameArguments(runCommand, ref fileName, ref arguments, out var deleteWhenDone); var process = ProcessHelpers.StartProcess(fileName, arguments, env, false); process.WaitForExit(); + if (!string.IsNullOrEmpty(deleteWhenDone)) + { + File.Delete(deleteWhenDone); + } + if (process.ExitCode != 0) { Console.WriteLine("\n(ai dev shell) FAILED!\n"); @@ -129,22 +134,38 @@ private void DoDevShell() } } - private static void UpdateFileNameArguments(string runCommand, ref string fileName, ref string arguments) + private static void UpdateFileNameArguments(string runCommand, ref string fileName, ref string arguments, out string? deleteTempFileWhenDone) { + deleteTempFileWhenDone = null; + if (!string.IsNullOrEmpty(runCommand)) { - var parts = runCommand.Split(new char[] { ' ' }, 2); - var inPath = FileHelpers.FileExistsInOsPath(parts[0]) || (OS.IsWindows() && FileHelpers.FileExistsInOsPath(parts[0] + ".exe")); + var isSingleLine = !runCommand.Contains('\n') && !runCommand.Contains('\r'); + if (isSingleLine) + { + var parts = runCommand.Split(new char[] { ' ' }, 2); + var inPath = FileHelpers.FileExistsInOsPath(parts[0]) || (OS.IsWindows() && FileHelpers.FileExistsInOsPath(parts[0] + ".exe")); - var filePart = parts[0]; - var argsPart = parts.Length == 2 ? parts[1] : null; + var filePart = parts[0]; + var argsPart = parts.Length == 2 ? parts[1] : null; - fileName = inPath ? filePart : fileName; - arguments = inPath ? argsPart : (OS.IsLinux() - ? $"-lic \"{runCommand}\"" - : $"/c \"{runCommand}\""); + fileName = inPath ? filePart : fileName; + arguments = inPath ? argsPart : (OS.IsLinux() + ? $"-lic \"{runCommand}\"" + : $"/c \"{runCommand}\""); - Console.WriteLine($"Running command: {runCommand}\n"); + Console.WriteLine($"Running command: {runCommand}\n"); + } + else + { + deleteTempFileWhenDone = Path.GetTempFileName() + (OS.IsWindows() ? ".cmd" : ".sh"); + File.WriteAllText(deleteTempFileWhenDone, runCommand); + + fileName = OS.IsLinux() ? "bash" : "cmd.exe"; + arguments = OS.IsLinux() ? $"-lic \"{deleteTempFileWhenDone}\"" : $"/c \"{deleteTempFileWhenDone}\""; + + Console.WriteLine($"Running script:\n\n{runCommand}\n"); + } } } diff --git a/src/common/details/helpers/process_helpers.cs b/src/common/details/helpers/process_helpers.cs index 5448718d..d2781d45 100644 --- a/src/common/details/helpers/process_helpers.cs +++ b/src/common/details/helpers/process_helpers.cs @@ -47,12 +47,13 @@ public static Process StartBrowser(string url) : null; } - public static Process StartProcess(string fileName, string arguments, Dictionary addToEnvironment = null, bool redirect = true) + public static Process StartProcess(string fileName, string arguments, Dictionary addToEnvironment = null, bool redirectOutput = true, bool redirectInput = false) { var start = new ProcessStartInfo(fileName, arguments); start.UseShellExecute = false; - start.RedirectStandardOutput = redirect; - start.RedirectStandardError = redirect; + start.RedirectStandardOutput = redirectOutput; + start.RedirectStandardError = redirectOutput; + start.RedirectStandardInput = redirectInput; if (addToEnvironment != null) { diff --git a/tests/Azure-AI-CLI-TestRunner-Default-Tags.yaml b/tests/Azure-AI-CLI-TestRunner-Default-Tags.yaml index 1eb9a5b3..a254aab6 100644 --- a/tests/Azure-AI-CLI-TestRunner-Default-Tags.yaml +++ b/tests/Azure-AI-CLI-TestRunner-Default-Tags.yaml @@ -1,2 +1,2 @@ cli: ai -workingDirectory: ../testruns +workingDirectory: ../testresults diff --git a/tests/test.yaml b/tests/test.yaml index 5d7557bf..24268c25 100644 --- a/tests/test.yaml +++ b/tests/test.yaml @@ -89,5 +89,5 @@ ^Helper +Function +Class +Library +helper-functions +C# *\r?$\n ^OpenAI +Chat +Completions +openai-chat +C#, +Go, +Java, +JavaScript, +Python *\r?$\n ^OpenAI +Chat +Completions +\(Streaming\) +openai-chat-streaming +C#, +Go, +Java, +JavaScript, +Python *\r?$\n - ^OpenAI +Chat +Completions +\(w/ +Data +\+ +AI +Search\) +openai-chat-streaming-with-data +C#(, +Python){0,1} *\r?$\n + ^OpenAI +Chat +Completions +\(w/ +Data +\+ +AI +Search\) +openai-chat-streaming-with-data +C#, +JavaScript, +Python *\r?$\n ^OpenAI +Chat +Completions +\(w/ +Functions\) +openai-chat-streaming-with-functions +C#, +Go, +JavaScript, +Python *\r?$\n diff --git a/tests/test3.yaml b/tests/test3.yaml index 9791da9d..5769668a 100644 --- a/tests/test3.yaml +++ b/tests/test3.yaml @@ -1,2 +1,216 @@ -- name: simple dev new command - command: ai dev new openai-chat --cs +# AI - Azure AI CLI, Version 1.0.0-DEV-robc-20240117 +# Copyright (c) 2023 Microsoft Corporation. All Rights Reserved. + +# This PUBLIC PREVIEW version may change at any time. +# See: https://aka.ms/azure-ai-cli-public-preview + +# Name Short Name Language +# --------------------------------------------- ------------------------------------ -------------------------------- +# Environment Variables .env +# Helper Function Class Library helper-functions C# +# OpenAI Chat Completions openai-chat C#, Go, Java, JavaScript, Python +# OpenAI Chat Completions (Streaming) openai-chat-streaming C#, Go, Java, JavaScript, Python +# OpenAI Chat Completions (w/ Data + AI Search) openai-chat-streaming-with-data C#, JavaScript, Python +# OpenAI Chat Completions (w/ Functions) openai-chat-streaming-with-functions C#, Go, JavaScript, Python +# OpenAI Webpage openai-webpage JavaScript, TypeScript +# OpenAI Webpage (w/ Functions) openai-webpage-with-functions JavaScript, TypeScript + +- name: dev new environment + command: ai dev new .env + +- class: dev new helper-functions + tests: + - name: 1-generate template + command: ai dev new helper-functions + - name: 2-build template + script: | + cd helper-functions + dotnet build + - name: 3-run template + command: ai chat --interactive --helper-functions helper-functions/bin/Debug/net7.0/HelperFunctionsProject.dll + input: | + What is my name? + expect: | + assistant-function: GetUsersName\({}\) = + tag: skip + +- area: ai dev new openai-chat + tests: + + - class: dev new openai-chat (c#) + tests: + - name: 1-generate template + command: ai dev new openai-chat --cs + - name: 2-build template + script: | + cd openai-chat-cs + dotnet build + - name: 3-run template + command: ai dev shell --run "openai-chat-cs/bin/Debug/net7.0/OpenAIChatCompletions" + input: |- + Tell me a joke + Tell me another joke + tag: skip + + - class: dev new openai-chat (go) + tests: + - name: 1-generate template + command: ai dev new openai-chat --go + - name: 2-build template + script: | + cd openai-chat-go + go mod tidy && go build + - name: 3-run template + command: ai dev shell --run "openai-chat-go/openai_chat_completions_hello_world" + input: |- + Tell me a joke + Tell me another joke + tag: skip + + - class: dev new openai-chat (java) + tests: + - name: 1-generate template + command: ai dev new openai-chat --java + - name: 2-restore packages + script: | + cd openai-chat-java + mvn clean package + - name: 3-build template + script: | + cd openai-chat-java + javac -cp "target/lib/*" src/OpenAIQuickstart.java -d out + - name: 4-run template + command: ai dev shell --run "java -cp \"openai-chat-java/out;openai-chat-java/target/lib/*\" OpenAIQuickstart" + input: |- + Tell me a joke + Tell me another joke + tag: skip + + - class: dev new openai-chat (javascript) + tests: + - name: 1-generate template + command: ai dev new openai-chat --javascript + - name: 2-build template + script: | + cd openai-chat-js + npm install + - name: 3-run template + command: ai dev shell --run "cd openai-chat-js && node main.js" + input: | + Tell me a joke + Tell me another joke + tag: skip + + - class: dev new openai-chat (python) + tests: + - name: 1-generate template + command: ai dev new openai-chat --python + - name: 2-build template + script: | + cd openai-chat-py + pip install -r requirements.txt + - name: 3-run template + command: ai dev shell --run "cd openai-chat-py && python openai_chat_completions.py" + input: | + Tell me a joke + Tell me another joke + tag: skip + +- area: ai dev new openai-chat-streaming + tests: + + - class: dev new openai-chat-streaming (c#) + tests: + - name: generate template + command: ai dev new openai-chat-streaming --cs + - name: run template + command: ai dev shell + arguments: + run: | + cd openai-chat-streaming-cs + dotnet run + stdin: | + Tell me a joke + Tell me another joke + tag: skip + + - name: dev new openai-chat-streaming (go) + command: ai dev new openai-chat-streaming --go + + - name: dev new openai-chat-streaming (java) + command: ai dev new openai-chat-streaming --java + + - name: dev new openai-chat-streaming (javascript) + command: ai dev new openai-chat-streaming --javascript + + - name: dev new openai-chat-streaming (python) + command: ai dev new openai-chat-streaming --python + +- area: ai dev new openai-chat-streaming-with-data + tests: + + - class: dev new openai-chat-streaming-with-data (c#) + tests: + - name: generate template + command: ai dev new openai-chat-streaming-with-data --cs + - name: run template + command: ai dev shell + arguments: + run: | + cd openai-chat-streaming-with-data-cs + dotnet run + stdin: | + What parameter should i use to initialize? + tag: skip + + - name: dev new openai-chat-streaming-with-data (javascript) + command: ai dev new openai-chat-streaming-with-data --javascript + + - name: dev new openai-chat-streaming-with-data (python) + command: ai dev new openai-chat-streaming-with-data --python + +- area: ai dev new openai-chat-streaming-with-functions + tests: + + - class: dev new openai-chat-streaming-with-functions (c#) + tests: + - name: generate template + command: ai dev new openai-chat-streaming-with-functions --cs + - name: run template + command: ai dev shell + arguments: + run: | + cd openai-chat-streaming-with-functions-cs + dotnet run + stdin: | + What is the date? + What is the time? + tag: skip + + - name: dev new openai-chat-streaming-with-functions (go) + command: ai dev new openai-chat-streaming-with-functions --go + + - name: dev new openai-chat-streaming-with-functions (javascript) + command: ai dev new openai-chat-streaming-with-functions --javascript + + - name: dev new openai-chat-streaming-with-functions (python) + command: ai dev new openai-chat-streaming-with-functions --python + +- area: ai dev new openai-webpage + tests: + + - name: dev new openai-webpage (javascript) + command: ai dev new openai-webpage --javascript + + - name: dev new openai-webpage (typescript) + command: ai dev new openai-webpage --typescript + +- area: ai dev new openai-webpage-with-functions + tests: + + - name: dev new openai-webpage-with-functions (javascript) + command: ai dev new openai-webpage-with-functions --javascript + + - name: dev new openai-webpage-with-functions (typescript) + command: ai dev new openai-webpage-with-functions --typescript + \ No newline at end of file diff --git a/tests/testadapter/YamlTestCaseFilter.cs b/tests/testadapter/YamlTestCaseFilter.cs index d14a35af..29e20362 100644 --- a/tests/testadapter/YamlTestCaseFilter.cs +++ b/tests/testadapter/YamlTestCaseFilter.cs @@ -53,6 +53,7 @@ private static object GetPropertyValue(TestCase test, string name) case "foreach": return YamlTestProperties.Get(test, "foreach"); case "arguments": return YamlTestProperties.Get(test, "arguments"); + case "input": return YamlTestProperties.Get(test, "input"); case "expect": return YamlTestProperties.Get(test, "expect"); case "not-expect": return YamlTestProperties.Get(test, "not-expect"); @@ -69,6 +70,6 @@ private static object GetPropertyValue(TestCase test, string name) return tags.Select(x => x.Value).ToArray(); } - private static readonly string[] supportedFilterProperties = { "DisplayName", "FullyQualifiedName", "Category", "cli", "command", "script", "foreach", "arguments", "expect", "not-expect", "simulate" }; + private static readonly string[] supportedFilterProperties = { "DisplayName", "FullyQualifiedName", "Category", "cli", "command", "script", "foreach", "arguments", "input", "expect", "not-expect", "simulate" }; } } diff --git a/tests/testadapter/YamlTestCaseParser.cs b/tests/testadapter/YamlTestCaseParser.cs index 12fea61d..14c92736 100644 --- a/tests/testadapter/YamlTestCaseParser.cs +++ b/tests/testadapter/YamlTestCaseParser.cs @@ -127,6 +127,7 @@ private static TestCase GetTestFromNode(string source, FileInfo file, YamlMappin SetTestCasePropertyMap(test, "foreach", mapping, "foreach", workingDirectory); SetTestCasePropertyMap(test, "arguments", mapping, "arguments", workingDirectory); + SetTestCasePropertyMap(test, "input", mapping, "input", workingDirectory); SetTestCaseProperty(test, "expect", mapping, "expect"); SetTestCaseProperty(test, "not-expect", mapping, "not-expect"); @@ -168,7 +169,7 @@ private static void CheckInvalidTestCaseNodes(FileInfo file, YamlMappingNode map private static bool IsValidTestCaseNode(string value) { - return ";area;class;name;cli;command;script;timeout;foreach;arguments;expect;not-expect;simulate;tag;tags;parallelize;workingDirectory;".IndexOf($";{value};") >= 0; + return ";area;class;name;cli;command;script;timeout;foreach;arguments;input;expect;not-expect;simulate;tag;tags;parallelize;workingDirectory;".IndexOf($";{value};") >= 0; } private static void SetTestCaseProperty(TestCase test, string propertyName, YamlMappingNode mapping, string mappingName) diff --git a/tests/testadapter/YamlTestCaseRunner.cs b/tests/testadapter/YamlTestCaseRunner.cs index 324032af..37339886 100644 --- a/tests/testadapter/YamlTestCaseRunner.cs +++ b/tests/testadapter/YamlTestCaseRunner.cs @@ -65,6 +65,7 @@ private static IEnumerable TestCaseGetResults(TestCase test) var script = YamlTestProperties.Get(test, "script"); var @foreach = YamlTestProperties.Get(test, "foreach"); var arguments = YamlTestProperties.Get(test, "arguments"); + var input = YamlTestProperties.Get(test, "input"); var expect = YamlTestProperties.Get(test, "expect"); var notExpect = YamlTestProperties.Get(test, "not-expect"); var workingDirectory = YamlTestProperties.Get(test, "working-directory"); @@ -84,8 +85,8 @@ private static IEnumerable TestCaseGetResults(TestCase test) var start = DateTime.Now; var outcome = string.IsNullOrEmpty(simulate) - ? RunTestCase(test, cli, command, script, foreachItem, arguments, expect, notExpect, workingDirectory, timeout, out string stdOut, out string stdErr, out string errorMessage, out string stackTrace, out string additional, out string debugTrace) - : SimulateTestCase(test, simulate, cli, command, script, foreachItem, arguments, expect, notExpect, workingDirectory, out stdOut, out stdErr, out errorMessage, out stackTrace, out additional, out debugTrace); + ? RunTestCase(test, cli, command, script, foreachItem, arguments, input, expect, notExpect, workingDirectory, timeout, out string stdOut, out string stdErr, out string errorMessage, out string stackTrace, out string additional, out string debugTrace) + : SimulateTestCase(test, simulate, cli, command, script, foreachItem, arguments, input, expect, notExpect, workingDirectory, out stdOut, out stdErr, out errorMessage, out stackTrace, out additional, out debugTrace); #if DEBUG additional += outcome == TestOutcome.Failed ? $"\nEXTRA: {ExtraDebugInfo()}" : ""; @@ -204,7 +205,7 @@ private static Dictionary DupAndAdd(Dictionary d return dup; } - private static TestOutcome RunTestCase(TestCase test, string cli, string command, string script, string @foreach, string arguments, string expect, string notExpect, string workingDirectory, int timeout, out string stdOut, out string stdErr, out string errorMessage, out string stackTrace, out string additional, out string debugTrace) + private static TestOutcome RunTestCase(TestCase test, string cli, string command, string script, string @foreach, string arguments, string input, string expect, string notExpect, string workingDirectory, int timeout, out string stdOut, out string stdErr, out string errorMessage, out string stackTrace, out string additional, out string debugTrace) { var outcome = TestOutcome.None; @@ -243,6 +244,8 @@ private static TestOutcome RunTestCase(TestCase test, string cli, string command UpdatePathEnvironment(startInfo); var process = Process.Start(startInfo); + process.StandardInput.WriteLine(input ?? string.Empty); + process.StandardInput.Close(); stdOutTask = process.StandardOutput.ReadToEndAsync(); stdErrTask = process.StandardError.ReadToEndAsync(); @@ -685,7 +688,7 @@ private static string GetKeyValueArgs(List> kvs) return args.ToString().TrimEnd(); } - private static TestOutcome SimulateTestCase(TestCase test, string simulate, string cli, string command, string script, string @foreach, string arguments, string expect, string notExpect, string workingDirectory, out string stdOut, out string stdErr, out string errorMessage, out string stackTrace, out string additional, out string debugTrace) + private static TestOutcome SimulateTestCase(TestCase test, string simulate, string cli, string command, string script, string @foreach, string arguments, string input, string expect, string notExpect, string workingDirectory, out string stdOut, out string stdErr, out string errorMessage, out string stackTrace, out string additional, out string debugTrace) { var sb = new StringBuilder(); sb.AppendLine($"cli='{cli?.Replace("\n", "\\n")}'"); @@ -693,6 +696,7 @@ private static TestOutcome SimulateTestCase(TestCase test, string simulate, stri sb.AppendLine($"script='{script?.Replace("\n", "\\n")}'"); sb.AppendLine($"foreach='{@foreach?.Replace("\n", "\\n")}'"); sb.AppendLine($"arguments='{arguments?.Replace("\n", "\\n")}'"); + sb.AppendLine($"input='{input?.Replace("\n", "\\n")}'"); sb.AppendLine($"expect='{expect?.Replace("\n", "\\n")}'"); sb.AppendLine($"not-expect='{notExpect?.Replace("\n", "\\n")}'"); sb.AppendLine($"working-directory='{workingDirectory}'"); diff --git a/tests/testadapter/YamlTestProperties.cs b/tests/testadapter/YamlTestProperties.cs index 309b97b0..89c23cd0 100644 --- a/tests/testadapter/YamlTestProperties.cs +++ b/tests/testadapter/YamlTestProperties.cs @@ -41,6 +41,7 @@ private static TestProperty RegisterTestCaseProperty(string name) { "parallelize", RegisterTestCaseProperty("Parallelize") }, { "foreach", RegisterTestCaseProperty("ForEach") }, { "arguments", RegisterTestCaseProperty("Arguments") }, + { "input", RegisterTestCaseProperty("Input")}, { "expect", RegisterTestCaseProperty("Expect") }, { "not-expect", RegisterTestCaseProperty("NotExpect") }, { "simulate", RegisterTestCaseProperty("Simulate") }, From 399e7af78945859065a5ec58c7246d7120ec92bf Mon Sep 17 00:00:00 2001 From: Rob Chambers Date: Thu, 18 Jan 2024 18:09:46 -0800 Subject: [PATCH 12/30] update dev new template tests for streaming and with data --- tests/test3.yaml | 165 ++++++++++++++++++++++++++++++++--------------- 1 file changed, 112 insertions(+), 53 deletions(-) diff --git a/tests/test3.yaml b/tests/test3.yaml index 5769668a..dfe2e4d4 100644 --- a/tests/test3.yaml +++ b/tests/test3.yaml @@ -1,21 +1,4 @@ -# AI - Azure AI CLI, Version 1.0.0-DEV-robc-20240117 -# Copyright (c) 2023 Microsoft Corporation. All Rights Reserved. - -# This PUBLIC PREVIEW version may change at any time. -# See: https://aka.ms/azure-ai-cli-public-preview - -# Name Short Name Language -# --------------------------------------------- ------------------------------------ -------------------------------- -# Environment Variables .env -# Helper Function Class Library helper-functions C# -# OpenAI Chat Completions openai-chat C#, Go, Java, JavaScript, Python -# OpenAI Chat Completions (Streaming) openai-chat-streaming C#, Go, Java, JavaScript, Python -# OpenAI Chat Completions (w/ Data + AI Search) openai-chat-streaming-with-data C#, JavaScript, Python -# OpenAI Chat Completions (w/ Functions) openai-chat-streaming-with-functions C#, Go, JavaScript, Python -# OpenAI Webpage openai-webpage JavaScript, TypeScript -# OpenAI Webpage (w/ Functions) openai-webpage-with-functions JavaScript, TypeScript - -- name: dev new environment +- name: dev new environment command: ai dev new .env - class: dev new helper-functions @@ -46,7 +29,7 @@ cd openai-chat-cs dotnet build - name: 3-run template - command: ai dev shell --run "openai-chat-cs/bin/Debug/net7.0/OpenAIChatCompletions" + command: ai dev shell --run "openai-chat-cs\bin\Debug\net7.0\OpenAIChatCompletions" input: |- Tell me a joke Tell me another joke @@ -61,7 +44,7 @@ cd openai-chat-go go mod tidy && go build - name: 3-run template - command: ai dev shell --run "openai-chat-go/openai_chat_completions_hello_world" + command: ai dev shell --run "openai-chat-go\openai_chat_completions_hello_world" input: |- Tell me a joke Tell me another joke @@ -80,7 +63,7 @@ cd openai-chat-java javac -cp "target/lib/*" src/OpenAIQuickstart.java -d out - name: 4-run template - command: ai dev shell --run "java -cp \"openai-chat-java/out;openai-chat-java/target/lib/*\" OpenAIQuickstart" + command: ai dev shell --run "cd openai-chat-java && java -cp \"out;target/lib/*\" OpenAIQuickstart" input: |- Tell me a joke Tell me another joke @@ -96,7 +79,7 @@ npm install - name: 3-run template command: ai dev shell --run "cd openai-chat-js && node main.js" - input: | + input: |- Tell me a joke Tell me another joke tag: skip @@ -111,7 +94,7 @@ pip install -r requirements.txt - name: 3-run template command: ai dev shell --run "cd openai-chat-py && python openai_chat_completions.py" - input: | + input: |- Tell me a joke Tell me another joke tag: skip @@ -121,53 +104,129 @@ - class: dev new openai-chat-streaming (c#) tests: - - name: generate template + - name: 1-generate template command: ai dev new openai-chat-streaming --cs - - name: run template - command: ai dev shell - arguments: - run: | - cd openai-chat-streaming-cs - dotnet run - stdin: | - Tell me a joke - Tell me another joke + - name: 2-build template + script: | + cd openai-chat-streaming-cs + dotnet build + - name: 3-run template + command: ai dev shell --run "openai-chat-streaming-cs\bin\Debug\net7.0\OpenAIChatCompletionsStreaming" + input: |- + Tell me a joke + Tell me another joke tag: skip - - name: dev new openai-chat-streaming (go) - command: ai dev new openai-chat-streaming --go + - class: dev new openai-chat-streaming (go) + tests: + - name: 1-generate template + command: ai dev new openai-chat-streaming --go + - name: 2-build template + script: | + cd openai-chat-streaming-go + go mod tidy && go build + - name: 3-run template + command: ai dev shell --run "openai-chat-streaming-go\openai_chat_completions_streaming_hello_world" + input: |- + Tell me a joke + Tell me another joke + tag: skip + + - class: dev new openai-chat-streaming (java) + tests: + - name: 1-generate template + command: ai dev new openai-chat-streaming --java + - name: 2-restore packages + script: | + cd openai-chat-streaming-java + mvn clean package + - name: 3-build template + script: | + cd openai-chat-streaming-java + javac -cp "target/lib/*" src/OpenAIQuickstartStreaming.java -d out + - name: 4-run template + command: ai dev shell --run "cd openai-chat-streaming-java && java -cp \"out;target/lib/*\" OpenAIQuickstartStreaming" + input: |- + Tell me a joke + Tell me another joke + tag: skip - - name: dev new openai-chat-streaming (java) command: ai dev new openai-chat-streaming --java - - name: dev new openai-chat-streaming (javascript) - command: ai dev new openai-chat-streaming --javascript + - class: dev new openai-chat-streaming (javascript) + tests: + - name: 1-generate template + command: ai dev new openai-chat-streaming --javascript + - name: 2-build template + script: | + cd openai-chat-streaming-js + npm install + - name: 3-run template + command: ai dev shell --run "cd openai-chat-streaming-js && node main.js" + input: |- + Tell me a joke + Tell me another joke + tag: skip - - name: dev new openai-chat-streaming (python) - command: ai dev new openai-chat-streaming --python + - class: dev new openai-chat-streaming (python) + tests: + - name: 1-generate template + command: ai dev new openai-chat-streaming --python + - name: 2-build template + script: | + cd openai-chat-streaming-py + pip install -r requirements.txt + - name: 3-run template + command: ai dev shell --run "cd openai-chat-streaming-py && python main.py" + input: |- + Tell me a joke + Tell me another joke + tag: skip - area: ai dev new openai-chat-streaming-with-data tests: - class: dev new openai-chat-streaming-with-data (c#) tests: - - name: generate template + - name: 1-generate template command: ai dev new openai-chat-streaming-with-data --cs - - name: run template - command: ai dev shell - arguments: - run: | - cd openai-chat-streaming-with-data-cs - dotnet run - stdin: | - What parameter should i use to initialize? + - name: 2-build template + script: | + cd openai-chat-streaming-with-data-cs + dotnet build + - name: 3-run template + command: ai dev shell --run "openai-chat-streaming-with-data-cs\bin\Debug\net7.0\OpenAIChatCompletionsWithDataStreaming" + input: |- + What parameter should i use to initialize? tag: skip - - name: dev new openai-chat-streaming-with-data (javascript) - command: ai dev new openai-chat-streaming-with-data --javascript + - class: dev new openai-chat-streaming-with-data (javascript) + tests: + - name: 1-generate template + command: ai dev new openai-chat-streaming-with-data --javascript + - name: 2-build template + script: | + cd openai-chat-streaming-with-data-js + npm install + - name: 3-run template + command: ai dev shell --run "cd openai-chat-streaming-with-data-js && node main.js" + input: |- + What parameter should i use to initialize? + tag: skip - - name: dev new openai-chat-streaming-with-data (python) - command: ai dev new openai-chat-streaming-with-data --python + - class: dev new openai-chat-streaming-with-data (python) + tests: + - name: 1-generate template + command: ai dev new openai-chat-streaming-with-data --python + - name: 2-build template + script: | + cd openai-chat-streaming-with-data-py + pip install -r requirements.txt + - name: 3-run template + command: ai dev shell --run "cd openai-chat-streaming-with-data-py && python main.py" + input: |- + What parameter should i use to initialize? + tag: skip - area: ai dev new openai-chat-streaming-with-functions tests: From 26835010e5c1db48916d7cb1a0d077b8d3ded1b6 Mon Sep 17 00:00:00 2001 From: Rob Chambers Date: Thu, 18 Jan 2024 18:20:18 -0800 Subject: [PATCH 13/30] template testing for dev new with-functions, and openai-webpage-* --- tests/test3.yaml | 124 ++++++++++++++++++++++++++++++++++++++--------- 1 file changed, 100 insertions(+), 24 deletions(-) diff --git a/tests/test3.yaml b/tests/test3.yaml index dfe2e4d4..6d5793e4 100644 --- a/tests/test3.yaml +++ b/tests/test3.yaml @@ -233,43 +233,119 @@ - class: dev new openai-chat-streaming-with-functions (c#) tests: - - name: generate template + - name: 1-generate template command: ai dev new openai-chat-streaming-with-functions --cs - - name: run template - command: ai dev shell - arguments: - run: | - cd openai-chat-streaming-with-functions-cs - dotnet run - stdin: | - What is the date? - What is the time? + - name: 2-build template + script: | + cd openai-chat-streaming-with-functions-cs + dotnet build + - name: 3-run template + command: ai dev shell --run "cd openai-chat-streaming-with-functions-cs && bin\Debug\net7.0\OpenAIChatCompletionsFunctionsStreaming" + input: |- + What is the date? + What is the time? tag: skip - - name: dev new openai-chat-streaming-with-functions (go) - command: ai dev new openai-chat-streaming-with-functions --go + - class: dev new openai-chat-streaming-with-functions (go) + tests: + - name: 1-generate template + command: ai dev new openai-chat-streaming-with-functions --go + - name: 2-build template + script: | + cd openai-chat-streaming-with-functions-go + go mod tidy && go build + - name: 3-run template + command: ai dev shell --run "cd openai-chat-streaming-with-functions-go && openai_chat_completions_functions_streaming_hello_world" + input: |- + What is the date? + What is the time? + tag: skip - - name: dev new openai-chat-streaming-with-functions (javascript) - command: ai dev new openai-chat-streaming-with-functions --javascript + - class: dev new openai-chat-streaming-with-functions (javascript) + tests: + - name: 1-generate template + command: ai dev new openai-chat-streaming-with-functions --javascript + - name: 2-build template + script: | + cd openai-chat-streaming-with-functions-js + npm install + - name: 3-run template + command: ai dev shell --run "cd openai-chat-streaming-with-functions-js && node main.js" + input: |- + What is the date? + What is the time? + tag: skip - - name: dev new openai-chat-streaming-with-functions (python) - command: ai dev new openai-chat-streaming-with-functions --python + - class: dev new openai-chat-streaming-with-functions (python) + tests: + - name: 1-generate template + command: ai dev new openai-chat-streaming-with-functions --python + - name: 2-build template + script: | + cd openai-chat-streaming-with-functions-py + pip install -r requirements.txt + - name: 3-run template + command: ai dev shell --run "cd openai-chat-streaming-with-functions-py && python main.py" + input: |- + What is the date? + What is the time? + tag: skip - area: ai dev new openai-webpage tests: - - name: dev new openai-webpage (javascript) - command: ai dev new openai-webpage --javascript + - class: dev new openai-webpage (javascript) + tests: + - name: 1-generate template + command: ai dev new openai-webpage --javascript + - name: 2-build template + script: | + cd openai-webpage-js + npm install + - name: 3-pack template + script: | + cd openai-webpage-js + npx webpack - - name: dev new openai-webpage (typescript) - command: ai dev new openai-webpage --typescript + - class: dev new openai-webpage (typescript) + tests: + - name: 1-generate template + command: ai dev new openai-webpage --typescript + - name: 2-build template + script: | + cd openai-webpage-ts + npm install + - name: 3-pack template + script: | + cd openai-webpage-ts + npx webpack - area: ai dev new openai-webpage-with-functions tests: - - name: dev new openai-webpage-with-functions (javascript) - command: ai dev new openai-webpage-with-functions --javascript + - class: dev new openai-webpage-with-functions (javascript) + tests: + - name: 1-generate template + command: ai dev new openai-webpage-with-functions --javascript + - name: 2-build template + script: | + cd openai-webpage-with-functions-js + npm install + - name: 3-pack template + script: | + cd openai-webpage-with-functions-js + npx webpack - - name: dev new openai-webpage-with-functions (typescript) - command: ai dev new openai-webpage-with-functions --typescript + - class: dev new openai-webpage-with-functions (typescript) + tests: + - name: 1-generate template + command: ai dev new openai-webpage-with-functions --typescript + - name: 2-build template + script: | + cd openai-webpage-with-functions-ts + npm install + - name: 3-pack template + script: | + cd openai-webpage-with-functions-ts + npx webpack \ No newline at end of file From e874da08717c7168b6f48c9a816d6a552473f805 Mon Sep 17 00:00:00 2001 From: Christopher Schraer <32145632+chschrae@users.noreply.github.com> Date: Fri, 19 Jan 2024 11:48:21 -0800 Subject: [PATCH 14/30] Made java templates a lot more like the others. (#156) Co-authored-by: Chris Schraer --- src/ai/.x/templates/openai-chat-java/_.json | 1 + src/ai/.x/templates/openai-chat-java/pom.xml | 2 +- .../openai-chat-java/scripts/2-compile.bat | 2 +- .../openai-chat-java/scripts/3-run.bat | 2 +- .../templates/openai-chat-java/src/Main.java | 29 +++++++ .../src/OpenAIChatCompletionsClass.java | 50 +++++++++++++ .../src/OpenAIQuickstart.java | 69 ----------------- .../openai-chat-streaming-java/_.json | 1 + .../scripts/2-compile.bat | 2 +- .../scripts/3-run.bat | 2 +- .../openai-chat-streaming-java/src/Main.java | 38 ++++++++++ ... OpenAIChatCompletionsStreamingClass.java} | 75 +++++-------------- 12 files changed, 144 insertions(+), 129 deletions(-) create mode 100644 src/ai/.x/templates/openai-chat-java/src/Main.java create mode 100644 src/ai/.x/templates/openai-chat-java/src/OpenAIChatCompletionsClass.java delete mode 100644 src/ai/.x/templates/openai-chat-java/src/OpenAIQuickstart.java create mode 100644 src/ai/.x/templates/openai-chat-streaming-java/src/Main.java rename src/ai/.x/templates/openai-chat-streaming-java/src/{OpenAIQuickstartStreaming.java => OpenAIChatCompletionsStreamingClass.java} (50%) diff --git a/src/ai/.x/templates/openai-chat-java/_.json b/src/ai/.x/templates/openai-chat-java/_.json index 27a24b45..d4485593 100644 --- a/src/ai/.x/templates/openai-chat-java/_.json +++ b/src/ai/.x/templates/openai-chat-java/_.json @@ -2,6 +2,7 @@ "_LongName": "OpenAI Chat Completions", "_ShortName": "openai-chat", "_Language": "Java", + "ClassName": "OpenAIChatCompletionsClass", "AZURE_OPENAI_ENDPOINT": "", "AZURE_OPENAI_KEY": "", "AZURE_OPENAI_CHAT_DEPLOYMENT": "", diff --git a/src/ai/.x/templates/openai-chat-java/pom.xml b/src/ai/.x/templates/openai-chat-java/pom.xml index ec38d9fa..50d91b14 100644 --- a/src/ai/.x/templates/openai-chat-java/pom.xml +++ b/src/ai/.x/templates/openai-chat-java/pom.xml @@ -10,7 +10,7 @@ com.azure azure-ai-openai - 1.0.0-beta.5 + 1.0.0-beta.6 diff --git a/src/ai/.x/templates/openai-chat-java/scripts/2-compile.bat b/src/ai/.x/templates/openai-chat-java/scripts/2-compile.bat index a0ab8417..13f0dc8c 100644 --- a/src/ai/.x/templates/openai-chat-java/scripts/2-compile.bat +++ b/src/ai/.x/templates/openai-chat-java/scripts/2-compile.bat @@ -1 +1 @@ -javac -cp target/lib/* src/OpenAIQuickstart.java -d out +javac -cp target/lib/* src/OpenAIChatCompletionsClass.java src/Main.java -d out diff --git a/src/ai/.x/templates/openai-chat-java/scripts/3-run.bat b/src/ai/.x/templates/openai-chat-java/scripts/3-run.bat index 6423520e..6d301cb6 100644 --- a/src/ai/.x/templates/openai-chat-java/scripts/3-run.bat +++ b/src/ai/.x/templates/openai-chat-java/scripts/3-run.bat @@ -1 +1 @@ -java -cp out;target/lib/* OpenAIQuickstart +java -cp out;target/lib/* Main diff --git a/src/ai/.x/templates/openai-chat-java/src/Main.java b/src/ai/.x/templates/openai-chat-java/src/Main.java new file mode 100644 index 00000000..8fca7f50 --- /dev/null +++ b/src/ai/.x/templates/openai-chat-java/src/Main.java @@ -0,0 +1,29 @@ +<#@ template hostspecific="true" #> +<#@ output extension=".java" encoding="utf-8" #> +<#@ parameter type="System.String" name="ClassName" #> +<#@ parameter type="System.String" name="AZURE_OPENAI_ENDPOINT" #> +<#@ parameter type="System.String" name="AZURE_OPENAI_KEY" #> +<#@ parameter type="System.String" name="AZURE_OPENAI_CHAT_DEPLOYMENT" #> +<#@ parameter type="System.String" name="AZURE_OPENAI_SYSTEM_PROMPT" #> +import java.util.Scanner; +public class Main { + public static void main(String[] args) { + String openAIKey = (System.getenv("AZURE_OPENAI_KEY") != null) ? System.getenv("AZURE_OPENAI_KEY") : ""; + String openAIEndpoint = (System.getenv("AZURE_OPENAI_ENDPOINT") != null) ? System.getenv("AZURE_OPENAI_ENDPOINT") : ""; + String openAIChatDeployment = (System.getenv("AZURE_OPENAI_CHAT_DEPLOYMENT") != null) ? System.getenv("AZURE_OPENAI_CHAT_DEPLOYMENT") : ""; + String openAISystemPrompt = (System.getenv("AZURE_OPENAI_SYSTEM_PROMPT") != null) ? System.getenv("AZURE_OPENAI_SYSTEM_PROMPT") : "You are a helpful AI assistant."; + + <#= ClassName #> chat = new <#= ClassName #>(openAIKey, openAIEndpoint, openAIChatDeployment, openAISystemPrompt); + + Scanner scanner = new Scanner(System.in); + while (true) { + System.out.print("User: "); + String userPrompt = scanner.nextLine(); + if (userPrompt.isEmpty() || "exit".equals(userPrompt)) break; + + String response = chat.getChatCompletion(userPrompt); + System.out.println("\nAssistant: " + response + "\n"); + } + scanner.close(); + } +} \ No newline at end of file diff --git a/src/ai/.x/templates/openai-chat-java/src/OpenAIChatCompletionsClass.java b/src/ai/.x/templates/openai-chat-java/src/OpenAIChatCompletionsClass.java new file mode 100644 index 00000000..3db87e5e --- /dev/null +++ b/src/ai/.x/templates/openai-chat-java/src/OpenAIChatCompletionsClass.java @@ -0,0 +1,50 @@ +<#@ template hostspecific="true" #> +<#@ output extension=".java" encoding="utf-8" #> +<#@ parameter type="System.String" name="ClassName" #> +import com.azure.ai.openai.OpenAIClient; +import com.azure.ai.openai.OpenAIClientBuilder; +import com.azure.ai.openai.models.ChatCompletions; +import com.azure.ai.openai.models.ChatCompletionsOptions; +import com.azure.ai.openai.models.ChatMessage; +import com.azure.ai.openai.models.ChatRole; +import com.azure.core.credential.AzureKeyCredential; + +import java.util.ArrayList; +import java.util.List; + +public class <#= ClassName #> { + + private OpenAIClient client; + private ChatCompletionsOptions options; + private String openAIChatDeployment; + private String openAISystemPrompt; + + public <#= ClassName #> (String openAIKey, String openAIEndpoint, String openAIChatDeployment, String openAISystemPrompt) { + this.openAIChatDeployment = openAIChatDeployment; + this.openAISystemPrompt = openAISystemPrompt; + client = new OpenAIClientBuilder() + .endpoint(openAIEndpoint) + .credential(new AzureKeyCredential(openAIKey)) + .buildClient(); + + List chatMessages = new ArrayList<>(); + options = new ChatCompletionsOptions(chatMessages); + ClearConversation(); + } + + public void ClearConversation(){ + List chatMessages = options.getMessages(); + chatMessages.clear(); + chatMessages.add(new ChatMessage(ChatRole.SYSTEM, this.openAISystemPrompt)); + } + + public String getChatCompletion(String userPrompt) { + options.getMessages().add(new ChatMessage(ChatRole.USER, userPrompt)); + + ChatCompletions chatCompletions = client.getChatCompletions(this.openAIChatDeployment, options); + String responseContent = chatCompletions.getChoices().get(0).getMessage().getContent(); + options.getMessages().add(new ChatMessage(ChatRole.ASSISTANT, responseContent)); + + return responseContent; + } +} \ No newline at end of file diff --git a/src/ai/.x/templates/openai-chat-java/src/OpenAIQuickstart.java b/src/ai/.x/templates/openai-chat-java/src/OpenAIQuickstart.java deleted file mode 100644 index dbe10257..00000000 --- a/src/ai/.x/templates/openai-chat-java/src/OpenAIQuickstart.java +++ /dev/null @@ -1,69 +0,0 @@ -<#@ template hostspecific="true" #> -<#@ output extension=".java" encoding="utf-8" #> -<#@ parameter type="System.String" name="ClassName" #> -<#@ parameter type="System.String" name="AZURE_OPENAI_ENDPOINT" #> -<#@ parameter type="System.String" name="AZURE_OPENAI_KEY" #> -<#@ parameter type="System.String" name="AZURE_OPENAI_CHAT_DEPLOYMENT" #> -<#@ parameter type="System.String" name="AZURE_OPENAI_SYSTEM_PROMPT" #> -import com.azure.ai.openai.OpenAIClient; -import com.azure.ai.openai.OpenAIClientBuilder; -import com.azure.ai.openai.models.ChatChoice; -import com.azure.ai.openai.models.ChatCompletions; -import com.azure.ai.openai.models.ChatCompletionsOptions; -import com.azure.ai.openai.models.ChatMessage; -import com.azure.ai.openai.models.ChatRole; -import com.azure.ai.openai.models.CompletionsUsage; -import com.azure.core.credential.AzureKeyCredential; - -import java.util.ArrayList; -import java.util.List; -import java.util.Scanner; - -public class OpenAIQuickstart { - - private OpenAIClient client; - private ChatCompletionsOptions options; - - private String key = (System.getenv("AZURE_OPENAI_KEY") != null) ? System.getenv("AZURE_OPENAI_KEY") : "<#= AZURE_OPENAI_KEY #>"; - private String endpoint = (System.getenv("AZURE_OPENAI_ENDPOINT") != null) ? System.getenv("AZURE_OPENAI_ENDPOINT") : "<#= AZURE_OPENAI_ENDPOINT #>"; - private String deploymentName = (System.getenv("AZURE_OPENAI_CHAT_DEPLOYMENT") != null) ? System.getenv("AZURE_OPENAI_CHAT_DEPLOYMENT") : "<#= AZURE_OPENAI_CHAT_DEPLOYMENT #>"; - private String systemPrompt = (System.getenv("AZURE_OPENAI_SYSTEM_PROMPT") != null) ? System.getenv("AZURE_OPENAI_SYSTEM_PROMPT") : "<#= AZURE_OPENAI_SYSTEM_PROMPT #>"; - - public OpenAIQuickstart() { - - client = new OpenAIClientBuilder() - .endpoint(endpoint) - .credential(new AzureKeyCredential(key)) - .buildClient(); - - List chatMessages = new ArrayList<>(); - chatMessages.add(new ChatMessage(ChatRole.SYSTEM, systemPrompt)); - - options = new ChatCompletionsOptions(chatMessages); - } - - public String getChatCompletion(String userPrompt) { - options.getMessages().add(new ChatMessage(ChatRole.USER, userPrompt)); - - ChatCompletions chatCompletions = client.getChatCompletions(deploymentName, options); - String responseContent = chatCompletions.getChoices().get(0).getMessage().getContent(); - options.getMessages().add(new ChatMessage(ChatRole.ASSISTANT, responseContent)); - - return responseContent; - } - - public static void main(String[] args) { - OpenAIQuickstart chat = new OpenAIQuickstart(); - - Scanner scanner = new Scanner(System.in); - while (true) { - System.out.print("User: "); - String userPrompt = scanner.nextLine(); - if (userPrompt.isEmpty() || "exit".equals(userPrompt)) break; - - String response = chat.getChatCompletion(userPrompt); - System.out.println("\nAssistant: " + response + "\n"); - } - scanner.close(); - } -} \ No newline at end of file diff --git a/src/ai/.x/templates/openai-chat-streaming-java/_.json b/src/ai/.x/templates/openai-chat-streaming-java/_.json index 137c0bfe..5c2e64d0 100644 --- a/src/ai/.x/templates/openai-chat-streaming-java/_.json +++ b/src/ai/.x/templates/openai-chat-streaming-java/_.json @@ -2,6 +2,7 @@ "_LongName": "OpenAI Chat Completions (Streaming)", "_ShortName": "openai-chat-streaming", "_Language": "Java", + "ClassName": "OpenAIChatCompletionsStreamingClass", "AZURE_OPENAI_ENDPOINT": "", "AZURE_OPENAI_KEY": "", "AZURE_OPENAI_CHAT_DEPLOYMENT": "", diff --git a/src/ai/.x/templates/openai-chat-streaming-java/scripts/2-compile.bat b/src/ai/.x/templates/openai-chat-streaming-java/scripts/2-compile.bat index f50d9fb1..a2fac8c8 100644 --- a/src/ai/.x/templates/openai-chat-streaming-java/scripts/2-compile.bat +++ b/src/ai/.x/templates/openai-chat-streaming-java/scripts/2-compile.bat @@ -1 +1 @@ -javac -cp target/lib/* src/OpenAIQuickstartStreaming.java -d out +javac -cp target/lib/* src/OpenAIChatCompletionsStreamingClass.java src/Main.java -d out diff --git a/src/ai/.x/templates/openai-chat-streaming-java/scripts/3-run.bat b/src/ai/.x/templates/openai-chat-streaming-java/scripts/3-run.bat index 4f50c0b2..6d301cb6 100644 --- a/src/ai/.x/templates/openai-chat-streaming-java/scripts/3-run.bat +++ b/src/ai/.x/templates/openai-chat-streaming-java/scripts/3-run.bat @@ -1 +1 @@ -java -cp out;target/lib/* OpenAIQuickstartStreaming +java -cp out;target/lib/* Main diff --git a/src/ai/.x/templates/openai-chat-streaming-java/src/Main.java b/src/ai/.x/templates/openai-chat-streaming-java/src/Main.java new file mode 100644 index 00000000..a3d3a0ca --- /dev/null +++ b/src/ai/.x/templates/openai-chat-streaming-java/src/Main.java @@ -0,0 +1,38 @@ +<#@ template hostspecific="true" #> +<#@ output extension=".java" encoding="utf-8" #> +<#@ parameter type="System.String" name="ClassName" #> +<#@ parameter type="System.String" name="AZURE_OPENAI_ENDPOINT" #> +<#@ parameter type="System.String" name="AZURE_OPENAI_KEY" #> +<#@ parameter type="System.String" name="AZURE_OPENAI_CHAT_DEPLOYMENT" #> +<#@ parameter type="System.String" name="AZURE_OPENAI_SYSTEM_PROMPT" #> +import java.util.Scanner; +import reactor.core.publisher.Flux; +import com.azure.ai.openai.models.ChatCompletions; + +public class Main { + + public static void main(String[] args) { + String openAIKey = (System.getenv("AZURE_OPENAI_KEY") != null) ? System.getenv("AZURE_OPENAI_KEY") : ""; + String openAIEndpoint = (System.getenv("AZURE_OPENAI_ENDPOINT") != null) ? System.getenv("AZURE_OPENAI_ENDPOINT") : ""; + String openAIChatDeployment = (System.getenv("AZURE_OPENAI_CHAT_DEPLOYMENT") != null) ? System.getenv("AZURE_OPENAI_CHAT_DEPLOYMENT") : ""; + String openAISystemPrompt = (System.getenv("AZURE_OPENAI_SYSTEM_PROMPT") != null) ? System.getenv("AZURE_OPENAI_SYSTEM_PROMPT") : "You are a helpful AI assistant."; + + <#= ClassName #> chat = new <#= ClassName #>(openAIKey, openAIEndpoint, openAIChatDeployment, openAISystemPrompt); + + Scanner scanner = new Scanner(System.in); + while (true) { + System.out.print("User: "); + String userPrompt = scanner.nextLine(); + if (userPrompt.isEmpty() || "exit".equals(userPrompt)) + break; + + System.out.print("\nAssistant: "); + Flux responseFlux = chat.getChatCompletionsStreamingAsync(userPrompt, update -> { + System.out.print(update.getContent()); + }); + responseFlux.blockLast(); + System.out.println("\n"); + } + scanner.close(); + } +} \ No newline at end of file diff --git a/src/ai/.x/templates/openai-chat-streaming-java/src/OpenAIQuickstartStreaming.java b/src/ai/.x/templates/openai-chat-streaming-java/src/OpenAIChatCompletionsStreamingClass.java similarity index 50% rename from src/ai/.x/templates/openai-chat-streaming-java/src/OpenAIQuickstartStreaming.java rename to src/ai/.x/templates/openai-chat-streaming-java/src/OpenAIChatCompletionsStreamingClass.java index aee6adcb..294a0470 100644 --- a/src/ai/.x/templates/openai-chat-streaming-java/src/OpenAIQuickstartStreaming.java +++ b/src/ai/.x/templates/openai-chat-streaming-java/src/OpenAIChatCompletionsStreamingClass.java @@ -1,12 +1,7 @@ <#@ template hostspecific="true" #> <#@ output extension=".java" encoding="utf-8" #> <#@ parameter type="System.String" name="ClassName" #> -<#@ parameter type="System.String" name="AZURE_OPENAI_ENDPOINT" #> -<#@ parameter type="System.String" name="AZURE_OPENAI_KEY" #> -<#@ parameter type="System.String" name="AZURE_OPENAI_CHAT_DEPLOYMENT" #> -<#@ parameter type="System.String" name="AZURE_OPENAI_SYSTEM_PROMPT" #> import com.azure.ai.openai.OpenAIAsyncClient; -import com.azure.ai.openai.OpenAIClient; import com.azure.ai.openai.OpenAIClientBuilder; import com.azure.ai.openai.models.ChatChoice; import com.azure.ai.openai.models.ChatCompletions; @@ -14,71 +9,58 @@ import com.azure.ai.openai.models.ChatRequestAssistantMessage; import com.azure.ai.openai.models.ChatRequestMessage; import com.azure.ai.openai.models.ChatRequestSystemMessage; -import com.azure.ai.openai.models.ChatRole; import com.azure.ai.openai.models.ChatRequestUserMessage; import com.azure.ai.openai.models.ChatResponseMessage; -import com.azure.ai.openai.models.CompletionsUsage; import com.azure.ai.openai.models.CompletionsFinishReason; import com.azure.core.credential.AzureKeyCredential; - import reactor.core.publisher.Flux; -import java.time.Duration; import java.util.ArrayList; -import java.util.concurrent.CompletableFuture; -import java.util.concurrent.ExecutionException; -import java.util.concurrent.atomic.AtomicInteger; import java.util.function.Consumer; import java.util.List; -import java.util.Scanner; -public class OpenAIQuickstartStreaming { +public class <#= ClassName #> { private OpenAIAsyncClient client; private ChatCompletionsOptions options; + private String openAIChatDeployment; + private String openAISystemPrompt; - private String key = (System.getenv("AZURE_OPENAI_KEY") != null) ? System.getenv("AZURE_OPENAI_KEY") - : ""; - private String endpoint = (System.getenv("AZURE_OPENAI_ENDPOINT") != null) ? System.getenv("AZURE_OPENAI_ENDPOINT") - : ""; - private String deploymentName = (System.getenv("AZURE_OPENAI_CHAT_DEPLOYMENT") != null) - ? System.getenv("AZURE_OPENAI_CHAT_DEPLOYMENT") - : ""; - private String systemPrompt = (System.getenv("AZURE_OPENAI_SYSTEM_PROMPT") != null) - ? System.getenv("AZURE_OPENAI_SYSTEM_PROMPT") - : "You are a helpful AI assistant."; - - public OpenAIQuickstartStreaming() { + public <#= ClassName #> (String openAIKey, String openAIEndpoint, String openAIChatDeployment, String openAISystemPrompt) { + this.openAIChatDeployment = openAIChatDeployment; + this.openAISystemPrompt = openAISystemPrompt; client = new OpenAIClientBuilder() - .endpoint(endpoint) - .credential(new AzureKeyCredential(key)) - .buildAsyncClient(); + .endpoint(openAIEndpoint) + .credential(new AzureKeyCredential(openAIKey)) + .buildAsyncClient(); List chatMessages = new ArrayList<>(); - chatMessages.add(new ChatRequestSystemMessage(systemPrompt)); - options = new ChatCompletionsOptions(chatMessages); + ClearConversation(); options.setStream(true); } + public void ClearConversation(){ + List chatMessages = options.getMessages(); + chatMessages.clear(); + chatMessages.add(new ChatRequestSystemMessage(this.openAISystemPrompt)); + } + public Flux getChatCompletionsStreamingAsync(String userPrompt, Consumer callback) { options.getMessages().add(new ChatRequestUserMessage(userPrompt)); StringBuilder responseContent = new StringBuilder(); - Flux response = client.getChatCompletionsStream(deploymentName, options); + Flux response = client.getChatCompletionsStream(this.openAIChatDeployment, options); response.subscribe(chatResponse -> { if (chatResponse.getChoices() != null) { for (ChatChoice update : chatResponse.getChoices()) { if (update.getDelta() == null || update.getDelta().getContent() == null) continue; - callback.accept(update.getDelta()); String content = update.getDelta().getContent(); - if (update.getFinishReason() == null) - continue; if (update.getFinishReason() == CompletionsFinishReason.CONTENT_FILTERED) { content = content + "\nWARNING: Content filtered!"; } else if (update.getFinishReason() == CompletionsFinishReason.TOKEN_LIMIT_REACHED) { @@ -88,6 +70,9 @@ public Flux getChatCompletionsStreamingAsync(String userPrompt, if (content.isEmpty()) continue; + if(callback != null) { + callback.accept(update.getDelta()); + } responseContent.append(content); } @@ -97,24 +82,4 @@ public Flux getChatCompletionsStreamingAsync(String userPrompt, return response; } - - public static void main(String[] args) { - OpenAIQuickstartStreaming chat = new OpenAIQuickstartStreaming(); - - Scanner scanner = new Scanner(System.in); - while (true) { - System.out.print("User: "); - String userPrompt = scanner.nextLine(); - if (userPrompt.isEmpty() || "exit".equals(userPrompt)) - break; - - System.out.print("\nAssistant: "); - Flux responseFlux = chat.getChatCompletionsStreamingAsync(userPrompt, update -> { - System.out.print(update.getContent()); - }); - responseFlux.blockLast(Duration.ofSeconds(20)); - System.out.println("\n"); - } - scanner.close(); - } } \ No newline at end of file From b3eb69711b6832390f1aad13c4358cb19245cab8 Mon Sep 17 00:00:00 2001 From: Rob Chambers Date: Mon, 22 Jan 2024 06:35:21 -0800 Subject: [PATCH 15/30] update dev new template testing (#155) * update dev new template testing * automatic non-step parallelization in progress * completed parallelizing first test in "steps" test sets * better error handling * refactor for clarity * fix broken tests from main merge * update to fix `scanner.nextLine` issue * ensure that if there's an exception in the python templates caught in main, that they `exit(1)` * ensure python templates when redirecting stdin don't print exception when hitting eof * make sure js node templates exit(1) when there's an error, except when that error is EOF from redirected input * publish test results at end of test stage, and fail the stage if there are failed tests --- .azure/pipelines/build.yaml | 23 +-- .../templates/openai-chat-java/src/Main.java | 2 + .../src/OpenAIChatCompletionsClass.java | 15 +- src/ai/.x/templates/openai-chat-js/Main.js | 5 +- .../openai-chat-py/openai_chat_completions.py | 27 ++- .../openai-chat-streaming-java/src/Main.java | 2 + .../OpenAIChatCompletionsStreamingClass.java | 10 +- .../openai-chat-streaming-js/Main.js | 5 +- .../openai-chat-streaming-py/main.py | 6 +- .../Main.js | 5 +- .../main.py | 6 +- .../Main.js | 5 +- .../main.py | 6 +- src/ai/Program_AI.cs | 39 +++- tests/test3.yaml | 188 +++++++++--------- tests/testadapter/YamlTestAdapter.cs | 112 +++++++++-- tests/testadapter/YamlTestCaseFilter.cs | 3 +- tests/testadapter/YamlTestCaseParser.cs | 114 ++++++----- tests/testadapter/YamlTestProperties.cs | 1 + 19 files changed, 365 insertions(+), 209 deletions(-) diff --git a/.azure/pipelines/build.yaml b/.azure/pipelines/build.yaml index e433eba3..ef06de87 100644 --- a/.azure/pipelines/build.yaml +++ b/.azure/pipelines/build.yaml @@ -298,18 +298,6 @@ stages: "$(LocalBinOutputPath)/$(BuildConfiguration)/net7.0/Azure.AI.CLI.TestAdapter.dll" workingDirectory: '$(TestResultsPath)' - # ----------------------------------------------------------------------------- - # Publish the test results - # ----------------------------------------------------------------------------- - - task: PublishTestResults@2 - displayName: Publish ai-cli test results - continueOnError: true - inputs: - testRunner: VSTest - testResultsFiles: '$(TestRunTrxFileName)' - testRunTitle: '$(TestRunTitle)' - failTaskOnFailedTests: true - # ----------------------------------------------------------------------------- # Archive and publish the test run backup artifact # ----------------------------------------------------------------------------- @@ -340,6 +328,17 @@ stages: pathToPublish: '$(TestBackupArtifactFile)' artifactName: TestRunBackup + # ----------------------------------------------------------------------------- + # Publish the test results + # ----------------------------------------------------------------------------- + - task: PublishTestResults@2 + displayName: Publish ai-cli test results + inputs: + testRunner: VSTest + testResultsFiles: '$(TestRunTrxFileName)' + testRunTitle: '$(TestRunTitle)' + failTaskOnFailedTests: true + - stage: ManualApproval dependsOn: [SetupStage, BuildStage, TestStage] condition: and(succeeded(), or(eq(stageDependencies.SetupStage.outputs['SetupJob.Variables.IsRelease'], 'true'), eq(variables['PublishDevBuild'], 'true'))) diff --git a/src/ai/.x/templates/openai-chat-java/src/Main.java b/src/ai/.x/templates/openai-chat-java/src/Main.java index 8fca7f50..c11db157 100644 --- a/src/ai/.x/templates/openai-chat-java/src/Main.java +++ b/src/ai/.x/templates/openai-chat-java/src/Main.java @@ -18,6 +18,8 @@ public static void main(String[] args) { Scanner scanner = new Scanner(System.in); while (true) { System.out.print("User: "); + if (!scanner.hasNextLine()) break; + String userPrompt = scanner.nextLine(); if (userPrompt.isEmpty() || "exit".equals(userPrompt)) break; diff --git a/src/ai/.x/templates/openai-chat-java/src/OpenAIChatCompletionsClass.java b/src/ai/.x/templates/openai-chat-java/src/OpenAIChatCompletionsClass.java index 3db87e5e..deccae33 100644 --- a/src/ai/.x/templates/openai-chat-java/src/OpenAIChatCompletionsClass.java +++ b/src/ai/.x/templates/openai-chat-java/src/OpenAIChatCompletionsClass.java @@ -3,10 +3,7 @@ <#@ parameter type="System.String" name="ClassName" #> import com.azure.ai.openai.OpenAIClient; import com.azure.ai.openai.OpenAIClientBuilder; -import com.azure.ai.openai.models.ChatCompletions; -import com.azure.ai.openai.models.ChatCompletionsOptions; -import com.azure.ai.openai.models.ChatMessage; -import com.azure.ai.openai.models.ChatRole; +import com.azure.ai.openai.models.*; import com.azure.core.credential.AzureKeyCredential; import java.util.ArrayList; @@ -27,23 +24,23 @@ public class <#= ClassName #> { .credential(new AzureKeyCredential(openAIKey)) .buildClient(); - List chatMessages = new ArrayList<>(); + List chatMessages = new ArrayList<>(); options = new ChatCompletionsOptions(chatMessages); ClearConversation(); } public void ClearConversation(){ - List chatMessages = options.getMessages(); + List chatMessages = options.getMessages(); chatMessages.clear(); - chatMessages.add(new ChatMessage(ChatRole.SYSTEM, this.openAISystemPrompt)); + chatMessages.add(new ChatRequestSystemMessage(this.openAISystemPrompt)); } public String getChatCompletion(String userPrompt) { - options.getMessages().add(new ChatMessage(ChatRole.USER, userPrompt)); + options.getMessages().add(new ChatRequestUserMessage(userPrompt)); ChatCompletions chatCompletions = client.getChatCompletions(this.openAIChatDeployment, options); String responseContent = chatCompletions.getChoices().get(0).getMessage().getContent(); - options.getMessages().add(new ChatMessage(ChatRole.ASSISTANT, responseContent)); + options.getMessages().add(new ChatRequestAssistantMessage(responseContent.toString())); return responseContent; } diff --git a/src/ai/.x/templates/openai-chat-js/Main.js b/src/ai/.x/templates/openai-chat-js/Main.js index afbf924e..0c9933f3 100644 --- a/src/ai/.x/templates/openai-chat-js/Main.js +++ b/src/ai/.x/templates/openai-chat-js/Main.js @@ -36,7 +36,10 @@ async function main() { } main().catch((err) => { - console.error("The sample encountered an error:", err); + if (err.code !== 'ERR_USE_AFTER_CLOSE') { // filter out expected error (EOF on redirected input) + console.error("The sample encountered an error:", err); + process.exit(1); + } }); module.exports = { main }; diff --git a/src/ai/.x/templates/openai-chat-py/openai_chat_completions.py b/src/ai/.x/templates/openai-chat-py/openai_chat_completions.py index 10229797..49cca028 100644 --- a/src/ai/.x/templates/openai-chat-py/openai_chat_completions.py +++ b/src/ai/.x/templates/openai-chat-py/openai_chat_completions.py @@ -5,8 +5,9 @@ <#@ parameter type="System.String" name="AZURE_OPENAI_API_VERSION" #> <#@ parameter type="System.String" name="AZURE_OPENAI_CHAT_DEPLOYMENT" #> <#@ parameter type="System.String" name="AZURE_OPENAI_SYSTEM_PROMPT" #> -import os from openai import AzureOpenAI +import os +import sys openai_api_version = os.getenv('AZURE_OPENAI_API_VERSION', '<#= AZURE_OPENAI_API_VERSION #>') openai_endpoint = os.getenv('AZURE_OPENAI_ENDPOINT', '<#= AZURE_OPENAI_ENDPOINT #>') @@ -37,10 +38,20 @@ def get_chat_completions(user_input) -> str: return response_content -while True: - user_input = input('User: ') - if user_input == 'exit' or user_input == '': - break - - response_content = get_chat_completions(user_input) - print(f"\nAssistant: {response_content}\n") \ No newline at end of file +def main(): + while True: + user_input = input('User: ') + if user_input == 'exit' or user_input == '': + break + + response_content = get_chat_completions(user_input) + print(f"\nAssistant: {response_content}\n") + +if __name__ == '__main__': + try: + main() + except EOFError: + pass + except Exception as e: + print(f"The sample encountered an error: {e}") + sys.exit(1) \ No newline at end of file diff --git a/src/ai/.x/templates/openai-chat-streaming-java/src/Main.java b/src/ai/.x/templates/openai-chat-streaming-java/src/Main.java index a3d3a0ca..643227fd 100644 --- a/src/ai/.x/templates/openai-chat-streaming-java/src/Main.java +++ b/src/ai/.x/templates/openai-chat-streaming-java/src/Main.java @@ -22,6 +22,8 @@ public static void main(String[] args) { Scanner scanner = new Scanner(System.in); while (true) { System.out.print("User: "); + if (!scanner.hasNextLine()) break; + String userPrompt = scanner.nextLine(); if (userPrompt.isEmpty() || "exit".equals(userPrompt)) break; diff --git a/src/ai/.x/templates/openai-chat-streaming-java/src/OpenAIChatCompletionsStreamingClass.java b/src/ai/.x/templates/openai-chat-streaming-java/src/OpenAIChatCompletionsStreamingClass.java index 294a0470..227e29b8 100644 --- a/src/ai/.x/templates/openai-chat-streaming-java/src/OpenAIChatCompletionsStreamingClass.java +++ b/src/ai/.x/templates/openai-chat-streaming-java/src/OpenAIChatCompletionsStreamingClass.java @@ -3,15 +3,7 @@ <#@ parameter type="System.String" name="ClassName" #> import com.azure.ai.openai.OpenAIAsyncClient; import com.azure.ai.openai.OpenAIClientBuilder; -import com.azure.ai.openai.models.ChatChoice; -import com.azure.ai.openai.models.ChatCompletions; -import com.azure.ai.openai.models.ChatCompletionsOptions; -import com.azure.ai.openai.models.ChatRequestAssistantMessage; -import com.azure.ai.openai.models.ChatRequestMessage; -import com.azure.ai.openai.models.ChatRequestSystemMessage; -import com.azure.ai.openai.models.ChatRequestUserMessage; -import com.azure.ai.openai.models.ChatResponseMessage; -import com.azure.ai.openai.models.CompletionsFinishReason; +import com.azure.ai.openai.models.*; import com.azure.core.credential.AzureKeyCredential; import reactor.core.publisher.Flux; diff --git a/src/ai/.x/templates/openai-chat-streaming-js/Main.js b/src/ai/.x/templates/openai-chat-streaming-js/Main.js index 6bad2289..ca55cc11 100644 --- a/src/ai/.x/templates/openai-chat-streaming-js/Main.js +++ b/src/ai/.x/templates/openai-chat-streaming-js/Main.js @@ -39,7 +39,10 @@ async function main() { } main().catch((err) => { - console.error("The sample encountered an error:", err); + if (err.code !== 'ERR_USE_AFTER_CLOSE') { // filter out expected error (EOF on redirected input) + console.error("The sample encountered an error:", err); + process.exit(1); + } }); module.exports = { main }; diff --git a/src/ai/.x/templates/openai-chat-streaming-py/main.py b/src/ai/.x/templates/openai-chat-streaming-py/main.py index 68ec2c93..574cab71 100644 --- a/src/ai/.x/templates/openai-chat-streaming-py/main.py +++ b/src/ai/.x/templates/openai-chat-streaming-py/main.py @@ -8,6 +8,7 @@ <#@ parameter type="System.String" name="AZURE_OPENAI_SYSTEM_PROMPT" #> from openai_chat_completions_streaming import <#= ClassName #> import os +import sys def main(): openai_api_version = os.getenv('AZURE_OPENAI_API_VERSION', '<#= AZURE_OPENAI_API_VERSION #>') @@ -30,5 +31,8 @@ def main(): if __name__ == '__main__': try: main() + except EOFError: + pass except Exception as e: - print(f"The sample encountered an error: {e}") \ No newline at end of file + print(f"The sample encountered an error: {e}") + sys.exit(1) \ No newline at end of file diff --git a/src/ai/.x/templates/openai-chat-streaming-with-data-js/Main.js b/src/ai/.x/templates/openai-chat-streaming-with-data-js/Main.js index fa1ee010..2123e1fe 100644 --- a/src/ai/.x/templates/openai-chat-streaming-with-data-js/Main.js +++ b/src/ai/.x/templates/openai-chat-streaming-with-data-js/Main.js @@ -50,7 +50,10 @@ async function main() { } main().catch((err) => { - console.error("The sample encountered an error:", err); + if (err.code !== 'ERR_USE_AFTER_CLOSE') { // filter out expected error (EOF on redirected input) + console.error("The sample encountered an error:", err); + process.exit(1); + } }); module.exports = { main }; diff --git a/src/ai/.x/templates/openai-chat-streaming-with-data-py/main.py b/src/ai/.x/templates/openai-chat-streaming-with-data-py/main.py index ad2ba210..210ba757 100644 --- a/src/ai/.x/templates/openai-chat-streaming-with-data-py/main.py +++ b/src/ai/.x/templates/openai-chat-streaming-with-data-py/main.py @@ -12,6 +12,7 @@ <#@ parameter type="System.String" name="AZURE_OPENAI_EMBEDDING_DEPLOYMENT" #> from openai_chat_completions_with_data_streaming import <#= ClassName #> import os +import sys def main(): openai_api_version = os.getenv('AZURE_OPENAI_API_VERSION', '<#= AZURE_OPENAI_API_VERSION #>') @@ -39,5 +40,8 @@ def main(): if __name__ == '__main__': try: main() + except EOFError: + pass except Exception as e: - print(f"The sample encountered an error: {e}") \ No newline at end of file + print(f"The sample encountered an error: {e}") + sys.exit(1) \ No newline at end of file diff --git a/src/ai/.x/templates/openai-chat-streaming-with-functions-js/Main.js b/src/ai/.x/templates/openai-chat-streaming-with-functions-js/Main.js index 4ae24cf4..20c1cab3 100644 --- a/src/ai/.x/templates/openai-chat-streaming-with-functions-js/Main.js +++ b/src/ai/.x/templates/openai-chat-streaming-with-functions-js/Main.js @@ -39,7 +39,10 @@ async function main() { } main().catch((err) => { - console.error("The sample encountered an error:", err); + if (err.code !== 'ERR_USE_AFTER_CLOSE') { // filter out expected error (EOF on redirected input) + console.error("The sample encountered an error:", err); + process.exit(1); + } }); module.exports = { main }; diff --git a/src/ai/.x/templates/openai-chat-streaming-with-functions-py/main.py b/src/ai/.x/templates/openai-chat-streaming-with-functions-py/main.py index 0a77a8b9..112f102f 100644 --- a/src/ai/.x/templates/openai-chat-streaming-with-functions-py/main.py +++ b/src/ai/.x/templates/openai-chat-streaming-with-functions-py/main.py @@ -8,6 +8,7 @@ from openai_chat_completions_custom_functions import factory from openai_chat_completions_functions_streaming import OpenAIChatCompletionsFunctionsStreaming import os +import sys def main(): openai_api_version = os.getenv('AZURE_OPENAI_API_VERSION', '<#= AZURE_OPENAI_API_VERSION #>') @@ -30,5 +31,8 @@ def main(): if __name__ == '__main__': try: main() + except EOFError: + pass except Exception as e: - print(f"The sample encountered an error: {e}") \ No newline at end of file + print(f"The sample encountered an error: {e}") + sys.exit(1) \ No newline at end of file diff --git a/src/ai/Program_AI.cs b/src/ai/Program_AI.cs index afcaa983..40c850fb 100644 --- a/src/ai/Program_AI.cs +++ b/src/ai/Program_AI.cs @@ -19,8 +19,45 @@ public class AiProgram { static int Main(string[] args) { - return Program.Main(new AiProgramData(), args); + var debug = args.Length > 0 && args[0] == "debug"; + + if (debug) StartStopWatch(); + var result = Program.Main(new AiProgramData(), args); + if (debug) StopStopWatch(); + + return result; + } + + static void StartStopWatch() + { + Console.WriteLine($"StopWatch: Started at {DateTime.Now}"); + _stopwatch = new Stopwatch(); + _stopwatch.Start(); + } + + static void StopStopWatch() + { + _stopwatch.Stop(); + Console.WriteLine($"StopWatch: Stopped at {DateTime.Now} ({GetStopWatchElapsedAsString()})"); + } + + static string GetStopWatchElapsedAsString() + { + var elapsed = _stopwatch.Elapsed; + var elapsedMilliseconds = elapsed.TotalMilliseconds; + var elapsedSeconds = elapsed.TotalSeconds; + var elapsedMinutes = elapsed.TotalMinutes; + var elapsedHours = elapsed.TotalHours; + + var elapsedString = elapsedSeconds < 1 ? $"{elapsedMilliseconds} ms" + : elapsedMinutes < 1 ? $"{elapsedSeconds:0.00} sec" + : elapsedHours < 1 ? $"{elapsedMinutes:0.00} min" + : $"{elapsedHours:0.00} hr"; + + return elapsedString; } + + static Stopwatch _stopwatch = null; } public class AiProgramData : IProgramData diff --git a/tests/test3.yaml b/tests/test3.yaml index 6d5793e4..9a0a3e10 100644 --- a/tests/test3.yaml +++ b/tests/test3.yaml @@ -2,14 +2,14 @@ command: ai dev new .env - class: dev new helper-functions - tests: - - name: 1-generate template + steps: + - name: generate template command: ai dev new helper-functions - - name: 2-build template + - name: build template script: | cd helper-functions dotnet build - - name: 3-run template + - name: run template command: ai chat --interactive --helper-functions helper-functions/bin/Debug/net7.0/HelperFunctionsProject.dll input: | What is my name? @@ -21,14 +21,14 @@ tests: - class: dev new openai-chat (c#) - tests: - - name: 1-generate template + steps: + - name: generate template command: ai dev new openai-chat --cs - - name: 2-build template + - name: build template script: | cd openai-chat-cs dotnet build - - name: 3-run template + - name: run template command: ai dev shell --run "openai-chat-cs\bin\Debug\net7.0\OpenAIChatCompletions" input: |- Tell me a joke @@ -36,14 +36,14 @@ tag: skip - class: dev new openai-chat (go) - tests: - - name: 1-generate template + steps: + - name: generate template command: ai dev new openai-chat --go - - name: 2-build template + - name: build template script: | cd openai-chat-go go mod tidy && go build - - name: 3-run template + - name: run template command: ai dev shell --run "openai-chat-go\openai_chat_completions_hello_world" input: |- Tell me a joke @@ -51,33 +51,33 @@ tag: skip - class: dev new openai-chat (java) - tests: - - name: 1-generate template + steps: + - name: generate template command: ai dev new openai-chat --java - - name: 2-restore packages + - name: restore packages script: | cd openai-chat-java mvn clean package - - name: 3-build template + - name: build template script: | cd openai-chat-java - javac -cp "target/lib/*" src/OpenAIQuickstart.java -d out - - name: 4-run template - command: ai dev shell --run "cd openai-chat-java && java -cp \"out;target/lib/*\" OpenAIQuickstart" + javac -cp "target/lib/*" src/OpenAIChatCompletionsClass.java src/Main.java -d out + - name: run template + command: ai dev shell --run "cd openai-chat-java && java -cp \"out;target/lib/*\" Main" input: |- Tell me a joke Tell me another joke tag: skip - class: dev new openai-chat (javascript) - tests: - - name: 1-generate template + steps: + - name: generate template command: ai dev new openai-chat --javascript - - name: 2-build template + - name: build template script: | cd openai-chat-js npm install - - name: 3-run template + - name: run template command: ai dev shell --run "cd openai-chat-js && node main.js" input: |- Tell me a joke @@ -85,14 +85,14 @@ tag: skip - class: dev new openai-chat (python) - tests: - - name: 1-generate template + steps: + - name: generate template command: ai dev new openai-chat --python - - name: 2-build template + - name: build template script: | cd openai-chat-py pip install -r requirements.txt - - name: 3-run template + - name: run template command: ai dev shell --run "cd openai-chat-py && python openai_chat_completions.py" input: |- Tell me a joke @@ -103,14 +103,14 @@ tests: - class: dev new openai-chat-streaming (c#) - tests: - - name: 1-generate template + steps: + - name: generate template command: ai dev new openai-chat-streaming --cs - - name: 2-build template + - name: build template script: | cd openai-chat-streaming-cs dotnet build - - name: 3-run template + - name: run template command: ai dev shell --run "openai-chat-streaming-cs\bin\Debug\net7.0\OpenAIChatCompletionsStreaming" input: |- Tell me a joke @@ -118,14 +118,14 @@ tag: skip - class: dev new openai-chat-streaming (go) - tests: - - name: 1-generate template + steps: + - name: generate template command: ai dev new openai-chat-streaming --go - - name: 2-build template + - name: build template script: | cd openai-chat-streaming-go go mod tidy && go build - - name: 3-run template + - name: run template command: ai dev shell --run "openai-chat-streaming-go\openai_chat_completions_streaming_hello_world" input: |- Tell me a joke @@ -133,19 +133,19 @@ tag: skip - class: dev new openai-chat-streaming (java) - tests: - - name: 1-generate template + steps: + - name: generate template command: ai dev new openai-chat-streaming --java - - name: 2-restore packages + - name: restore packages script: | cd openai-chat-streaming-java mvn clean package - - name: 3-build template + - name: build template script: | cd openai-chat-streaming-java - javac -cp "target/lib/*" src/OpenAIQuickstartStreaming.java -d out - - name: 4-run template - command: ai dev shell --run "cd openai-chat-streaming-java && java -cp \"out;target/lib/*\" OpenAIQuickstartStreaming" + javac -cp "target/lib/*" src/OpenAIChatCompletionsStreamingClass.java src/Main.java -d out + - name: run template + command: ai dev shell --run "cd openai-chat-streaming-java && java -cp \"out;target/lib/*\" Main" input: |- Tell me a joke Tell me another joke @@ -154,14 +154,14 @@ command: ai dev new openai-chat-streaming --java - class: dev new openai-chat-streaming (javascript) - tests: - - name: 1-generate template + steps: + - name: generate template command: ai dev new openai-chat-streaming --javascript - - name: 2-build template + - name: build template script: | cd openai-chat-streaming-js npm install - - name: 3-run template + - name: run template command: ai dev shell --run "cd openai-chat-streaming-js && node main.js" input: |- Tell me a joke @@ -169,14 +169,14 @@ tag: skip - class: dev new openai-chat-streaming (python) - tests: - - name: 1-generate template + steps: + - name: generate template command: ai dev new openai-chat-streaming --python - - name: 2-build template + - name: build template script: | cd openai-chat-streaming-py pip install -r requirements.txt - - name: 3-run template + - name: run template command: ai dev shell --run "cd openai-chat-streaming-py && python main.py" input: |- Tell me a joke @@ -187,42 +187,42 @@ tests: - class: dev new openai-chat-streaming-with-data (c#) - tests: - - name: 1-generate template + steps: + - name: generate template command: ai dev new openai-chat-streaming-with-data --cs - - name: 2-build template + - name: build template script: | cd openai-chat-streaming-with-data-cs dotnet build - - name: 3-run template + - name: run template command: ai dev shell --run "openai-chat-streaming-with-data-cs\bin\Debug\net7.0\OpenAIChatCompletionsWithDataStreaming" input: |- What parameter should i use to initialize? tag: skip - class: dev new openai-chat-streaming-with-data (javascript) - tests: - - name: 1-generate template + steps: + - name: generate template command: ai dev new openai-chat-streaming-with-data --javascript - - name: 2-build template + - name: build template script: | cd openai-chat-streaming-with-data-js npm install - - name: 3-run template + - name: run template command: ai dev shell --run "cd openai-chat-streaming-with-data-js && node main.js" input: |- What parameter should i use to initialize? tag: skip - class: dev new openai-chat-streaming-with-data (python) - tests: - - name: 1-generate template + steps: + - name: generate template command: ai dev new openai-chat-streaming-with-data --python - - name: 2-build template + - name: build template script: | cd openai-chat-streaming-with-data-py pip install -r requirements.txt - - name: 3-run template + - name: run template command: ai dev shell --run "cd openai-chat-streaming-with-data-py && python main.py" input: |- What parameter should i use to initialize? @@ -232,14 +232,14 @@ tests: - class: dev new openai-chat-streaming-with-functions (c#) - tests: - - name: 1-generate template + steps: + - name: generate template command: ai dev new openai-chat-streaming-with-functions --cs - - name: 2-build template + - name: build template script: | cd openai-chat-streaming-with-functions-cs dotnet build - - name: 3-run template + - name: run template command: ai dev shell --run "cd openai-chat-streaming-with-functions-cs && bin\Debug\net7.0\OpenAIChatCompletionsFunctionsStreaming" input: |- What is the date? @@ -247,14 +247,14 @@ tag: skip - class: dev new openai-chat-streaming-with-functions (go) - tests: - - name: 1-generate template + steps: + - name: generate template command: ai dev new openai-chat-streaming-with-functions --go - - name: 2-build template + - name: build template script: | cd openai-chat-streaming-with-functions-go go mod tidy && go build - - name: 3-run template + - name: run template command: ai dev shell --run "cd openai-chat-streaming-with-functions-go && openai_chat_completions_functions_streaming_hello_world" input: |- What is the date? @@ -262,14 +262,14 @@ tag: skip - class: dev new openai-chat-streaming-with-functions (javascript) - tests: - - name: 1-generate template + steps: + - name: generate template command: ai dev new openai-chat-streaming-with-functions --javascript - - name: 2-build template + - name: build template script: | cd openai-chat-streaming-with-functions-js npm install - - name: 3-run template + - name: run template command: ai dev shell --run "cd openai-chat-streaming-with-functions-js && node main.js" input: |- What is the date? @@ -277,14 +277,14 @@ tag: skip - class: dev new openai-chat-streaming-with-functions (python) - tests: - - name: 1-generate template + steps: + - name: generate template command: ai dev new openai-chat-streaming-with-functions --python - - name: 2-build template + - name: build template script: | cd openai-chat-streaming-with-functions-py pip install -r requirements.txt - - name: 3-run template + - name: run template command: ai dev shell --run "cd openai-chat-streaming-with-functions-py && python main.py" input: |- What is the date? @@ -295,27 +295,27 @@ tests: - class: dev new openai-webpage (javascript) - tests: - - name: 1-generate template + steps: + - name: generate template command: ai dev new openai-webpage --javascript - - name: 2-build template + - name: build template script: | cd openai-webpage-js npm install - - name: 3-pack template + - name: pack template script: | cd openai-webpage-js npx webpack - class: dev new openai-webpage (typescript) - tests: - - name: 1-generate template + steps: + - name: generate template command: ai dev new openai-webpage --typescript - - name: 2-build template + - name: build template script: | cd openai-webpage-ts npm install - - name: 3-pack template + - name: pack template script: | cd openai-webpage-ts npx webpack @@ -324,27 +324,27 @@ tests: - class: dev new openai-webpage-with-functions (javascript) - tests: - - name: 1-generate template + steps: + - name: generate template command: ai dev new openai-webpage-with-functions --javascript - - name: 2-build template + - name: build template script: | cd openai-webpage-with-functions-js npm install - - name: 3-pack template + - name: pack template script: | cd openai-webpage-with-functions-js npx webpack - class: dev new openai-webpage-with-functions (typescript) - tests: - - name: 1-generate template + steps: + - name: generate template command: ai dev new openai-webpage-with-functions --typescript - - name: 2-build template + - name: build template script: | cd openai-webpage-with-functions-ts npm install - - name: 3-pack template + - name: pack template script: | cd openai-webpage-with-functions-ts npx webpack diff --git a/tests/testadapter/YamlTestAdapter.cs b/tests/testadapter/YamlTestAdapter.cs index 0981a82c..73b00e99 100644 --- a/tests/testadapter/YamlTestAdapter.cs +++ b/tests/testadapter/YamlTestAdapter.cs @@ -45,34 +45,104 @@ public static IEnumerable GetTestsFromFile(string source) public static void RunTests(IEnumerable tests, IRunContext runContext, IFrameworkHandle frameworkHandle) { - var parallelWorkers = Environment.ProcessorCount; - Logger.Log($"YamlTestAdapter.RunTests(): {parallelWorkers} parallel Workers"); - // Must run before, middle, and after testSets in certain order so cannot parallelize those - // Can parallelize tests within each testSet - foreach (var testSet in FilterTestCases(tests, runContext, frameworkHandle)) + var filteredBeforeMiddleAndAfterTestSets = FilterTestCases(tests, runContext, frameworkHandle); + foreach (var testSet in filteredBeforeMiddleAndAfterTestSets) { if (!testSet.Any()) continue; - var parallelTestSet = testSet.Where(test => YamlTestProperties.Get(test, "parallelize") == "true"); - var nonParallelTestSet = testSet.Where(test => YamlTestProperties.Get(test, "parallelize") != "true"); + RunAndRecordTests(frameworkHandle, testSet); + } + } - var workerBlock = new ActionBlock( - test => RunAndRecordTestCase(test, frameworkHandle), - new ExecutionDataflowBlockOptions { MaxDegreeOfParallelism = parallelWorkers }); - foreach (var test in parallelTestSet) - { - workerBlock.Post(test); - } - workerBlock.Complete(); - workerBlock.Completion.Wait(); + #region private methods + + private static void RunAndRecordTests(IFrameworkHandle frameworkHandle, IEnumerable tests) + { + InitRunAndRecordTestCaseMaps(tests, out var testFromIdMap, out var completionFromIdMap); + RunAndRecordParallelizedTestCases(frameworkHandle, testFromIdMap, completionFromIdMap, tests); + RunAndRecordRemainingTestCases(frameworkHandle, testFromIdMap, completionFromIdMap); + } + + private static void InitRunAndRecordTestCaseMaps(IEnumerable tests, out Dictionary testFromIdMap, out Dictionary> completionFromIdMap) + { + testFromIdMap = new Dictionary(); + completionFromIdMap = new Dictionary>(); + foreach (var test in tests) + { + var id = test.Id.ToString(); + testFromIdMap[id] = test; + completionFromIdMap[id] = new TaskCompletionSource(); + } + } - foreach (var test in nonParallelTestSet) + private static void RunAndRecordParallelizedTestCases(IFrameworkHandle frameworkHandle, Dictionary testFromIdMap, Dictionary> completionFromIdMap, IEnumerable tests) + { + var parallelTestSet = tests.Where(test => YamlTestProperties.Get(test, "parallelize") == "true"); + foreach (var test in parallelTestSet) + { + ThreadPool.QueueUserWorkItem(state => { - RunAndRecordTestCase(test, frameworkHandle); - } + var parallelTestId = test.Id.ToString(); + var parallelTest = testFromIdMap[parallelTestId]; + var parallelTestOutcome = RunAndRecordTestCase(parallelTest, frameworkHandle); + // defer setting completion outcome until all steps are complete + + var checkTest = parallelTest; + while (true) + { + var nextStepId = YamlTestProperties.Get(checkTest, "nextStepId"); + if (string.IsNullOrEmpty(nextStepId)) + { + Logger.LogInfo($"YamlTestAdapter.RunTests() ==> No nextStepId for test '{checkTest.DisplayName}'"); + break; + } + + var stepTest = testFromIdMap.ContainsKey(nextStepId) ? testFromIdMap[nextStepId] : null; + if (stepTest == null) + { + Logger.LogError($"YamlTestAdapter.RunTests() ==> ERROR: nextStepId '{nextStepId}' not found for test '{checkTest.DisplayName}'"); + break; + } + + var stepCompletion = completionFromIdMap.ContainsKey(nextStepId) ? completionFromIdMap[nextStepId] : null; + if (stepCompletion == null) + { + Logger.LogError($"YamlTestAdapter.RunTests() ==> ERROR: nextStepId '{nextStepId}' completion not found for test '{checkTest.DisplayName}'"); + break; + } + + var stepOutcome = RunAndRecordTestCase(stepTest, frameworkHandle); + Logger.Log($"YamlTestAdapter.RunTests() ==> Setting completion outcome for {stepTest.DisplayName} to {stepOutcome}"); + completionFromIdMap[nextStepId].SetResult(stepOutcome); + + checkTest = stepTest; + } + + // now that all steps are complete, set the completion outcome + completionFromIdMap[parallelTestId].SetResult(parallelTestOutcome); + Logger.Log($"YamlTestAdapter.RunTests() ==> Setting completion outcome for {parallelTest.DisplayName} to {parallelTestOutcome}"); + + }, test.Id); } + + Logger.Log($"YamlTestAdapter.RunTests() ==> Waiting for parallel tests to complete"); + var parallelCompletions = completionFromIdMap + .Where(x => parallelTestSet.Any(y => y.Id.ToString() == x.Key)) + .Select(x => x.Value.Task); + Task.WaitAll(parallelCompletions.ToArray()); + Logger.Log($"YamlTestAdapter.RunTests() ==> All parallel tests complete"); } - #region private methods + private static void RunAndRecordRemainingTestCases(IFrameworkHandle frameworkHandle, Dictionary testFromIdMap, Dictionary> completionFromIdMap) + { + var remainingTests = completionFromIdMap + .Where(x => x.Value.Task.Status != TaskStatus.RanToCompletion) + .Select(x => testFromIdMap[x.Key]); + foreach (var test in remainingTests) + { + var outcome = RunAndRecordTestCase(test, frameworkHandle); + completionFromIdMap[test.Id.ToString()].SetResult(outcome); + } + } private static IEnumerable GetTestsFromSource(string source, FileInfo file) { @@ -121,7 +191,7 @@ private static IEnumerable GetTestsFromYaml(string source, FileInfo fi { yield return test; } - Logger.Log($"YamlTestAdapter.GetTestsFromYaml('{source}', '{file.FullName}'): EXIT"); + Logger.Log($"YamlTestAdapter.GetTestsFromYaml('{source}', '{file.FullName}'): EXIT"); } private static bool IsTrait(Trait trait, string check) diff --git a/tests/testadapter/YamlTestCaseFilter.cs b/tests/testadapter/YamlTestCaseFilter.cs index 29e20362..c4f8ace2 100644 --- a/tests/testadapter/YamlTestCaseFilter.cs +++ b/tests/testadapter/YamlTestCaseFilter.cs @@ -58,6 +58,7 @@ private static object GetPropertyValue(TestCase test, string name) case "expect": return YamlTestProperties.Get(test, "expect"); case "not-expect": return YamlTestProperties.Get(test, "not-expect"); + case "parallelize": return YamlTestProperties.Get(test, "parallelize"); case "simulate": return YamlTestProperties.Get(test, "simulate"); case "timeout": return YamlTestProperties.Get(test, "timeout"); @@ -70,6 +71,6 @@ private static object GetPropertyValue(TestCase test, string name) return tags.Select(x => x.Value).ToArray(); } - private static readonly string[] supportedFilterProperties = { "DisplayName", "FullyQualifiedName", "Category", "cli", "command", "script", "foreach", "arguments", "input", "expect", "not-expect", "simulate" }; + private static readonly string[] supportedFilterProperties = { "DisplayName", "FullyQualifiedName", "Category", "cli", "command", "script", "foreach", "arguments", "input", "expect", "not-expect", "parallelize", "simulate" }; } } diff --git a/tests/testadapter/YamlTestCaseParser.cs b/tests/testadapter/YamlTestCaseParser.cs index 14c92736..ed6a850d 100644 --- a/tests/testadapter/YamlTestCaseParser.cs +++ b/tests/testadapter/YamlTestCaseParser.cs @@ -25,35 +25,30 @@ private static IEnumerable TestCasesFromYamlStream(string source, File { var tests = new List(); var defaultTags = YamlTagHelpers.GetDefaultTags(file.Directory); - var parallelize = "false"; - if (defaultTags.ContainsKey("parallelize")) - { - parallelize = defaultTags["parallelize"].Last(); - } foreach (var document in parsed?.Documents) { - var fromDocument = TestCasesFromYamlNode(source, file, document.RootNode, area, defaultClassName, defaultTags, parallelize); + var fromDocument = TestCasesFromYamlDocumentRootNode(source, file, document.RootNode, area, defaultClassName, defaultTags); tests.AddRange(fromDocument); } return tests; } - private static IEnumerable TestCasesFromYamlNode(string source, FileInfo file, YamlNode node, string area, string @class, Dictionary> tags, string parallelize) + private static IEnumerable TestCasesFromYamlDocumentRootNode(string source, FileInfo file, YamlNode node, string area, string @class, Dictionary> tags) { return node is YamlMappingNode - ? TestCasesFromYamlMapping(source, file, node as YamlMappingNode, area, @class, tags, parallelize) - : TestCasesFromYamlSequence(source, file, node as YamlSequenceNode, area, @class, tags, parallelize); + ? TestCasesFromYamlMapping(source, file, node as YamlMappingNode, area, @class, tags) + : TestCasesFromYamlSequence(source, file, node as YamlSequenceNode, area, @class, tags); } - private static IEnumerable TestCasesFromYamlMapping(string source, FileInfo file, YamlMappingNode mapping, string area, string @class, Dictionary> tags, string parallelize) + private static IEnumerable TestCasesFromYamlMapping(string source, FileInfo file, YamlMappingNode mapping, string area, string @class, Dictionary> tags) { - var children = CheckForChildren(source, file, mapping, area, @class, tags, parallelize); + var children = CheckForChildren(source, file, mapping, area, @class, tags); if (children != null) { return children; } - var test = GetTestFromNode(source, file, mapping, area, @class, tags, parallelize); + var test = GetTestFromNode(source, file, mapping, area, @class, tags); if (test != null) { return new[] { test }; @@ -62,14 +57,14 @@ private static IEnumerable TestCasesFromYamlMapping(string source, Fil return null; } - private static IEnumerable TestCasesFromYamlSequence(string source, FileInfo file, YamlSequenceNode sequence, string area, string @class, Dictionary> tags, string parallelize) + private static IEnumerable TestCasesFromYamlSequence(string source, FileInfo file, YamlSequenceNode sequence, string area, string @class, Dictionary> tags) { var tests = new List(); if (sequence == null) return tests; foreach (YamlMappingNode mapping in sequence.Children) { - var fromMapping = TestCasesFromYamlMapping(source, file, mapping, area, @class, tags, parallelize); + var fromMapping = TestCasesFromYamlMapping(source, file, mapping, area, @class, tags); if (fromMapping != null) { tests.AddRange(fromMapping); @@ -79,24 +74,21 @@ private static IEnumerable TestCasesFromYamlSequence(string source, Fi return tests; } - private static TestCase GetTestFromNode(string source, FileInfo file, YamlMappingNode mapping, string area, string @class, Dictionary> tags, string parallelize) + private static TestCase GetTestFromNode(string source, FileInfo file, YamlMappingNode mapping, string area, string @class, Dictionary> tags, int stepNumber = 0) { - string simulate = GetScalarString(mapping, "simulate"); - var simulating = !string.IsNullOrEmpty(simulate); - string cli = GetScalarString(mapping, tags, "cli"); + string parallelize = GetScalarString(mapping, tags, "parallelize"); - string currentParallelize = GetScalarString(mapping, "parallelize"); - parallelize = currentParallelize == null ? parallelize : currentParallelize; - + string simulate = GetScalarString(mapping, "simulate"); string command = GetScalarString(mapping, "command"); string script = GetScalarString(mapping, "script"); string fullyQualifiedName = command == null && script == null - ? GetFullyQualifiedNameAndCommandFromShortForm(mapping, area, @class, ref command) - : GetFullyQualifiedName(mapping, area, @class); - fullyQualifiedName ??= GetFullyQualifiedName(area, @class, $"Expected YAML node ('name') at {file.FullName}({mapping.Start.Line})"); + ? GetFullyQualifiedNameAndCommandFromShortForm(mapping, area, @class, ref command, stepNumber) + : GetFullyQualifiedName(mapping, area, @class, stepNumber); + fullyQualifiedName ??= GetFullyQualifiedName(area, @class, $"Expected YAML node ('name') at {file.FullName}({mapping.Start.Line})", 0); + var simulating = !string.IsNullOrEmpty(simulate); var neitherOrBoth = (command == null) == (script == null); if (neitherOrBoth && !simulating) { @@ -138,19 +130,51 @@ private static TestCase GetTestFromNode(string source, FileInfo file, YamlMappin return test; } - private static IEnumerable CheckForChildren(string source, FileInfo file, YamlMappingNode mapping, string area, string @class, Dictionary> tags, string parallelize) + private static IEnumerable CheckForChildren(string source, FileInfo file, YamlMappingNode mapping, string area, string @class, Dictionary> tags) { - var sequence = mapping.Children.ContainsKey("tests") - ? mapping.Children["tests"] as YamlSequenceNode - : null; - if (sequence == null) return null; + if (mapping.Children.ContainsKey("steps") && mapping.Children["steps"] is YamlSequenceNode stepsSequence) + { + @class = GetScalarString(mapping, "class", @class); + area = UpdateArea(mapping, area); + tags = YamlTagHelpers.UpdateCopyTags(tags, mapping); - @class = GetScalarString(mapping, "class", @class); - area = UpdateArea(mapping, area); - tags = YamlTagHelpers.UpdateCopyTags(tags, mapping); - parallelize = GetParallelizeTag(mapping, parallelize); + return TestCasesFromYamlSequenceOfSteps(source, file, stepsSequence, area, @class, tags); + } + + if (mapping.Children.ContainsKey("tests") && mapping.Children["tests"] is YamlSequenceNode testsSequence) + { + @class = GetScalarString(mapping, "class", @class); + area = UpdateArea(mapping, area); + tags = YamlTagHelpers.UpdateCopyTags(tags, mapping); + + return TestCasesFromYamlSequence(source, file, testsSequence, area, @class, tags).ToList(); + } - return TestCasesFromYamlSequence(source, file, sequence, area, @class, tags, parallelize); + return null; + } + + private static IEnumerable TestCasesFromYamlSequenceOfSteps(string source, FileInfo file, YamlSequenceNode sequence, string area, string @class, Dictionary> tags) + { + var tests = new List(); + for (int i = 0; i < sequence.Children.Count; i++) + { + var mapping = sequence.Children[i] as YamlMappingNode; + var test = GetTestFromNode(source, file, mapping, area, @class, tags, i + 1); + tests.Add(test); + } + + if (tests.Count > 0) + { + SetTestCaseProperty(tests[0], "parallelize", "true"); + } + + for (int i = 1; i < tests.Count; i++) + { + SetTestCaseProperty(tests[i - 1], "nextStepId", tests[i].Id.ToString()); + SetTestCaseProperty(tests[i], "parallelize", "false"); + } + + return tests; } private static void CheckInvalidTestCaseNodes(FileInfo file, YamlMappingNode mapping, TestCase test) @@ -169,7 +193,7 @@ private static void CheckInvalidTestCaseNodes(FileInfo file, YamlMappingNode map private static bool IsValidTestCaseNode(string value) { - return ";area;class;name;cli;command;script;timeout;foreach;arguments;input;expect;not-expect;simulate;tag;tags;parallelize;workingDirectory;".IndexOf($";{value};") >= 0; + return ";area;class;name;cli;command;script;timeout;foreach;arguments;input;expect;not-expect;parallelize;simulate;tag;tags;workingDirectory;".IndexOf($";{value};") >= 0; } private static void SetTestCaseProperty(TestCase test, string propertyName, YamlMappingNode mapping, string mappingName) @@ -368,7 +392,7 @@ private static string UpdateArea(YamlMappingNode mapping, string area) : $"{area}.{subArea}"; } - private static string GetFullyQualifiedName(YamlMappingNode mapping, string area, string @class) + private static string GetFullyQualifiedName(YamlMappingNode mapping, string area, string @class, int stepNumber) { var name = GetScalarString(mapping, "name"); if (name == null) return null; @@ -376,10 +400,10 @@ private static string GetFullyQualifiedName(YamlMappingNode mapping, string area area = UpdateArea(mapping, area); @class = GetScalarString(mapping, "class", @class); - return GetFullyQualifiedName(area, @class, name); + return GetFullyQualifiedName(area, @class, name, stepNumber); } - private static string GetFullyQualifiedNameAndCommandFromShortForm(YamlMappingNode mapping, string area, string @class, ref string command) + private static string GetFullyQualifiedNameAndCommandFromShortForm(YamlMappingNode mapping, string area, string @class, ref string command, int stepNumber) { // if there's only one invalid mapping node, we'll treat it's key as "name" and value as "command" var invalid = mapping.Children.Keys.Where(key => !IsValidTestCaseNode((key as YamlScalarNode).Value)); @@ -392,21 +416,17 @@ private static string GetFullyQualifiedNameAndCommandFromShortForm(YamlMappingNo area = UpdateArea(mapping, area); @class = GetScalarString(mapping, "class", @class); - return GetFullyQualifiedName(area, @class, name); + return GetFullyQualifiedName(area, @class, name, stepNumber); } return null; } - private static string GetFullyQualifiedName(string area, string @class, string name) - { - return $"{area}.{@class}.{name}"; - } - - private static string GetParallelizeTag(YamlMappingNode mapping, string currentParallelize) + private static string GetFullyQualifiedName(string area, string @class, string name, int stepNumber) { - var parallelizeNode = mapping.Children.ContainsKey("parallelize") ? mapping.Children["parallelize"] : null; - return parallelizeNode == null ? currentParallelize : (parallelizeNode as YamlScalarNode)?.Value; + return stepNumber > 0 + ? $"{area}.{@class}.{stepNumber:D2}.{name}" + : $"{area}.{@class}.{name}"; } private static void SetTestCaseTagsAsTraits(TestCase test, Dictionary> tags) diff --git a/tests/testadapter/YamlTestProperties.cs b/tests/testadapter/YamlTestProperties.cs index 89c23cd0..204d6de1 100644 --- a/tests/testadapter/YamlTestProperties.cs +++ b/tests/testadapter/YamlTestProperties.cs @@ -39,6 +39,7 @@ private static TestProperty RegisterTestCaseProperty(string name) { "command", RegisterTestCaseProperty("Command") }, { "script", RegisterTestCaseProperty("Script") }, { "parallelize", RegisterTestCaseProperty("Parallelize") }, + { "nextStepId", RegisterTestCaseProperty("nextStepId") }, { "foreach", RegisterTestCaseProperty("ForEach") }, { "arguments", RegisterTestCaseProperty("Arguments") }, { "input", RegisterTestCaseProperty("Input")}, From d93a8fe2a0c43fb1c01b7c299f76e88ca64a838e Mon Sep 17 00:00:00 2001 From: Rob Chambers Date: Mon, 22 Jan 2024 15:51:27 -0800 Subject: [PATCH 16/30] try `ai init speech` in tests (#157) * try `ai init speech` in tests * added skipOnFailure * remove debug output * try this * bash in azcli task not ps * try doing ai init from azcli task * run tests under AI_CLI_TestAdapter service connection via AzureCLI * fix quotes problem on linux * try to fix problem --- .azure/pipelines/build.yaml | 20 ++++++++++---------- src/common/details/commands/init_command.cs | 14 +++++++++++++- tests/test3.yaml | 11 ++++++++++- tests/testadapter/YamlTestCaseFilter.cs | 3 ++- tests/testadapter/YamlTestCaseParser.cs | 4 +++- tests/testadapter/YamlTestCaseRunner.cs | 9 ++++++--- tests/testadapter/YamlTestProperties.cs | 1 + 7 files changed, 45 insertions(+), 17 deletions(-) diff --git a/.azure/pipelines/build.yaml b/.azure/pipelines/build.yaml index ef06de87..9e20f9ec 100644 --- a/.azure/pipelines/build.yaml +++ b/.azure/pipelines/build.yaml @@ -284,19 +284,19 @@ stages: # ----------------------------------------------------------------------------- # Run the tests # ----------------------------------------------------------------------------- - - task: DotNetCoreCLI@2 + - task: AzureCLI@2 displayName: Run ai-cli tests continueOnError: true inputs: - includeNuGetOrg: false - command: test - version: '7.0.x' - arguments: - --logger:trx;LogFileName="$(TestRunTrxFileName)" - --logger:console;verbosity=normal - --filter "$(TestFilter)" - "$(LocalBinOutputPath)/$(BuildConfiguration)/net7.0/Azure.AI.CLI.TestAdapter.dll" - workingDirectory: '$(TestResultsPath)' + azureSubscription: 'AI_CLI_TestAdapter' + scriptType: 'bash' + scriptLocation: 'inlineScript' + inlineScript: | + az --version + az account show + cd $(TestResultsPath) + echo dotnet test --logger trx --results-directory "$(Agent.TempDirectory)" --logger:"trx;LogFileName=$(TestRunTrxFileName)" --logger:"console;verbosity=normal" --filter "$(TestFilter)" "$(LocalBinOutputPath)/$(BuildConfiguration)/net7.0/Azure.AI.CLI.TestAdapter.dll" + dotnet test --logger trx --results-directory "$(Agent.TempDirectory)" --logger:"trx;LogFileName=$(TestRunTrxFileName)" --logger:"console;verbosity=normal" --filter "$(TestFilter)" "$(LocalBinOutputPath)/$(BuildConfiguration)/net7.0/Azure.AI.CLI.TestAdapter.dll" # ----------------------------------------------------------------------------- # Archive and publish the test run backup artifact diff --git a/src/common/details/commands/init_command.cs b/src/common/details/commands/init_command.cs index 029f6d1d..1f273c82 100644 --- a/src/common/details/commands/init_command.cs +++ b/src/common/details/commands/init_command.cs @@ -59,7 +59,6 @@ private async Task DoCommand(string command) CheckPath(); var interactive = _values.GetOrDefault("init.service.interactive", true); - if (!interactive) ThrowInteractiveNotSupportedApplicationException(); // POST-IGNITE: TODO: Add back non-interactive mode support switch (command) { @@ -94,6 +93,7 @@ private async Task DoCommand(string command) private async Task DoInitRootAsync() { var interactive = _values.GetOrDefault("init.service.interactive", true); + if (!interactive) ThrowInteractiveNotSupportedApplicationException(); // POST-IGNITE: TODO: Add back non-interactive mode support ConsoleHelpers.WriteLineWithHighlight("`AI INIT`\n\n Initializes (creates, selects, or attaches to) AI Projects and services.\n"); @@ -347,6 +347,8 @@ private async Task DoInitSubscriptionId(bool interactive) private async Task DoInitRootHubResource(bool interactive) { + if (!interactive) ThrowInteractiveNotSupportedApplicationException(); // POST-IGNITE: TODO: Add back non-interactive mode support + await DoInitSubscriptionId(interactive); await DoInitHubResource(interactive); } @@ -359,6 +361,8 @@ private async Task DoInitHubResource(bool interactive) private async Task DoInitRootProject(bool interactive, bool allowCreate = true, bool allowPick = true) { + if (!interactive) ThrowInteractiveNotSupportedApplicationException(); // POST-IGNITE: TODO: Add back non-interactive mode support + await DoInitSubscriptionId(interactive); await DoInitProject(interactive, allowCreate, allowPick); } @@ -387,6 +391,8 @@ private async Task DoInitProject(bool interactive, bool allowCreate = true, bool private async Task DoInitRootOpenAi(bool interactive) { + if (!interactive) ThrowInteractiveNotSupportedApplicationException(); // POST-IGNITE: TODO: Add back non-interactive mode support + await DoInitSubscriptionId(interactive); await DoInitOpenAi(interactive); } @@ -416,6 +422,8 @@ private async Task DoInitOpenAi(bool interactive, bool allowSkipDeployments = tr private async Task DoInitRootCognitiveServicesAIServicesKind(bool interactive) { + if (!interactive) ThrowInteractiveNotSupportedApplicationException(); // POST-IGNITE: TODO: Add back non-interactive mode support + await DoInitSubscriptionId(interactive); await DoInitCognitiveServicesAIServicesKind(interactive); } @@ -444,6 +452,8 @@ private async Task DoInitCognitiveServicesAIServicesKind(bool interactive, bool private async Task DoInitRootCognitiveServicesCognitiveServicesKind(bool interactive) { + if (!interactive) ThrowInteractiveNotSupportedApplicationException(); // POST-IGNITE: TODO: Add back non-interactive mode support + await DoInitSubscriptionId(interactive); await DoInitCognitiveServicesCognitiveServicesKind(interactive); } @@ -470,6 +480,8 @@ private async Task DoInitCognitiveServicesCognitiveServicesKind(bool interactive private async Task DoInitRootSearch(bool interactive) { + if (!interactive) ThrowInteractiveNotSupportedApplicationException(); // POST-IGNITE: TODO: Add back non-interactive mode support + await DoInitSubscriptionId(interactive); await DoInitSearch(interactive, false); } diff --git a/tests/test3.yaml b/tests/test3.yaml index 9a0a3e10..957dc8ba 100644 --- a/tests/test3.yaml +++ b/tests/test3.yaml @@ -1,4 +1,13 @@ -- name: dev new environment +- class: setup + tags: [before] + steps: + - name: az login + script: az login --identity + skipOnFailure: true + - name: ai init + command: ai init speech --subscription e72e5254-f265-4e95-9bd2-9ee8e7329051 --name robch-cranky-red-koala-ais --interactive false + +- name: dev new environment command: ai dev new .env - class: dev new helper-functions diff --git a/tests/testadapter/YamlTestCaseFilter.cs b/tests/testadapter/YamlTestCaseFilter.cs index c4f8ace2..25a22d7d 100644 --- a/tests/testadapter/YamlTestCaseFilter.cs +++ b/tests/testadapter/YamlTestCaseFilter.cs @@ -60,6 +60,7 @@ private static object GetPropertyValue(TestCase test, string name) case "parallelize": return YamlTestProperties.Get(test, "parallelize"); case "simulate": return YamlTestProperties.Get(test, "simulate"); + case "skipOnFailure": return YamlTestProperties.Get(test, "skipOnFailure"); case "timeout": return YamlTestProperties.Get(test, "timeout"); case "working-directory": return YamlTestProperties.Get(test, "working-directory"); @@ -71,6 +72,6 @@ private static object GetPropertyValue(TestCase test, string name) return tags.Select(x => x.Value).ToArray(); } - private static readonly string[] supportedFilterProperties = { "DisplayName", "FullyQualifiedName", "Category", "cli", "command", "script", "foreach", "arguments", "input", "expect", "not-expect", "parallelize", "simulate" }; + private static readonly string[] supportedFilterProperties = { "DisplayName", "FullyQualifiedName", "Category", "cli", "command", "script", "foreach", "arguments", "input", "expect", "not-expect", "parallelize", "simulate", "skipOnFailure" }; } } diff --git a/tests/testadapter/YamlTestCaseParser.cs b/tests/testadapter/YamlTestCaseParser.cs index ed6a850d..a772198a 100644 --- a/tests/testadapter/YamlTestCaseParser.cs +++ b/tests/testadapter/YamlTestCaseParser.cs @@ -78,6 +78,7 @@ private static TestCase GetTestFromNode(string source, FileInfo file, YamlMappin { string cli = GetScalarString(mapping, tags, "cli"); string parallelize = GetScalarString(mapping, tags, "parallelize"); + string skipOnFailure = GetScalarString(mapping, tags, "skipOnFailure"); string simulate = GetScalarString(mapping, "simulate"); string command = GetScalarString(mapping, "command"); @@ -110,6 +111,7 @@ private static TestCase GetTestFromNode(string source, FileInfo file, YamlMappin SetTestCaseProperty(test, "script", script); SetTestCaseProperty(test, "simulate", simulate); SetTestCaseProperty(test, "parallelize", parallelize); + SetTestCaseProperty(test, "skipOnFailure", skipOnFailure); var timeout = GetScalarString(mapping, tags, "timeout", YamlTestAdapter.DefaultTimeout); SetTestCaseProperty(test, "timeout", timeout); @@ -193,7 +195,7 @@ private static void CheckInvalidTestCaseNodes(FileInfo file, YamlMappingNode map private static bool IsValidTestCaseNode(string value) { - return ";area;class;name;cli;command;script;timeout;foreach;arguments;input;expect;not-expect;parallelize;simulate;tag;tags;workingDirectory;".IndexOf($";{value};") >= 0; + return ";area;class;name;cli;command;script;timeout;foreach;arguments;input;expect;not-expect;parallelize;simulate;skipOnFailure;tag;tags;workingDirectory;".IndexOf($";{value};") >= 0; } private static void SetTestCaseProperty(TestCase test, string propertyName, YamlMappingNode mapping, string mappingName) diff --git a/tests/testadapter/YamlTestCaseRunner.cs b/tests/testadapter/YamlTestCaseRunner.cs index 37339886..1413b953 100644 --- a/tests/testadapter/YamlTestCaseRunner.cs +++ b/tests/testadapter/YamlTestCaseRunner.cs @@ -71,6 +71,7 @@ private static IEnumerable TestCaseGetResults(TestCase test) var workingDirectory = YamlTestProperties.Get(test, "working-directory"); var timeout = int.Parse(YamlTestProperties.Get(test, "timeout")); var simulate = YamlTestProperties.Get(test, "simulate"); + var skipOnFailure = YamlTestProperties.Get(test, "skipOnFailure") switch { "true" => true, _ => false }; var basePath = new FileInfo(test.CodeFilePath).DirectoryName; workingDirectory = Path.Combine(basePath, workingDirectory ?? ""); @@ -85,7 +86,7 @@ private static IEnumerable TestCaseGetResults(TestCase test) var start = DateTime.Now; var outcome = string.IsNullOrEmpty(simulate) - ? RunTestCase(test, cli, command, script, foreachItem, arguments, input, expect, notExpect, workingDirectory, timeout, out string stdOut, out string stdErr, out string errorMessage, out string stackTrace, out string additional, out string debugTrace) + ? RunTestCase(test, skipOnFailure, cli, command, script, foreachItem, arguments, input, expect, notExpect, workingDirectory, timeout, out string stdOut, out string stdErr, out string errorMessage, out string stackTrace, out string additional, out string debugTrace) : SimulateTestCase(test, simulate, cli, command, script, foreachItem, arguments, input, expect, notExpect, workingDirectory, out stdOut, out stdErr, out errorMessage, out stackTrace, out additional, out debugTrace); #if DEBUG @@ -205,7 +206,7 @@ private static Dictionary DupAndAdd(Dictionary d return dup; } - private static TestOutcome RunTestCase(TestCase test, string cli, string command, string script, string @foreach, string arguments, string input, string expect, string notExpect, string workingDirectory, int timeout, out string stdOut, out string stdErr, out string errorMessage, out string stackTrace, out string additional, out string debugTrace) + private static TestOutcome RunTestCase(TestCase test, bool skipOnFailure, string cli, string command, string script, string @foreach, string arguments, string input, string expect, string notExpect, string workingDirectory, int timeout, out string stdOut, out string stdErr, out string errorMessage, out string stackTrace, out string additional, out string debugTrace) { var outcome = TestOutcome.None; @@ -252,7 +253,9 @@ private static TestOutcome RunTestCase(TestCase test, string cli, string command var exitedNotKilled = WaitForExit(process, timeout); outcome = exitedNotKilled && process.ExitCode == 0 ? TestOutcome.Passed - : TestOutcome.Failed; + : skipOnFailure + ? TestOutcome.Skipped + : TestOutcome.Failed; var exitCode = exitedNotKilled ? process.ExitCode.ToString() diff --git a/tests/testadapter/YamlTestProperties.cs b/tests/testadapter/YamlTestProperties.cs index 204d6de1..a7ee22b6 100644 --- a/tests/testadapter/YamlTestProperties.cs +++ b/tests/testadapter/YamlTestProperties.cs @@ -46,6 +46,7 @@ private static TestProperty RegisterTestCaseProperty(string name) { "expect", RegisterTestCaseProperty("Expect") }, { "not-expect", RegisterTestCaseProperty("NotExpect") }, { "simulate", RegisterTestCaseProperty("Simulate") }, + { "skipOnFailure", RegisterTestCaseProperty("SkipOnFailure") }, { "timeout", RegisterTestCaseProperty("Timeout") }, { "working-directory", RegisterTestCaseProperty("WorkingDirectory") } }; From d7051e85d5d048a0fb02f0535be68ef0e3ae7daa Mon Sep 17 00:00:00 2001 From: Rob Chambers Date: Mon, 22 Jan 2024 19:20:13 -0800 Subject: [PATCH 17/30] fix bug found by Hanchi (#159) * fix bug found by Hanchi * updated requirements based on Hanchi's updated SDK * update requirements --- requirements.txt | 11 ++++++++--- .../azcli/AzCliConsoleGui_SubscriptionPicker.cs | 2 +- 2 files changed, 9 insertions(+), 4 deletions(-) diff --git a/requirements.txt b/requirements.txt index 5e3d0e43..46183daa 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,8 +1,12 @@ # other app dependencies ipykernel +openai>1.0 + +azure-ai-ml @https://pkgs.dev.azure.com/azure-sdk/29ec6040-b234-4e31-b139-33dc4287b756/_packaging/3572dbf9-b5ef-433b-9137-fc4d7768e7cc/pypi/download/azure-ai-ml/1.13a20240110002/azure_ai_ml-1.13.0a20240110002-py3-none-any.whl +azure-ai-resources @https://pkgs.dev.azure.com/azure-sdk/29ec6040-b234-4e31-b139-33dc4287b756/_packaging/3572dbf9-b5ef-433b-9137-fc4d7768e7cc/pypi/download/azure-ai-resources/1a20240112004/azure_ai_resources-1.0.0a20240112004-py3-none-any.whl # generative ai SDK dependencies -azure-ai-generative[evaluate,index,promptflow] +azure-ai-generative[evaluate,index] @https://pkgs.dev.azure.com/azure-sdk/29ec6040-b234-4e31-b139-33dc4287b756/_packaging/3572dbf9-b5ef-433b-9137-fc4d7768e7cc/pypi/download/azure-ai-generative/1a20240112004/azure_ai_generative-1.0.0a20240112004-py3-none-any.whl # hardcoded the version of azureml-mlflow here for faster Docker image building speed azureml-mlflow==1.53.0 @@ -11,5 +15,6 @@ pytest # langchain dependencies, these should be optional in the future # langchain moved Embeddings from langchain.embeddings.base to langchain.schema.embeddings while azureml-rag is still referencing it. # once azureml-rag fixes, we should remove the langchain reference from this file -langchain==0.0.324 -semantic-kernel +langchain==0.1.1 +langchain-openai==0.0.2.post1 +semantic-kernel \ No newline at end of file diff --git a/src/common/details/azcli/AzCliConsoleGui_SubscriptionPicker.cs b/src/common/details/azcli/AzCliConsoleGui_SubscriptionPicker.cs index ea74b7a1..5c5c4cb9 100644 --- a/src/common/details/azcli/AzCliConsoleGui_SubscriptionPicker.cs +++ b/src/common/details/azcli/AzCliConsoleGui_SubscriptionPicker.cs @@ -77,7 +77,7 @@ public static async Task PickSubscriptionIdAsync(bool allowInteractiveLo throw new ApplicationException($"*** ERROR: Loading subscriptions ***\n{response.Output.StdError}"); } - var needLogin = response.Output.StdError != null && (response.Output.StdError.Contains("az login") || response.Output.StdError.Contains("refresh token")); + var needLogin = response.Output.StdError != null && (response.Output.StdError.Split('\'', '"').Contains("az login") || response.Output.StdError.Contains("refresh token")); if (needLogin) { bool cancelLogin = !allowInteractiveLogin; From 675ae176612708a1528eac75d2a4d0fdef30644d Mon Sep 17 00:00:00 2001 From: Rob Chambers Date: Mon, 29 Jan 2024 07:52:19 -0800 Subject: [PATCH 18/30] init openai w/ deployments almost works non-interactively (#162) * init openai w/ deployments almost works non-interactively * re-enable "--interactive false" for `ai init openai` path --- ...ig_CognitiveServicesResource_OpenAiKind.cs | 41 ++++++++++++++++--- src/common/details/commands/init_command.cs | 8 ++-- .../commands/parsers/init_command_parser.cs | 4 ++ tests/test3.yaml | 27 ++++++++---- 4 files changed, 64 insertions(+), 16 deletions(-) diff --git a/src/common/details/azcli/AzCliConsoleGui_PickOrCreateAndConfig_CognitiveServicesResource_OpenAiKind.cs b/src/common/details/azcli/AzCliConsoleGui_PickOrCreateAndConfig_CognitiveServicesResource_OpenAiKind.cs index 78b74169..1b69e166 100644 --- a/src/common/details/azcli/AzCliConsoleGui_PickOrCreateAndConfig_CognitiveServicesResource_OpenAiKind.cs +++ b/src/common/details/azcli/AzCliConsoleGui_PickOrCreateAndConfig_CognitiveServicesResource_OpenAiKind.cs @@ -19,7 +19,19 @@ namespace Azure.AI.Details.Common.CLI { public partial class AzCliConsoleGui { - public static async Task PickOrCreateAndConfigCognitiveServicesOpenAiKindResource(bool interactive, bool allowSkipDeployments, string subscriptionId, string regionFilter = null, string groupFilter = null, string resourceFilter = null, string kinds = null, string sku = null, bool yes = false) + public static async Task PickOrCreateAndConfigCognitiveServicesOpenAiKindResource( + bool interactive, + bool allowSkipDeployments, + string subscriptionId, + string regionFilter = null, + string groupFilter = null, + string resourceFilter = null, + string kinds = null, + string sku = null, + bool yes = false, + string chatDeploymentFilter = null, + string embeddingsDeploymentFilter = null, + string evaluationsDeploymentFilter = null) { kinds ??= "OpenAI;AIServices"; var sectionHeader = "AZURE OPENAI RESOURCE"; @@ -27,7 +39,15 @@ public partial class AzCliConsoleGui var regionLocation = !string.IsNullOrEmpty(regionFilter) ? await AzCliConsoleGui.PickRegionLocationAsync(interactive, regionFilter) : new AzCli.AccountRegionLocationInfo(); var resource = await AzCliConsoleGui.PickOrCreateCognitiveResource(sectionHeader, interactive, subscriptionId, regionLocation.Name, groupFilter, resourceFilter, kinds, sku, yes); - var (chatDeployment, embeddingsDeployment, evaluationDeployment, keys) = await PickOrCreateAndConfigCognitiveServicesOpenAiKindResourceDeployments(sectionHeader, interactive, allowSkipDeployments, subscriptionId, resource); + var (chatDeployment, embeddingsDeployment, evaluationDeployment, keys) = await PickOrCreateAndConfigCognitiveServicesOpenAiKindResourceDeployments( + sectionHeader, + interactive, + allowSkipDeployments, + subscriptionId, + resource, + chatDeploymentFilter, + embeddingsDeploymentFilter, + evaluationsDeploymentFilter); return new AzCli.CognitiveServicesResourceInfoEx { @@ -44,11 +64,20 @@ public partial class AzCliConsoleGui }; } - public static async Task<(AzCli.CognitiveServicesDeploymentInfo?, AzCli.CognitiveServicesDeploymentInfo?, AzCli.CognitiveServicesDeploymentInfo?, AzCli.CognitiveServicesKeyInfo)> PickOrCreateAndConfigCognitiveServicesOpenAiKindResourceDeployments(string sectionHeader, bool interactive, bool allowSkipDeployments, string subscriptionId, AzCli.CognitiveServicesResourceInfo resource) + public static async Task<(AzCli.CognitiveServicesDeploymentInfo?, AzCli.CognitiveServicesDeploymentInfo?, AzCli.CognitiveServicesDeploymentInfo?, AzCli.CognitiveServicesKeyInfo)> + PickOrCreateAndConfigCognitiveServicesOpenAiKindResourceDeployments( + string sectionHeader, + bool interactive, + bool allowSkipDeployments, + string subscriptionId, + AzCli.CognitiveServicesResourceInfo resource, + string chatDeploymentFilter = null, + string embeddingsDeploymentFilter = null, + string evaluationsDeploymentFilter = null) { - var chatDeployment = await AzCliConsoleGui.PickOrCreateCognitiveServicesResourceDeployment(interactive, allowSkipDeployments, "Chat", subscriptionId, resource.Group, resource.RegionLocation, resource.Name, null); - var embeddingsDeployment = await AzCliConsoleGui.PickOrCreateCognitiveServicesResourceDeployment(interactive, allowSkipDeployments, "Embeddings", subscriptionId, resource.Group, resource.RegionLocation, resource.Name, null); - var evaluationDeployment = await AzCliConsoleGui.PickOrCreateCognitiveServicesResourceDeployment(interactive, allowSkipDeployments, "Evaluation", subscriptionId, resource.Group, resource.RegionLocation, resource.Name, null); + var chatDeployment = await AzCliConsoleGui.PickOrCreateCognitiveServicesResourceDeployment(interactive, allowSkipDeployments, "Chat", subscriptionId, resource.Group, resource.RegionLocation, resource.Name, chatDeploymentFilter); + var embeddingsDeployment = await AzCliConsoleGui.PickOrCreateCognitiveServicesResourceDeployment(interactive, allowSkipDeployments, "Embeddings", subscriptionId, resource.Group, resource.RegionLocation, resource.Name, embeddingsDeploymentFilter); + var evaluationDeployment = await AzCliConsoleGui.PickOrCreateCognitiveServicesResourceDeployment(interactive, allowSkipDeployments, "Evaluation", subscriptionId, resource.Group, resource.RegionLocation, resource.Name, evaluationsDeploymentFilter); var keys = await AzCliConsoleGui.LoadCognitiveServicesResourceKeys(sectionHeader, subscriptionId, resource); if (resource.Kind == "AIServices") diff --git a/src/common/details/commands/init_command.cs b/src/common/details/commands/init_command.cs index 1f273c82..236fc7ef 100644 --- a/src/common/details/commands/init_command.cs +++ b/src/common/details/commands/init_command.cs @@ -391,8 +391,6 @@ private async Task DoInitProject(bool interactive, bool allowCreate = true, bool private async Task DoInitRootOpenAi(bool interactive) { - if (!interactive) ThrowInteractiveNotSupportedApplicationException(); // POST-IGNITE: TODO: Add back non-interactive mode support - await DoInitSubscriptionId(interactive); await DoInitOpenAi(interactive); } @@ -407,7 +405,11 @@ private async Task DoInitOpenAi(bool interactive, bool allowSkipDeployments = tr var sku = _values.GetOrDefault("init.service.cognitiveservices.resource.sku", Program.CognitiveServiceResourceSku); var yes = _values.GetOrDefault("init.service.cognitiveservices.terms.agree", false); - var resource = await AzCliConsoleGui.PickOrCreateAndConfigCognitiveServicesOpenAiKindResource(interactive, allowSkipDeployments, subscriptionId, regionFilter, groupFilter, resourceFilter, kind, sku, yes); + var chatDeploymentFilter = _values.GetOrDefault("init.chat.model.deployment.name", ""); + var embeddingsDeploymentFilter = _values.GetOrDefault("init.embeddings.model.deployment.name", ""); + var evaluationsDeploymentFilter = _values.GetOrDefault("init.evaluation.model.deployment.name", ""); + + var resource = await AzCliConsoleGui.PickOrCreateAndConfigCognitiveServicesOpenAiKindResource(interactive, allowSkipDeployments, subscriptionId, regionFilter, groupFilter, resourceFilter, kind, sku, yes, chatDeploymentFilter, embeddingsDeploymentFilter, evaluationsDeploymentFilter); _values.Reset("service.openai.deployments.picked", "true"); SubscriptionToken.Data().Set(_values, subscriptionId); diff --git a/src/common/details/commands/parsers/init_command_parser.cs b/src/common/details/commands/parsers/init_command_parser.cs index 3c5cab3a..5c5ba615 100644 --- a/src/common/details/commands/parsers/init_command_parser.cs +++ b/src/common/details/commands/parsers/init_command_parser.cs @@ -81,6 +81,10 @@ public CommonInitNamedValueTokenParsers() : base( new NamedValueTokenParser("--sku", "init.service.cognitiveservices.resource.sku", "00001", "1"), new NamedValueTokenParser("--yes", "init.service.cognitiveservices.terms.agree", "00001", "1;0", "true;false", null, "true"), + new NamedValueTokenParser(null, "init.chat.model.deployment.name", "01010", "1"), + new NamedValueTokenParser(null, "init.embeddings.model.deployment.name", "01010", "1"), + new NamedValueTokenParser(null, "init.evaluation.model.deployment.name", "01010", "1"), + new NamedValueTokenParser("--interactive", "init.service.interactive", "001", "1;0", "true;false", null, "true") // new NamedValueTokenParser(null, "init.output.azcli.command.file", "01100", "1", "@@"), diff --git a/tests/test3.yaml b/tests/test3.yaml index 957dc8ba..164395be 100644 --- a/tests/test3.yaml +++ b/tests/test3.yaml @@ -1,11 +1,24 @@ -- class: setup +- area: ai init tags: [before] - steps: - - name: az login - script: az login --identity - skipOnFailure: true - - name: ai init - command: ai init speech --subscription e72e5254-f265-4e95-9bd2-9ee8e7329051 --name robch-cranky-red-koala-ais --interactive false + tests: + - name: ai init openai + command: ai init openai + arguments: + subscription: e72e5254-f265-4e95-9bd2-9ee8e7329051 + name: robch-oai-eastus2 + chat-deployment-name: gpt-4-32k-0613 + embeddings-deployment-name: text-embedding-ada-002-2 + evaluation-deployment-name: gpt-4-32k-0613 + interactive: false + - name: ai init speech + command: ai init speech + arguments: + subscription: e72e5254-f265-4e95-9bd2-9ee8e7329051 + name: robch-cranky-red-koala-ais + interactive: false + +- name: test ai chat + command: ai chat --question "Why is the sky blue" - name: dev new environment command: ai dev new .env From 931d7951dd11f16194996c7e7bf091db2a2f9ff8 Mon Sep 17 00:00:00 2001 From: Rob Chambers Date: Tue, 30 Jan 2024 06:01:45 -0800 Subject: [PATCH 19/30] Robch/2401 jan29 expect gpt (#163) * first steps for expect-gpt * actually implement `expect-gpt` * fix list connections issue --- .../include.python.script.connection_list.py | 2 +- tests/test3.yaml | 5 +- tests/testadapter/YamlTestCaseFilter.cs | 3 +- tests/testadapter/YamlTestCaseParser.cs | 3 +- tests/testadapter/YamlTestCaseRunner.cs | 111 ++++++++++++++++-- tests/testadapter/YamlTestProperties.cs | 1 + 6 files changed, 110 insertions(+), 15 deletions(-) diff --git a/src/ai/.x/help/include.python.script.connection_list.py b/src/ai/.x/help/include.python.script.connection_list.py index 928b7f29..cc056ef6 100644 --- a/src/ai/.x/help/include.python.script.connection_list.py +++ b/src/ai/.x/help/include.python.script.connection_list.py @@ -22,7 +22,7 @@ def list_connections(subscription_id, resource_group_name, project_name): "name": item.name, "type": item.type, "target": item.target, - "credentials": item.credentials.values() + "credentials": item.credentials.values() if item.credentials else None, } connections.append(connection) diff --git a/tests/test3.yaml b/tests/test3.yaml index 164395be..ab7856a7 100644 --- a/tests/test3.yaml +++ b/tests/test3.yaml @@ -18,7 +18,8 @@ interactive: false - name: test ai chat - command: ai chat --question "Why is the sky blue" + command: ai chat --question "Why is the sky blue" --index-name @none + expect: Rayleigh - name: dev new environment command: ai dev new .env @@ -55,6 +56,8 @@ input: |- Tell me a joke Tell me another joke + expect-gpt: | + The output should contain exactly two jokes. tag: skip - class: dev new openai-chat (go) diff --git a/tests/testadapter/YamlTestCaseFilter.cs b/tests/testadapter/YamlTestCaseFilter.cs index 25a22d7d..c3975a0f 100644 --- a/tests/testadapter/YamlTestCaseFilter.cs +++ b/tests/testadapter/YamlTestCaseFilter.cs @@ -56,6 +56,7 @@ private static object GetPropertyValue(TestCase test, string name) case "input": return YamlTestProperties.Get(test, "input"); case "expect": return YamlTestProperties.Get(test, "expect"); + case "expect-gpt": return YamlTestProperties.Get(test, "expect-gpt"); case "not-expect": return YamlTestProperties.Get(test, "not-expect"); case "parallelize": return YamlTestProperties.Get(test, "parallelize"); @@ -72,6 +73,6 @@ private static object GetPropertyValue(TestCase test, string name) return tags.Select(x => x.Value).ToArray(); } - private static readonly string[] supportedFilterProperties = { "DisplayName", "FullyQualifiedName", "Category", "cli", "command", "script", "foreach", "arguments", "input", "expect", "not-expect", "parallelize", "simulate", "skipOnFailure" }; + private static readonly string[] supportedFilterProperties = { "DisplayName", "FullyQualifiedName", "Category", "cli", "command", "script", "foreach", "arguments", "input", "expect", "expect-gpt", "not-expect", "parallelize", "simulate", "skipOnFailure" }; } } diff --git a/tests/testadapter/YamlTestCaseParser.cs b/tests/testadapter/YamlTestCaseParser.cs index a772198a..dd385f02 100644 --- a/tests/testadapter/YamlTestCaseParser.cs +++ b/tests/testadapter/YamlTestCaseParser.cs @@ -124,6 +124,7 @@ private static TestCase GetTestFromNode(string source, FileInfo file, YamlMappin SetTestCasePropertyMap(test, "input", mapping, "input", workingDirectory); SetTestCaseProperty(test, "expect", mapping, "expect"); + SetTestCaseProperty(test, "expect-gpt", mapping, "expect-gpt"); SetTestCaseProperty(test, "not-expect", mapping, "not-expect"); SetTestCaseTagsAsTraits(test, YamlTagHelpers.UpdateCopyTags(tags, mapping)); @@ -195,7 +196,7 @@ private static void CheckInvalidTestCaseNodes(FileInfo file, YamlMappingNode map private static bool IsValidTestCaseNode(string value) { - return ";area;class;name;cli;command;script;timeout;foreach;arguments;input;expect;not-expect;parallelize;simulate;skipOnFailure;tag;tags;workingDirectory;".IndexOf($";{value};") >= 0; + return ";area;class;name;cli;command;script;timeout;foreach;arguments;input;expect;expect-gpt;not-expect;parallelize;simulate;skipOnFailure;tag;tags;workingDirectory;".IndexOf($";{value};") >= 0; } private static void SetTestCaseProperty(TestCase test, string propertyName, YamlMappingNode mapping, string mappingName) diff --git a/tests/testadapter/YamlTestCaseRunner.cs b/tests/testadapter/YamlTestCaseRunner.cs index 1413b953..168def29 100644 --- a/tests/testadapter/YamlTestCaseRunner.cs +++ b/tests/testadapter/YamlTestCaseRunner.cs @@ -67,6 +67,7 @@ private static IEnumerable TestCaseGetResults(TestCase test) var arguments = YamlTestProperties.Get(test, "arguments"); var input = YamlTestProperties.Get(test, "input"); var expect = YamlTestProperties.Get(test, "expect"); + var expectGpt = YamlTestProperties.Get(test, "expect-gpt"); var notExpect = YamlTestProperties.Get(test, "not-expect"); var workingDirectory = YamlTestProperties.Get(test, "working-directory"); var timeout = int.Parse(YamlTestProperties.Get(test, "timeout")); @@ -86,8 +87,8 @@ private static IEnumerable TestCaseGetResults(TestCase test) var start = DateTime.Now; var outcome = string.IsNullOrEmpty(simulate) - ? RunTestCase(test, skipOnFailure, cli, command, script, foreachItem, arguments, input, expect, notExpect, workingDirectory, timeout, out string stdOut, out string stdErr, out string errorMessage, out string stackTrace, out string additional, out string debugTrace) - : SimulateTestCase(test, simulate, cli, command, script, foreachItem, arguments, input, expect, notExpect, workingDirectory, out stdOut, out stdErr, out errorMessage, out stackTrace, out additional, out debugTrace); + ? RunTestCase(test, skipOnFailure, cli, command, script, foreachItem, arguments, input, expect, expectGpt, notExpect, workingDirectory, timeout, out string stdOut, out string stdErr, out string errorMessage, out string stackTrace, out string additional, out string debugTrace) + : SimulateTestCase(test, simulate, cli, command, script, foreachItem, arguments, input, expect, expectGpt, notExpect, workingDirectory, out stdOut, out stdErr, out errorMessage, out stackTrace, out additional, out debugTrace); #if DEBUG additional += outcome == TestOutcome.Failed ? $"\nEXTRA: {ExtraDebugInfo()}" : ""; @@ -206,7 +207,7 @@ private static Dictionary DupAndAdd(Dictionary d return dup; } - private static TestOutcome RunTestCase(TestCase test, bool skipOnFailure, string cli, string command, string script, string @foreach, string arguments, string input, string expect, string notExpect, string workingDirectory, int timeout, out string stdOut, out string stdErr, out string errorMessage, out string stackTrace, out string additional, out string debugTrace) + private static TestOutcome RunTestCase(TestCase test, bool skipOnFailure, string cli, string command, string script, string @foreach, string arguments, string input, string expect, string expectGpt, string notExpect, string workingDirectory, int timeout, out string stdOut, out string stdErr, out string errorMessage, out string stackTrace, out string additional, out string debugTrace) { var outcome = TestOutcome.None; @@ -214,10 +215,12 @@ private static TestOutcome RunTestCase(TestCase test, bool skipOnFailure, string debugTrace = ""; stackTrace = script; - Task stdOutTask = null; - Task stdErrTask = null; List filesToDelete = null; + var sbOut = new StringBuilder(); + var sbErr = new StringBuilder(); + var sbMerged = new StringBuilder(); + try { var isWindows = RuntimeInformation.IsOSPlatform(OSPlatform.Windows); @@ -247,8 +250,11 @@ private static TestOutcome RunTestCase(TestCase test, bool skipOnFailure, string var process = Process.Start(startInfo); process.StandardInput.WriteLine(input ?? string.Empty); process.StandardInput.Close(); - stdOutTask = process.StandardOutput.ReadToEndAsync(); - stdErrTask = process.StandardError.ReadToEndAsync(); + + process.OutputDataReceived += (sender, e) => { if (e.Data != null) { sbOut.AppendLine(e.Data); sbMerged.AppendLine(e.Data); } }; + process.ErrorDataReceived += (sender, e) => { if (e.Data != null) { sbErr.AppendLine(e.Data); sbMerged.AppendLine(e.Data); } }; + process.BeginOutputReadLine(); + process.BeginErrorReadLine(); var exitedNotKilled = WaitForExit(process, timeout); outcome = exitedNotKilled && process.ExitCode == 0 @@ -284,10 +290,12 @@ private static TestOutcome RunTestCase(TestCase test, bool skipOnFailure, string filesToDelete?.ForEach(x => File.Delete(x)); } - stdOut = stdOutTask?.Result; - stdErr = stdErrTask?.Result; + stdOut = sbOut.ToString(); + stdErr = sbErr.ToString(); - return outcome; + return outcome == TestOutcome.Passed && !string.IsNullOrEmpty(expectGpt) + ? CheckExpectGptOutcome(sbMerged.ToString(), expectGpt, ref stdOut, ref stdErr) + : outcome; } private static List> ConvertValuesToAtArgs(List> kvs, ref List files) @@ -691,7 +699,7 @@ private static string GetKeyValueArgs(List> kvs) return args.ToString().TrimEnd(); } - private static TestOutcome SimulateTestCase(TestCase test, string simulate, string cli, string command, string script, string @foreach, string arguments, string input, string expect, string notExpect, string workingDirectory, out string stdOut, out string stdErr, out string errorMessage, out string stackTrace, out string additional, out string debugTrace) + private static TestOutcome SimulateTestCase(TestCase test, string simulate, string cli, string command, string script, string @foreach, string arguments, string input, string expect, string expectGpt, string notExpect, string workingDirectory, out string stdOut, out string stdErr, out string errorMessage, out string stackTrace, out string additional, out string debugTrace) { var sb = new StringBuilder(); sb.AppendLine($"cli='{cli?.Replace("\n", "\\n")}'"); @@ -804,6 +812,87 @@ private static string ExtraDebugInfo() return sb.ToString(); } + private static TestOutcome CheckExpectGptOutcome(string output, string expectGpt, ref string stdOut, ref string stdErr) + { + var outcome = ExpectGptOutcome(output, expectGpt, out var gptStdOut, out var gptStdErr, out var gptMerged); + if (outcome == TestOutcome.Failed) + { + if (!string.IsNullOrEmpty(gptStdOut)) stdOut = $"{stdOut}\n--expect-gpt--\n{gptStdOut}\n".Trim('\n'); + if (!string.IsNullOrEmpty(gptStdErr)) stdErr = $"{stdErr}\n--expect-gpt--\n{gptStdErr}\n".Trim('\n'); + } + return outcome; + } + + private static TestOutcome ExpectGptOutcome(string output, string expect, out string gptStdOut, out string gptStdErr, out string gptMerged) + { + var outcome = TestOutcome.None; + + var sbOut = new StringBuilder(); + var sbErr = new StringBuilder(); + var sbMerged = new StringBuilder(); + + var question = new StringBuilder(); + question.AppendLine($"Here's the console output:\n\n{output}\n"); + question.AppendLine($"Here's the expectation:\n\n{expect}\n"); + question.AppendLine("You **must always** answer \"PASS\" if the expectation is met."); + question.AppendLine("You **must always** answer \"FAIL\" if the expectation is not met."); + question.AppendLine("You **must only** answer \"PASS\" or \"FAIL\"."); + var questionTempFile = WriteTextToTempFile(question.ToString()); + + try + { + var startProcess = FindCacheCli("ai"); + var startArgs = $"chat --quiet true --index-name @none --question @{questionTempFile}"; + var startInfo = new ProcessStartInfo(startProcess, startArgs) + { + UseShellExecute = false, + RedirectStandardInput = true, + RedirectStandardError = true, + RedirectStandardOutput = true + }; + + Logger.Log($"Process.Start('{startProcess} {startArgs}')"); + var process = Process.Start(startInfo); + process.StandardInput.Close(); + + process.OutputDataReceived += (sender, e) => { if (e.Data != null) { sbOut.AppendLine(e.Data); sbMerged.AppendLine(e.Data); } }; + process.ErrorDataReceived += (sender, e) => { if (e.Data != null) { sbErr.AppendLine(e.Data); sbMerged.AppendLine(e.Data); } }; + process.BeginOutputReadLine(); + process.BeginErrorReadLine(); + + var exitedNotKilled = WaitForExit(process, 30000); + + outcome = exitedNotKilled && process.ExitCode == 0 + ? TestOutcome.Passed + : TestOutcome.Failed; + } + catch + { + outcome = TestOutcome.Failed; + } + finally + { + gptStdOut = sbOut.ToString(); + gptStdErr = sbErr.ToString(); + gptMerged = sbMerged.ToString(); + + File.Delete(questionTempFile); + } + + if (outcome == TestOutcome.Passed) + { + Logger.Log($"ExpectGptOutcome: Checking for 'PASS' in '{gptMerged}'"); + var passed = gptMerged.Contains("PASS") || gptMerged.Contains("TRUE") || gptMerged.Contains("YES"); + var failed = gptMerged.Contains("FAIL") || gptMerged.Contains("FALSE") || gptMerged.Contains("NO"); + outcome = passed && !failed + ? TestOutcome.Passed + : TestOutcome.Failed; + Logger.Log($"ExpectGptOutcome: {outcome}"); + } + + return outcome; + } + #endregion private static Dictionary _cliCache = new Dictionary(); diff --git a/tests/testadapter/YamlTestProperties.cs b/tests/testadapter/YamlTestProperties.cs index a7ee22b6..d73a9813 100644 --- a/tests/testadapter/YamlTestProperties.cs +++ b/tests/testadapter/YamlTestProperties.cs @@ -44,6 +44,7 @@ private static TestProperty RegisterTestCaseProperty(string name) { "arguments", RegisterTestCaseProperty("Arguments") }, { "input", RegisterTestCaseProperty("Input")}, { "expect", RegisterTestCaseProperty("Expect") }, + { "expect-gpt", RegisterTestCaseProperty("ExpectGpt") }, { "not-expect", RegisterTestCaseProperty("NotExpect") }, { "simulate", RegisterTestCaseProperty("Simulate") }, { "skipOnFailure", RegisterTestCaseProperty("SkipOnFailure") }, From 70f3274bed1f629bd79d7371c1d5949e721635cb Mon Sep 17 00:00:00 2001 From: Hanchi Wang Date: Tue, 30 Jan 2024 16:10:32 -0800 Subject: [PATCH 20/30] workaround for ai search index (#160) --- src/ai/.x/help/include.python.script.ml_index_update.py | 8 +++++++- 1 file changed, 7 insertions(+), 1 deletion(-) diff --git a/src/ai/.x/help/include.python.script.ml_index_update.py b/src/ai/.x/help/include.python.script.ml_index_update.py index d8235f2f..3d2ee8bf 100644 --- a/src/ai/.x/help/include.python.script.ml_index_update.py +++ b/src/ai/.x/help/include.python.script.ml_index_update.py @@ -53,7 +53,13 @@ def search_index_update( ) openaiConnection = client.get_default_aoai_connection() - openaiConnection.set_current_environment() + # openaiConnection.set_current_environment() + + # This is a workaround for build_index(), as it has nested logic depending on openai 0.x environment variables. + # This sets environment variables in openai 0.x fashion. + openaiConnection._set_current_environment_old() + # This sets environment variables in openai 1.x fashion. + openaiConnection._set_current_environment_new() searchConnection = client.connections.get("AzureAISearch") searchConnection.set_current_environment() From 307288efe41e3fac7158f389858326796994a8e6 Mon Sep 17 00:00:00 2001 From: Christopher Schraer <32145632+chschrae@users.noreply.github.com> Date: Tue, 6 Feb 2024 12:00:58 -0800 Subject: [PATCH 21/30] added go template for streaming with your own data (#161) * added go template for streaming with your own data * updated go templates to latest API and add Go streaming with data to the test * added test for go streaming with data --------- Co-authored-by: Chris Schraer --- src/ai/.x/templates/openai-chat-go/go.mod | 4 +- .../openai_chat_completions_hello_world.go | 19 +-- .../templates/openai-chat-streaming-go/go.mod | 4 +- ..._chat_completions_streaming_hello_world.go | 21 +-- .../openai-chat-streaming-with-data-go/_.json | 15 +++ .../openai-chat-streaming-with-data-go/go.mod | 6 + .../main.go | 105 +++++++++++++++ ...letions_streaming_with_data_hello_world.go | 122 ++++++++++++++++++ tests/test.yaml | 2 +- tests/test3.yaml | 22 +++- 10 files changed, 291 insertions(+), 29 deletions(-) create mode 100644 src/ai/.x/templates/openai-chat-streaming-with-data-go/_.json create mode 100644 src/ai/.x/templates/openai-chat-streaming-with-data-go/go.mod create mode 100644 src/ai/.x/templates/openai-chat-streaming-with-data-go/main.go create mode 100644 src/ai/.x/templates/openai-chat-streaming-with-data-go/openai_chat_completions_streaming_with_data_hello_world.go diff --git a/src/ai/.x/templates/openai-chat-go/go.mod b/src/ai/.x/templates/openai-chat-go/go.mod index 6bcebc87..77cb86da 100644 --- a/src/ai/.x/templates/openai-chat-go/go.mod +++ b/src/ai/.x/templates/openai-chat-go/go.mod @@ -1,6 +1,6 @@ module openai_chat_completions_hello_world require ( - github.com/Azure/azure-sdk-for-go/sdk/ai/azopenai v0.3.0 - github.com/Azure/azure-sdk-for-go/sdk/azcore v1.9.0 + github.com/Azure/azure-sdk-for-go/sdk/ai/azopenai v0.4.1 + github.com/Azure/azure-sdk-for-go/sdk/azcore v1.9.1 ) diff --git a/src/ai/.x/templates/openai-chat-go/openai_chat_completions_hello_world.go b/src/ai/.x/templates/openai-chat-go/openai_chat_completions_hello_world.go index 11c1baeb..3f129cc1 100644 --- a/src/ai/.x/templates/openai-chat-go/openai_chat_completions_hello_world.go +++ b/src/ai/.x/templates/openai-chat-go/openai_chat_completions_hello_world.go @@ -7,6 +7,7 @@ import ( "context" "github.com/Azure/azure-sdk-for-go/sdk/ai/azopenai" + "github.com/Azure/azure-sdk-for-go/sdk/azcore" "github.com/Azure/azure-sdk-for-go/sdk/azcore/to" ) @@ -16,21 +17,21 @@ type <#= ClassName #> struct { } func New<#= ClassName #>(openAIEndpoint string, openAIKey string, openAIChatDeploymentName string, openAISystemPrompt string) (*<#= ClassName #>, error) { - keyCredential, err := azopenai.NewKeyCredential(openAIKey) - if err != nil { - return nil, err - } + keyCredential := azcore.NewKeyCredential(openAIKey) + client, err := azopenai.NewClientWithKeyCredential(openAIEndpoint, keyCredential, nil) if err != nil { return nil, err } - messages := []azopenai.ChatMessage{ - {Role: to.Ptr(azopenai.ChatRoleSystem), Content: to.Ptr(openAISystemPrompt)}, + messages := []azopenai.ChatRequestMessageClassification{ + &azopenai.ChatRequestSystemMessage{ + Content: &openAISystemPrompt, + }, } options := &azopenai.ChatCompletionsOptions{ - Deployment: openAIChatDeploymentName, + DeploymentName: &openAIChatDeploymentName, Messages: messages, } @@ -45,7 +46,7 @@ func (chat *<#= ClassName #>) ClearConversation() { } func (chat *<#= ClassName #>) GetChatCompletions(userPrompt string) (string, error) { - chat.options.Messages = append(chat.options.Messages, azopenai.ChatMessage{Role: to.Ptr(azopenai.ChatRoleUser), Content: to.Ptr(userPrompt)}) + chat.options.Messages = append(chat.options.Messages, &azopenai.ChatRequestUserMessage{Content: azopenai.NewChatRequestUserMessageContent(userPrompt)}) resp, err := chat.client.GetChatCompletions(context.TODO(), *chat.options, nil) if err != nil { @@ -53,7 +54,7 @@ func (chat *<#= ClassName #>) GetChatCompletions(userPrompt string) (string, err } responseContent := *resp.Choices[0].Message.Content - chat.options.Messages = append(chat.options.Messages, azopenai.ChatMessage{Role: to.Ptr(azopenai.ChatRoleAssistant), Content: to.Ptr(responseContent)}) + chat.options.Messages = append(chat.options.Messages, &azopenai.ChatRequestAssistantMessage{Content: to.Ptr(responseContent)}) return responseContent, nil } diff --git a/src/ai/.x/templates/openai-chat-streaming-go/go.mod b/src/ai/.x/templates/openai-chat-streaming-go/go.mod index 3d12c4f5..525650b3 100644 --- a/src/ai/.x/templates/openai-chat-streaming-go/go.mod +++ b/src/ai/.x/templates/openai-chat-streaming-go/go.mod @@ -1,6 +1,6 @@ module openai_chat_completions_streaming_hello_world require ( - github.com/Azure/azure-sdk-for-go/sdk/ai/azopenai v0.3.0 - github.com/Azure/azure-sdk-for-go/sdk/azcore v1.9.0 + github.com/Azure/azure-sdk-for-go/sdk/ai/azopenai v0.4.1 + github.com/Azure/azure-sdk-for-go/sdk/azcore v1.9.1 ) diff --git a/src/ai/.x/templates/openai-chat-streaming-go/openai_chat_completions_streaming_hello_world.go b/src/ai/.x/templates/openai-chat-streaming-go/openai_chat_completions_streaming_hello_world.go index 13e22361..a2220e83 100644 --- a/src/ai/.x/templates/openai-chat-streaming-go/openai_chat_completions_streaming_hello_world.go +++ b/src/ai/.x/templates/openai-chat-streaming-go/openai_chat_completions_streaming_hello_world.go @@ -9,6 +9,7 @@ import ( "io" "github.com/Azure/azure-sdk-for-go/sdk/ai/azopenai" + "github.com/Azure/azure-sdk-for-go/sdk/azcore" "github.com/Azure/azure-sdk-for-go/sdk/azcore/to" ) @@ -18,21 +19,21 @@ type <#= ClassName #> struct { } func New<#= ClassName #>(openAIEndpoint string, openAIKey string, openAIChatDeploymentName string, openAISystemPrompt string) (*<#= ClassName #>, error) { - keyCredential, err := azopenai.NewKeyCredential(openAIKey) - if err != nil { - return nil, err - } + keyCredential := azcore.NewKeyCredential(openAIKey) + client, err := azopenai.NewClientWithKeyCredential(openAIEndpoint, keyCredential, nil) if err != nil { return nil, err } - messages := []azopenai.ChatMessage{ - {Role: to.Ptr(azopenai.ChatRoleSystem), Content: to.Ptr(openAISystemPrompt)}, + messages := []azopenai.ChatRequestMessageClassification{ + &azopenai.ChatRequestSystemMessage{ + Content: &openAISystemPrompt, + }, } options := &azopenai.ChatCompletionsOptions{ - Deployment: openAIChatDeploymentName, + DeploymentName: &openAIChatDeploymentName, Messages: messages, } @@ -47,7 +48,7 @@ func (chat *<#= ClassName #>) ClearConversation() { } func (chat *<#= ClassName #>) GetChatCompletionsStream(userPrompt string, callback func(content string)) (string, error) { - chat.options.Messages = append(chat.options.Messages, azopenai.ChatMessage{Role: to.Ptr(azopenai.ChatRoleUser), Content: to.Ptr(userPrompt)}) + chat.options.Messages = append(chat.options.Messages, &azopenai.ChatRequestUserMessage{Content: azopenai.NewChatRequestUserMessageContent(userPrompt)}) resp, err := chat.client.GetChatCompletionsStream(context.TODO(), *chat.options, nil) if err != nil { @@ -74,7 +75,7 @@ func (chat *<#= ClassName #>) GetChatCompletionsStream(userPrompt string, callba if choice.FinishReason != nil { finishReason := *choice.FinishReason - if finishReason == azopenai.CompletionsFinishReasonLength { + if finishReason == azopenai.CompletionsFinishReasonTokenLimitReached { content = content + "\nWARNING: Exceeded token limit!" } } @@ -88,6 +89,6 @@ func (chat *<#= ClassName #>) GetChatCompletionsStream(userPrompt string, callba } } - chat.options.Messages = append(chat.options.Messages, azopenai.ChatMessage{Role: to.Ptr(azopenai.ChatRoleAssistant), Content: to.Ptr(responseContent)}) + chat.options.Messages = append(chat.options.Messages, &azopenai.ChatRequestAssistantMessage{Content: to.Ptr(responseContent)}) return responseContent, nil } diff --git a/src/ai/.x/templates/openai-chat-streaming-with-data-go/_.json b/src/ai/.x/templates/openai-chat-streaming-with-data-go/_.json new file mode 100644 index 00000000..b938bc70 --- /dev/null +++ b/src/ai/.x/templates/openai-chat-streaming-with-data-go/_.json @@ -0,0 +1,15 @@ +{ + "_LongName": "OpenAI Chat Completions (w/ Data + AI Search)", + "_ShortName": "openai-chat-streaming-with-data", + "_Language": "Go", + "ClassName": "OpenAIChatCompletionsWithDataStreamingExample", + "AZURE_OPENAI_API_VERSION": "", + "AZURE_OPENAI_ENDPOINT": "", + "AZURE_OPENAI_KEY": "", + "AZURE_OPENAI_CHAT_DEPLOYMENT": "", + "AZURE_OPENAI_EMBEDDING_DEPLOYMENT": "", + "AZURE_OPENAI_SYSTEM_PROMPT": "You are a helpful AI assistant.", + "AZURE_AI_SEARCH_ENDPOINT": "", + "AZURE_AI_SEARCH_KEY": "", + "AZURE_AI_SEARCH_INDEX_NAME": "" +} \ No newline at end of file diff --git a/src/ai/.x/templates/openai-chat-streaming-with-data-go/go.mod b/src/ai/.x/templates/openai-chat-streaming-with-data-go/go.mod new file mode 100644 index 00000000..1d0b37af --- /dev/null +++ b/src/ai/.x/templates/openai-chat-streaming-with-data-go/go.mod @@ -0,0 +1,6 @@ +module openai_chat_completions_streaming_with_data_hello_world + +require ( + github.com/Azure/azure-sdk-for-go/sdk/ai/azopenai v0.4.1 + github.com/Azure/azure-sdk-for-go/sdk/azcore v1.9.1 +) diff --git a/src/ai/.x/templates/openai-chat-streaming-with-data-go/main.go b/src/ai/.x/templates/openai-chat-streaming-with-data-go/main.go new file mode 100644 index 00000000..c522b6b3 --- /dev/null +++ b/src/ai/.x/templates/openai-chat-streaming-with-data-go/main.go @@ -0,0 +1,105 @@ +<#@ template hostspecific="true" #> +<#@ output extension=".go" encoding="utf-8" #> +<#@ parameter type="System.String" name="ClassName" #> +<#@ parameter type="System.String" name="AZURE_OPENAI_API_VERSION" #> +<#@ parameter type="System.String" name="AZURE_OPENAI_ENDPOINT" #> +<#@ parameter type="System.String" name="AZURE_OPENAI_KEY" #> +<#@ parameter type="System.String" name="AZURE_OPENAI_CHAT_DEPLOYMENT" #> +<#@ parameter type="System.String" name="AZURE_OPENAI_EMBEDDING_DEPLOYMENT" #> +<#@ parameter type="System.String" name="AZURE_OPENAI_SYSTEM_PROMPT" #> +<#@ parameter type="System.String" name="AZURE_AI_SEARCH_ENDPOINT" #> +<#@ parameter type="System.String" name="AZURE_AI_SEARCH_KEY" #> +<#@ parameter type="System.String" name="AZURE_AI_SEARCH_INDEX_NAME" #> +package main + +import ( + "bufio" + "fmt" + "log" + "os" + "strings" +) + +func main() { + openAIEndpoint := os.Getenv("AZURE_OPENAI_ENDPOINT") + if openAIEndpoint == "" { + openAIEndpoint = "<#= AZURE_OPENAI_ENDPOINT #>" + } + openAIKey := os.Getenv("AZURE_OPENAI_KEY") + if openAIKey == "" { + openAIKey = "<#= AZURE_OPENAI_KEY #>" + } + openAIChatDeploymentName := os.Getenv("AZURE_OPENAI_CHAT_DEPLOYMENT") + if openAIChatDeploymentName == "" { + openAIChatDeploymentName = "<#= AZURE_OPENAI_CHAT_DEPLOYMENT #>" + } + openAISystemPrompt := os.Getenv("OPENAI_SYSTEM_PROMPT") + if openAISystemPrompt == "" { + openAISystemPrompt = "<#= AZURE_OPENAI_SYSTEM_PROMPT #>" + } + + openAIApiVersion := os.Getenv("AZURE_OPENAI_API_VERSION") + if openAIApiVersion == "" { + openAIApiVersion = "<#= AZURE_OPENAI_API_VERSION #>" + } + + openAIEmbeddingsDeploymentName := os.Getenv("AZURE_OPENAI_EMBEDDING_DEPLOYMENT") + if openAIEmbeddingsDeploymentName == "" { + openAIEmbeddingsDeploymentName = "<#= AZURE_OPENAI_EMBEDDING_DEPLOYMENT #>" + } + + openAIEndpoint = strings.TrimSuffix(openAIEndpoint, "/") + + azureSearchEndpoint := os.Getenv("AZURE_AI_SEARCH_ENDPOINT") + if azureSearchEndpoint == "" { + azureSearchEndpoint = "<#= AZURE_AI_SEARCH_ENDPOINT #>" + } + + azureSearchApiKey := os.Getenv("AZURE_AI_SEARCH_KEY") + if azureSearchApiKey == "" { + azureSearchApiKey = "<#= AZURE_AI_SEARCH_KEY #>" + } + + azureSearchIndexName := os.Getenv("AZURE_AI_SEARCH_INDEX_NAME") + if azureSearchIndexName == "" { + azureSearchIndexName = "<#= AZURE_AI_SEARCH_INDEX_NAME #>" + } + + if openAIEndpoint == "" || openAIKey == "" || openAIChatDeploymentName == "" || openAISystemPrompt == "" { + fmt.Println("Please set the environment variables.") + os.Exit(1) + } + + chat, err := New<#= ClassName #>(openAIEndpoint, openAIKey, openAIChatDeploymentName, openAISystemPrompt, azureSearchEndpoint, azureSearchApiKey, azureSearchIndexName, openAIEmbeddingsDeploymentName) + if err != nil { + log.Fatalf("ERROR: %s", err) + } + + for { + fmt.Print("User: ") + input, _ := getUserInput() + if input == "exit" || input == "" { + break + } + + fmt.Printf("\nAssistant: ") + _, err := chat.GetChatCompletionsStream(input, func(content string) { + fmt.Printf("%s", content) + }) + if err != nil { + log.Fatalf("ERROR: %s", err) + } + fmt.Printf("\n\n") + } +} + +func getUserInput() (string, error) { + reader := bufio.NewReader(os.Stdin) + userInput, err := reader.ReadString('\n') + if err != nil { + return "", err + } + userInput = strings.TrimSuffix(userInput, "\n") + userInput = strings.TrimSuffix(userInput, "\r") + return userInput, nil +} diff --git a/src/ai/.x/templates/openai-chat-streaming-with-data-go/openai_chat_completions_streaming_with_data_hello_world.go b/src/ai/.x/templates/openai-chat-streaming-with-data-go/openai_chat_completions_streaming_with_data_hello_world.go new file mode 100644 index 00000000..c606f996 --- /dev/null +++ b/src/ai/.x/templates/openai-chat-streaming-with-data-go/openai_chat_completions_streaming_with_data_hello_world.go @@ -0,0 +1,122 @@ +<#@ template hostspecific="true" #> +<#@ output extension=".go" encoding="utf-8" #> +<#@ parameter type="System.String" name="ClassName" #> +package main + +import ( + "context" + "errors" + "io" + + "github.com/Azure/azure-sdk-for-go/sdk/ai/azopenai" + "github.com/Azure/azure-sdk-for-go/sdk/azcore" + "github.com/Azure/azure-sdk-for-go/sdk/azcore/to" +) + +type <#= ClassName #> struct { + client *azopenai.Client + options *azopenai.ChatCompletionsOptions +} + +func New<#= ClassName #>( + openAIEndpoint string, + openAIKey string, + openAIChatDeploymentName string, + openAISystemPrompt string, + azureSearchEndpoint string, + azureSearchApiKey string, + azureSearchIndexName string, + openAIEmbeddingsDeploymentName string, + ) (*<#= ClassName #>, error) { + keyCredential := azcore.NewKeyCredential(openAIKey) + + client, err := azopenai.NewClientWithKeyCredential(openAIEndpoint, keyCredential, nil) + if err != nil { + return nil, err + } + + messages := []azopenai.ChatRequestMessageClassification{ + &azopenai.ChatRequestSystemMessage{ + Content: &openAISystemPrompt, + }, + } + + options := &azopenai.ChatCompletionsOptions{ + DeploymentName: &openAIChatDeploymentName, + Messages: messages, + AzureExtensionsOptions: []azopenai.AzureChatExtensionConfigurationClassification{ + &azopenai.AzureCognitiveSearchChatExtensionConfiguration{ + Parameters: &azopenai.AzureCognitiveSearchChatExtensionParameters{ + Endpoint: &azureSearchEndpoint, + IndexName: &azureSearchIndexName, + Authentication: &azopenai.OnYourDataAPIKeyAuthenticationOptions{ + Key: &azureSearchApiKey, + }, + QueryType: to.Ptr(azopenai.AzureCognitiveSearchQueryTypeVectorSimpleHybrid), + EmbeddingDependency: &azopenai.OnYourDataDeploymentNameVectorizationSource{ + DeploymentName: &openAIEmbeddingsDeploymentName, + Type: to.Ptr(azopenai.OnYourDataVectorizationSourceTypeDeploymentName), + }, + }, + }, + }, + } + + return &OpenAIChatCompletionsWithDataStreamingExample{ + client: client, + options: options, + }, nil + } + +func (chat *<#= ClassName #>) ClearConversation() { + chat.options.Messages = chat.options.Messages[:1] +} + +func (chat *<#= ClassName #>) GetChatCompletionsStream(userPrompt string, callback func(content string)) (string, error) { + chat.options.Messages = append(chat.options.Messages, &azopenai.ChatRequestUserMessage{Content: azopenai.NewChatRequestUserMessageContent(userPrompt)}) + + resp, err := chat.client.GetChatCompletionsStream(context.TODO(), *chat.options, nil) + if err != nil { + return "", err + } + defer resp.ChatCompletionsStream.Close() + + responseContent := "" + for { + chatCompletions, err := resp.ChatCompletionsStream.Read() + if errors.Is(err, io.EOF) { + break + } + if err != nil { + return "", err + } + + for _, choice := range chatCompletions.Choices { + + content := "" + if choice.Delta.Content != nil { + content = *choice.Delta.Content + } + + if choice.FinishReason != nil { + finishReason := *choice.FinishReason + if finishReason == azopenai.CompletionsFinishReasonTokenLimitReached { + content = content + "\nWARNING: Exceeded token limit!" + } + } + + if content == "" { + continue + } + + if callback != nil { + callback(content) + } + responseContent += content + } + } + + chat.options.Messages = append(chat.options.Messages, &azopenai.ChatRequestAssistantMessage{Content: to.Ptr(responseContent)}) + return responseContent, nil +} + diff --git a/tests/test.yaml b/tests/test.yaml index 24268c25..757b8d3a 100644 --- a/tests/test.yaml +++ b/tests/test.yaml @@ -89,5 +89,5 @@ ^Helper +Function +Class +Library +helper-functions +C# *\r?$\n ^OpenAI +Chat +Completions +openai-chat +C#, +Go, +Java, +JavaScript, +Python *\r?$\n ^OpenAI +Chat +Completions +\(Streaming\) +openai-chat-streaming +C#, +Go, +Java, +JavaScript, +Python *\r?$\n - ^OpenAI +Chat +Completions +\(w/ +Data +\+ +AI +Search\) +openai-chat-streaming-with-data +C#, +JavaScript, +Python *\r?$\n + ^OpenAI +Chat +Completions +\(w/ +Data +\+ +AI +Search\) +openai-chat-streaming-with-data +C#, +Go, +JavaScript, +Python *\r?$\n ^OpenAI +Chat +Completions +\(w/ +Functions\) +openai-chat-streaming-with-functions +C#, +Go, +JavaScript, +Python *\r?$\n diff --git a/tests/test3.yaml b/tests/test3.yaml index ab7856a7..05ad5d69 100644 --- a/tests/test3.yaml +++ b/tests/test3.yaml @@ -37,7 +37,7 @@ input: | What is my name? expect: | - assistant-function: GetUsersName\({}\) = + assistant-function: GetUsersName\({}\) = tag: skip - area: ai dev new openai-chat @@ -252,6 +252,19 @@ input: |- What parameter should i use to initialize? tag: skip + - class: dev new openai-chat-streaming-with-data (go) + steps: + - name: generate template + command: ai dev new openai-chat-streaming-with-data --go + - name: build template + script: | + cd openai-chat-streaming-with-data-go + go mod tidy + - name: run template + command: ai dev shell --run "cd openai-chat-streaming-with-data-go && go run .\main.go .\openai_chat_completions_streaming_with_data_hello_world.go" + input: |- + What parameter should i use to initialize? + tag: skip - area: ai dev new openai-chat-streaming-with-functions tests: @@ -351,12 +364,12 @@ - class: dev new openai-webpage-with-functions (javascript) steps: - name: generate template - command: ai dev new openai-webpage-with-functions --javascript - - name: build template + command: ai dev new openai-webpage-with-functions --javascript + - name: build template script: | cd openai-webpage-with-functions-js npm install - - name: pack template + - name: pack template script: | cd openai-webpage-with-functions-js npx webpack @@ -373,4 +386,3 @@ script: | cd openai-webpage-with-functions-ts npx webpack - \ No newline at end of file From a98813c6c7b3e8e6b2b3b7ad4e0d500538d253aa Mon Sep 17 00:00:00 2001 From: Rob Chambers Date: Tue, 6 Feb 2024 17:21:33 -0800 Subject: [PATCH 22/30] refactor into TestAdapter and lower level TestFramework (#164) * refactor into TestAdapter and lower level TestFramework * it's 2024 now... * update with first draft of test runner standalone exe * test runner can now "list" and the start of "run" * refactored reporter capabilities * go back to "skipping" the "run" part for now * with console reporter * test reporting from standalone exe works sometimes now * more refactor and optimizations * fixed bug * fix more bugs * better prompt for test * yaml test framework now allows bash scripts on windows using git-bash (bash.exe) * enable `dev shell` to user process helpers class; update framework's test runner with more logger output * remove unnecessary files * enable better diagnostics * more refactoring * improved outputs * `ait` filtering now works!! * better usage text for `ait` * better usage output per feedback * updated formating * better command line parameters and descriptions of such in usage * stdout works properly now, and stderr now in trx file * protect agains null when writing trx * fixed final bugs in `ait` ... i think it's almost ready * fix getting git bash.exe on windows, and also cache it * bad merge * renamed RunCommandToken to RunCommandScriptToken --- .azure/pipelines/build.yaml | 2 +- .gitignore | 1 + README.md | 2 +- ai-cli.sln | 17 + src/ai/Program_AI.cs | 11 +- src/ai/commands/code_command.cs | 67 --- src/ai/commands/complete_command.cs | 199 -------- src/ai/commands/dev_command.cs | 86 ++-- .../commands/parsers/code_command_parser.cs | 78 ---- .../parsers/complete_command_parser.cs | 48 -- src/ai/commands/parsers/dev_command_parser.cs | 2 +- .../parsers/samples_command_parser.cs | 78 ---- src/ai/commands/samples_command.cs | 67 --- src/ai/commands/service_command.cs | 1 - src/common/Program.cs | 46 +- src/common/details/commands/init_command.cs | 2 - src/common/details/commands/runjob_command.cs | 4 +- src/common/details/helpers/file_helpers.cs | 28 +- src/common/details/helpers/process_helpers.cs | 69 ++- ...d_token.cs => run_command_script_token.cs} | 12 +- src/spx/spx-cli.csproj | 2 +- src/vz/vz-cli.csproj | 2 +- ...re-AI-CLI-TestFramework-Default-Tags.yaml} | 0 tests/test.yaml | 2 +- tests/test3.yaml | 80 ++-- tests/testadapter/Properties/AssemblyInfo.cs | 11 +- tests/testadapter/TestDiscoverer.cs | 8 +- tests/testadapter/TestExecutor.cs | 7 +- tests/testadapter/YamlTestAdapter.cs | 180 +------ tests/testadapter/YamlTestAdapter.csproj | 4 + .../testadapter/YamlTestAdapterCommon.targets | 2 +- tests/testadapter/YamlTestCaseFilter.cs | 78 ---- .../YamlTestFrameworkHandleHost.cs | 30 ++ .../YamlTestRunnerTriggerAttribute.cs | 2 +- tests/testframework/IYamlTestFrameworkHost.cs | 11 + .../{testadapter => testframework}/Logger.cs | 4 +- .../testframework/Properties/AssemblyInfo.cs | 36 ++ tests/testframework/TestResultHelpers.cs | 21 + .../YamlHelpers.cs | 2 +- .../YamlNodeExtensions.cs | 2 +- .../YamlTagHelpers.cs | 4 +- tests/testframework/YamlTestCaseFilter.cs | 151 ++++++ .../YamlTestCaseParser.cs | 28 +- .../YamlTestCaseRunner.cs | 201 +++++--- tests/testframework/YamlTestFramework.cs | 226 +++++++++ tests/testframework/YamlTestFramework.csproj | 13 + .../YamlTestFrameworkCommon.targets | 68 +++ .../YamlTestProperties.cs | 3 +- tests/testrunner/Program.cs | 224 +++++++++ tests/testrunner/Properties/AssemblyInfo.cs | 36 ++ .../YamlTestFrameworkConsoleHost.cs | 438 ++++++++++++++++++ tests/testrunner/YamlTestRunner.csproj | 14 + tests/testrunner/YamlTestRunnerCommon.targets | 56 +++ 53 files changed, 1728 insertions(+), 1038 deletions(-) delete mode 100644 src/ai/commands/code_command.cs delete mode 100644 src/ai/commands/complete_command.cs delete mode 100644 src/ai/commands/parsers/code_command_parser.cs delete mode 100644 src/ai/commands/parsers/complete_command_parser.cs delete mode 100644 src/ai/commands/parsers/samples_command_parser.cs delete mode 100644 src/ai/commands/samples_command.cs rename src/common/details/named_values/tokens/{run_command_token.cs => run_command_script_token.cs} (53%) rename tests/{Azure-AI-CLI-TestRunner-Default-Tags.yaml => Azure-AI-CLI-TestFramework-Default-Tags.yaml} (100%) delete mode 100644 tests/testadapter/YamlTestCaseFilter.cs create mode 100644 tests/testadapter/YamlTestFrameworkHandleHost.cs create mode 100644 tests/testframework/IYamlTestFrameworkHost.cs rename tests/{testadapter => testframework}/Logger.cs (96%) create mode 100644 tests/testframework/Properties/AssemblyInfo.cs create mode 100644 tests/testframework/TestResultHelpers.cs rename tests/{testadapter => testframework}/YamlHelpers.cs (97%) rename tests/{testadapter => testframework}/YamlNodeExtensions.cs (98%) rename tests/{testadapter => testframework}/YamlTagHelpers.cs (96%) create mode 100644 tests/testframework/YamlTestCaseFilter.cs rename tests/{testadapter => testframework}/YamlTestCaseParser.cs (95%) rename tests/{testadapter => testframework}/YamlTestCaseRunner.cs (82%) create mode 100644 tests/testframework/YamlTestFramework.cs create mode 100644 tests/testframework/YamlTestFramework.csproj create mode 100644 tests/testframework/YamlTestFrameworkCommon.targets rename tests/{testadapter => testframework}/YamlTestProperties.cs (95%) create mode 100644 tests/testrunner/Program.cs create mode 100644 tests/testrunner/Properties/AssemblyInfo.cs create mode 100644 tests/testrunner/YamlTestFrameworkConsoleHost.cs create mode 100644 tests/testrunner/YamlTestRunner.csproj create mode 100644 tests/testrunner/YamlTestRunnerCommon.targets diff --git a/.azure/pipelines/build.yaml b/.azure/pipelines/build.yaml index 9e20f9ec..6134df89 100644 --- a/.azure/pipelines/build.yaml +++ b/.azure/pipelines/build.yaml @@ -19,7 +19,7 @@ stages: name: Variables inputs: filePath: ./.azure/pipelines/scripts/set-variables.sh - arguments: '0.0.0-dev2023.$(Build.BuildId)' + arguments: '0.0.0-dev2024.$(Build.BuildId)' displayName: 'Set up environment variables' - stage: BuildStage diff --git a/.gitignore b/.gitignore index 6bc5c4bd..6818c46c 100644 --- a/.gitignore +++ b/.gitignore @@ -5,3 +5,4 @@ **/bin/*/net7.0/* **/obj/* ideas/website/node_modules/** +testresults/** diff --git a/README.md b/README.md index 8b2e4cda..d2b20737 100644 --- a/README.md +++ b/README.md @@ -118,7 +118,7 @@ ai search index update --files "../../data/3-product-info/*.md" --index-name "pr ``` AI - Azure AI CLI, Version 1.0.0 -Copyright (c) 2023 Microsoft Corporation. All Rights Reserved. +Copyright (c) 2024 Microsoft Corporation. All Rights Reserved. This PUBLIC PREVIEW version may change at any time. See: https://aka.ms/azure-ai-cli-public-preview diff --git a/ai-cli.sln b/ai-cli.sln index c226ae76..605cfd9c 100644 --- a/ai-cli.sln +++ b/ai-cli.sln @@ -17,6 +17,12 @@ Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "template_extension", "src\e EndProject Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "YamlTestAdapter", "tests\testadapter\YamlTestAdapter.csproj", "{7C3F1355-B679-487D-904D-7E5FEBA9E75C}" EndProject +Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "YamlTestFramework", "tests\testframework\YamlTestFramework.csproj", "{B0B3437F-1828-4A13-866F-1CF7C924015E}" +EndProject +Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "YamlTestRunner", "tests\testrunner\YamlTestRunner.csproj", "{39876475-2D98-40CF-8B08-CD423A5EB4E8}" +EndProject +Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "Tests", "Tests", "{C8AFF891-D6AA-4B8F-BC21-10404DF4B355}" +EndProject Global GlobalSection(SolutionConfigurationPlatforms) = preSolution Debug|Any CPU = Debug|Any CPU @@ -47,6 +53,14 @@ Global {7C3F1355-B679-487D-904D-7E5FEBA9E75C}.Debug|Any CPU.Build.0 = Debug|Any CPU {7C3F1355-B679-487D-904D-7E5FEBA9E75C}.Release|Any CPU.ActiveCfg = Release|Any CPU {7C3F1355-B679-487D-904D-7E5FEBA9E75C}.Release|Any CPU.Build.0 = Release|Any CPU + {B0B3437F-1828-4A13-866F-1CF7C924015E}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {B0B3437F-1828-4A13-866F-1CF7C924015E}.Debug|Any CPU.Build.0 = Debug|Any CPU + {B0B3437F-1828-4A13-866F-1CF7C924015E}.Release|Any CPU.ActiveCfg = Release|Any CPU + {B0B3437F-1828-4A13-866F-1CF7C924015E}.Release|Any CPU.Build.0 = Release|Any CPU + {39876475-2D98-40CF-8B08-CD423A5EB4E8}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {39876475-2D98-40CF-8B08-CD423A5EB4E8}.Debug|Any CPU.Build.0 = Debug|Any CPU + {39876475-2D98-40CF-8B08-CD423A5EB4E8}.Release|Any CPU.ActiveCfg = Release|Any CPU + {39876475-2D98-40CF-8B08-CD423A5EB4E8}.Release|Any CPU.Build.0 = Release|Any CPU EndGlobalSection GlobalSection(SolutionProperties) = preSolution HideSolutionNode = FALSE @@ -55,6 +69,9 @@ Global {272E0B1B-6C05-428E-BF64-E30B1E5F603A} = {644B75F1-C768-4DB3-BAF2-C69A1F36DD28} {7BD6EF67-BA75-478D-9721-C1B2AB6DE3FF} = {644B75F1-C768-4DB3-BAF2-C69A1F36DD28} {023B4F9C-E2B3-4CCD-A993-87E337C16EDE} = {644B75F1-C768-4DB3-BAF2-C69A1F36DD28} + {7C3F1355-B679-487D-904D-7E5FEBA9E75C} = {C8AFF891-D6AA-4B8F-BC21-10404DF4B355} + {B0B3437F-1828-4A13-866F-1CF7C924015E} = {C8AFF891-D6AA-4B8F-BC21-10404DF4B355} + {39876475-2D98-40CF-8B08-CD423A5EB4E8} = {C8AFF891-D6AA-4B8F-BC21-10404DF4B355} EndGlobalSection GlobalSection(ExtensibilityGlobals) = postSolution SolutionGuid = {002655B1-E1E1-4F2A-8D53-C9CD55136AE2} diff --git a/src/ai/Program_AI.cs b/src/ai/Program_AI.cs index 40c850fb..dcbc2038 100644 --- a/src/ai/Program_AI.cs +++ b/src/ai/Program_AI.cs @@ -85,11 +85,11 @@ public class AiProgramData : IProgramData #endregion #region help command data - public string HelpCommandTokens => "wizard;dev;init;config;chat;flow;speech;vision;language;search;service;tool;samples;code;eval;run"; + public string HelpCommandTokens => "wizard;dev;init;config;chat;flow;speech;vision;language;search;service;tool;samples;eval;run"; #endregion #region config command data - public string ConfigScopeTokens => $"wizard;dev;init;chat;flow;speech;vision;language;search;service;tool;samples;code;eval;run;*"; + public string ConfigScopeTokens => $"wizard;dev;init;chat;flow;speech;vision;language;search;service;tool;samples;eval;run;*"; #endregion #region zip option data @@ -150,8 +150,6 @@ public bool DispatchRunCommand(ICommandValues values) "search" => (new SearchCommand(values)).RunCommand(), "service" => (new ServiceCommand(values)).RunCommand(), "tool" => (new ToolCommand(values)).RunCommand(), - "samples" => (new SamplesCommand(values)).RunCommand(), - "code" => (new CodeCommand(values)).RunCommand(), "eval" => (new EvalCommand(values)).RunCommand(), "wizard" => (new ScenarioWizardCommand(values)).RunCommand(), "dev" => (new DevCommand(values)).RunCommand(), @@ -179,8 +177,6 @@ public bool DispatchParseCommand(INamedValueTokens tokens, ICommandValues values "search" => SearchCommandParser.ParseCommand(tokens, values), "service" => ServiceCommandParser.ParseCommand(tokens, values), "tool" => ToolCommandParser.ParseCommand(tokens, values), - "samples" => SamplesCommandParser.ParseCommand(tokens, values), - "code" => CodeCommandParser.ParseCommand(tokens, values), "wizard" => ScenarioWizardCommandParser.ParseCommand(tokens, values), "dev" => DevCommandParser.ParseCommand(tokens, values), "run" => RunJobCommandParser.ParseCommand(tokens, values), @@ -204,9 +200,6 @@ public bool DispatchParseCommandValues(INamedValueTokens tokens, ICommandValues "search" => SearchCommandParser.ParseCommandValues(tokens, values), "service" => ServiceCommandParser.ParseCommandValues(tokens, values), "tool" => ToolCommandParser.ParseCommandValues(tokens, values), - "samples" => SamplesCommandParser.ParseCommandValues(tokens, values), - "code" => CodeCommandParser.ParseCommandValues(tokens, values), - "complete" => CompleteCommandParser.ParseCommandValues(tokens, values), "wizard" => ScenarioWizardCommandParser.ParseCommandValues(tokens, values), "dev" => DevCommandParser.ParseCommandValues(tokens, values), "run" => RunJobCommandParser.ParseCommandValues(tokens, values), diff --git a/src/ai/commands/code_command.cs b/src/ai/commands/code_command.cs deleted file mode 100644 index 91c77aa4..00000000 --- a/src/ai/commands/code_command.cs +++ /dev/null @@ -1,67 +0,0 @@ -// -// Copyright (c) Microsoft. All rights reserved. -// Licensed under the MIT license. See LICENSE.md file in the project root for full license information. -// - -using System; -using System.Collections.Generic; -using System.Diagnostics; -using System.IO; -using System.Linq; -using System.Net; -using System.Text; -using System.Text.RegularExpressions; -using System.Text.Json; -using System.Text.Json.Serialization; -using System.Threading; -using System.Threading.Tasks; -using Newtonsoft.Json.Linq; - -namespace Azure.AI.Details.Common.CLI -{ - public class CodeCommand : Command - { - internal CodeCommand(ICommandValues values) - { - _values = values.ReplaceValues(); - _quiet = _values.GetOrDefault("x.quiet", false); - _verbose = _values.GetOrDefault("x.verbose", true); - } - - internal bool RunCommand() - { - try - { - RunCodeCommand(); - } - catch (WebException ex) - { - ConsoleHelpers.WriteLineError($"\n ERROR: {ex.Message}"); - JsonHelpers.PrintJson(HttpHelpers.ReadWriteJson(ex.Response, _values, "code")); - } - - return _values.GetOrDefault("passed", true); - } - - private bool RunCodeCommand() - { - DoCommand(_values.GetCommand()); - return _values.GetOrDefault("passed", true); - } - - private void DoCommand(string command) - { - CheckPath(); - - switch (command) - { - default: - _values.AddThrowError("WARNING:", $"'{command.Replace('.', ' ')}' NOT YET IMPLEMENTED!!"); - break; - } - } - - private bool _quiet = false; - private bool _verbose = false; - } -} diff --git a/src/ai/commands/complete_command.cs b/src/ai/commands/complete_command.cs deleted file mode 100644 index a8020294..00000000 --- a/src/ai/commands/complete_command.cs +++ /dev/null @@ -1,199 +0,0 @@ -// -// Copyright (c) Microsoft. All rights reserved. -// Licensed under the MIT license. See LICENSE.md file in the project root for full license information. -// - -using System; -using System.Diagnostics; -using System.IO; -using System.Linq; -using System.Text; -using System.Threading; -using System.Threading.Tasks; -using System.Collections.Generic; -using Newtonsoft.Json; -using Newtonsoft.Json.Linq; -using System.Net; -using Azure.AI.OpenAI; - -namespace Azure.AI.Details.Common.CLI -{ - public class CompleteCommand : Command - { - internal CompleteCommand(ICommandValues values) - { - _values = values.ReplaceValues(); - } - - internal bool RunCommand() - { - Complete(); - return _values.GetOrDefault("passed", true); - } - - private void Complete() - { - StartCommand(); - - var kind = _values["complete.input.type"]; - switch (kind) - { - case "": - case null: - case "interactive": - // SynthesizeInteractive(false); - // break; - - case "interactive+": - CompleteInteractively(true); - break; - - // TODO: Add support for other input types - } - - StopCommand(); - DisposeAfterStop(); - DeleteTemporaryFiles(); - } - - private void CompleteInteractively(bool repeatedly = false) - { - var client = CreateOpenAIClient(out var deployment); - var options = CreateCompletionOptions(); - - while (true) - { - Console.Write("[complete] >>> "); - var text = ConsoleHelpers.ReadLineOrDefault("", "exit"); - - if (text.ToLower() == "") break; - if (text.ToLower() == "stop") break; - if (text.ToLower() == "quit") break; - if (text.ToLower() == "exit") break; - - var task = GetCompletionsAsync(client, deployment, options, text); - WaitForStopOrCancel(task); - - if (!repeatedly) break; - if (_canceledEvent.WaitOne(0)) break; - } - } - - private async Task> GetCompletionsAsync(OpenAIClient client, string deployment, CompletionsOptions options, string text) - { - options.Prompts.Clear(); - options.Prompts.Add(text); - options.DeploymentName = deployment; - var response = await client.GetCompletionsAsync(options); - - Console.WriteLine(); - Console.WriteLine(response.Value.Choices[0].Text); - Console.WriteLine(); - - return response; - } - - private CompletionsOptions CreateCompletionOptions() - { - var options = new CompletionsOptions(); - // messages.ToList().ForEach(m => options.Messages.Add(m)); - - // options.MaxTokens = TryParse(maxTokens, _defaultMaxTokens); - // options.Temperature = TryParse(temperature, _defaultTemperature); - // options.FrequencyPenalty = TryParse(frequencyPenalty, _defaultFrequencyPenalty); - // options.PresencePenalty = TryParse(presencePenalty, _defaultPresencePenalty); - - // if (!string.IsNullOrEmpty(stop)) - // { - // var stops = stop.Split('\n', StringSplitOptions.RemoveEmptyEntries).ToList(); - // stops.ForEach(s => options.StopSequences.Add(s)); - // } - - return options; - } - - private OpenAIClient CreateOpenAIClient(out string deployment) - { - var key = _values["service.config.key"]; - var host = _values["service.config.host"]; - var region = _values["service.config.region"]; - var endpoint = ConfigEndpointUriToken.Data().GetOrDefault(_values); - var tokenValue = _values["service.config.token.value"]; - - deployment = ConfigDeploymentToken.Data().GetOrDefault(_values); - - if (string.IsNullOrEmpty(endpoint) && string.IsNullOrEmpty(region) && string.IsNullOrEmpty(host)) - { - _values.AddThrowError("ERROR:", $"Creating OpenAIClient; requires one of: region, endpoint, or host."); - } - else if (!string.IsNullOrEmpty(region) && string.IsNullOrEmpty(tokenValue) && string.IsNullOrEmpty(key)) - { - _values.AddThrowError("ERROR:", $"Creating OpenAIClient; use of region requires one of: key or token."); - } - else if (string.IsNullOrEmpty(deployment)) - { - _values.AddThrowError("ERROR:", $"Creating OpenAIClient; requires deployment."); - } - - if (!string.IsNullOrEmpty(endpoint)) - { - return new OpenAIClient( - new Uri(endpoint!), - new AzureKeyCredential(key!)); - } - else if (!string.IsNullOrEmpty(host)) - { - _values.AddThrowError("ERROR:", $"Creating OpenAIClient; Not-yet-implemented create from host."); - return null; - } - else // if (!string.IsNullOrEmpty(region)) - { - _values.AddThrowError("ERROR:", $"Creating OpenAIClient; Not-yet-implemented create from region."); - return null; - } - } - - private void WaitForStopOrCancel(Task> task) - { - var interval = 100; - - while (!task.Wait(interval)) - { - if (_stopEvent.WaitOne(0)) break; - if (_canceledEvent.WaitOne(0)) break; - } - } - - private void StartCommand() - { - CheckPath(); - // CheckCompleteInput(); - - // _display = new DisplayHelper(_values); - - // _output = new OutputHelper(_values); - // _output.StartOutput(); - - // var id = _values["complete.input.id"]; - // _output.EnsureOutputAll("complete.input.id", id); - // _output.EnsureOutputEach("complete.input.id", id); - - _lock = new SpinLock(); - _lock.StartLock(); - } - - private void StopCommand() - { - _lock.StopLock(5000); - _stopEvent.Set(); - - // _output.CheckOutput(); - // _output.StopOutput(); - } - - private SpinLock _lock = null; - - // OutputHelper _output = null; - // DisplayHelper _display = null; - } -} diff --git a/src/ai/commands/dev_command.cs b/src/ai/commands/dev_command.cs index e0b5dda5..114a70fd 100644 --- a/src/ai/commands/dev_command.cs +++ b/src/ai/commands/dev_command.cs @@ -45,7 +45,7 @@ private bool RunDevCommand() private void DoCommand(string command) { - CheckPath(); + StartCommand(); switch (command) { @@ -57,6 +57,10 @@ private void DoCommand(string command) _values.AddThrowError("WARNING:", $"'{command.Replace('.', ' ')}' NOT YET IMPLEMENTED!!"); break; } + + StopCommand(); + DisposeAfterStop(); + DeleteTemporaryFiles(); } private void DoNew() @@ -112,21 +116,16 @@ private void DoDevShell() ConfigEnvironmentHelpers.SetEnvironment(env); Console.WriteLine(); - var runCommand = RunCommandToken.Data().GetOrDefault(_values); - UpdateFileNameArguments(runCommand, ref fileName, ref arguments, out var deleteWhenDone); - - var process = ProcessHelpers.StartProcess(fileName, arguments, env, false); - process.WaitForExit(); + var runCommand = RunCommandScriptToken.Data().GetOrDefault(_values); + var processOutput = string.IsNullOrEmpty(runCommand) + ? ProcessHelpers.RunShellCommandAsync(fileName, arguments, env, null, null, null, false).Result + : ProcessHelpers.RunShellCommandAsync(runCommand, env, null, null, null, false).Result; - if (!string.IsNullOrEmpty(deleteWhenDone)) - { - File.Delete(deleteWhenDone); - } - - if (process.ExitCode != 0) + var exitCode = processOutput.ExitCode; + if (exitCode != 0) { Console.WriteLine("\n(ai dev shell) FAILED!\n"); - _values.AddThrowError("ERROR:", $"Shell exited with code {process.ExitCode}"); + _values.AddThrowError("ERROR:", $"Shell exited with code {exitCode}"); } else { @@ -134,41 +133,6 @@ private void DoDevShell() } } - private static void UpdateFileNameArguments(string runCommand, ref string fileName, ref string arguments, out string? deleteTempFileWhenDone) - { - deleteTempFileWhenDone = null; - - if (!string.IsNullOrEmpty(runCommand)) - { - var isSingleLine = !runCommand.Contains('\n') && !runCommand.Contains('\r'); - if (isSingleLine) - { - var parts = runCommand.Split(new char[] { ' ' }, 2); - var inPath = FileHelpers.FileExistsInOsPath(parts[0]) || (OS.IsWindows() && FileHelpers.FileExistsInOsPath(parts[0] + ".exe")); - - var filePart = parts[0]; - var argsPart = parts.Length == 2 ? parts[1] : null; - - fileName = inPath ? filePart : fileName; - arguments = inPath ? argsPart : (OS.IsLinux() - ? $"-lic \"{runCommand}\"" - : $"/c \"{runCommand}\""); - - Console.WriteLine($"Running command: {runCommand}\n"); - } - else - { - deleteTempFileWhenDone = Path.GetTempFileName() + (OS.IsWindows() ? ".cmd" : ".sh"); - File.WriteAllText(deleteTempFileWhenDone, runCommand); - - fileName = OS.IsLinux() ? "bash" : "cmd.exe"; - arguments = OS.IsLinux() ? $"-lic \"{deleteTempFileWhenDone}\"" : $"/c \"{deleteTempFileWhenDone}\""; - - Console.WriteLine($"Running script:\n\n{runCommand}\n"); - } - } - } - private void DisplayBanner(string which) { if (_quiet) return; @@ -225,6 +189,32 @@ private void CheckGenerateTemplateFileWarnings(string templateName, string langu } } + private void StartCommand() + { + CheckPath(); + LogHelpers.EnsureStartLogFile(_values); + + // _display = new DisplayHelper(_values); + + // _output = new OutputHelper(_values); + // _output.StartOutput(); + + _lock = new SpinLock(); + _lock.StartLock(); + } + + private void StopCommand() + { + _lock.StopLock(5000); + + // LogHelpers.EnsureStopLogFile(_values); + // _output.CheckOutput(); + // _output.StopOutput(); + + _stopEvent.Set(); + } + + private SpinLock _lock = null; private readonly bool _quiet; private readonly bool _verbose; } diff --git a/src/ai/commands/parsers/code_command_parser.cs b/src/ai/commands/parsers/code_command_parser.cs deleted file mode 100644 index 967eb3ec..00000000 --- a/src/ai/commands/parsers/code_command_parser.cs +++ /dev/null @@ -1,78 +0,0 @@ -// -// Copyright (c) Microsoft. All rights reserved. -// Licensed under the MIT license. See LICENSE.md file in the project root for full license information. -// - -using System; -using System.Collections.Generic; -using System.Linq; - -namespace Azure.AI.Details.Common.CLI -{ - class CodeCommandParser : CommandParser - { - public static bool ParseCommand(INamedValueTokens tokens, ICommandValues values) - { - return ParseCommands(_commands, _partialCommands, tokens, values, x => GetCommandParsers(x)); - } - - public static bool ParseCommandValues(INamedValueTokens tokens, ICommandValues values) - { - return ParseCommandValues("code", GetCommandParsers(values), tokens, values); - } - - private static readonly (string name, bool valuesRequired)[] _commands = { - ("code", true) - }; - - private static readonly string[] _partialCommands = { - "code" - }; - - private static IEnumerable GetCommandParsers(ICommandValues values) - { - var commandName = values.GetCommand(); - foreach (var command in _commands) - { - if (commandName == command.name) - { - return _codePlaceHolderParsers; - } - } - - return null; - } - - #region private data - - public class CommonCodeNamedValueTokenParsers : NamedValueTokenParserList - { - public CommonCodeNamedValueTokenParsers() : base( - - new NamedValueTokenParser(null, "x.command", "11", "1"), - - new ExpectOutputTokenParser(), - new DiagnosticLogTokenParser(), - new CommonNamedValueTokenParsers(), - - new NamedValueTokenParser("--ini", "ini.file", "10", "1", "@"), - - new NamedValueTokenParser(null, "x.command.expand.file.name", "11111", "1"), - - ConfigEndpointUriToken.Parser(), - ConfigDeploymentToken.Parser() - - ) - { - } - } - - private static INamedValueTokenParser[] _codePlaceHolderParsers = { - - new CommonCodeNamedValueTokenParsers() - - }; - - #endregion - } -} diff --git a/src/ai/commands/parsers/complete_command_parser.cs b/src/ai/commands/parsers/complete_command_parser.cs deleted file mode 100644 index 9e193375..00000000 --- a/src/ai/commands/parsers/complete_command_parser.cs +++ /dev/null @@ -1,48 +0,0 @@ -// -// Copyright (c) Microsoft. All rights reserved. -// Licensed under the MIT license. See LICENSE.md file in the project root for full license information. -// - -using System; -using System.Collections.Generic; -using System.Linq; - -namespace Azure.AI.Details.Common.CLI -{ - class CompleteCommandParser : CommandParser - { - public static bool ParseCommand(INamedValueTokens tokens, ICommandValues values) - { - return ParseCommand("complete", completeCommandParsers, tokens, values); - } - - public static bool ParseCommandValues(INamedValueTokens tokens, ICommandValues values) - { - return ParseCommandValues("complete", completeCommandParsers, tokens, values); - } - - #region private data - - private static INamedValueTokenParser[] completeCommandParsers = { - - new NamedValueTokenParser(null, "x.command", "11", "1", "complete"), - - new ExpectOutputTokenParser(), - new DiagnosticLogTokenParser(), - new CommonNamedValueTokenParsers(), - - new NamedValueTokenParser("--ini", "ini.file", "10", "1", "@"), - - new NamedValueTokenParser(null, "x.command.expand.file.name", "11111", "1"), - - ConfigEndpointUriToken.Parser(), - ConfigDeploymentToken.Parser(), - - new NamedValueTokenParser("--interactive", "complete.input.interactive", "001", "0", null, null, "interactive", "complete.input.type"), - new NamedValueTokenParser("--interactive+", "complete.input.interactive+", "001", "0", null, null, "interactive+", "complete.input.type"), - new NamedValueTokenParser(null, "complete.input.type", "111", "1", "interactive;interactive+;text;ssml;text.file;ssml.file"), - }; - - #endregion - } -} diff --git a/src/ai/commands/parsers/dev_command_parser.cs b/src/ai/commands/parsers/dev_command_parser.cs index ad9281fa..5ddc0ab4 100644 --- a/src/ai/commands/parsers/dev_command_parser.cs +++ b/src/ai/commands/parsers/dev_command_parser.cs @@ -84,7 +84,7 @@ public CommonDevNamedValueTokenParsers() : base( private static INamedValueTokenParser[] _devShellParsers = { new CommonDevNamedValueTokenParsers(), - RunCommandToken.Parser() + RunCommandScriptToken.Parser() }; } } diff --git a/src/ai/commands/parsers/samples_command_parser.cs b/src/ai/commands/parsers/samples_command_parser.cs deleted file mode 100644 index bdbeb248..00000000 --- a/src/ai/commands/parsers/samples_command_parser.cs +++ /dev/null @@ -1,78 +0,0 @@ -// -// Copyright (c) Microsoft. All rights reserved. -// Licensed under the MIT license. See LICENSE.md file in the project root for full license information. -// - -using System; -using System.Collections.Generic; -using System.Linq; - -namespace Azure.AI.Details.Common.CLI -{ - class SamplesCommandParser : CommandParser - { - public static bool ParseCommand(INamedValueTokens tokens, ICommandValues values) - { - return ParseCommands(_commands, _partialCommands, tokens, values, x => GetCommandParsers(x)); - } - - public static bool ParseCommandValues(INamedValueTokens tokens, ICommandValues values) - { - return ParseCommandValues("samples", GetCommandParsers(values), tokens, values); - } - - private static readonly (string name, bool valuesRequired)[] _commands = { - ("samples", true) - }; - - private static readonly string[] _partialCommands = { - "samples" - }; - - private static IEnumerable GetCommandParsers(ICommandValues values) - { - var commandName = values.GetCommand(); - foreach (var command in _commands) - { - if (commandName == command.name) - { - return _samplesPlaceHolderParsers; - } - } - - return null; - } - - #region private data - - public class CommonSamplesNamedValueTokenParsers : NamedValueTokenParserList - { - public CommonSamplesNamedValueTokenParsers() : base( - - new NamedValueTokenParser(null, "x.command", "11", "1"), - - new ExpectOutputTokenParser(), - new DiagnosticLogTokenParser(), - new CommonNamedValueTokenParsers(), - - new NamedValueTokenParser("--ini", "ini.file", "10", "1", "@"), - - new NamedValueTokenParser(null, "x.command.expand.file.name", "11111", "1"), - - ConfigEndpointUriToken.Parser(), - ConfigDeploymentToken.Parser() - - ) - { - } - } - - private static INamedValueTokenParser[] _samplesPlaceHolderParsers = { - - new CommonSamplesNamedValueTokenParsers() - - }; - - #endregion - } -} diff --git a/src/ai/commands/samples_command.cs b/src/ai/commands/samples_command.cs deleted file mode 100644 index bbb6574a..00000000 --- a/src/ai/commands/samples_command.cs +++ /dev/null @@ -1,67 +0,0 @@ -// -// Copyright (c) Microsoft. All rights reserved. -// Licensed under the MIT license. See LICENSE.md file in the project root for full license information. -// - -using System; -using System.Collections.Generic; -using System.Diagnostics; -using System.IO; -using System.Linq; -using System.Net; -using System.Text; -using System.Text.RegularExpressions; -using System.Text.Json; -using System.Text.Json.Serialization; -using System.Threading; -using System.Threading.Tasks; -using Newtonsoft.Json.Linq; - -namespace Azure.AI.Details.Common.CLI -{ - public class SamplesCommand : Command - { - internal SamplesCommand(ICommandValues values) - { - _values = values.ReplaceValues(); - _quiet = _values.GetOrDefault("x.quiet", false); - _verbose = _values.GetOrDefault("x.verbose", true); - } - - internal bool RunCommand() - { - try - { - RunSamplesCommand(); - } - catch (WebException ex) - { - ConsoleHelpers.WriteLineError($"\n ERROR: {ex.Message}"); - JsonHelpers.PrintJson(HttpHelpers.ReadWriteJson(ex.Response, _values, "samples")); - } - - return _values.GetOrDefault("passed", true); - } - - private bool RunSamplesCommand() - { - DoCommand(_values.GetCommand()); - return _values.GetOrDefault("passed", true); - } - - private void DoCommand(string command) - { - CheckPath(); - - switch (command) - { - default: - _values.AddThrowError("WARNING:", $"'{command.Replace('.', ' ')}' NOT YET IMPLEMENTED!!"); - break; - } - } - - private bool _quiet = false; - private bool _verbose = false; - } -} diff --git a/src/ai/commands/service_command.cs b/src/ai/commands/service_command.cs index 8f952e99..b5409ad5 100644 --- a/src/ai/commands/service_command.cs +++ b/src/ai/commands/service_command.cs @@ -52,7 +52,6 @@ private bool RunServiceCommand() private void DoCommand(string command) { StartCommand(); - CheckPath(); switch (command) { diff --git a/src/common/Program.cs b/src/common/Program.cs index ba129695..e142736d 100644 --- a/src/common/Program.cs +++ b/src/common/Program.cs @@ -127,7 +127,7 @@ private static void DisplayBanner(ICommandValues values) if (values.GetOrDefault("x.cls", false)) Console.Clear(); Console.WriteLine(GetDisplayBannerText()); - Console.WriteLine("Copyright (c) 2023 Microsoft Corporation. All Rights Reserved."); + Console.WriteLine("Copyright (c) 2024 Microsoft Corporation. All Rights Reserved."); Console.WriteLine(""); var warning = Program.WarningBanner; @@ -324,45 +324,45 @@ private static bool RunCommand(ICommandValues values) private static IProgramData _data; - public static string Name => _data.Name; + public static string Name => _data?.Name; - public static string DisplayName => _data.DisplayName; + public static string DisplayName => _data?.DisplayName; - public static string WarningBanner => _data.WarningBanner; + public static string WarningBanner => _data?.WarningBanner; - public static string TelemetryUserAgent => _data.TelemetryUserAgent; + public static string TelemetryUserAgent => _data?.TelemetryUserAgent; - public static string Exe => _data.Exe; + public static string Exe => _data?.Exe; - public static string Dll => _data.Dll; + public static string Dll => _data?.Dll; - public static Type ResourceAssemblyType => _data.ResourceAssemblyType; + public static Type ResourceAssemblyType => _data?.ResourceAssemblyType; - public static Assembly ResourceAssembly => _data.ResourceAssemblyType.Assembly; + public static Assembly ResourceAssembly => _data?.ResourceAssemblyType.Assembly; - public static Type BindingAssemblySdkType => _data.BindingAssemblySdkType; + public static Type BindingAssemblySdkType => _data?.BindingAssemblySdkType; - public static string SERVICE_RESOURCE_DISPLAY_NAME_ALL_CAPS => _data.SERVICE_RESOURCE_DISPLAY_NAME_ALL_CAPS; + public static string SERVICE_RESOURCE_DISPLAY_NAME_ALL_CAPS => _data?.SERVICE_RESOURCE_DISPLAY_NAME_ALL_CAPS; - public static string CognitiveServiceResourceKind => _data.CognitiveServiceResourceKind; + public static string CognitiveServiceResourceKind => _data?.CognitiveServiceResourceKind; - public static string CognitiveServiceResourceSku => _data.CognitiveServiceResourceSku; + public static string CognitiveServiceResourceSku => _data?.CognitiveServiceResourceSku; - public static bool InitConfigsEndpoint => _data.InitConfigsEndpoint; + public static bool InitConfigsEndpoint => _data != null && _data.InitConfigsEndpoint; - public static bool InitConfigsSubscription => _data.InitConfigsSubscription; + public static bool InitConfigsSubscription => _data != null && _data.InitConfigsSubscription; - public static string HelpCommandTokens => _data.HelpCommandTokens; + public static string HelpCommandTokens => _data?.HelpCommandTokens; - public static string ConfigScopeTokens => _data.ConfigScopeTokens; + public static string ConfigScopeTokens => _data?.ConfigScopeTokens; - public static string[] ZipIncludes => _data.ZipIncludes; + public static string[] ZipIncludes => _data?.ZipIncludes; - public static bool DispatchRunCommand(ICommandValues values) => _data.DispatchRunCommand(values); - public static bool DispatchParseCommand(INamedValueTokens tokens, ICommandValues values) => _data.DispatchParseCommand(tokens, values); - public static bool DispatchParseCommandValues(INamedValueTokens tokens, ICommandValues values) => _data.DispatchParseCommandValues(tokens, values); - public static bool DisplayKnownErrors(ICommandValues values, Exception ex) => _data.DisplayKnownErrors(values, ex); + public static bool DispatchRunCommand(ICommandValues values) => _data != null && _data.DispatchRunCommand(values); + public static bool DispatchParseCommand(INamedValueTokens tokens, ICommandValues values) => _data != null && _data.DispatchParseCommand(tokens, values); + public static bool DispatchParseCommandValues(INamedValueTokens tokens, ICommandValues values) => _data != null && _data.DispatchParseCommandValues(tokens, values); + public static bool DisplayKnownErrors(ICommandValues values, Exception ex) => _data != null && _data.DisplayKnownErrors(values, ex); - public static IEventLoggerHelpers EventLoggerHelpers => _data.EventLoggerHelpers; + public static IEventLoggerHelpers EventLoggerHelpers => _data?.EventLoggerHelpers; } } diff --git a/src/common/details/commands/init_command.cs b/src/common/details/commands/init_command.cs index 236fc7ef..30f9c5d3 100644 --- a/src/common/details/commands/init_command.cs +++ b/src/common/details/commands/init_command.cs @@ -56,8 +56,6 @@ private async Task DoCommand(string command) DisplayInitServiceBanner(); - CheckPath(); - var interactive = _values.GetOrDefault("init.service.interactive", true); switch (command) diff --git a/src/common/details/commands/runjob_command.cs b/src/common/details/commands/runjob_command.cs index 6190723a..bcffced4 100644 --- a/src/common/details/commands/runjob_command.cs +++ b/src/common/details/commands/runjob_command.cs @@ -66,6 +66,8 @@ private bool DoRunJob() var itemArgs = _values.GetOrDefault("run.input.post.item.args", "").Replace(';', ' '); var preItemArgs = _values.GetOrDefault("run.input.pre.item.args", "").Replace(';', ' '); + var inputPath = _values.GetOrDefault("x.input.path", Directory.GetCurrentDirectory()); + var processOk = !string.IsNullOrEmpty(process); var commandOk = !string.IsNullOrEmpty(command); var scriptOk = !string.IsNullOrEmpty(script); @@ -75,7 +77,7 @@ private bool DoRunJob() var app = processOk && process == Program.Name; if (app && jobOk && !job.StartsWith("@")) job = $"@{job}"; - var startPath = UpdateJobStartPath(ref job, _values.GetOrDefault("x.input.path", Directory.GetCurrentDirectory())); + var startPath = UpdateJobStartPath(ref job, inputPath); if (!processOk && scriptOk) processOk = UpdateProcessIfFileNotExist(script, ref process); if (!processOk && commandOk) processOk = UpdateProcessIfFileNotExist(command, ref process); diff --git a/src/common/details/helpers/file_helpers.cs b/src/common/details/helpers/file_helpers.cs index 2be6afc0..fbaddedb 100644 --- a/src/common/details/helpers/file_helpers.cs +++ b/src/common/details/helpers/file_helpers.cs @@ -82,26 +82,32 @@ public static string AppendToFileName(string fileName, string appendBeforeExtens return Path.Combine(file.DirectoryName, $"{Path.GetFileNameWithoutExtension(file.FullName)}{appendBeforeExtension}{file.Extension}{appendAfterExtension}"); } - public static IEnumerable FindFiles(string path, string pattern, INamedValues values = null) + public static IEnumerable FindFiles(string path, string pattern, INamedValues values = null, bool checkOverrides = true, bool checkResources = true) { - return FindFiles(PathHelpers.Combine(path, pattern), values); + return FindFiles(PathHelpers.Combine(path, pattern), values, checkOverrides, checkResources); } - public static IEnumerable FindFiles(string fileNames, INamedValues values = null) + public static IEnumerable FindFiles(string fileNames, INamedValues values = null, bool checkOverrides = true, bool checkResources = true) { var currentDir = Directory.GetCurrentDirectory(); foreach (var item in fileNames.Split(new char[] { ';', '\r', '\n' }, StringSplitOptions.RemoveEmptyEntries)) { - var overrides = FindOverrides(item); - foreach (var name in overrides) + if (checkOverrides) { - yield return name; + var overrides = FindOverrides(item); + foreach (var name in overrides) + { + yield return name; + } } - var resources = FindResources(item); - foreach (var resource in resources) + if (checkResources) { - yield return resource; + var resources = FindResources(item); + foreach (var resource in resources) + { + yield return resource; + } } if (IsResource(item) || IsOverride(item)) continue; @@ -947,7 +953,7 @@ public static bool ResourceExists(string fileName) public static bool IsResource(string fileName) { - return !string.IsNullOrEmpty(fileName) && fileName.StartsWith(Program.Exe); + return !string.IsNullOrEmpty(fileName) && !string.IsNullOrEmpty(Program.Exe) && fileName.StartsWith(Program.Exe); } private static string ResourceNameFromFileName(string fileName) @@ -1439,7 +1445,7 @@ private static string CheckDotDirectory(string checkPath, bool mustExist = true, } private const string resourcePrefix = "Azure.AI.Details.Common.CLI.resources"; - private static readonly string overridePrefix = $"${Program.Name.ToUpper()}"; + private static readonly string overridePrefix = $"${Program.Name?.ToUpper()}"; private const string defaultDataPath = @";./;../;../../;../../../;../../../../;{config.path};"; diff --git a/src/common/details/helpers/process_helpers.cs b/src/common/details/helpers/process_helpers.cs index d2781d45..4f866452 100644 --- a/src/common/details/helpers/process_helpers.cs +++ b/src/common/details/helpers/process_helpers.cs @@ -66,49 +66,80 @@ public static Process StartProcess(string fileName, string arguments, Dictionary return Process.Start(start); } - public static async Task RunShellCommandAsync(string command, string arguments, Dictionary addToEnvironment = null, Action stdOutHandler = null, Action stdErrHandler = null, Action mergedOutputHandler = null) + public static async Task RunShellCommandAsync(string commandLine, Dictionary addToEnvironment = null, Action stdOutHandler = null, Action stdErrHandler = null, Action mergedOutputHandler = null, bool captureOutput = true) + { + var isWindows = RuntimeInformation.IsOSPlatform(OSPlatform.Windows); + var command = isWindows ? "cmd" : "bash"; + var arguments = isWindows ? $"/c \"{commandLine}\"" : $"-li \"{commandLine}\""; + return await RunShellCommandAsync(command, arguments, addToEnvironment, stdOutHandler, stdErrHandler, mergedOutputHandler, captureOutput); + } + + public static async Task RunShellCommandAsync(string command, string arguments, Dictionary addToEnvironment = null, Action stdOutHandler = null, Action stdErrHandler = null, Action mergedOutputHandler = null, bool captureOutput = true) { SHELL_DEBUG_TRACE($"COMMAND: {command} {arguments} {DictionaryToString(addToEnvironment)}"); - var stdOut = new StringBuilder(); - var stdErr = new StringBuilder(); - var mergedOutput = new StringBuilder(); + var redirectOutput = captureOutput || stdOutHandler != null || stdErrHandler != null || mergedOutputHandler != null; + + var outDoneSignal = new ManualResetEvent(false); + var errDoneSignal = new ManualResetEvent(false); + var sbOut = new StringBuilder(); + var sbErr = new StringBuilder(); + var sbMerged = new StringBuilder(); + var stdOutReceived = (string data) => { if (data != null) { - stdOut.AppendLine(data); - mergedOutput.AppendLine(data); + sbOut.AppendLine(data); + sbMerged.AppendLine(data); if (stdOutHandler != null) stdOutHandler(data); if (mergedOutputHandler != null) mergedOutputHandler(data); } + else + { + outDoneSignal.Set(); + } }; var stdErrReceived = (string data) => { if (data != null) { - stdErr.AppendLine(data); - mergedOutput.AppendLine(data); + sbErr.AppendLine(data); + sbMerged.AppendLine(data); if (stdErrHandler != null) stdErrHandler(data); if (mergedOutputHandler != null) mergedOutputHandler(data); } + else + { + errDoneSignal.Set(); + } }; - var process = TryCatchHelpers.TryCatchNoThrow(() => StartShellCommandProcess(command, arguments, addToEnvironment), null, out Exception processException); + var process = TryCatchHelpers.TryCatchNoThrow(() => StartShellCommandProcess(command, arguments, addToEnvironment, redirectOutput), null, out Exception processException); if (process == null) { SHELL_DEBUG_TRACE($"ERROR: {processException}"); return new ProcessOutput() { StdError = processException.ToString() }; } - process.OutputDataReceived += (sender, e) => stdOutReceived(e.Data); - process.ErrorDataReceived += (sender, e) => stdErrReceived(e.Data); - process.BeginOutputReadLine(); - process.BeginErrorReadLine(); + + if (redirectOutput) + { + process.OutputDataReceived += (sender, e) => stdOutReceived(e.Data); + process.ErrorDataReceived += (sender, e) => stdErrReceived(e.Data); + process.BeginOutputReadLine(); + process.BeginErrorReadLine(); + } await process.WaitForExitAsync(); + if (redirectOutput) + { + outDoneSignal.WaitOne(); + errDoneSignal.WaitOne(); + } + var output = new ProcessOutput(); - output.StdOutput = process != null ? stdOut.ToString().Trim(' ', '\r', '\n') : ""; - output.StdError = process != null ? stdErr.ToString().Trim(' ', '\r', '\n') : processException.ToString(); - output.MergedOutput = process != null ? mergedOutput.ToString().Trim(' ', '\r', '\n') : ""; + output.StdOutput = process != null ? sbOut.ToString().Trim(' ', '\r', '\n') : ""; + output.StdError = process != null ? sbErr.ToString().Trim(' ', '\r', '\n') : processException.ToString(); + output.MergedOutput = process != null ? sbMerged.ToString().Trim(' ', '\r', '\n') : ""; output.ExitCode = process != null ? process.ExitCode : -1; if (!string.IsNullOrEmpty(output.StdOutput)) SHELL_DEBUG_TRACE($"---\nSTDOUT\n---\n{output.StdOutput}"); @@ -130,12 +161,12 @@ public static async Task RunShellCommandAsync(string command, str return x; } - private static Process StartShellCommandProcess(string command, string arguments, Dictionary addToEnvironment = null) + private static Process StartShellCommandProcess(string command, string arguments, Dictionary addToEnvironment = null, bool captureOutput = true) { var isWindows = RuntimeInformation.IsOSPlatform(OSPlatform.Windows); return isWindows - ? StartProcess("cmd", $"/c {command} {arguments}", addToEnvironment) - : StartProcess(command, arguments, addToEnvironment); + ? StartProcess("cmd", $"/c {command} {arguments}", addToEnvironment, captureOutput) + : StartProcess(command, arguments, addToEnvironment, captureOutput); } private static void SHELL_DEBUG_TRACE(string message,[CallerLineNumber] int line = 0, [CallerMemberName] string? caller = null, [CallerFilePath] string? file = null) diff --git a/src/common/details/named_values/tokens/run_command_token.cs b/src/common/details/named_values/tokens/run_command_script_token.cs similarity index 53% rename from src/common/details/named_values/tokens/run_command_token.cs rename to src/common/details/named_values/tokens/run_command_script_token.cs index 77410da2..f3f0a934 100644 --- a/src/common/details/named_values/tokens/run_command_token.cs +++ b/src/common/details/named_values/tokens/run_command_script_token.cs @@ -5,14 +5,14 @@ namespace Azure.AI.Details.Common.CLI { - public class RunCommandToken + public class RunCommandScriptToken { public static NamedValueTokenData Data() => new NamedValueTokenData(_optionName, _fullName, _optionExample, _requiredDisplayName); - public static INamedValueTokenParser Parser() => new NamedValueTokenParser(_optionName, _fullName, "10;01", "1"); + public static INamedValueTokenParser Parser() => new NamedValueTokenParser(_optionName, _fullName, "100;010;010", "1"); - private const string _requiredDisplayName = "run command"; - private const string _optionName = "--run-command"; - private const string _optionExample = "COMMAND"; - private const string _fullName = "run.command"; + private const string _requiredDisplayName = "run shell command/script"; + private const string _optionName = "--script"; + private const string _optionExample = "COMMAND/SCRIPT"; + private const string _fullName = "run.command.script"; } } diff --git a/src/spx/spx-cli.csproj b/src/spx/spx-cli.csproj index 42bca76b..4c2752f8 100644 --- a/src/spx/spx-cli.csproj +++ b/src/spx/spx-cli.csproj @@ -90,7 +90,7 @@ - + diff --git a/src/vz/vz-cli.csproj b/src/vz/vz-cli.csproj index 16d4fd72..afbd9e3b 100644 --- a/src/vz/vz-cli.csproj +++ b/src/vz/vz-cli.csproj @@ -85,7 +85,7 @@ - + diff --git a/tests/Azure-AI-CLI-TestRunner-Default-Tags.yaml b/tests/Azure-AI-CLI-TestFramework-Default-Tags.yaml similarity index 100% rename from tests/Azure-AI-CLI-TestRunner-Default-Tags.yaml rename to tests/Azure-AI-CLI-TestFramework-Default-Tags.yaml diff --git a/tests/test.yaml b/tests/test.yaml index 757b8d3a..92a3747e 100644 --- a/tests/test.yaml +++ b/tests/test.yaml @@ -26,7 +26,7 @@ expect: | (?# ---------- BANNER) AI - Azure AI CLI, Version [01]\.[0-9].[0-9] - Copyright \(c\) 2023 Microsoft Corporation\. All Rights Reserved\. + Copyright \(c\) 2024 Microsoft Corporation\. All Rights Reserved\. This PUBLIC PREVIEW version may change at any time\. See: https://aka\.ms/azure-ai-cli-public-preview diff --git a/tests/test3.yaml b/tests/test3.yaml index 05ad5d69..dd402492 100644 --- a/tests/test3.yaml +++ b/tests/test3.yaml @@ -18,7 +18,7 @@ interactive: false - name: test ai chat - command: ai chat --question "Why is the sky blue" --index-name @none + command: ai chat --question "Why is the sky blue, what's it called" --index-name @none expect: Rayleigh - name: dev new environment @@ -29,7 +29,7 @@ - name: generate template command: ai dev new helper-functions - name: build template - script: | + bash: | cd helper-functions dotnet build - name: run template @@ -38,7 +38,6 @@ What is my name? expect: | assistant-function: GetUsersName\({}\) = - tag: skip - area: ai dev new openai-chat tests: @@ -48,7 +47,7 @@ - name: generate template command: ai dev new openai-chat --cs - name: build template - script: | + bash: | cd openai-chat-cs dotnet build - name: run template @@ -65,9 +64,10 @@ - name: generate template command: ai dev new openai-chat --go - name: build template - script: | + bash: | cd openai-chat-go - go mod tidy && go build + go mod tidy + go build - name: run template command: ai dev shell --run "openai-chat-go\openai_chat_completions_hello_world" input: |- @@ -80,11 +80,11 @@ - name: generate template command: ai dev new openai-chat --java - name: restore packages - script: | + bash: | cd openai-chat-java mvn clean package - name: build template - script: | + bash: | cd openai-chat-java javac -cp "target/lib/*" src/OpenAIChatCompletionsClass.java src/Main.java -d out - name: run template @@ -99,7 +99,7 @@ - name: generate template command: ai dev new openai-chat --javascript - name: build template - script: | + bash: | cd openai-chat-js npm install - name: run template @@ -114,7 +114,7 @@ - name: generate template command: ai dev new openai-chat --python - name: build template - script: | + bash: | cd openai-chat-py pip install -r requirements.txt - name: run template @@ -132,7 +132,7 @@ - name: generate template command: ai dev new openai-chat-streaming --cs - name: build template - script: | + bash: | cd openai-chat-streaming-cs dotnet build - name: run template @@ -147,9 +147,10 @@ - name: generate template command: ai dev new openai-chat-streaming --go - name: build template - script: | + bash: | cd openai-chat-streaming-go - go mod tidy && go build + go mod tidy + go build - name: run template command: ai dev shell --run "openai-chat-streaming-go\openai_chat_completions_streaming_hello_world" input: |- @@ -162,11 +163,11 @@ - name: generate template command: ai dev new openai-chat-streaming --java - name: restore packages - script: | + bash: | cd openai-chat-streaming-java mvn clean package - name: build template - script: | + bash: | cd openai-chat-streaming-java javac -cp "target/lib/*" src/OpenAIChatCompletionsStreamingClass.java src/Main.java -d out - name: run template @@ -183,7 +184,7 @@ - name: generate template command: ai dev new openai-chat-streaming --javascript - name: build template - script: | + bash: | cd openai-chat-streaming-js npm install - name: run template @@ -198,7 +199,7 @@ - name: generate template command: ai dev new openai-chat-streaming --python - name: build template - script: | + bash: | cd openai-chat-streaming-py pip install -r requirements.txt - name: run template @@ -216,13 +217,13 @@ - name: generate template command: ai dev new openai-chat-streaming-with-data --cs - name: build template - script: | + bash: | cd openai-chat-streaming-with-data-cs dotnet build - name: run template command: ai dev shell --run "openai-chat-streaming-with-data-cs\bin\Debug\net7.0\OpenAIChatCompletionsWithDataStreaming" input: |- - What parameter should i use to initialize? + What parameter should i use to select my resources? tag: skip - class: dev new openai-chat-streaming-with-data (javascript) @@ -230,13 +231,13 @@ - name: generate template command: ai dev new openai-chat-streaming-with-data --javascript - name: build template - script: | + bash: | cd openai-chat-streaming-with-data-js npm install - name: run template command: ai dev shell --run "cd openai-chat-streaming-with-data-js && node main.js" input: |- - What parameter should i use to initialize? + What parameter should i use to select my resources? tag: skip - class: dev new openai-chat-streaming-with-data (python) @@ -244,13 +245,13 @@ - name: generate template command: ai dev new openai-chat-streaming-with-data --python - name: build template - script: | + bash: | cd openai-chat-streaming-with-data-py pip install -r requirements.txt - name: run template command: ai dev shell --run "cd openai-chat-streaming-with-data-py && python main.py" input: |- - What parameter should i use to initialize? + What parameter should i use to select my resources? tag: skip - class: dev new openai-chat-streaming-with-data (go) steps: @@ -274,7 +275,7 @@ - name: generate template command: ai dev new openai-chat-streaming-with-functions --cs - name: build template - script: | + bash: | cd openai-chat-streaming-with-functions-cs dotnet build - name: run template @@ -289,9 +290,10 @@ - name: generate template command: ai dev new openai-chat-streaming-with-functions --go - name: build template - script: | + bash: | cd openai-chat-streaming-with-functions-go - go mod tidy && go build + go mod tidy + go build - name: run template command: ai dev shell --run "cd openai-chat-streaming-with-functions-go && openai_chat_completions_functions_streaming_hello_world" input: |- @@ -304,7 +306,7 @@ - name: generate template command: ai dev new openai-chat-streaming-with-functions --javascript - name: build template - script: | + bash: | cd openai-chat-streaming-with-functions-js npm install - name: run template @@ -319,7 +321,7 @@ - name: generate template command: ai dev new openai-chat-streaming-with-functions --python - name: build template - script: | + bash: | cd openai-chat-streaming-with-functions-py pip install -r requirements.txt - name: run template @@ -337,11 +339,11 @@ - name: generate template command: ai dev new openai-webpage --javascript - name: build template - script: | + bash: | cd openai-webpage-js npm install - name: pack template - script: | + bash: | cd openai-webpage-js npx webpack @@ -350,11 +352,11 @@ - name: generate template command: ai dev new openai-webpage --typescript - name: build template - script: | + bash: | cd openai-webpage-ts npm install - name: pack template - script: | + bash: | cd openai-webpage-ts npx webpack @@ -364,13 +366,13 @@ - class: dev new openai-webpage-with-functions (javascript) steps: - name: generate template - command: ai dev new openai-webpage-with-functions --javascript - - name: build template - script: | + command: ai dev new openai-webpage-with-functions --javascript + - name: build template + bash: | cd openai-webpage-with-functions-js npm install - - name: pack template - script: | + - name: pack template + bash: | cd openai-webpage-with-functions-js npx webpack @@ -379,10 +381,10 @@ - name: generate template command: ai dev new openai-webpage-with-functions --typescript - name: build template - script: | + bash: | cd openai-webpage-with-functions-ts npm install - name: pack template - script: | + bash: | cd openai-webpage-with-functions-ts npx webpack diff --git a/tests/testadapter/Properties/AssemblyInfo.cs b/tests/testadapter/Properties/AssemblyInfo.cs index 8acd379b..95e7b07d 100644 --- a/tests/testadapter/Properties/AssemblyInfo.cs +++ b/tests/testadapter/Properties/AssemblyInfo.cs @@ -1,16 +1,17 @@ -using System.Reflection; +using System; +using System.Reflection; using System.Runtime.CompilerServices; using System.Runtime.InteropServices; // General Information about an assembly is controlled through the following // set of attributes. Change these attribute values to modify the information // associated with an assembly. -[assembly: AssemblyTitle("TestAdapterTest")] +[assembly: AssemblyTitle("YamlTestAdapter")] [assembly: AssemblyDescription("")] [assembly: AssemblyConfiguration("")] [assembly: AssemblyCompany("")] -[assembly: AssemblyProduct("TestAdapterTest")] -[assembly: AssemblyCopyright("Copyright © 2022")] +[assembly: AssemblyProduct("YamlTestAdapter")] +[assembly: AssemblyCopyright("Copyright © 2024")] [assembly: AssemblyTrademark("")] [assembly: AssemblyCulture("")] @@ -20,7 +21,7 @@ [assembly: ComVisible(false)] // The following GUID is for the ID of the typelib if this project is exposed to COM -[assembly: Guid("9409c89f-ae64-4d4f-820e-e4248512733a")] +[assembly: Guid("EE266A17-DBFD-4C18-BCEB-C6F6CE76E6CC")] // Version information for an assembly consists of the following four values: // diff --git a/tests/testadapter/TestDiscoverer.cs b/tests/testadapter/TestDiscoverer.cs index f53ffd62..1f1a5d27 100644 --- a/tests/testadapter/TestDiscoverer.cs +++ b/tests/testadapter/TestDiscoverer.cs @@ -9,11 +9,12 @@ using System.Text; using System.Threading; using System.Threading.Tasks; +using Azure.AI.Details.Common.CLI.TestFramework; -namespace TestAdapterTest +namespace Azure.AI.Details.Common.CLI.TestAdapter { - [FileExtension(YamlTestAdapter.FileExtensionYaml)] - [FileExtension(YamlTestAdapter.FileExtensionDll)] + [FileExtension(YamlTestFramework.YamlFileExtension)] + [FileExtension(YamlTestAdapter.DllFileExtension)] [DefaultExecutorUri(YamlTestAdapter.Executor)] public class TestDiscoverer : ITestDiscoverer { @@ -26,6 +27,7 @@ public void DiscoverTests(IEnumerable sources, IDiscoveryContext discove Logger.Log($"TestDiscoverer.DiscoverTests(): count={sources.Count()}"); foreach (var test in YamlTestAdapter.GetTestsFromFiles(sources)) { + test.ExecutorUri = new Uri(YamlTestAdapter.Executor); discoverySink.SendTestCase(test); } Logger.Log($"TestDiscoverer.DiscoverTests(): EXIT"); diff --git a/tests/testadapter/TestExecutor.cs b/tests/testadapter/TestExecutor.cs index 9ebca03c..0bcf0a65 100644 --- a/tests/testadapter/TestExecutor.cs +++ b/tests/testadapter/TestExecutor.cs @@ -9,14 +9,17 @@ using System.Text; using System.Threading; using System.Threading.Tasks; +using Azure.AI.Details.Common.CLI.TestFramework; -namespace TestAdapterTest +namespace Azure.AI.Details.Common.CLI.TestAdapter { [ExtensionUri(YamlTestAdapter.Executor)] public class TextExecutor : ITestExecutor { public void RunTests(IEnumerable tests, IRunContext runContext, IFrameworkHandle frameworkHandle) { + tests = tests.ToList(); // force enumeration + Logger.Log(frameworkHandle); Logger.Log($"TextExecutor.RunTests(IEnumerable(): ENTER"); Logger.Log($"TextExecutor.RunTests(IEnumerable(): count={tests.Count()}"); @@ -26,6 +29,8 @@ public void RunTests(IEnumerable tests, IRunContext runContext, IFrame public void RunTests(IEnumerable sources, IRunContext runContext, IFrameworkHandle frameworkHandle) { + sources = sources.ToList(); // force enumeration + Logger.Log(frameworkHandle); Logger.Log($"TextExecutor.RunTests(IEnumerable(): ENTER"); Logger.Log($"TextExecutor.RunTests(IEnumerable(): count={sources.Count()}"); diff --git a/tests/testadapter/YamlTestAdapter.cs b/tests/testadapter/YamlTestAdapter.cs index 73b00e99..956c5f43 100644 --- a/tests/testadapter/YamlTestAdapter.cs +++ b/tests/testadapter/YamlTestAdapter.cs @@ -11,8 +11,9 @@ using System.Threading; using System.Threading.Tasks; using System.Threading.Tasks.Dataflow; +using Azure.AI.Details.Common.CLI.TestFramework; -namespace TestAdapterTest +namespace Azure.AI.Details.Common.CLI.TestAdapter { public class YamlTestAdapter { @@ -38,113 +39,20 @@ public static IEnumerable GetTestsFromFile(string source) var file = new FileInfo(source); Logger.Log($"YamlTestAdapter.GetTestsFromFile('{source}'): Extension={file.Extension}"); - return file.Extension.Trim('.') == FileExtensionYaml.Trim('.') - ? GetTestsFromYaml(source, file) - : GetTestsFromSource(source, file); + return file.Extension.Trim('.') == YamlTestFramework.YamlFileExtension.Trim('.') + ? YamlTestFramework.GetTestsFromYaml(source, file).ToList() + : GetTestsFromTestAdapterOrReferenceDirectory(source, file).ToList(); } public static void RunTests(IEnumerable tests, IRunContext runContext, IFrameworkHandle frameworkHandle) { - var filteredBeforeMiddleAndAfterTestSets = FilterTestCases(tests, runContext, frameworkHandle); - foreach (var testSet in filteredBeforeMiddleAndAfterTestSets) - { - if (!testSet.Any()) continue; - RunAndRecordTests(frameworkHandle, testSet); - } + var filtered = YamlTestCaseFilter.FilterTestCases(tests, runContext); + YamlTestFramework.RunTests(filtered, new YamlTestFrameworkHandleHost(frameworkHandle)); } #region private methods - private static void RunAndRecordTests(IFrameworkHandle frameworkHandle, IEnumerable tests) - { - InitRunAndRecordTestCaseMaps(tests, out var testFromIdMap, out var completionFromIdMap); - RunAndRecordParallelizedTestCases(frameworkHandle, testFromIdMap, completionFromIdMap, tests); - RunAndRecordRemainingTestCases(frameworkHandle, testFromIdMap, completionFromIdMap); - } - - private static void InitRunAndRecordTestCaseMaps(IEnumerable tests, out Dictionary testFromIdMap, out Dictionary> completionFromIdMap) - { - testFromIdMap = new Dictionary(); - completionFromIdMap = new Dictionary>(); - foreach (var test in tests) - { - var id = test.Id.ToString(); - testFromIdMap[id] = test; - completionFromIdMap[id] = new TaskCompletionSource(); - } - } - - private static void RunAndRecordParallelizedTestCases(IFrameworkHandle frameworkHandle, Dictionary testFromIdMap, Dictionary> completionFromIdMap, IEnumerable tests) - { - var parallelTestSet = tests.Where(test => YamlTestProperties.Get(test, "parallelize") == "true"); - foreach (var test in parallelTestSet) - { - ThreadPool.QueueUserWorkItem(state => - { - var parallelTestId = test.Id.ToString(); - var parallelTest = testFromIdMap[parallelTestId]; - var parallelTestOutcome = RunAndRecordTestCase(parallelTest, frameworkHandle); - // defer setting completion outcome until all steps are complete - - var checkTest = parallelTest; - while (true) - { - var nextStepId = YamlTestProperties.Get(checkTest, "nextStepId"); - if (string.IsNullOrEmpty(nextStepId)) - { - Logger.LogInfo($"YamlTestAdapter.RunTests() ==> No nextStepId for test '{checkTest.DisplayName}'"); - break; - } - - var stepTest = testFromIdMap.ContainsKey(nextStepId) ? testFromIdMap[nextStepId] : null; - if (stepTest == null) - { - Logger.LogError($"YamlTestAdapter.RunTests() ==> ERROR: nextStepId '{nextStepId}' not found for test '{checkTest.DisplayName}'"); - break; - } - - var stepCompletion = completionFromIdMap.ContainsKey(nextStepId) ? completionFromIdMap[nextStepId] : null; - if (stepCompletion == null) - { - Logger.LogError($"YamlTestAdapter.RunTests() ==> ERROR: nextStepId '{nextStepId}' completion not found for test '{checkTest.DisplayName}'"); - break; - } - - var stepOutcome = RunAndRecordTestCase(stepTest, frameworkHandle); - Logger.Log($"YamlTestAdapter.RunTests() ==> Setting completion outcome for {stepTest.DisplayName} to {stepOutcome}"); - completionFromIdMap[nextStepId].SetResult(stepOutcome); - - checkTest = stepTest; - } - - // now that all steps are complete, set the completion outcome - completionFromIdMap[parallelTestId].SetResult(parallelTestOutcome); - Logger.Log($"YamlTestAdapter.RunTests() ==> Setting completion outcome for {parallelTest.DisplayName} to {parallelTestOutcome}"); - - }, test.Id); - } - - Logger.Log($"YamlTestAdapter.RunTests() ==> Waiting for parallel tests to complete"); - var parallelCompletions = completionFromIdMap - .Where(x => parallelTestSet.Any(y => y.Id.ToString() == x.Key)) - .Select(x => x.Value.Task); - Task.WaitAll(parallelCompletions.ToArray()); - Logger.Log($"YamlTestAdapter.RunTests() ==> All parallel tests complete"); - } - - private static void RunAndRecordRemainingTestCases(IFrameworkHandle frameworkHandle, Dictionary testFromIdMap, Dictionary> completionFromIdMap) - { - var remainingTests = completionFromIdMap - .Where(x => x.Value.Task.Status != TaskStatus.RanToCompletion) - .Select(x => testFromIdMap[x.Key]); - foreach (var test in remainingTests) - { - var outcome = RunAndRecordTestCase(test, frameworkHandle); - completionFromIdMap[test.Id.ToString()].SetResult(outcome); - } - } - - private static IEnumerable GetTestsFromSource(string source, FileInfo file) + private static IEnumerable GetTestsFromTestAdapterOrReferenceDirectory(string source, FileInfo file) { var sourceOk = source.Contains("Azure.AI.CLI.TestAdapter") || @@ -156,82 +64,18 @@ private static IEnumerable GetTestsFromSource(string source, FileInfo // Logger.Log($"a.FullName={a.FullName}"); // } - Logger.Log($"YamlTestAdapter.GetTestsFromSource('{source}'): sourceOk = {sourceOk}"); + Logger.Log($"YamlTestAdapter.GetTestsFromTestAdapterOrReferenceDirectory('{source}'): sourceOk = {sourceOk}"); return !sourceOk ? Enumerable.Empty() - : GetTestsFromDirectory(source, file.Directory); - } - - private static IEnumerable GetTestsFromDirectory(string source, DirectoryInfo directory) - { - Logger.Log($"YamlTestAdapter.GetTestsFromDirectory('{source}', '{directory.FullName}'): ENTER"); - - directory = YamlTagHelpers.GetYamlDefaultTagsFullFileName(directory)?.Directory ?? directory; - foreach (var file in FindFiles(directory)) - { - foreach (var test in GetTestsFromYaml(source, file)) - { - yield return test; - } - } - Logger.Log($"YamlTestAdapter.GetTestsFromDirectory('{source}', '{directory.FullName}'): EXIT"); - } - - private static IEnumerable FindFiles(DirectoryInfo directory) - { - return directory.GetFiles($"*{FileExtensionYaml}", SearchOption.AllDirectories) - .Where(file => file.Name != YamlDefaultTagsFileName); - } - - private static IEnumerable GetTestsFromYaml(string source, FileInfo file) - { - Logger.Log($"YamlTestAdapter.GetTestsFromYaml('{source}', '{file.FullName}'): ENTER"); - foreach (var test in YamlTestCaseParser.TestCasesFromYaml(source, file)) - { - yield return test; - } - Logger.Log($"YamlTestAdapter.GetTestsFromYaml('{source}', '{file.FullName}'): EXIT"); - } - - private static bool IsTrait(Trait trait, string check) - { - return trait.Name == check || trait.Value == check; - } - - private static IEnumerable> FilterTestCases(IEnumerable tests, IRunContext runContext, IFrameworkHandle frameworkHandle) - { - Logger.Log($"YamlTestAdapter.FilterTestCases()"); - - tests = YamlTestCaseFilter.FilterTestCases(tests, runContext, frameworkHandle); - - var before = tests.Where(test => test.Traits.Count(x => IsTrait(x, "before")) > 0); - var after = tests.Where(test => test.Traits.Count(x => IsTrait(x, "after")) > 0); - var middle = tests.Where(test => !before.Contains(test) && !after.Contains(test)); - - var testsList = new List> { before, middle, after }; - Logger.Log("YamlTestAdapter.FilterTestCases() ==> {string.Join('\n', tests.Select(x => x.Name))}"); - - return testsList; - } - - private static TestOutcome RunAndRecordTestCase(TestCase test, IFrameworkHandle frameworkHandle) - { - Logger.Log($"YamlTestAdapter.TestRunAndRecord({test.DisplayName})"); - return YamlTestCaseRunner.RunAndRecordTestCase(test, frameworkHandle); + : YamlTestFramework.GetTestsFromDirectory(source, file.Directory); } #endregion #region test adapter registration data - public const string FileExtensionDll = ".dll"; - public const string FileExtensionYaml = ".yaml"; - public const string Executor = "executor://ai/yaml/VsTestRunner1"; - #endregion - - #region other constants - public const string YamlDefaultTagsFileName = "Azure-AI-CLI-TestRunner-Default-Tags.yaml"; - public const string DefaultTimeout = "600000"; + public const string DllFileExtension = ".dll"; + public const string Executor = "executor://ai/cli/TestAdapter/v1"; #endregion } } diff --git a/tests/testadapter/YamlTestAdapter.csproj b/tests/testadapter/YamlTestAdapter.csproj index b2671a63..49849286 100644 --- a/tests/testadapter/YamlTestAdapter.csproj +++ b/tests/testadapter/YamlTestAdapter.csproj @@ -6,4 +6,8 @@ + + + + \ No newline at end of file diff --git a/tests/testadapter/YamlTestAdapterCommon.targets b/tests/testadapter/YamlTestAdapterCommon.targets index c18b567d..0a1f5428 100644 --- a/tests/testadapter/YamlTestAdapterCommon.targets +++ b/tests/testadapter/YamlTestAdapterCommon.targets @@ -57,7 +57,7 @@ - + diff --git a/tests/testadapter/YamlTestCaseFilter.cs b/tests/testadapter/YamlTestCaseFilter.cs deleted file mode 100644 index c3975a0f..00000000 --- a/tests/testadapter/YamlTestCaseFilter.cs +++ /dev/null @@ -1,78 +0,0 @@ -using Microsoft.VisualStudio.TestPlatform.ObjectModel; -using Microsoft.VisualStudio.TestPlatform.ObjectModel.Adapter; -using Microsoft.VisualStudio.TestPlatform.ObjectModel.Logging; -using System; -using System.Collections.Generic; -using System.Diagnostics; -using System.IO; -using System.Linq; -using System.Text; -using System.Threading; -using System.Threading.Tasks; - -namespace TestAdapterTest -{ - public class YamlTestCaseFilter - { - public static IEnumerable FilterTestCases(IEnumerable tests, IRunContext runContext, IFrameworkHandle frameworkHandle) - { - var names = GetSupportedFilterableNames(tests); - var filter = runContext.GetTestCaseFilter(names, null); - return tests.Where(test => filter == null || filter.MatchTestCase(test, name => GetPropertyValue(test, name))); - } - - private static HashSet GetSupportedFilterableNames(IEnumerable tests) - { - var filterable = new HashSet(supportedFilterProperties); - foreach (var test in tests) - { - foreach (var trait in test.Traits) - { - filterable.Add(trait.Name); - } - } - - if (filterable.Contains("tag")) filterable.Add("tags"); - - return filterable; - } - - private static object GetPropertyValue(TestCase test, string name) - { - switch (name.ToLower()) - { - case "name": - case "displayname": return test.DisplayName; - - case "fqn": - case "fullyqualifiedname": return test.FullyQualifiedName; - - case "cli": return YamlTestProperties.Get(test, "cli"); - case "command": return YamlTestProperties.Get(test, "command"); - case "script": return YamlTestProperties.Get(test, "script"); - - case "foreach": return YamlTestProperties.Get(test, "foreach"); - case "arguments": return YamlTestProperties.Get(test, "arguments"); - case "input": return YamlTestProperties.Get(test, "input"); - - case "expect": return YamlTestProperties.Get(test, "expect"); - case "expect-gpt": return YamlTestProperties.Get(test, "expect-gpt"); - case "not-expect": return YamlTestProperties.Get(test, "not-expect"); - - case "parallelize": return YamlTestProperties.Get(test, "parallelize"); - case "simulate": return YamlTestProperties.Get(test, "simulate"); - case "skipOnFailure": return YamlTestProperties.Get(test, "skipOnFailure"); - - case "timeout": return YamlTestProperties.Get(test, "timeout"); - case "working-directory": return YamlTestProperties.Get(test, "working-directory"); - } - - var tags = test.Traits.Where(x => x.Name == name || name == "tags"); - if (tags.Count() == 0) return null; - - return tags.Select(x => x.Value).ToArray(); - } - - private static readonly string[] supportedFilterProperties = { "DisplayName", "FullyQualifiedName", "Category", "cli", "command", "script", "foreach", "arguments", "input", "expect", "expect-gpt", "not-expect", "parallelize", "simulate", "skipOnFailure" }; - } -} diff --git a/tests/testadapter/YamlTestFrameworkHandleHost.cs b/tests/testadapter/YamlTestFrameworkHandleHost.cs new file mode 100644 index 00000000..6b9f08e1 --- /dev/null +++ b/tests/testadapter/YamlTestFrameworkHandleHost.cs @@ -0,0 +1,30 @@ +using Microsoft.VisualStudio.TestPlatform.ObjectModel; +using Microsoft.VisualStudio.TestPlatform.ObjectModel.Adapter; + +namespace Azure.AI.Details.Common.CLI.TestFramework +{ + public class YamlTestFrameworkHandleHost : IYamlTestFrameworkHost + { + private readonly IFrameworkHandle _frameworkHandle; + + public YamlTestFrameworkHandleHost(IFrameworkHandle frameworkHandle) + { + _frameworkHandle = frameworkHandle; + } + + public void RecordStart(TestCase testCase) + { + _frameworkHandle.RecordStart(testCase); + } + + public void RecordResult(TestResult testResult) + { + _frameworkHandle.RecordResult(testResult); + } + + public void RecordEnd(TestCase testCase, TestOutcome outcome) + { + _frameworkHandle.RecordEnd(testCase, outcome); + } + } +} diff --git a/tests/testadapter/YamlTestRunnerTriggerAttribute.cs b/tests/testadapter/YamlTestRunnerTriggerAttribute.cs index 5e8bf874..b2d3157c 100644 --- a/tests/testadapter/YamlTestRunnerTriggerAttribute.cs +++ b/tests/testadapter/YamlTestRunnerTriggerAttribute.cs @@ -10,7 +10,7 @@ using System.Threading; using System.Threading.Tasks; -namespace TestAdapterTest +namespace Azure.AI.Details.Common.CLI.TestAdapter { public class YamlTestRunnerTriggerAttribute : Attribute { diff --git a/tests/testframework/IYamlTestFrameworkHost.cs b/tests/testframework/IYamlTestFrameworkHost.cs new file mode 100644 index 00000000..383b278d --- /dev/null +++ b/tests/testframework/IYamlTestFrameworkHost.cs @@ -0,0 +1,11 @@ +using Microsoft.VisualStudio.TestPlatform.ObjectModel; + +namespace Azure.AI.Details.Common.CLI.TestFramework +{ + public interface IYamlTestFrameworkHost + { + void RecordStart(TestCase testCase); + void RecordResult(TestResult testResult); + void RecordEnd(TestCase testCase, TestOutcome outcome); + } +} diff --git a/tests/testadapter/Logger.cs b/tests/testframework/Logger.cs similarity index 96% rename from tests/testadapter/Logger.cs rename to tests/testframework/Logger.cs index e4da6ec6..6b6822ec 100644 --- a/tests/testadapter/Logger.cs +++ b/tests/testframework/Logger.cs @@ -10,7 +10,7 @@ using System.Threading; using System.Threading.Tasks; -namespace TestAdapterTest +namespace Azure.AI.Details.Common.CLI.TestFramework { public class Logger { @@ -114,7 +114,7 @@ private static string GetLogPath() { var pid = Process.GetCurrentProcess().Id.ToString(); var time = DateTime.Now.ToFileTime().ToString(); - return $"log-test-adatper-{time}-{pid}.log"; + return $"log-ai-cli-test-framework-{time}-{pid}.log"; } private static IMessageLogger logger = null; diff --git a/tests/testframework/Properties/AssemblyInfo.cs b/tests/testframework/Properties/AssemblyInfo.cs new file mode 100644 index 00000000..c553fe3d --- /dev/null +++ b/tests/testframework/Properties/AssemblyInfo.cs @@ -0,0 +1,36 @@ +using System.Reflection; +using System.Runtime.CompilerServices; +using System.Runtime.InteropServices; + +// General Information about an assembly is controlled through the following +// set of attributes. Change these attribute values to modify the information +// associated with an assembly. +[assembly: AssemblyTitle("YamlTestFramework")] +[assembly: AssemblyDescription("")] +[assembly: AssemblyConfiguration("")] +[assembly: AssemblyCompany("")] +[assembly: AssemblyProduct("YamlTestFramework")] +[assembly: AssemblyCopyright("Copyright © 2024")] +[assembly: AssemblyTrademark("")] +[assembly: AssemblyCulture("")] + +// Setting ComVisible to false makes the types in this assembly not visible +// to COM components. If you need to access a type in this assembly from +// COM, set the ComVisible attribute to true on that type. +[assembly: ComVisible(false)] + +// The following GUID is for the ID of the typelib if this project is exposed to COM +[assembly: Guid("9409c89f-ae64-4d4f-820e-e4248512733a")] + +// Version information for an assembly consists of the following four values: +// +// Major Version +// Minor Version +// Build Number +// Revision +// +// You can specify all the values or you can default the Build and Revision Numbers +// by using the '*' as shown below: +// [assembly: AssemblyVersion("1.0.*")] +[assembly: AssemblyVersion("1.0.0.0")] +[assembly: AssemblyFileVersion("1.0.0.0")] diff --git a/tests/testframework/TestResultHelpers.cs b/tests/testframework/TestResultHelpers.cs new file mode 100644 index 00000000..2416d953 --- /dev/null +++ b/tests/testframework/TestResultHelpers.cs @@ -0,0 +1,21 @@ +using System.Collections.Generic; +using System.Linq; +using Microsoft.VisualStudio.TestPlatform.ObjectModel; + +namespace Azure.AI.Details.Common.CLI.TestFramework +{ + public class TestResultHelpers + { + public static TestOutcome TestOutcomeFromResults(IEnumerable results) + { + var failed = results.Count(x => x.Outcome == TestOutcome.Failed) > 0; + var skipped = results.Count(x => x.Outcome == TestOutcome.Skipped) > 0; + var notFound = results.Count(x => x.Outcome == TestOutcome.NotFound) > 0 || results.Count() == 0; + + return failed ? TestOutcome.Failed + : skipped ? TestOutcome.Skipped + : notFound ? TestOutcome.NotFound + : TestOutcome.Passed; + } + } +} diff --git a/tests/testadapter/YamlHelpers.cs b/tests/testframework/YamlHelpers.cs similarity index 97% rename from tests/testadapter/YamlHelpers.cs rename to tests/testframework/YamlHelpers.cs index 112ca04a..33ce53b7 100644 --- a/tests/testadapter/YamlHelpers.cs +++ b/tests/testframework/YamlHelpers.cs @@ -3,7 +3,7 @@ using YamlDotNet.RepresentationModel; using YamlDotNet.Serialization; -namespace TestAdapterTest +namespace Azure.AI.Details.Common.CLI.TestFramework { public class YamlHelpers { diff --git a/tests/testadapter/YamlNodeExtensions.cs b/tests/testframework/YamlNodeExtensions.cs similarity index 98% rename from tests/testadapter/YamlNodeExtensions.cs rename to tests/testframework/YamlNodeExtensions.cs index 5bb42ab8..e8e8be3f 100644 --- a/tests/testadapter/YamlNodeExtensions.cs +++ b/tests/testframework/YamlNodeExtensions.cs @@ -6,7 +6,7 @@ using System.Text; using YamlDotNet.RepresentationModel; -namespace TestAdapterTest +namespace Azure.AI.Details.Common.CLI.TestFramework { public static class YamlNodeExtensions { diff --git a/tests/testadapter/YamlTagHelpers.cs b/tests/testframework/YamlTagHelpers.cs similarity index 96% rename from tests/testadapter/YamlTagHelpers.cs rename to tests/testframework/YamlTagHelpers.cs index 7b544c08..c9356c06 100644 --- a/tests/testadapter/YamlTagHelpers.cs +++ b/tests/testframework/YamlTagHelpers.cs @@ -4,13 +4,13 @@ using System.Linq; using YamlDotNet.RepresentationModel; -namespace TestAdapterTest +namespace Azure.AI.Details.Common.CLI.TestFramework { public class YamlTagHelpers { public static FileInfo GetYamlDefaultTagsFullFileName(DirectoryInfo directory) { - var found = directory.GetFiles(YamlTestAdapter.YamlDefaultTagsFileName); + var found = directory.GetFiles(YamlTestFramework.YamlDefaultTagsFileName); return found.Length == 1 ? found[0] : directory.Parent != null diff --git a/tests/testframework/YamlTestCaseFilter.cs b/tests/testframework/YamlTestCaseFilter.cs new file mode 100644 index 00000000..32b2837b --- /dev/null +++ b/tests/testframework/YamlTestCaseFilter.cs @@ -0,0 +1,151 @@ +using Microsoft.VisualStudio.TestPlatform.ObjectModel; +using Microsoft.VisualStudio.TestPlatform.ObjectModel.Adapter; +using Microsoft.VisualStudio.TestPlatform.ObjectModel.Logging; +using System; +using System.Collections.Generic; +using System.Diagnostics; +using System.IO; +using System.Linq; +using System.Text; +using System.Threading; +using System.Threading.Tasks; + +namespace Azure.AI.Details.Common.CLI.TestFramework +{ + public class YamlTestCaseFilter + { + public static IEnumerable FilterTestCases(IEnumerable tests, IEnumerable criteria) + { + // example 1: "ai init openai" "ai init speech" -skip -nightly + // > test must contain either: + // > * "ai", "init", and "openai" in EXACTLY that order in any one single field/property, or + // > * "ai", "init", and "speech" in EXACTLY that order in any one single field/property + // > test must not contain "skip" in any field/property + // > test must not contain "nightly" in any field/property + + // example 2: +ai +init +openai -skip -nightly + // > test must contain ALL three of "ai", "init", and "openai" in any field/property + // > * they do NOT need to be in the same field/property + // > test must not contain "skip" in any field/property + // > test must not contain "nightly" in any field/property + + // example 3: "ai dev new" "ai init speech" +java +build -skip + // > test must contain, either: + // > * "ai", "init", and "openai" in EXACTLY that order in any one single field/property, or + // > * "ai", "init", and "speech" in EXACTLY that order in any one single field/property + // > test must contain "java" in any field/property + // > test must contain "build" in any field/property + // > test must not contain "skip" in any field/property + + var sourceCriteria = new List(); + var mustMatchCriteria = new List(); + var mustNotMatchCriteria = new List(); + + foreach (var criterion in criteria) + { + var isMustMatch = criterion.StartsWith("+"); + var isMustNotMatch = criterion.StartsWith("-"); + var isSource = !isMustMatch && !isMustNotMatch; + + if (isSource) sourceCriteria.Add(criterion); + if (isMustMatch) mustMatchCriteria.Add(criterion.Substring(1)); + if (isMustNotMatch) mustNotMatchCriteria.Add(criterion.Substring(1)); + } + + var unfiltered = sourceCriteria.Count > 0 + ? tests.Where(test => + sourceCriteria.Any(criterion => + TestContainsText(test, criterion))) + : tests; + + if (mustMatchCriteria.Count > 0) + { + unfiltered = unfiltered.Where(test => + mustMatchCriteria.All(criterion => + TestContainsText(test, criterion))); + } + + if (mustNotMatchCriteria.Count > 0) + { + unfiltered = unfiltered.Where(test => + mustNotMatchCriteria.All(criterion => + !TestContainsText(test, criterion))); + } + + return unfiltered; + } + + public static IEnumerable FilterTestCases(IEnumerable tests, IRunContext runContext) + { + tests = tests.ToList(); // force enumeration + + var names = GetSupportedFilterableNames(tests); + var filter = runContext.GetTestCaseFilter(names, null); + return tests.Where(test => filter == null || filter.MatchTestCase(test, name => GetPropertyValue(test, name))).ToList(); + } + + private static HashSet GetSupportedFilterableNames(IEnumerable tests) + { + var filterable = new HashSet(supportedFilterProperties); + foreach (var test in tests) + { + foreach (var trait in test.Traits) + { + filterable.Add(trait.Name); + } + } + + if (filterable.Contains("tag")) filterable.Add("tags"); + + return filterable; + } + + private static object GetPropertyValue(TestCase test, string name) + { + switch (name.ToLower()) + { + case "name": + case "displayname": return test.DisplayName; + + case "fqn": + case "fullyqualifiedname": return test.FullyQualifiedName; + + case "cli": return YamlTestProperties.Get(test, "cli"); + case "command": return YamlTestProperties.Get(test, "command"); + case "script": return YamlTestProperties.Get(test, "script"); + case "bash": return YamlTestProperties.Get(test, "bash"); + + case "foreach": return YamlTestProperties.Get(test, "foreach"); + case "arguments": return YamlTestProperties.Get(test, "arguments"); + case "input": return YamlTestProperties.Get(test, "input"); + + case "expect": return YamlTestProperties.Get(test, "expect"); + case "expect-gpt": return YamlTestProperties.Get(test, "expect-gpt"); + case "not-expect": return YamlTestProperties.Get(test, "not-expect"); + + case "parallelize": return YamlTestProperties.Get(test, "parallelize"); + case "simulate": return YamlTestProperties.Get(test, "simulate"); + case "skipOnFailure": return YamlTestProperties.Get(test, "skipOnFailure"); + + case "timeout": return YamlTestProperties.Get(test, "timeout"); + case "working-directory": return YamlTestProperties.Get(test, "working-directory"); + } + + var tags = test.Traits.Where(x => x.Name == name || name == "tags"); + if (tags.Count() == 0) return null; + + return tags.Select(x => x.Value).ToArray(); + } + + private static bool TestContainsText(TestCase test, string text) + { + return test.DisplayName.Contains(text) + || test.FullyQualifiedName.Contains(text) + || test.Traits.Any(x => x.Name == text || x.Value.Contains(text)) + || supportedFilterProperties.Any(property => GetPropertyValue(test, property)?.ToString().Contains(text) == true); + } + + + private static readonly string[] supportedFilterProperties = { "DisplayName", "FullyQualifiedName", "Category", "cli", "command", "script", "bash", "foreach", "arguments", "input", "expect", "expect-gpt", "not-expect", "parallelize", "simulate", "skipOnFailure" }; + } +} diff --git a/tests/testadapter/YamlTestCaseParser.cs b/tests/testframework/YamlTestCaseParser.cs similarity index 95% rename from tests/testadapter/YamlTestCaseParser.cs rename to tests/testframework/YamlTestCaseParser.cs index dd385f02..4daeca02 100644 --- a/tests/testadapter/YamlTestCaseParser.cs +++ b/tests/testframework/YamlTestCaseParser.cs @@ -8,7 +8,7 @@ using YamlDotNet.Helpers; using YamlDotNet.RepresentationModel; -namespace TestAdapterTest +namespace Azure.AI.Details.Common.CLI.TestFramework { public class YamlTestCaseParser { @@ -16,7 +16,7 @@ public static IEnumerable TestCasesFromYaml(string source, FileInfo fi { var area = GetRootArea(file); var parsed = YamlHelpers.ParseYamlStream(file.FullName); - return TestCasesFromYamlStream(source, file, area, parsed); + return TestCasesFromYamlStream(source, file, area, parsed).ToList(); } #region private methods @@ -28,7 +28,10 @@ private static IEnumerable TestCasesFromYamlStream(string source, File foreach (var document in parsed?.Documents) { var fromDocument = TestCasesFromYamlDocumentRootNode(source, file, document.RootNode, area, defaultClassName, defaultTags); - tests.AddRange(fromDocument); + if (fromDocument != null) + { + tests.AddRange(fromDocument); + } } return tests; } @@ -83,24 +86,25 @@ private static TestCase GetTestFromNode(string source, FileInfo file, YamlMappin string simulate = GetScalarString(mapping, "simulate"); string command = GetScalarString(mapping, "command"); string script = GetScalarString(mapping, "script"); + string bash = GetScalarString(mapping, "bash"); - string fullyQualifiedName = command == null && script == null + string fullyQualifiedName = command == null && script == null && bash == null ? GetFullyQualifiedNameAndCommandFromShortForm(mapping, area, @class, ref command, stepNumber) : GetFullyQualifiedName(mapping, area, @class, stepNumber); fullyQualifiedName ??= GetFullyQualifiedName(area, @class, $"Expected YAML node ('name') at {file.FullName}({mapping.Start.Line})", 0); var simulating = !string.IsNullOrEmpty(simulate); - var neitherOrBoth = (command == null) == (script == null); + var neitherOrBoth = (command == null) == (script == null && bash == null); if (neitherOrBoth && !simulating) { - var message = $"Error parsing YAML: expected/unexpected key ('name', 'command', 'script', 'arguments') at {file.FullName}({mapping.Start.Line})"; + var message = $"Error parsing YAML: expected/unexpected key ('name', 'command', 'script', 'bash', 'arguments') at {file.FullName}({mapping.Start.Line})"; Logger.LogError(message); Logger.TraceError(message); return null; } Logger.Log($"YamlTestCaseParser.GetTests(): new TestCase('{fullyQualifiedName}')"); - var test = new TestCase(fullyQualifiedName, new Uri(YamlTestAdapter.Executor), source) + var test = new TestCase(fullyQualifiedName, new Uri(YamlTestFramework.FakeExecutor), source) { CodeFilePath = file.FullName, LineNumber = mapping.Start.Line @@ -109,11 +113,12 @@ private static TestCase GetTestFromNode(string source, FileInfo file, YamlMappin SetTestCaseProperty(test, "cli", cli); SetTestCaseProperty(test, "command", command); SetTestCaseProperty(test, "script", script); + SetTestCaseProperty(test, "bash", bash); SetTestCaseProperty(test, "simulate", simulate); SetTestCaseProperty(test, "parallelize", parallelize); SetTestCaseProperty(test, "skipOnFailure", skipOnFailure); - var timeout = GetScalarString(mapping, tags, "timeout", YamlTestAdapter.DefaultTimeout); + var timeout = GetScalarString(mapping, tags, "timeout", YamlTestFramework.DefaultTimeout); SetTestCaseProperty(test, "timeout", timeout); var workingDirectory = GetScalarString(mapping, tags, "workingDirectory", file.Directory.FullName); @@ -163,7 +168,10 @@ private static IEnumerable TestCasesFromYamlSequenceOfSteps(string sou { var mapping = sequence.Children[i] as YamlMappingNode; var test = GetTestFromNode(source, file, mapping, area, @class, tags, i + 1); - tests.Add(test); + if (test != null) + { + tests.Add(test); + } } if (tests.Count > 0) @@ -196,7 +204,7 @@ private static void CheckInvalidTestCaseNodes(FileInfo file, YamlMappingNode map private static bool IsValidTestCaseNode(string value) { - return ";area;class;name;cli;command;script;timeout;foreach;arguments;input;expect;expect-gpt;not-expect;parallelize;simulate;skipOnFailure;tag;tags;workingDirectory;".IndexOf($";{value};") >= 0; + return ";area;class;name;cli;command;script;bash;timeout;foreach;arguments;input;expect;expect-gpt;not-expect;parallelize;simulate;skipOnFailure;tag;tags;workingDirectory;".IndexOf($";{value};") >= 0; } private static void SetTestCaseProperty(TestCase test, string propertyName, YamlMappingNode mapping, string mappingName) diff --git a/tests/testadapter/YamlTestCaseRunner.cs b/tests/testframework/YamlTestCaseRunner.cs similarity index 82% rename from tests/testadapter/YamlTestCaseRunner.cs rename to tests/testframework/YamlTestCaseRunner.cs index 168def29..86bb566f 100644 --- a/tests/testadapter/YamlTestCaseRunner.cs +++ b/tests/testframework/YamlTestCaseRunner.cs @@ -6,6 +6,7 @@ using System.Linq.Expressions; using System.Runtime.InteropServices; using System.Text; +using System.Threading; using System.Threading.Tasks; using Microsoft.VisualStudio.TestPlatform.ObjectModel; using Microsoft.VisualStudio.TestPlatform.ObjectModel.Adapter; @@ -13,47 +14,43 @@ using Newtonsoft.Json.Linq; using YamlDotNet.RepresentationModel; -namespace TestAdapterTest +namespace Azure.AI.Details.Common.CLI.TestFramework { + public class YamlTestCaseRunner { - public static TestOutcome RunAndRecordTestCase(TestCase test, IFrameworkHandle frameworkHandle) + public static IList RunAndRecordTestCase(TestCase test, IYamlTestFrameworkHost host) { - TestCaseStart(test, frameworkHandle); - TestCaseRun(test, frameworkHandle, out TestOutcome outcome); - TestCaseStop(test, frameworkHandle, outcome); - return outcome; + TestCaseStart(test, host); + var results = TestCaseRun(test, host); + + var outcome = TestResultHelpers.TestOutcomeFromResults(results); + TestCaseStop(test, host, outcome); + + return results; } #region private methods - private static void TestCaseStart(TestCase test, IFrameworkHandle frameworkHandle) + private static void TestCaseStart(TestCase test, IYamlTestFrameworkHost host) { Logger.Log($"YamlTestCaseRunner.TestCaseStart({test.DisplayName})"); - frameworkHandle.RecordStart(test); + host.RecordStart(test); } - private static TestOutcome TestCaseRun(TestCase test, IFrameworkHandle frameworkHandle, out TestOutcome outcome) + private static IList TestCaseRun(TestCase test, IYamlTestFrameworkHost host) { Logger.Log($"YamlTestCaseRunner.TestCaseRun({test.DisplayName})"); - + // run the test case, getting all the results, prior to recording any of those results // (not doing this in this order seems to, for some reason, cause "foreach" test cases to run 5 times!?) var results = TestCaseGetResults(test).ToList(); foreach (var result in results) { - frameworkHandle.RecordResult(result); + host.RecordResult(result); } - var failed = results.Count(x => x.Outcome == TestOutcome.Failed) > 0; - var skipped = results.Count(x => x.Outcome == TestOutcome.Skipped) > 0; - var notFound = results.Count(x => x.Outcome == TestOutcome.NotFound) > 0 || results.Count() == 0; - - return outcome = - failed ? TestOutcome.Failed - : skipped ? TestOutcome.Skipped - : notFound ? TestOutcome.NotFound - : TestOutcome.Passed; + return results; } private static IEnumerable TestCaseGetResults(TestCase test) @@ -63,6 +60,14 @@ private static IEnumerable TestCaseGetResults(TestCase test) var cli = YamlTestProperties.Get(test, "cli") ?? ""; var command = YamlTestProperties.Get(test, "command"); var script = YamlTestProperties.Get(test, "script"); + var bash = YamlTestProperties.Get(test, "bash"); + + var scriptIsBash = !string.IsNullOrEmpty(bash); + if (scriptIsBash) script = bash; + + var isWindows = RuntimeInformation.IsOSPlatform(OSPlatform.Windows); + if (!isWindows) scriptIsBash = true; + var @foreach = YamlTestProperties.Get(test, "foreach"); var arguments = YamlTestProperties.Get(test, "arguments"); var input = YamlTestProperties.Get(test, "input"); @@ -87,12 +92,12 @@ private static IEnumerable TestCaseGetResults(TestCase test) var start = DateTime.Now; var outcome = string.IsNullOrEmpty(simulate) - ? RunTestCase(test, skipOnFailure, cli, command, script, foreachItem, arguments, input, expect, expectGpt, notExpect, workingDirectory, timeout, out string stdOut, out string stdErr, out string errorMessage, out string stackTrace, out string additional, out string debugTrace) - : SimulateTestCase(test, simulate, cli, command, script, foreachItem, arguments, input, expect, expectGpt, notExpect, workingDirectory, out stdOut, out stdErr, out errorMessage, out stackTrace, out additional, out debugTrace); + ? RunTestCase(test, skipOnFailure, cli, command, script, scriptIsBash, foreachItem, arguments, input, expect, expectGpt, notExpect, workingDirectory, timeout, out string stdOut, out string stdErr, out string errorMessage, out string stackTrace, out string additional, out string debugTrace) + : SimulateTestCase(test, simulate, cli, command, script, scriptIsBash, foreachItem, arguments, input, expect, expectGpt, notExpect, workingDirectory, out stdOut, out stdErr, out errorMessage, out stackTrace, out additional, out debugTrace); - #if DEBUG - additional += outcome == TestOutcome.Failed ? $"\nEXTRA: {ExtraDebugInfo()}" : ""; - #endif + // #if DEBUG + // additional += outcome == TestOutcome.Failed ? $"\nEXTRA: {ExtraDebugInfo()}" : ""; + // #endif var stop = DateTime.Now; var result = CreateTestResult(test, start, stop, stdOut, stdErr, errorMessage, stackTrace, additional, debugTrace, outcome); @@ -207,7 +212,7 @@ private static Dictionary DupAndAdd(Dictionary d return dup; } - private static TestOutcome RunTestCase(TestCase test, bool skipOnFailure, string cli, string command, string script, string @foreach, string arguments, string input, string expect, string expectGpt, string notExpect, string workingDirectory, int timeout, out string stdOut, out string stdErr, out string errorMessage, out string stackTrace, out string additional, out string debugTrace) + private static TestOutcome RunTestCase(TestCase test, bool skipOnFailure, string cli, string command, string script, bool scriptIsBash, string @foreach, string arguments, string input, string expect, string expectGpt, string notExpect, string workingDirectory, int timeout, out string stdOut, out string stdErr, out string errorMessage, out string stackTrace, out string additional, out string debugTrace) { var outcome = TestOutcome.None; @@ -223,8 +228,8 @@ private static TestOutcome RunTestCase(TestCase test, bool skipOnFailure, string try { - var isWindows = RuntimeInformation.IsOSPlatform(OSPlatform.Windows); - script = WriteTextToTempFile(script, isWindows ? "cmd" : null); + var useCmd = !scriptIsBash; + script = WriteTextToTempFile(script, useCmd ? "cmd" : null); expect = WriteTextToTempFile(expect); notExpect = WriteTextToTempFile(notExpect); @@ -233,8 +238,8 @@ private static TestOutcome RunTestCase(TestCase test, bool skipOnFailure, string kvs.AddRange(KeyValuePairsFromJson(@foreach, false)); kvs = ConvertValuesToAtArgs(kvs, ref filesToDelete); - var startArgs = GetStartInfo(out string startProcess, cli, command, script, kvs, expect, notExpect, ref filesToDelete); - stackTrace = stackTrace ?? $"{startProcess} {startArgs}"; + var startArgs = GetStartInfo(out string startProcess, cli, command, script, scriptIsBash, kvs, expect, notExpect, ref filesToDelete); + stackTrace = $"{startProcess} {startArgs}\n{stackTrace ?? string.Empty}"; Logger.Log($"Process.Start('{startProcess} {startArgs}')"); var startInfo = new ProcessStartInfo(startProcess, startArgs) @@ -251,8 +256,10 @@ private static TestOutcome RunTestCase(TestCase test, bool skipOnFailure, string process.StandardInput.WriteLine(input ?? string.Empty); process.StandardInput.Close(); - process.OutputDataReceived += (sender, e) => { if (e.Data != null) { sbOut.AppendLine(e.Data); sbMerged.AppendLine(e.Data); } }; - process.ErrorDataReceived += (sender, e) => { if (e.Data != null) { sbErr.AppendLine(e.Data); sbMerged.AppendLine(e.Data); } }; + var outDoneSignal = new ManualResetEvent(false); + var errDoneSignal = new ManualResetEvent(false); + process.OutputDataReceived += (sender, e) => AppendLineOrSignal(e.Data, sbOut, sbMerged, outDoneSignal); + process.ErrorDataReceived += (sender, e) => AppendLineOrSignal(e.Data, sbErr, sbMerged, errDoneSignal); process.BeginOutputReadLine(); process.BeginErrorReadLine(); @@ -263,6 +270,12 @@ private static TestOutcome RunTestCase(TestCase test, bool skipOnFailure, string ? TestOutcome.Skipped : TestOutcome.Failed; + if (exitedNotKilled) + { + outDoneSignal.WaitOne(); + errDoneSignal.WaitOne(); + } + var exitCode = exitedNotKilled ? process.ExitCode.ToString() : $"(did not exit; timedout; killed)"; @@ -405,7 +418,6 @@ private static string WriteTextToTempFile(string text, string extension = null) return null; } - private static string FindCacheCli(string cli) { if (_cliCache.ContainsKey(cli)) @@ -516,7 +528,7 @@ private static string PickCliOrNull(IEnumerable clis) private static void PickCliUpdateYamlDefaultsFileWarning(IEnumerable clis) { var message = string.Join(" or ", clis.Select(cli => $"`cli: {cli}`")); - message = $"PickCli: CLI not specified; please create/update {YamlTestAdapter.YamlDefaultTagsFileName} with one of: {message}"; + message = $"PickCli: CLI not specified; please create/update {YamlTestFramework.YamlDefaultTagsFileName} with one of: {message}"; Logger.LogWarning(message); Logger.TraceWarning(message); } @@ -636,7 +648,7 @@ private static bool WaitForExit(Process process, int timeout) return completed; } - private static string GetStartInfo(out string startProcess, string cli, string command, string script, List> kvs, string expect, string notExpect, ref List files) + private static string GetStartInfo(out string startProcess, string cli, string command, string script, bool scriptIsBash, List> kvs, string expect, string notExpect, ref List files) { startProcess = FindCacheCli(cli); @@ -652,13 +664,48 @@ private static string GetStartInfo(out string startProcess, string cli, string c files ??= new List(); files.Add(command); - return $"quiet run --command @{command} {GetAtArgs(expect, notExpect)}"; + return $"run --command @{command} {GetAtArgs(expect, notExpect)}"; } - var isWindows = RuntimeInformation.IsOSPlatform(OSPlatform.Windows); - return isWindows - ? $"quiet run --cmd --script {script} {GetKeyValueArgs(kvs)} {GetAtArgs(expect, notExpect)}" - : $"quiet run --process /bin/bash --pre.script -l --script {script} {GetKeyValueArgs(kvs)} {GetAtArgs(expect, notExpect)}"; + if (scriptIsBash) + { + var bash = RuntimeInformation.IsOSPlatform(OSPlatform.Windows) + ? EnsureFindCacheGetBashExe() + : "/bin/bash"; + return $"run --process \"{bash}\" --pre.script -l --script \"{script}\" {GetKeyValueArgs(kvs)} {GetAtArgs(expect, notExpect)}"; + } + + return $"run --cmd --script \"{script}\" {GetKeyValueArgs(kvs)} {GetAtArgs(expect, notExpect)}"; + } + + private static string EnsureFindCacheGetBashExe() + { + var gitBash = FindCacheGitBashExe(); + if (gitBash == null || gitBash == "bash.exe") + { + throw new Exception("Could not Git for Windows bash.exe in PATH!"); + } + return gitBash; + } + + private static string FindCacheGitBashExe() + { + var bashExe = "bash.exe"; + if (_cliCache.ContainsKey(bashExe)) + { + return _cliCache[bashExe]; + } + + var found = FindGitBashExe(); + _cliCache[bashExe] = found; + + return found; + } + + private static string FindGitBashExe() + { + var found = FileHelpers.FindFilesInOsPath("bash.exe"); + return found.Where(x => x.ToLower().Contains("git")).FirstOrDefault() ?? "bash.exe"; } private static string GetAtArgs(string expect, string notExpect) @@ -699,12 +746,13 @@ private static string GetKeyValueArgs(List> kvs) return args.ToString().TrimEnd(); } - private static TestOutcome SimulateTestCase(TestCase test, string simulate, string cli, string command, string script, string @foreach, string arguments, string input, string expect, string expectGpt, string notExpect, string workingDirectory, out string stdOut, out string stdErr, out string errorMessage, out string stackTrace, out string additional, out string debugTrace) + private static TestOutcome SimulateTestCase(TestCase test, string simulate, string cli, string command, string script, bool scriptIsBash, string @foreach, string arguments, string input, string expect, string expectGpt, string notExpect, string workingDirectory, out string stdOut, out string stdErr, out string errorMessage, out string stackTrace, out string additional, out string debugTrace) { var sb = new StringBuilder(); sb.AppendLine($"cli='{cli?.Replace("\n", "\\n")}'"); sb.AppendLine($"command='{command?.Replace("\n", "\\n")}'"); sb.AppendLine($"script='{script?.Replace("\n", "\\n")}'"); + sb.AppendLine($"scriptIsBash='{scriptIsBash}'"); sb.AppendLine($"foreach='{@foreach?.Replace("\n", "\\n")}'"); sb.AppendLine($"arguments='{arguments?.Replace("\n", "\\n")}'"); sb.AppendLine($"input='{input?.Replace("\n", "\\n")}'"); @@ -751,10 +799,10 @@ private static TestOutcome OutcomeFromString(string simulate) return outcome; } - private static void TestCaseStop(TestCase test, IFrameworkHandle frameworkHandle, TestOutcome outcome) + private static void TestCaseStop(TestCase test, IYamlTestFrameworkHost host, TestOutcome outcome) { Logger.Log($"YamlTestCaseRunner.TestCaseStop({test.DisplayName})"); - frameworkHandle.RecordEnd(test, outcome); + host.RecordEnd(test, outcome); } private static TestResult CreateTestResult(TestCase test, DateTime start, DateTime stop, string stdOut, string stdErr, string errorMessage, string stackTrace, string additional, string debugTrace, TestOutcome outcome) @@ -851,48 +899,77 @@ private static TestOutcome ExpectGptOutcome(string output, string expect, out st RedirectStandardOutput = true }; - Logger.Log($"Process.Start('{startProcess} {startArgs}')"); + Logger.Log($"ExpectGptOutcome: Process.Start('{startProcess} {startArgs}')"); var process = Process.Start(startInfo); process.StandardInput.Close(); - process.OutputDataReceived += (sender, e) => { if (e.Data != null) { sbOut.AppendLine(e.Data); sbMerged.AppendLine(e.Data); } }; - process.ErrorDataReceived += (sender, e) => { if (e.Data != null) { sbErr.AppendLine(e.Data); sbMerged.AppendLine(e.Data); } }; + var outDoneSignal = new ManualResetEvent(false); + var errDoneSignal = new ManualResetEvent(false); + process.OutputDataReceived += (sender, e) => AppendLineOrSignal(e.Data, sbOut, sbMerged, outDoneSignal); + process.ErrorDataReceived += (sender, e) => AppendLineOrSignal(e.Data, sbErr, sbMerged, errDoneSignal); process.BeginOutputReadLine(); process.BeginErrorReadLine(); - var exitedNotKilled = WaitForExit(process, 30000); + var exitedNotKilled = WaitForExit(process, 60000); + if (exitedNotKilled) + { + outDoneSignal.WaitOne(); + errDoneSignal.WaitOne(); + } - outcome = exitedNotKilled && process.ExitCode == 0 - ? TestOutcome.Passed - : TestOutcome.Failed; + var passed = exitedNotKilled && process.ExitCode == 0; + outcome = passed ? TestOutcome.Passed : TestOutcome.Failed; + + var timedoutOrKilled = !exitedNotKilled; + if (timedoutOrKilled) + { + var message = "ExpectGptOutcome: WARNING: Timedout or killed!"; + sbErr.AppendLine(message); + sbMerged.AppendLine(message); + Logger.LogWarning(message); + } } - catch + catch (Exception ex) { outcome = TestOutcome.Failed; - } - finally - { - gptStdOut = sbOut.ToString(); - gptStdErr = sbErr.ToString(); - gptMerged = sbMerged.ToString(); - File.Delete(questionTempFile); + var exception = $"ExpectGptOutcome: EXCEPTION: {ex.Message}"; + sbErr.AppendLine(exception); + sbMerged.AppendLine(exception); + Logger.Log(exception); } + File.Delete(questionTempFile); + gptStdOut = sbOut.ToString(); + gptStdErr = sbErr.ToString(); + gptMerged = sbMerged.ToString(); + if (outcome == TestOutcome.Passed) { Logger.Log($"ExpectGptOutcome: Checking for 'PASS' in '{gptMerged}'"); + var passed = gptMerged.Contains("PASS") || gptMerged.Contains("TRUE") || gptMerged.Contains("YES"); - var failed = gptMerged.Contains("FAIL") || gptMerged.Contains("FALSE") || gptMerged.Contains("NO"); - outcome = passed && !failed - ? TestOutcome.Passed - : TestOutcome.Failed; + outcome = passed ? TestOutcome.Passed : TestOutcome.Failed; + Logger.Log($"ExpectGptOutcome: {outcome}"); } return outcome; } + private static void AppendLineOrSignal(string? text, StringBuilder sb1, StringBuilder sb2, ManualResetEvent signal) + { + if (text != null) + { + sb1.AppendLine(text); + sb2.AppendLine(text); + } + else + { + signal.Set(); + } + } + #endregion private static Dictionary _cliCache = new Dictionary(); diff --git a/tests/testframework/YamlTestFramework.cs b/tests/testframework/YamlTestFramework.cs new file mode 100644 index 00000000..ee60b57a --- /dev/null +++ b/tests/testframework/YamlTestFramework.cs @@ -0,0 +1,226 @@ +using Microsoft.VisualStudio.TestPlatform.ObjectModel; +using Microsoft.VisualStudio.TestPlatform.ObjectModel.Logging; +using System; +using System.Collections.Generic; +using System.Diagnostics; +using System.IO; +using System.Linq; +using System.Reflection; +using System.Text; +using System.Threading; +using System.Threading.Tasks; +using System.Threading.Tasks.Dataflow; + +namespace Azure.AI.Details.Common.CLI.TestFramework +{ + public class YamlTestFramework + { + public static IEnumerable GetTestsFromDirectory(string source, DirectoryInfo directory) + { + Logger.Log($"YamlTestFramework.GetTestsFromDirectory('{source}', '{directory.FullName}'): ENTER"); + + directory = YamlTagHelpers.GetYamlDefaultTagsFullFileName(directory)?.Directory ?? directory; + var files = FindFiles(directory); + var tests = files.SelectMany(file => GetTestsFromYaml(source, file)); + + Logger.Log($"YamlTestFramework.GetTestsFromDirectory('{source}', '{directory.FullName}'): EXIT"); + return tests.ToList(); + } + + public static IEnumerable GetTestsFromYaml(string source, FileInfo file) + { + Logger.Log($"YamlTestFramework.GetTestsFromYaml('{source}', '{file.FullName}'): ENTER"); + var tests = YamlTestCaseParser.TestCasesFromYaml(source, file); + + Logger.Log($"YamlTestFramework.GetTestsFromYaml('{source}', '{file.FullName}'): EXIT"); + return tests; + } + + public static IDictionary> RunTests(IEnumerable tests, IYamlTestFrameworkHost host) + { + var resultsByTestCaseId = new Dictionary>(); + + tests = tests.ToList(); // force enumeration + var groupedByPriority = GroupTestCasesByPriority(tests); + + foreach (var priorityGroup in groupedByPriority) + { + if (priorityGroup.Count == 0) continue; + + var resultsByTestCaseIdForGroup = RunAndRecordTests(host, priorityGroup); + foreach (var resultsForTestCase in resultsByTestCaseIdForGroup) + { + var testCaseId = resultsForTestCase.Key; + var testResults = resultsForTestCase.Value; + resultsByTestCaseId[testCaseId] = testResults; + } + } + + return resultsByTestCaseId; + } + + #region private methods + + private static IDictionary> RunAndRecordTests(IYamlTestFrameworkHost host, IEnumerable tests) + { + InitRunAndRecordTestCaseMaps(tests, out var testFromIdMap, out var completionFromIdMap); + + RunAndRecordParallelizedTestCases(host, testFromIdMap, completionFromIdMap); + RunAndRecordRemainingTestCases(host, testFromIdMap, completionFromIdMap); + + return GetRunAndRecordTestResultsMap(completionFromIdMap); + } + + private static void InitRunAndRecordTestCaseMaps(IEnumerable tests, out Dictionary testFromIdMap, out Dictionary>> completionFromIdMap) + { + testFromIdMap = new Dictionary(); + completionFromIdMap = new Dictionary>>(); + foreach (var test in tests) + { + var id = test.Id.ToString(); + testFromIdMap[id] = test; + completionFromIdMap[id] = new TaskCompletionSource>(); + } + } + + private static IDictionary> GetRunAndRecordTestResultsMap(Dictionary>> completionFromIdMap) + { + var resultsPerTestCase = completionFromIdMap.Select(x => x.Value.Task.Result); + + var resultsMap = new Dictionary>(); + foreach (var resultsForCase in resultsPerTestCase) + { + var test = resultsForCase.FirstOrDefault()?.TestCase; + if (test == null) continue; + + var id = test.Id.ToString(); + resultsMap[id] = resultsForCase; + } + + return resultsMap; + } + + private static void RunAndRecordParallelizedTestCases(IYamlTestFrameworkHost host, Dictionary testFromIdMap, Dictionary>> completionFromIdMap) + { + var parallelTests = testFromIdMap + .Select(x => x.Value) + .Where(test => YamlTestProperties.Get(test, "parallelize") == "true") + .ToList(); + + foreach (var test in parallelTests) + { + ThreadPool.QueueUserWorkItem(state => + { + var parallelTestId = test.Id.ToString(); + RunAndRecordTestCaseSteps(host, testFromIdMap, completionFromIdMap, parallelTestId); + }); + } + + Logger.Log($"YamlTestFramework.RunAndRecordParallelizedTestCases() ==> Waiting for parallel tests to complete"); + var parallelCompletions = completionFromIdMap + .Where(x => parallelTests.Any(y => y.Id.ToString() == x.Key)) + .Select(x => x.Value.Task); + Task.WaitAll(parallelCompletions.ToArray()); + Logger.Log($"YamlTestFramework.RunAndRecordParallelizedTestCases() ==> All parallel tests complete"); + } + + private static void RunAndRecordTestCaseSteps(IYamlTestFrameworkHost host, Dictionary testFromIdMap, Dictionary>> completionFromIdMap, string firstTestId) + { + var firstTest = testFromIdMap[firstTestId]; + var firstTestResults = RunAndRecordTestCase(firstTest, host); + var firstTestOutcome = TestResultHelpers.TestOutcomeFromResults(firstTestResults); + // defer setting completion until all steps are complete + + var checkTest = firstTest; + while (true) + { + var nextStepId = YamlTestProperties.Get(checkTest, "nextStepId"); + if (string.IsNullOrEmpty(nextStepId)) + { + Logger.LogInfo($"YamlTestFramework.RunAndRecordTestCaseSteps() ==> No nextStepId for test '{checkTest.DisplayName}'"); + break; + } + + var stepTest = testFromIdMap.ContainsKey(nextStepId) ? testFromIdMap[nextStepId] : null; + if (stepTest == null) + { + Logger.LogError($"YamlTestFramework.RunAndRecordTestCaseSteps() ==> ERROR: nextStepId '{nextStepId}' not found for test '{checkTest.DisplayName}'"); + break; + } + + var stepCompletion = completionFromIdMap.ContainsKey(nextStepId) ? completionFromIdMap[nextStepId] : null; + if (stepCompletion == null) + { + Logger.LogError($"YamlTestFramework.RunAndRecordTestCaseSteps() ==> ERROR: nextStepId '{nextStepId}' completion not found for test '{checkTest.DisplayName}'"); + break; + } + + var stepResults = RunAndRecordTestCase(stepTest, host); + var stepOutcome = TestResultHelpers.TestOutcomeFromResults(stepResults); + Logger.Log($"YamlTestFramework.RunAndRecordTestCaseSteps() ==> Setting completion outcome for {stepTest.DisplayName} to {stepOutcome}"); + completionFromIdMap[nextStepId].SetResult(stepResults); + + checkTest = stepTest; + } + + // now that all steps are complete, set the completion outcome + completionFromIdMap[firstTestId].SetResult(firstTestResults); + Logger.Log($"YamlTestFramework.RunAndRecordTestCaseSteps() ==> Setting completion; outcome for {firstTest.DisplayName}: {firstTestOutcome}"); + } + + private static void RunAndRecordRemainingTestCases(IYamlTestFrameworkHost host, Dictionary testFromIdMap, Dictionary>> completionFromIdMap) + { + var remainingTests = completionFromIdMap + .Where(x => x.Value.Task.Status != TaskStatus.RanToCompletion) + .Select(x => testFromIdMap[x.Key]); + foreach (var test in remainingTests) + { + var outcome = RunAndRecordTestCase(test, host); + completionFromIdMap[test.Id.ToString()].SetResult(outcome); + } + } + + private static IEnumerable FindFiles(DirectoryInfo directory) + { + return directory.GetFiles($"*{YamlFileExtension}", SearchOption.AllDirectories) + .Where(file => file.Name != YamlDefaultTagsFileName); + } + + private static bool IsTrait(Trait trait, string check) + { + return trait.Name == check || trait.Value == check; + } + + private static List> GroupTestCasesByPriority(IEnumerable tests) + { + Logger.Log($"YamlTestFramework.GroupTestCasesByPriority()"); + + var before = tests.Where(test => test.Traits.Count(x => IsTrait(x, "before")) > 0); + var after = tests.Where(test => test.Traits.Count(x => IsTrait(x, "after")) > 0); + var middle = tests.Where(test => !before.Contains(test) && !after.Contains(test)); + + var testsList = new List>(); + testsList.Add(before.ToList()); + testsList.Add(middle.ToList()); + testsList.Add(after.ToList()); + Logger.Log("YamlTestFramework.GroupTestCasesByPriority() ==> {string.Join('\n', tests.Select(x => x.Name))}"); + + return testsList; + } + + private static IList RunAndRecordTestCase(TestCase test, IYamlTestFrameworkHost host) + { + Logger.Log($"YamlTestFramework.TestRunAndRecord({test.DisplayName})"); + return YamlTestCaseRunner.RunAndRecordTestCase(test, host); + } + + #endregion + + #region constants + public const string YamlFileExtension = ".yaml"; + public const string FakeExecutor = "executor://ai/cli/TestFramework/v1"; + public const string YamlDefaultTagsFileName = "Azure-AI-CLI-TestFramework-Default-Tags.yaml"; + public const string DefaultTimeout = "600000"; + #endregion + } +} diff --git a/tests/testframework/YamlTestFramework.csproj b/tests/testframework/YamlTestFramework.csproj new file mode 100644 index 00000000..74d60cbe --- /dev/null +++ b/tests/testframework/YamlTestFramework.csproj @@ -0,0 +1,13 @@ + + + + net7.0 + + + + + + + + + \ No newline at end of file diff --git a/tests/testframework/YamlTestFrameworkCommon.targets b/tests/testframework/YamlTestFrameworkCommon.targets new file mode 100644 index 00000000..ae2fd904 --- /dev/null +++ b/tests/testframework/YamlTestFrameworkCommon.targets @@ -0,0 +1,68 @@ + + + + + net7.0 + Library + Azure.AI.CLI.TestFramework + false + + + True + 1.1.0 + + + + $(LocalBuildSDKBinPath) + bin + + + + + + x64 + $(LocalBinOutputPath)\Release\ + + + + + x64 + $(LocalBinOutputPath)\Debug\ + + + + + $(DefineConstants);UNIX + $(LocalBinOutputPath)\ReleaseUnixOS + + + + + $(DefineConstants);UNIX + $(LocalBinOutputPath)\DebugUnixOS + + + + + + 1.0.0 + $(TFAssemblyVersion) + + + + $(TFAssemblyVersion) + $(TFAssemblyVersion) + $(TFAssemblyInformationalVersion) + + + + + + + + + + + + + diff --git a/tests/testadapter/YamlTestProperties.cs b/tests/testframework/YamlTestProperties.cs similarity index 95% rename from tests/testadapter/YamlTestProperties.cs rename to tests/testframework/YamlTestProperties.cs index d73a9813..133bb02b 100644 --- a/tests/testadapter/YamlTestProperties.cs +++ b/tests/testframework/YamlTestProperties.cs @@ -7,7 +7,7 @@ using Microsoft.VisualStudio.TestPlatform.ObjectModel.Adapter; using YamlDotNet.RepresentationModel; -namespace TestAdapterTest +namespace Azure.AI.Details.Common.CLI.TestFramework { public class YamlTestProperties { @@ -38,6 +38,7 @@ private static TestProperty RegisterTestCaseProperty(string name) { "cli", RegisterTestCaseProperty("CLI") }, { "command", RegisterTestCaseProperty("Command") }, { "script", RegisterTestCaseProperty("Script") }, + { "bash", RegisterTestCaseProperty("Bash") }, { "parallelize", RegisterTestCaseProperty("Parallelize") }, { "nextStepId", RegisterTestCaseProperty("nextStepId") }, { "foreach", RegisterTestCaseProperty("ForEach") }, diff --git a/tests/testrunner/Program.cs b/tests/testrunner/Program.cs new file mode 100644 index 00000000..291755be --- /dev/null +++ b/tests/testrunner/Program.cs @@ -0,0 +1,224 @@ +// +// Copyright (c) Microsoft. All rights reserved. +// Licensed under the MIT license. See LICENSE.md file in the project root for full license information. +// + +using System; +using System.Collections.Generic; +using System.Diagnostics; +using System.IO; +using System.Linq; +using System.Reflection; +using System.Runtime.InteropServices; +using System.Text; +using System.Threading.Tasks; +using Azure.AI.Details.Common.CLI.TestFramework; +using Microsoft.VisualStudio.TestPlatform.ObjectModel; + +namespace Azure.AI.Details.Common.CLI.TestRunner +{ + public class Program + { + public static int Main(string[] args) + { + if (args.Length == 0) + { + return DisplayUsage(); + } + + var command = args[0]; + return command switch + { + "list" => DoCommand(args.Skip(1).ToArray(), true, false), + "run" => DoCommand(args.Skip(1).ToArray(), false, true), + _ => DisplayUsage() + }; + } + + private static int DisplayUsage() + { + Console.WriteLine("AIT - Azure AI CLI Test runner, Version 1.0.0"); + Console.WriteLine("Copyright (c) 2024 Microsoft Corporation. All Rights Reserved."); + Console.WriteLine(); + Console.WriteLine("USAGE: ait list [...]"); + Console.WriteLine(" OR: ait run [...]"); + Console.WriteLine(); + Console.WriteLine(" FILES"); + Console.WriteLine(" --file FILE"); + Console.WriteLine(" --files FILE1 [FILE2 [...]]"); + Console.WriteLine(" --files PATTERN1 [PATTERN2 [...]]"); + Console.WriteLine(); + Console.WriteLine(" TESTS"); + Console.WriteLine(" --test TEXT"); + Console.WriteLine(" --tests TEXT1 [TEXT2 [...]]"); + Console.WriteLine(); + Console.WriteLine(" FILTERING"); + Console.WriteLine(" --contains TEXT1 [TEXT2 [...]]"); + Console.WriteLine(" --remove TEXT1 [TEXT2 [...]]"); + Console.WriteLine(); + Console.WriteLine("EXAMPLES"); + Console.WriteLine(); + Console.WriteLine(" EXAMPLE 1: List tests from two files, that contain both 'nightly' and 'java', but not 'skip'"); + Console.WriteLine(); + Console.WriteLine(" ait list --files test1.yaml test2.yaml --contains nightly java --remove skip"); + Console.WriteLine(); + Console.WriteLine(" EXAMPLE 2: Run tests from files under current directory, that contain 'setup' or 'nightly', and 'java', but not 'skip'"); + Console.WriteLine(); + Console.WriteLine(" ait run --tests setup nightly --contains java --remove skip"); + Console.WriteLine(); + Console.WriteLine(" EXAMPLE 3: Run tests from files under 'tests' directory, that contain 'test3', but not 'skip'"); + Console.WriteLine(); + Console.WriteLine(" ait run --files ../tests/**/*.yaml --contains test3 --remove skip"); + + return 1; + } + + private static int DoCommand(string[] args, bool list, bool run) + { + var tests = FindAndFilterTests(args); + if (tests == null) return 1; + + if (list) return DoListTests(tests) ? 0 : 1; + if (run) return DoRunTests(tests) ? 0 : 1; + + return 1; + } + + private static IEnumerable FindAndFilterTests(string[] args) + { + var parsedOk = ParseFilesAndFilterArgs(args, out var files, out var filters); + if (!parsedOk) return null; + + var atLeastOneFileSpecified = files.Any(); + var tests = atLeastOneFileSpecified + ? files.SelectMany(file => YamlTestFramework.GetTestsFromYaml(file.FullName, file)).ToList() + : YamlTestFramework.GetTestsFromDirectory("ait", new DirectoryInfo(".")).ToList(); + + return YamlTestCaseFilter.FilterTestCases(tests, filters); + } + + private static bool ParseFilesAndFilterArgs(string[] args, out IList files, out IList filters) + { + var filesAsList = new List(); + files = filesAsList; + filters = new List(); + + for (int i = 0; i < args.Length; i++) + { + if (args[i] == "--file" || args[i] == "--files") + { + if (i + 1 >= args.Length || args[i + 1].StartsWith("--")) + { + Console.WriteLine($"Expected a file or pattern after '{args[i]}'."); + return false; + } + + do + { + i++; + var pattern = args[i]; + var found = FindFiles(pattern); + if (found.Count() == 0) + { + Console.WriteLine($"No files found for pattern '{pattern}'."); + return false; + } + + filesAsList.AddRange(found); + } + while (i + 1 < args.Length && !args[i + 1].StartsWith("--")); + } + else if (args[i] == "--search") + { + if (i + 1 >= args.Length || args[i + 1].StartsWith("--")) + { + Console.WriteLine($"Expected text after '{args[i]}'."); + return false; + } + + do + { + i++; + filters.Add(args[i]); + } + while (i + 1 < args.Length && !args[i + 1].StartsWith("--")); + } + else if (args[i] == "--test" || args[i] == "--tests") + { + if (i + 1 >= args.Length || args[i + 1].StartsWith("--")) + { + Console.WriteLine($"Expected text after '{args[i]}'."); + return false; + } + + do + { + i++; + filters.Add(args[i]); + } + while (i + 1 < args.Length && !args[i + 1].StartsWith("--")); + } + else if (args[i] == "--contains") + { + if (i + 1 >= args.Length || args[i + 1].StartsWith("--")) + { + Console.WriteLine($"Expected text after '{args[i]}'."); + return false; + } + + do + { + i++; + filters.Add($"+{args[i]}"); // `+` means MUST contain text + } + while (i + 1 < args.Length && !args[i + 1].StartsWith("--")); + } + else if (args[i] == "--remove") + { + if (i + 1 >= args.Length || args[i + 1].StartsWith("--")) + { + Console.WriteLine($"Expected text after '{args[i]}'."); + return false; + } + + do + { + i++; + filters.Add($"-{args[i]}"); // `-` means MUST NOT contain text + } + while (i + 1 < args.Length && !args[i + 1].StartsWith("--")); + } + else + { + Console.WriteLine($"Invalid command line argument at '{args[i]}'."); + return false; + } + } + + return true; + } + + private static IList FindFiles(string pattern) + { + var files = FileHelpers.FindFiles(Directory.GetCurrentDirectory(), pattern, null, false, false); + return files.Select(x => new FileInfo(x)).ToList(); + } + + private static bool DoListTests(IEnumerable tests) + { + foreach (var test in tests) + { + Console.WriteLine(test.FullyQualifiedName); + } + + return true; + } + + private static bool DoRunTests(IEnumerable tests) + { + var consoleHost = new YamlTestFrameworkConsoleHost(); + var resultsByTestCaseId = YamlTestFramework.RunTests(tests, consoleHost); + return consoleHost.Finish(resultsByTestCaseId); + } + } +} diff --git a/tests/testrunner/Properties/AssemblyInfo.cs b/tests/testrunner/Properties/AssemblyInfo.cs new file mode 100644 index 00000000..d280765b --- /dev/null +++ b/tests/testrunner/Properties/AssemblyInfo.cs @@ -0,0 +1,36 @@ +using System.Reflection; +using System.Runtime.CompilerServices; +using System.Runtime.InteropServices; + +// General Information about an assembly is controlled through the following +// set of attributes. Change these attribute values to modify the information +// associated with an assembly. +[assembly: AssemblyTitle("YamlTestRunner")] +[assembly: AssemblyDescription("")] +[assembly: AssemblyConfiguration("")] +[assembly: AssemblyCompany("")] +[assembly: AssemblyProduct("YamlTestRunner")] +[assembly: AssemblyCopyright("Copyright © 2024")] +[assembly: AssemblyTrademark("")] +[assembly: AssemblyCulture("")] + +// Setting ComVisible to false makes the types in this assembly not visible +// to COM components. If you need to access a type in this assembly from +// COM, set the ComVisible attribute to true on that type. +[assembly: ComVisible(false)] + +// The following GUID is for the ID of the typelib if this project is exposed to COM +[assembly: Guid("AF47877C-87D2-449B-B52B-AD90D6FDF609")] + +// Version information for an assembly consists of the following four values: +// +// Major Version +// Minor Version +// Build Number +// Revision +// +// You can specify all the values or you can default the Build and Revision Numbers +// by using the '*' as shown below: +// [assembly: AssemblyVersion("1.0.*")] +[assembly: AssemblyVersion("1.0.0.0")] +[assembly: AssemblyFileVersion("1.0.0.0")] diff --git a/tests/testrunner/YamlTestFrameworkConsoleHost.cs b/tests/testrunner/YamlTestFrameworkConsoleHost.cs new file mode 100644 index 00000000..2494002e --- /dev/null +++ b/tests/testrunner/YamlTestFrameworkConsoleHost.cs @@ -0,0 +1,438 @@ +using System; +using System.Collections.Generic; +using System.IO; +using System.Linq; +using System.Text.RegularExpressions; +using System.Xml; +using Microsoft.VisualStudio.TestPlatform.ObjectModel; +using Microsoft.VisualStudio.TestPlatform.ObjectModel.Adapter; + +namespace Azure.AI.Details.Common.CLI.TestFramework +{ + public class YamlTestFrameworkConsoleHost : IYamlTestFrameworkHost + { + public YamlTestFrameworkConsoleHost() + { + } + + public void RecordStart(TestCase testCase) + { + _startTime ??= DateTime.Now; + _testCases.Add(testCase); + SetExecutionId(testCase, Guid.NewGuid()); + + lock (this) + { + Console.ForegroundColor = ConsoleColor.DarkGray; + Console.WriteLine("Starting test: " + testCase.FullyQualifiedName); + Console.ResetColor(); + } + } + + public void RecordResult(TestResult testResult) + { + _testResults.Add(testResult); + PrintResult(testResult); + } + + public void RecordEnd(TestCase testCase, TestOutcome outcome) + { + _endTime = DateTime.Now; + } + + public bool Finish(IDictionary> resultsByTestCaseId) + { + var allResults = resultsByTestCaseId.Values.SelectMany(x => x); + var failedResults = allResults.Where(x => x.Outcome == TestOutcome.Failed).ToList(); + var passedResults = allResults.Where(x => x.Outcome == TestOutcome.Passed).ToList(); + var skippedResults = allResults.Where(x => x.Outcome == TestOutcome.Skipped).ToList(); + var passed = failedResults.Count == 0; + + if (failedResults.Count > 0) + { + Console.ResetColor(); + Console.WriteLine(); + Console.BackgroundColor = ConsoleColor.Red; + Console.ForegroundColor = ConsoleColor.White; + Console.Write("FAILURE SUMMARY:"); + Console.ResetColor(); + Console.WriteLine(); + failedResults.ForEach(r => PrintResult(r)); + } + else + { + Console.WriteLine(); + } + + var count = allResults.Count(); + var duration = _endTime != null && _startTime != null ? FormattedDuration((_endTime.Value - _startTime.Value).TotalMilliseconds) : "0 ms"; + Console.BackgroundColor = ConsoleColor.Blue; + Console.ForegroundColor = ConsoleColor.White; + Console.Write("TEST RESULT SUMMARY:"); + Console.ResetColor(); + Console.Write("\nTests: "); + Console.ForegroundColor = ConsoleColor.Blue; + Console.Write($"{count}"); + Console.ForegroundColor = ConsoleColor.DarkGray; + Console.WriteLine($" ({duration})"); + + var resultsFile = WriteResultFile(); + + var fi = new FileInfo(resultsFile); + Console.ResetColor(); + Console.Write("Results: "); + Console.ForegroundColor = ConsoleColor.Blue; + Console.Write(fi.FullName); + Console.ResetColor(); + Console.WriteLine("\n"); + + Console.ForegroundColor = ConsoleColor.Green; + Console.Write($"Passed: {passedResults.Count}"); + + if (failedResults.Count > 0) + { + Console.ForegroundColor = ConsoleColor.DarkGray; + Console.Write(", "); + Console.ForegroundColor = ConsoleColor.Red; + Console.Write($"Failed: {failedResults.Count}"); + } + + if (skippedResults.Count > 0) + { + Console.ForegroundColor = ConsoleColor.DarkGray; + Console.Write(", "); + Console.ForegroundColor = ConsoleColor.Yellow; + Console.Write($"Skipped: {skippedResults.Count}"); + } + + Console.ResetColor(); + Console.WriteLine("\n"); + + return passed; + } + + public string WriteResultFile() + { + var assembly = typeof(YamlTestFrameworkConsoleHost).Assembly; + var assemblyPath = assembly.Location; + + _startTime ??= DateTime.Now; + _endTime ??= DateTime.Now; + + var resultFile = "test-results.trx"; + var testRunId = Guid.NewGuid().ToString(); + var testListId = "8c84fa94-04c1-424b-9868-57a2d4851a1d"; + var testType = "13cdc9d9-ddb5-4fa4-a97d-d965ccfc6d4b"; + var userName = Environment.UserName; + var machineName = Environment.MachineName; + var userAtMachine = userName.Split('\\', '/').Last() + "@" + machineName; + var testRunName = userAtMachine + " " + _endTime.Value.ToString("yyyy-MM-dd HH:mm:ss"); + + XmlWriterSettings settings = new XmlWriterSettings(); + settings.Indent = true; + settings.IndentChars = " "; + settings.NewLineChars = "\n"; + settings.NewLineHandling = NewLineHandling.Replace; + settings.OmitXmlDeclaration = false; + + var writer = XmlWriter.Create(resultFile, settings); + writer.WriteStartDocument(); + writer.WriteStartElement("", "TestRun", "http://microsoft.com/schemas/VisualStudio/TeamTest/2010"); + writer.WriteAttributeString("id", testRunId); + writer.WriteAttributeString("name", testRunName); + writer.WriteAttributeString("runUser", userName); + + writer.WriteStartElement("Times"); + writer.WriteAttributeString("creation", _endTime.Value.ToString("o")); + writer.WriteAttributeString("queuing", _endTime.Value.ToString("o")); + writer.WriteAttributeString("start", _startTime.Value.ToString("o")); + writer.WriteAttributeString("finish", _endTime.Value.ToString("o")); + writer.WriteEndElement(); + + writer.WriteStartElement("Results"); + foreach (var testResult in _testResults) + { + var executionId = GetExecutionId(testResult.TestCase).ToString(); + var stdout = testResult.Messages.FirstOrDefault(x => x.Category == TestResultMessage.StandardOutCategory)?.Text; + var stderr = testResult.Messages.FirstOrDefault(x => x.Category == TestResultMessage.StandardErrorCategory)?.Text; + var debugTrace = testResult.Messages.FirstOrDefault(x => x.Category == TestResultMessage.DebugTraceCategory)?.Text; + var message = testResult.Messages.FirstOrDefault(x => x.Category == TestResultMessage.AdditionalInfoCategory)?.Text; + + writer.WriteStartElement("UnitTestResult"); + writer.WriteAttributeString("executionId", executionId); + writer.WriteAttributeString("testId", testResult.TestCase.Id.ToString()); + writer.WriteAttributeString("testName", testResult.TestCase.FullyQualifiedName); + writer.WriteAttributeString("computerName", machineName); + writer.WriteAttributeString("duration", testResult.Duration.ToString()); + writer.WriteAttributeString("startTime", testResult.StartTime.DateTime.ToString("o")); + writer.WriteAttributeString("endTime", testResult.EndTime.DateTime.ToString("o")); + writer.WriteAttributeString("testType", testType); + writer.WriteAttributeString("outcome", OutcomeToString(testResult.Outcome)); + writer.WriteAttributeString("testListId", testListId); + writer.WriteAttributeString("relativeResultsDirectory", Guid.NewGuid().ToString()); + writer.WriteStartElement("Output"); + + if (!string.IsNullOrEmpty(stdout)) + { + writer.WriteStartElement("StdOut"); + writer.WriteRaw(System.Security.SecurityElement + .Escape(stdout.Replace("\u001b", string.Empty)) + .Replace("\r\n", " \n")); + writer.WriteEndElement(); + } + + if (!string.IsNullOrEmpty(stderr)) + { + writer.WriteStartElement("StdErr"); + writer.WriteRaw(System.Security.SecurityElement + .Escape(stderr.Replace("\u001b", string.Empty)) + .Replace("\r\n", " \n")); + writer.WriteEndElement(); + } + + if (!string.IsNullOrEmpty(debugTrace)) + { + writer.WriteElementString("DebugTrace", debugTrace); + } + + writer.WriteStartElement("ErrorInfo"); + writer.WriteElementString("Message", testResult.ErrorMessage); + writer.WriteElementString("StackTrace", testResult.ErrorStackTrace); + writer.WriteEndElement(); + writer.WriteStartElement("TextMessages"); + + if (!string.IsNullOrEmpty(message)) + { + writer.WriteElementString("Message", message); + } + writer.WriteEndElement(); + writer.WriteEndElement(); + writer.WriteEndElement(); + } + writer.WriteEndElement(); + + writer.WriteStartElement("TestDefinitions"); + foreach (var testCase in _testCases) + { + var executionId = GetExecutionId(testCase).ToString(); + var qualifiedParts = testCase.FullyQualifiedName.Split('.'); + var className = string.Join(".", qualifiedParts.Take(qualifiedParts.Length - 1)); + var name = qualifiedParts.Last(); + writer.WriteStartElement("UnitTest"); + writer.WriteAttributeString("name", testCase.DisplayName); + writer.WriteAttributeString("storage", assemblyPath); + writer.WriteAttributeString("id", testCase.Id.ToString()); + writer.WriteStartElement("Execution"); + writer.WriteAttributeString("id", executionId); + writer.WriteEndElement(); + writer.WriteStartElement("TestMethod"); + writer.WriteAttributeString("codeBase", assemblyPath); + writer.WriteAttributeString("adapterTypeName", testCase.ExecutorUri.ToString()); + writer.WriteAttributeString("className", className); + writer.WriteAttributeString("name", name); + writer.WriteEndElement(); + writer.WriteEndElement(); + } + writer.WriteEndElement(); + + writer.WriteStartElement("TestEntries"); + foreach (var testCase in _testCases) + { + var executionId = GetExecutionId(testCase).ToString(); + writer.WriteStartElement("TestEntry"); + writer.WriteAttributeString("testId", testCase.Id.ToString()); + writer.WriteAttributeString("executionId", executionId); + writer.WriteAttributeString("testListId", testListId); + writer.WriteEndElement(); + } + writer.WriteEndElement(); + + writer.WriteStartElement("TestLists"); + writer.WriteStartElement("TestList"); + writer.WriteAttributeString("name", "Results Not in a List"); + writer.WriteAttributeString("id", testListId); + writer.WriteEndElement(); + writer.WriteStartElement("TestList"); + writer.WriteAttributeString("name", "All Loaded Results"); + writer.WriteAttributeString("id", "19431567-8539-422a-85d7-44ee4e166bda"); + writer.WriteEndElement(); + writer.WriteEndElement(); + + writer.WriteStartElement("ResultSummary"); + writer.WriteAttributeString("outcome", "Completed"); + + writer.WriteStartElement("Counters"); + writer.WriteAttributeString("total", _testResults.Count.ToString()); + writer.WriteAttributeString("executed", _testResults.Count(r => IsExecuted(r)).ToString()); + writer.WriteAttributeString("passed", _testResults.Count(r => IsPassed(r)).ToString()); + writer.WriteAttributeString("failed", _testResults.Count(r => IsFailed(r)).ToString()); + writer.WriteAttributeString("error", _testResults.Count(r => IsError(r)).ToString()); + writer.WriteAttributeString("timeout", _testResults.Count(r => IsTimeout(r)).ToString()); + writer.WriteAttributeString("aborted", _testResults.Count(r => IsAborted(r)).ToString()); + writer.WriteAttributeString("inconclusive", _testResults.Count(r => IsInConclusive(r)).ToString()); + writer.WriteAttributeString("passedButRunAborted", _testResults.Count(r => IsPassedButRunaborted(r)).ToString()); + writer.WriteAttributeString("notRunnable", _testResults.Count(r => IsNotRunnable(r)).ToString()); + writer.WriteAttributeString("notExecuted", _testResults.Count(r => IsNotExecuted(r)).ToString()); + writer.WriteAttributeString("disconnected", _testResults.Count(r => IsDisconnected(r)).ToString()); + writer.WriteAttributeString("warning", _testResults.Count(r => IsWarning(r)).ToString()); + writer.WriteAttributeString("completed", "0"); + writer.WriteAttributeString("inProgress", "0"); + writer.WriteAttributeString("pending", "0"); + writer.WriteEndElement(); + + writer.WriteStartElement("Output"); + writer.WriteElementString("StdOut", ""); + writer.WriteEndElement(); + + writer.WriteEndElement(); + + writer.WriteEndElement(); + writer.WriteEndDocument(); + + writer.Close(); + writer.Dispose(); + + return resultFile; + } + + private void PrintResult(TestResult testResult) + { + lock (this) + { + Console.ForegroundColor = ConsoleColor.DarkGray; + if (testResult.Outcome == TestOutcome.Passed) Console.ForegroundColor = ConsoleColor.Green; + if (testResult.Outcome == TestOutcome.Skipped) Console.ForegroundColor = ConsoleColor.Yellow; + if (testResult.Outcome == TestOutcome.Failed) Console.ForegroundColor = ConsoleColor.Red; + + var duration = FormattedDuration(testResult.Duration.TotalMilliseconds); + Console.WriteLine($"{testResult.Outcome} ({duration}): {testResult.TestCase.FullyQualifiedName}"); + Console.ResetColor(); + + if (testResult.Outcome == TestOutcome.Failed) + { + var hasStack = !string.IsNullOrEmpty(testResult.ErrorStackTrace); + if (hasStack) Console.WriteLine(testResult.ErrorStackTrace.Trim('\r', '\n')); + + var hasErr = !string.IsNullOrEmpty(testResult.ErrorMessage); + if (hasErr) Console.WriteLine(testResult.ErrorMessage.Trim('\r', '\n')); + + if (hasErr || hasStack) Console.WriteLine(); + } + } + } + + private static string FormattedDuration(double ms) + { + var secs = ms / 1000; + var duration = ms >= 1000 + ? secs.ToString("0.00") + " seconds" + : ms.ToString("0") + " ms"; + return duration; + } + + private static string OutcomeToString(TestOutcome outcome) + { + return outcome switch { + TestOutcome.None => "None", + TestOutcome.Passed => "Passed", + TestOutcome.Failed => "Failed", + TestOutcome.Skipped => "NotExecuted", + TestOutcome.NotFound => "NotFound", + _ => "None", + }; + } + + private bool IsExecuted(TestResult r) + { + return IsPassed(r) || IsFailed(r); + } + + private static bool IsPassed(TestResult r) + { + return r.Outcome == TestOutcome.Passed; + } + + private static bool IsFailed(TestResult r) + { + return r.Outcome == TestOutcome.Failed; + } + + private static bool IsError(TestResult r) + { + return false; + // return r.Outcome == TestOutcome.Error; + } + + private static bool IsTimeout(TestResult r) + { + return false; + // return r.Outcome == TestOutcome.Timeout; + } + + private static bool IsAborted(TestResult r) + { + return false; + // return r.Outcome == TestOutcome.Aborted; + } + + private static bool IsInConclusive(TestResult r) + { + return false; + // return r.Outcome == TestOutcome.Inconclusive; + } + + private static bool IsPassedButRunaborted(TestResult r) + { + return false; + // return r.Outcome == TestOutcome.PassedButRunAborted; + } + + private static bool IsNotRunnable(TestResult r) + { + return false; + // return r.Outcome == TestOutcome.NotRunnable; + } + + private static bool IsNotExecuted(TestResult r) + { + return false; + // return r.Outcome == TestOutcome.NotExecuted; + } + + private static bool IsDisconnected(TestResult r) + { + return false; + // return r.Outcome == TestOutcome.Disconnected; + } + + private static bool IsWarning(TestResult r) + { + return false; + // return r.Outcome == TestOutcome.Warning; + } + + private void SetExecutionId(TestCase testCase, Guid guid) + { + lock (_testToExecutionMap) + { + _testToExecutionMap[testCase.Id] = guid; + } + } + + private Guid GetExecutionId(TestCase testCase) + { + lock (_testToExecutionMap) + { + return _testToExecutionMap[testCase.Id]; + } + } + + private DateTime? _startTime; + private DateTime? _endTime; + + private List _testCases = new List(); + private Dictionary _testToExecutionMap = new Dictionary(); + private List _testResults = new List(); + } +} + diff --git a/tests/testrunner/YamlTestRunner.csproj b/tests/testrunner/YamlTestRunner.csproj new file mode 100644 index 00000000..4a17b466 --- /dev/null +++ b/tests/testrunner/YamlTestRunner.csproj @@ -0,0 +1,14 @@ + + + + net7.0 + Exe + + + + + + + + + \ No newline at end of file diff --git a/tests/testrunner/YamlTestRunnerCommon.targets b/tests/testrunner/YamlTestRunnerCommon.targets new file mode 100644 index 00000000..6755939a --- /dev/null +++ b/tests/testrunner/YamlTestRunnerCommon.targets @@ -0,0 +1,56 @@ + + + + + net7.0 + ait + false + + + 1.1.0 + + + + $(LocalBuildSDKBinPath) + bin + + + + + + x64 + $(LocalBinOutputPath)\Release\ + + + + + x64 + $(LocalBinOutputPath)\Debug\ + + + + + $(DefineConstants);UNIX + $(LocalBinOutputPath)\ReleaseUnixOS + + + + + $(DefineConstants);UNIX + $(LocalBinOutputPath)\DebugUnixOS + + + + + + 1.0.0 + $(TFAssemblyVersion) + + + + $(TFAssemblyVersion) + $(TFAssemblyVersion) + $(TFAssemblyInformationalVersion) + + + From 6ef70205bae97d3c36b6c0611f0e5e027719d9e6 Mon Sep 17 00:00:00 2001 From: Christopher Schraer <32145632+chschrae@users.noreply.github.com> Date: Tue, 6 Feb 2024 17:46:08 -0800 Subject: [PATCH 23/30] java stream with own data (#158) * updated imports on chat completions template * Added java streaming with your own data sample * updated test --------- Co-authored-by: Chris Schraer --- .../src/OpenAIChatCompletionsClass.java | 7 +- .../_.json | 15 +++ .../pom.xml | 39 +++++++ .../scripts/1-copydependencies.bat | 1 + .../scripts/2-compile.bat | 1 + .../scripts/3-run.bat | 1 + .../src/Main.java | 49 ++++++++ ...ChatCompletionsWithDataStreamingClass.java | 109 ++++++++++++++++++ tests/test.yaml | 2 +- 9 files changed, 222 insertions(+), 2 deletions(-) create mode 100644 src/ai/.x/templates/openai-chat-streaming-with-data-java/_.json create mode 100644 src/ai/.x/templates/openai-chat-streaming-with-data-java/pom.xml create mode 100644 src/ai/.x/templates/openai-chat-streaming-with-data-java/scripts/1-copydependencies.bat create mode 100644 src/ai/.x/templates/openai-chat-streaming-with-data-java/scripts/2-compile.bat create mode 100644 src/ai/.x/templates/openai-chat-streaming-with-data-java/scripts/3-run.bat create mode 100644 src/ai/.x/templates/openai-chat-streaming-with-data-java/src/Main.java create mode 100644 src/ai/.x/templates/openai-chat-streaming-with-data-java/src/OpenAIChatCompletionsWithDataStreamingClass.java diff --git a/src/ai/.x/templates/openai-chat-java/src/OpenAIChatCompletionsClass.java b/src/ai/.x/templates/openai-chat-java/src/OpenAIChatCompletionsClass.java index deccae33..792e85f0 100644 --- a/src/ai/.x/templates/openai-chat-java/src/OpenAIChatCompletionsClass.java +++ b/src/ai/.x/templates/openai-chat-java/src/OpenAIChatCompletionsClass.java @@ -3,7 +3,12 @@ <#@ parameter type="System.String" name="ClassName" #> import com.azure.ai.openai.OpenAIClient; import com.azure.ai.openai.OpenAIClientBuilder; -import com.azure.ai.openai.models.*; +import com.azure.ai.openai.models.ChatRequestAssistantMessage; +import com.azure.ai.openai.models.ChatRequestMessage; +import com.azure.ai.openai.models.ChatRequestSystemMessage; +import com.azure.ai.openai.models.ChatRequestUserMessage; +import com.azure.ai.openai.models.ChatCompletions; +import com.azure.ai.openai.models.ChatCompletionsOptions; import com.azure.core.credential.AzureKeyCredential; import java.util.ArrayList; diff --git a/src/ai/.x/templates/openai-chat-streaming-with-data-java/_.json b/src/ai/.x/templates/openai-chat-streaming-with-data-java/_.json new file mode 100644 index 00000000..a4123b59 --- /dev/null +++ b/src/ai/.x/templates/openai-chat-streaming-with-data-java/_.json @@ -0,0 +1,15 @@ +{ + "_LongName": "OpenAI Chat Completions (w/ Data + AI Search)", + "_ShortName": "openai-chat-streaming-with-data", + "_Language": "Java", + "ClassName": "OpenAIChatCompletionsWithDataStreamingClass", + "AZURE_OPENAI_API_VERSION": "", + "AZURE_OPENAI_ENDPOINT": "", + "AZURE_OPENAI_KEY": "", + "AZURE_OPENAI_CHAT_DEPLOYMENT": "", + "AZURE_OPENAI_EMBEDDING_DEPLOYMENT": "", + "AZURE_OPENAI_SYSTEM_PROMPT": "You are a helpful AI assistant.", + "AZURE_AI_SEARCH_ENDPOINT": "", + "AZURE_AI_SEARCH_KEY": "", + "AZURE_AI_SEARCH_INDEX_NAME": "" +} \ No newline at end of file diff --git a/src/ai/.x/templates/openai-chat-streaming-with-data-java/pom.xml b/src/ai/.x/templates/openai-chat-streaming-with-data-java/pom.xml new file mode 100644 index 00000000..59337755 --- /dev/null +++ b/src/ai/.x/templates/openai-chat-streaming-with-data-java/pom.xml @@ -0,0 +1,39 @@ + + 4.0.0 + + com.azure.ai.openai.samples + openai-chat-java-streaming + 1.0-SNAPSHOT + + + + + com.azure + azure-ai-openai + 1.0.0-beta.6 + + + + + + + org.apache.maven.plugins + maven-dependency-plugin + 3.1.2 + + + copy-dependencies + prepare-package + + copy-dependencies + + + ${project.build.directory}/lib + + + + + + + + \ No newline at end of file diff --git a/src/ai/.x/templates/openai-chat-streaming-with-data-java/scripts/1-copydependencies.bat b/src/ai/.x/templates/openai-chat-streaming-with-data-java/scripts/1-copydependencies.bat new file mode 100644 index 00000000..f0b4c1c7 --- /dev/null +++ b/src/ai/.x/templates/openai-chat-streaming-with-data-java/scripts/1-copydependencies.bat @@ -0,0 +1 @@ +mvn clean package \ No newline at end of file diff --git a/src/ai/.x/templates/openai-chat-streaming-with-data-java/scripts/2-compile.bat b/src/ai/.x/templates/openai-chat-streaming-with-data-java/scripts/2-compile.bat new file mode 100644 index 00000000..f0249b59 --- /dev/null +++ b/src/ai/.x/templates/openai-chat-streaming-with-data-java/scripts/2-compile.bat @@ -0,0 +1 @@ +javac -cp target/lib/* src/OpenAIChatCompletionsWithDataStreamingClass.java src/Main.java -d out diff --git a/src/ai/.x/templates/openai-chat-streaming-with-data-java/scripts/3-run.bat b/src/ai/.x/templates/openai-chat-streaming-with-data-java/scripts/3-run.bat new file mode 100644 index 00000000..6d301cb6 --- /dev/null +++ b/src/ai/.x/templates/openai-chat-streaming-with-data-java/scripts/3-run.bat @@ -0,0 +1 @@ +java -cp out;target/lib/* Main diff --git a/src/ai/.x/templates/openai-chat-streaming-with-data-java/src/Main.java b/src/ai/.x/templates/openai-chat-streaming-with-data-java/src/Main.java new file mode 100644 index 00000000..f4efebc5 --- /dev/null +++ b/src/ai/.x/templates/openai-chat-streaming-with-data-java/src/Main.java @@ -0,0 +1,49 @@ +<#@ template hostspecific="true" #> +<#@ output extension=".java" encoding="utf-8" #> +<#@ parameter type="System.String" name="ClassName" #> +<#@ parameter type="System.String" name="AZURE_OPENAI_API_VERSION" #> +<#@ parameter type="System.String" name="AZURE_OPENAI_ENDPOINT" #> +<#@ parameter type="System.String" name="AZURE_OPENAI_KEY" #> +<#@ parameter type="System.String" name="AZURE_OPENAI_CHAT_DEPLOYMENT" #> +<#@ parameter type="System.String" name="AZURE_OPENAI_EMBEDDING_DEPLOYMENT" #> +<#@ parameter type="System.String" name="AZURE_OPENAI_SYSTEM_PROMPT" #> +<#@ parameter type="System.String" name="AZURE_AI_SEARCH_ENDPOINT" #> +<#@ parameter type="System.String" name="AZURE_AI_SEARCH_KEY" #> +<#@ parameter type="System.String" name="AZURE_AI_SEARCH_INDEX_NAME" #> +import java.util.Scanner; +import reactor.core.publisher.Flux; +import com.azure.ai.openai.models.ChatCompletions; + +public class Main { + + public static void main(String[] args) { + String openAIKey = (System.getenv("AZURE_OPENAI_KEY") != null) ? System.getenv("AZURE_OPENAI_KEY") : ""; + String openAIEndpoint = (System.getenv("AZURE_OPENAI_ENDPOINT") != null) ? System.getenv("AZURE_OPENAI_ENDPOINT") : ""; + String openAIChatDeployment = (System.getenv("AZURE_OPENAI_CHAT_DEPLOYMENT") != null) ? System.getenv("AZURE_OPENAI_CHAT_DEPLOYMENT") : ""; + String openAISystemPrompt = (System.getenv("AZURE_OPENAI_SYSTEM_PROMPT") != null) ? System.getenv("AZURE_OPENAI_SYSTEM_PROMPT") : "You are a helpful AI assistant."; + + String openAIApiVersion = System.getenv("AZURE_OPENAI_API_VERSION") != null ? System.getenv("AZURE_OPENAI_API_VERSION") : "<#= AZURE_OPENAI_API_VERSION #>"; + String azureSearchEmbeddingsDeploymentName = System.getenv("AZURE_OPENAI_EMBEDDING_DEPLOYMENT") != null ? System.getenv("AZURE_OPENAI_EMBEDDING_DEPLOYMENT") : "<#= AZURE_OPENAI_EMBEDDING_DEPLOYMENT #>"; + String azureSearchEndpoint = System.getenv("AZURE_AI_SEARCH_ENDPOINT") != null ? System.getenv("AZURE_AI_SEARCH_ENDPOINT") : "<#= AZURE_AI_SEARCH_ENDPOINT #>"; + String azureSearchAPIKey = System.getenv("AZURE_AI_SEARCH_KEY") != null ? System.getenv("AZURE_AI_SEARCH_KEY") : "<#= AZURE_AI_SEARCH_KEY #>"; + String azureSearchIndexName = System.getenv("AZURE_AI_SEARCH_INDEX_NAME") != null ? System.getenv("AZURE_AI_SEARCH_INDEX_NAME") : "<#= AZURE_AI_SEARCH_INDEX_NAME #>"; + + <#= ClassName #> chat = new <#= ClassName #>(openAIKey, openAIEndpoint, openAIChatDeployment, openAISystemPrompt, azureSearchEndpoint, azureSearchIndexName, azureSearchAPIKey, azureSearchEmbeddingsDeploymentName); + + Scanner scanner = new Scanner(System.in); + while (true) { + System.out.print("User: "); + String userPrompt = scanner.nextLine(); + if (userPrompt.isEmpty() || "exit".equals(userPrompt)) + break; + + System.out.print("\nAssistant: "); + Flux responseFlux = chat.getChatCompletionsStreamingAsync(userPrompt, update -> { + System.out.print(update.getContent()); + }); + responseFlux.blockLast(); + System.out.println("\n"); + } + scanner.close(); + } +} \ No newline at end of file diff --git a/src/ai/.x/templates/openai-chat-streaming-with-data-java/src/OpenAIChatCompletionsWithDataStreamingClass.java b/src/ai/.x/templates/openai-chat-streaming-with-data-java/src/OpenAIChatCompletionsWithDataStreamingClass.java new file mode 100644 index 00000000..ec7469bd --- /dev/null +++ b/src/ai/.x/templates/openai-chat-streaming-with-data-java/src/OpenAIChatCompletionsWithDataStreamingClass.java @@ -0,0 +1,109 @@ +<#@ template hostspecific="true" #> +<#@ output extension=".java" encoding="utf-8" #> +<#@ parameter type="System.String" name="ClassName" #> +import com.azure.ai.openai.OpenAIAsyncClient; +import com.azure.ai.openai.OpenAIClientBuilder; +import com.azure.ai.openai.models.AzureCognitiveSearchChatExtensionConfiguration; +import com.azure.ai.openai.models.AzureCognitiveSearchChatExtensionParameters; +import com.azure.ai.openai.models.AzureCognitiveSearchIndexFieldMappingOptions; +import com.azure.ai.openai.models.AzureCognitiveSearchQueryType; +import com.azure.ai.openai.models.ChatChoice; +import com.azure.ai.openai.models.ChatCompletions; +import com.azure.ai.openai.models.ChatCompletionsOptions; +import com.azure.ai.openai.models.ChatRequestAssistantMessage; +import com.azure.ai.openai.models.ChatRequestMessage; +import com.azure.ai.openai.models.ChatRequestSystemMessage; +import com.azure.ai.openai.models.ChatRequestUserMessage; +import com.azure.ai.openai.models.ChatResponseMessage; +import com.azure.ai.openai.models.CompletionsFinishReason; +import com.azure.ai.openai.models.OnYourDataApiKeyAuthenticationOptions; +import com.azure.ai.openai.models.OnYourDataDeploymentNameVectorizationSource; +import com.azure.core.credential.AzureKeyCredential; +import reactor.core.publisher.Flux; + +import java.util.ArrayList; +import java.util.Arrays; +import java.util.function.Consumer; +import java.util.List; + +public class <#= ClassName #> { + + private OpenAIAsyncClient client; + private ChatCompletionsOptions options; + private String openAIChatDeployment; + private String openAISystemPrompt; + + public <#= ClassName #> ( + String openAIKey, + String openAIEndpoint, + String openAIChatDeployment, + String openAISystemPrompt, + String azureSearchEndpoint, + String azureSearchIndexName, + String azureSearchAPIKey, + String azureSearchEmbeddingsDeploymentName) { + + this.openAIChatDeployment = openAIChatDeployment; + this.openAISystemPrompt = openAISystemPrompt; + client = new OpenAIClientBuilder() + .endpoint(openAIEndpoint) + .credential(new AzureKeyCredential(openAIKey)) + .buildAsyncClient(); + + AzureCognitiveSearchChatExtensionConfiguration searchConfiguration = + new AzureCognitiveSearchChatExtensionConfiguration( + new AzureCognitiveSearchChatExtensionParameters(azureSearchEndpoint, azureSearchIndexName) + .setAuthentication(new OnYourDataApiKeyAuthenticationOptions(azureSearchAPIKey)) + .setQueryType(AzureCognitiveSearchQueryType.VECTOR_SIMPLE_HYBRID) + .setEmbeddingDependency(new OnYourDataDeploymentNameVectorizationSource(azureSearchEmbeddingsDeploymentName)) + ); + + List chatMessages = new ArrayList<>(); + options = new ChatCompletionsOptions(chatMessages) + .setDataSources(Arrays.asList(searchConfiguration)); + ClearConversation(); + options.setStream(true); + } + + public void ClearConversation(){ + List chatMessages = options.getMessages(); + chatMessages.clear(); + chatMessages.add(new ChatRequestSystemMessage(this.openAISystemPrompt)); + } + + public Flux getChatCompletionsStreamingAsync(String userPrompt, + Consumer callback) { + options.getMessages().add(new ChatRequestUserMessage(userPrompt)); + + StringBuilder responseContent = new StringBuilder(); + Flux response = client.getChatCompletionsStream(this.openAIChatDeployment, options); + + response.subscribe(chatResponse -> { + if (chatResponse.getChoices() != null) { + for (ChatChoice update : chatResponse.getChoices()) { + if (update.getDelta() == null || update.getDelta().getContent() == null) + continue; + String content = update.getDelta().getContent(); + + if (update.getFinishReason() == CompletionsFinishReason.CONTENT_FILTERED) { + content = content + "\nWARNING: Content filtered!"; + } else if (update.getFinishReason() == CompletionsFinishReason.TOKEN_LIMIT_REACHED) { + content = content + "\nERROR: Exceeded token limit!"; + } + + if (content.isEmpty()) + continue; + + if(callback != null) { + callback.accept(update.getDelta()); + } + responseContent.append(content); + } + + options.getMessages().add(new ChatRequestAssistantMessage(responseContent.toString())); + } + }); + + return response; + } +} \ No newline at end of file diff --git a/tests/test.yaml b/tests/test.yaml index 92a3747e..d7b1bac3 100644 --- a/tests/test.yaml +++ b/tests/test.yaml @@ -89,5 +89,5 @@ ^Helper +Function +Class +Library +helper-functions +C# *\r?$\n ^OpenAI +Chat +Completions +openai-chat +C#, +Go, +Java, +JavaScript, +Python *\r?$\n ^OpenAI +Chat +Completions +\(Streaming\) +openai-chat-streaming +C#, +Go, +Java, +JavaScript, +Python *\r?$\n - ^OpenAI +Chat +Completions +\(w/ +Data +\+ +AI +Search\) +openai-chat-streaming-with-data +C#, +Go, +JavaScript, +Python *\r?$\n + ^OpenAI +Chat +Completions +\(w/ +Data +\+ +AI +Search\) +openai-chat-streaming-with-data +C#, +Go, +Java, +JavaScript, +Python *\r?$\n ^OpenAI +Chat +Completions +\(w/ +Functions\) +openai-chat-streaming-with-functions +C#, +Go, +JavaScript, +Python *\r?$\n From f92682159ac53e6364234e770ecff9a9e725d7d1 Mon Sep 17 00:00:00 2001 From: Christopher Schraer <32145632+chschrae@users.noreply.github.com> Date: Tue, 6 Feb 2024 17:59:52 -0800 Subject: [PATCH 24/30] trimmed spaces on Readme (#168) Co-authored-by: Chris Schraer --- README.md | 16 ++++++++-------- 1 file changed, 8 insertions(+), 8 deletions(-) diff --git a/README.md b/README.md index d2b20737..ebec9ba5 100644 --- a/README.md +++ b/README.md @@ -1,9 +1,9 @@ -Status: Draft in Progress +Status: Draft in Progress Owner: Rob Chambers # Using the Azure AI CLI -The Azure `AI` Command-Line Interface (CLI) is a cross-platform command-line tool to connect to Azure AI services and execute control-plane and data-plane operations without having to write any code. The CLI allows the execution of commands through a terminal using interactive command-line prompts or via script. +The Azure `AI` Command-Line Interface (CLI) is a cross-platform command-line tool to connect to Azure AI services and execute control-plane and data-plane operations without having to write any code. The CLI allows the execution of commands through a terminal using interactive command-line prompts or via script. You can easily use the `AI` CLI to experiment with key Azure AI service features and see how they work with your use cases. Within minutes, you can setup all the required Azure resources needed, and build a customized Copilot using OpenAI's chat completions APIs and your own data. You can try it out interactively, or script larger processes to automate your own workflows and evaluations as part of your CI/CD system. @@ -11,7 +11,7 @@ In the future, you'll even be able to use the `AI` CLI to dynamically create cod ## **STEP 1**: Setup your development environment -You can install the Azure `AI` CLI locally on Linux, Mac, or Windows computers, or use it thru an internet browser or Docker container. +You can install the Azure `AI` CLI locally on Linux, Mac, or Windows computers, or use it thru an internet browser or Docker container. During this public preview, we recommend using the Azure `AI` CLI thru GitHub Codespaces. This will allow you to quickly get started without having to install anything locally. @@ -82,12 +82,12 @@ ai chat --system @prompt.txt --user "Tell me about Azure AI Studio" ``` USAGE: ai chat [...] - CONNECTION (see: ai help connection) + CONNECTION (see: ai help connection) --deployment DEPLOYMENT (see: ai help chat deployment) - --endpoint ENDPOINT (see: ai help chat endpoint) - --key KEY (see: ai help chat key) + --endpoint ENDPOINT (see: ai help chat endpoint) + --key KEY (see: ai help chat key) - INPUT (see: ai help chat input) + INPUT (see: ai help chat input) --interactive (see: ai help chat interactive) --system PROMPT (see: ai help chat system prompt) * --file FILE (see: ai help chat history file) @@ -187,4 +187,4 @@ ai chat --interactive --system @prompt.txt --index-name "product-info" ```bash ai chat --system @prompt.txt --index-name "product-info" --user "Which tent has the highest rainfly waterproof rating?" -``` \ No newline at end of file +``` From 99f934bed64164e33e10c8799749587940cabc54 Mon Sep 17 00:00:00 2001 From: Rob Chambers Date: Wed, 7 Feb 2024 04:18:42 -0800 Subject: [PATCH 25/30] update requirements and evaluate function (#167) --- requirements.txt | 8 +------- .../help/include.python.script.function_call_evaluate.py | 5 +---- 2 files changed, 2 insertions(+), 11 deletions(-) diff --git a/requirements.txt b/requirements.txt index 46183daa..1521f520 100644 --- a/requirements.txt +++ b/requirements.txt @@ -3,18 +3,12 @@ ipykernel openai>1.0 -azure-ai-ml @https://pkgs.dev.azure.com/azure-sdk/29ec6040-b234-4e31-b139-33dc4287b756/_packaging/3572dbf9-b5ef-433b-9137-fc4d7768e7cc/pypi/download/azure-ai-ml/1.13a20240110002/azure_ai_ml-1.13.0a20240110002-py3-none-any.whl -azure-ai-resources @https://pkgs.dev.azure.com/azure-sdk/29ec6040-b234-4e31-b139-33dc4287b756/_packaging/3572dbf9-b5ef-433b-9137-fc4d7768e7cc/pypi/download/azure-ai-resources/1a20240112004/azure_ai_resources-1.0.0a20240112004-py3-none-any.whl -# generative ai SDK dependencies -azure-ai-generative[evaluate,index] @https://pkgs.dev.azure.com/azure-sdk/29ec6040-b234-4e31-b139-33dc4287b756/_packaging/3572dbf9-b5ef-433b-9137-fc4d7768e7cc/pypi/download/azure-ai-generative/1a20240112004/azure_ai_generative-1.0.0a20240112004-py3-none-any.whl +azure-ai-generative[evaluate,index,promptflow]==1.0.0b6 # hardcoded the version of azureml-mlflow here for faster Docker image building speed azureml-mlflow==1.53.0 pytest -# langchain dependencies, these should be optional in the future -# langchain moved Embeddings from langchain.embeddings.base to langchain.schema.embeddings while azureml-rag is still referencing it. -# once azureml-rag fixes, we should remove the langchain reference from this file langchain==0.1.1 langchain-openai==0.0.2.post1 semantic-kernel \ No newline at end of file diff --git a/src/ai/.x/help/include.python.script.function_call_evaluate.py b/src/ai/.x/help/include.python.script.function_call_evaluate.py index ea3f9c86..fe6b1174 100755 --- a/src/ai/.x/help/include.python.script.function_call_evaluate.py +++ b/src/ai/.x/help/include.python.script.function_call_evaluate.py @@ -215,13 +215,10 @@ def run_evaluate_part(subscription_id, resource_group_name, project_name, run_re user_agent="ai-cli 0.0.1" ) - def dont_call_this_method(kwargs): - raise Exception("This method should not be called.") - from azure.ai.generative.evaluate import evaluate eval_results = evaluate( evaluation_name=name, - target=dont_call_this_method, + target=None, data=run_results, truth_data="truth", prediction_data="answer", From 9756f7ca6021b598eb3e86deae0c171371fddffe Mon Sep 17 00:00:00 2001 From: Ryan Hurey Date: Wed, 7 Feb 2024 13:11:20 -0800 Subject: [PATCH 26/30] Add the file that the docs say should have been added by the creation process (#171) --- .github/compliance/inventory.yml | 6 ++++++ 1 file changed, 6 insertions(+) create mode 100644 .github/compliance/inventory.yml diff --git a/.github/compliance/inventory.yml b/.github/compliance/inventory.yml new file mode 100644 index 00000000..e9b10d8e --- /dev/null +++ b/.github/compliance/inventory.yml @@ -0,0 +1,6 @@ +inventory: +- source: DirectOwners + isProduction: false + items: + - id: robch@microsoft.com + - id: cdev@microsoft.com \ No newline at end of file From 5e12aaff4947b69f44046492475fd6f36c5d370a Mon Sep 17 00:00:00 2001 From: pankopon <55108151+pankopon@users.noreply.github.com> Date: Wed, 7 Feb 2024 15:04:59 -0800 Subject: [PATCH 27/30] Add Windows x64 installer (#165) Add a package bundle installer for Azure AI CLI on Windows x64 that checks for .NET and Azure CLI dependencies and installs them if missing. --- .azure/pipelines/build.yaml | 69 +++++++++++++-- .azure/pipelines/scripts/set-variables.sh | 46 +++++++++- .azure/pipelines/sign-dll-exe.yaml | 60 +++++++++++++ scripts/Azure-AI-CLI-Bundle.wxs | 79 +++++++++++++++++ scripts/Azure-AI-CLI.wxs | 80 +++++++++++++++--- scripts/InstallCLI.cmd | 2 - scripts/UninstallCLI.cmd | 2 - scripts/WindowsInstaller.png | Bin 0 -> 2795 bytes scripts/WixBuildInstaller.cmd | 98 ++++++++++++++++++++++ scripts/WixRunInsignia.cmd | 75 +++++++++++++++++ 10 files changed, 487 insertions(+), 24 deletions(-) create mode 100644 .azure/pipelines/sign-dll-exe.yaml create mode 100644 scripts/Azure-AI-CLI-Bundle.wxs delete mode 100644 scripts/InstallCLI.cmd delete mode 100644 scripts/UninstallCLI.cmd create mode 100644 scripts/WindowsInstaller.png create mode 100644 scripts/WixBuildInstaller.cmd create mode 100644 scripts/WixRunInsignia.cmd diff --git a/.azure/pipelines/build.yaml b/.azure/pipelines/build.yaml index 6134df89..b25d5182 100644 --- a/.azure/pipelines/build.yaml +++ b/.azure/pipelines/build.yaml @@ -19,7 +19,7 @@ stages: name: Variables inputs: filePath: ./.azure/pipelines/scripts/set-variables.sh - arguments: '0.0.0-dev2024.$(Build.BuildId)' + arguments: '$(Build.BuildNumber) $(Build.BuildId)' displayName: 'Set up environment variables' - stage: BuildStage @@ -62,15 +62,16 @@ stages: folderPath: '$(Build.ArtifactStagingDirectory)' pattern: '$(AICLINuPkgFileName)' - task: Bash@3 - displayName: 'Create installation script' + displayName: 'Create Linux installation script' inputs: filePath: ./scripts/InstallAzureAICLIDeb-UpdateVersion.sh arguments: '$(AICLIVersion) $(Build.StagingDirectory)' - - task: PublishPipelineArtifact@1 + - task: PublishBuildArtifacts@1 displayName: Publish artifacts inputs: + pathToPublish: '$(Build.ArtifactStagingDirectory)' targetPath: '$(Build.StagingDirectory)' - artifact: 'ai-cli-artifacts' + artifactName: 'ai-cli-artifacts' - job: Publish dependsOn: [Build] @@ -93,12 +94,19 @@ stages: arguments: '$(Build.ArtifactStagingDirectory)/$(AICLINuPkgFileName) private/ai/$(AICLINuPkgFileName)' scriptPath: './.azure/pipelines/scripts/upload-file.sh' - task: AzureCLI@2 - displayName: Upload installation script + displayName: Upload Linux installation script inputs: azureSubscription: 'Carbon Dropper (CSSpeechStorage Drop)' scriptType: 'bash' arguments: '$(Build.ArtifactStagingDirectory)/InstallAzureAICLIDeb-$(AICLIVersion).sh private/ai/InstallAzureAICLIDeb-$(AICLIVersion).sh' scriptPath: './.azure/pipelines/scripts/upload-file.sh' + - task: AzureCLI@2 + displayName: Upload Windows installer + inputs: + azureSubscription: 'Carbon Dropper (CSSpeechStorage Drop)' + scriptType: 'bash' + arguments: '$(Build.ArtifactStagingDirectory)/Azure-AI-CLI-Setup-$(AICLIVersion)-x64.exe private/ai/Azure-AI-CLI-Setup-$(AICLIVersion)-x64.exe' + scriptPath: './.azure/pipelines/scripts/upload-file.sh' - task: GithubRelease@1 condition: and(succeeded(), eq(variables['IsRelease'], 'true')) displayName: Create GitHub release @@ -187,6 +195,57 @@ stages: docker tag acrbn.azurecr.io/azure-ai-cli:bookworm-$(AICLIVersion) acrbn.azurecr.io/azure-ai-cli:latest docker push acrbn.azurecr.io/azure-ai-cli:latest +- stage: BuildInstallerStage + dependsOn: SetupStage + pool: + name: SkymanWS2022Compliant + jobs: + - job: BuildWindowsInstaller + variables: + AICLIVersion: $[ stageDependencies.SetupStage.SetupJob.outputs['Variables.AICLIVersion']] + AICLISemVerVersion: $[ stageDependencies.SetupStage.SetupJob.outputs['Variables.AICLISemVerVersion']] + steps: + - script: call WixBuildInstaller.cmd $(AICLIVersion) $(AICLISemVerVersion) + workingDirectory: $(Build.SourcesDirectory)/scripts + failOnStderr: true + displayName: Build installation package bundle + + - script: call WixRunInsignia.cmd detach x64 $(AICLIVersion) + workingDirectory: $(Build.SourcesDirectory)/scripts + failOnStderr: true + displayName: Detach bundle bootstrap engine + + - template: sign-dll-exe.yaml + parameters: + displayName: Sign bundle bootstrap engine + folderPath: '$(Build.SourcesDirectory)/scripts' + pattern: 'engine-*.exe' + + - script: call WixRunInsignia.cmd attach x64 $(AICLIVersion) + workingDirectory: $(Build.SourcesDirectory)/scripts + failOnStderr: true + displayName: Re-attach bootstrap bundle engine + + - template: sign-dll-exe.yaml + parameters: + displayName: Sign installer + folderPath: '$(Build.SourcesDirectory)/scripts' + pattern: 'Azure-AI-CLI-Setup-*.exe' + + - task: CopyFiles@2 + inputs: + sourceFolder: '$(Build.SourcesDirectory)/scripts' + contents: 'Azure-AI-CLI-Setup-*.exe' + targetFolder: '$(Build.ArtifactStagingDirectory)' + displayName: Copy files + + - task: PublishBuildArtifacts@1 + inputs: + pathToPublish: '$(Build.ArtifactStagingDirectory)' + targetPath: '$(Build.StagingDirectory)' + artifactName: 'ai-cli-artifacts' + displayName: Publish artifacts + - stage: TestStage dependsOn: [SetupStage, BuildStage] condition: and(succeeded(), or(eq(variables['IsRelease'], 'true'), eq(variables['TestDevBuild'], 'true'))) diff --git a/.azure/pipelines/scripts/set-variables.sh b/.azure/pipelines/scripts/set-variables.sh index 4d6ac2fc..ebed002f 100644 --- a/.azure/pipelines/scripts/set-variables.sh +++ b/.azure/pipelines/scripts/set-variables.sh @@ -1,16 +1,56 @@ #!/bin/bash define_variable () { + echo "$1=$2" echo "##vso[task.setvariable variable=$1;isOutput=true]$2" } echo "Source branch: $BUILD_SOURCEBRANCH" -# if the user passed in a custom dev branch, use it +# Determine the product version (major.minor.build). +# ref. https://learn.microsoft.com/windows/win32/msi/productversion +# NOTE: If the major or minor version is not updated before a new year, the version number becomes ambiguous +# and it may not be possible to upgrade an old version from the previous year without manual uninstallation. +# Example: +# - last build of year N: 1.0.36599 +# - first build of year N+1: 1.0.101 -> cannot update with this, must uninstall 1.0.36599 first. + +MAJOR_VERSION="1" +MINOR_VERSION="0" +BUILD_VERSION="0" + +# Parse Build.BuildNumber for build date and daily run # (max 99). if [ ! -z "$1" ]; then - DEV_VERSION="$1" + # e.g. "20240120.2" -> build year 2024, month 01, day 20, run 2 + BUILD_YEAR=$(echo "$1" | sed 's/^\([0-9]\{4\}\)[0-9]\{4\}\.[0-9]*$/\1/') + BUILD_MONTH=$(echo "$1" | sed 's/^[0-9]\{4\}\([0-9]\{2\}\)[0-9]\{2\}\.[0-9]*$/\1/') + BUILD_DAY=$(echo "$1" | sed 's/^[0-9]\{6\}\([0-9]\{2\}\)\.[0-9]*$/\1/') + BUILD_RUN=$(echo "$1" | sed 's/^[0-9]\{8\}\.\([0-9]*$\)/\1/') + + if [ ! -z "$BUILD_MONTH" -a $BUILD_MONTH -ge 1 -a $BUILD_MONTH -le 12 -a \ + ! -z "$BUILD_DAY" -a $BUILD_DAY -ge 1 -a $BUILD_DAY -le 31 -a \ + ! -z "$BUILD_RUN" -a $BUILD_RUN -ge 1 -a $BUILD_RUN -le 99 ] + then + let DayOfYear="($BUILD_MONTH - 1) * 31 + $BUILD_DAY" # estimate using max days/month + if [ $BUILD_RUN -lt 10 ]; then + BUILD_VERSION="${DayOfYear}0${BUILD_RUN}" + else + BUILD_VERSION="${DayOfYear}${BUILD_RUN}" + fi + else + >&2 echo "Ignored invalid argument: Build.BuildNumber $1" + fi +fi + +PRODUCT_VERSION="${MAJOR_VERSION}.${MINOR_VERSION}.${BUILD_VERSION}" +echo "Product version: $PRODUCT_VERSION" + +# Append Build.BuildId to version string. +if [ ! -z "$2" ]; then + BUILD_ID=$2 + DEV_VERSION="${PRODUCT_VERSION}-dev${BUILD_YEAR}.${BUILD_ID}" else - DEV_VERSION="0.0.0-dev" + DEV_VERSION="${PRODUCT_VERSION}-dev${BUILD_YEAR}" fi # If the build was triggered from a tag, use the tag as the version. Otherwise, set the version to dev. diff --git a/.azure/pipelines/sign-dll-exe.yaml b/.azure/pipelines/sign-dll-exe.yaml new file mode 100644 index 00000000..8e5388a2 --- /dev/null +++ b/.azure/pipelines/sign-dll-exe.yaml @@ -0,0 +1,60 @@ +parameters: + - name: displayName + type: string + default: 'Sign dlls' + - name: folderPath + type: string + - name: pattern + type: string + default: '*.dll' + - name: condition + type: boolean + default: true + +steps: +- task: EsrpCodeSigning@2 + displayName: ${{ parameters.displayName }} + inputs: + ConnectedServiceName: 'Speech SDK ESRP Signing Alternate' + FolderPath: '${{ parameters.folderPath }}' + Pattern: '${{ parameters.pattern }}' + signConfigType: inlineSignParams + inlineOperation: | + [ + { + "keyCode": "CP-230012", + "operationSetCode": "SigntoolSign", + "parameters": [ + { + "parameterName": "OpusName", + "parameterValue": "Microsoft" + }, + { + "parameterName": "OpusInfo", + "parameterValue": "http://www.microsoft.com" + }, + { + "parameterName": "PageHash", + "parameterValue": "/NPH" + }, + { + "parameterName": "FileDigest", + "parameterValue": "/fd sha256" + }, + { + "parameterName": "TimeStamp", + "parameterValue": "/tr \"http://rfc3161.gtm.corp.microsoft.com/TSS/HttpTspServer\" /td sha256" + } + ], + "toolName": "signtool.exe", + "toolVersion": "6.2.9304.0" + }, + { + "keyCode": "CP-230012", + "operationSetCode": "SigntoolVerify", + "parameters": [ ], + "toolName": "signtool.exe", + "toolVersion": "6.2.9304.0" + } + ] + condition: succeeded() diff --git a/scripts/Azure-AI-CLI-Bundle.wxs b/scripts/Azure-AI-CLI-Bundle.wxs new file mode 100644 index 00000000..36e3bfb0 --- /dev/null +++ b/scripts/Azure-AI-CLI-Bundle.wxs @@ -0,0 +1,79 @@ + + + + + + + + + + + + + + NOT WixBundleAction = 5 OR NOT AlreadyInstalledVersion >= v$(var.productVersion) + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/scripts/Azure-AI-CLI.wxs b/scripts/Azure-AI-CLI.wxs index 5282adc3..a8b9155c 100644 --- a/scripts/Azure-AI-CLI.wxs +++ b/scripts/Azure-AI-CLI.wxs @@ -1,7 +1,31 @@ - - + + + + + + + + @@ -11,28 +35,60 @@ - - + + + + + + + + - - - - - + + - NOT Installed - Installed + + NOT Installed OR REINSTALL OR UPGRADINGPRODUCTCODE + NOT Installed OR REINSTALL OR UPGRADINGPRODUCTCODE + Installed AND NOT REINSTALL AND NOT UPGRADINGPRODUCTCODE - + diff --git a/scripts/InstallCLI.cmd b/scripts/InstallCLI.cmd deleted file mode 100644 index f8908167..00000000 --- a/scripts/InstallCLI.cmd +++ /dev/null @@ -1,2 +0,0 @@ -@echo off -dotnet.exe tool install --global Azure.AI.CLI diff --git a/scripts/UninstallCLI.cmd b/scripts/UninstallCLI.cmd deleted file mode 100644 index 113d51cd..00000000 --- a/scripts/UninstallCLI.cmd +++ /dev/null @@ -1,2 +0,0 @@ -@echo off -dotnet.exe tool uninstall --global Azure.AI.CLI \ No newline at end of file diff --git a/scripts/WindowsInstaller.png b/scripts/WindowsInstaller.png new file mode 100644 index 0000000000000000000000000000000000000000..150a14096728fde49582457aa9bc29bba4dcda35 GIT binary patch literal 2795 zcmVVGd01AsqL_t(|+U=TcY*W`6$A9%-woRJIT=Q2=fU2sjTel8bmaqF+EiEnh{eGI8n*sg> zYy#R+nt*6Dir4Fcwh9t$Ue&q9O;Q~pE+u`X8I}_JKHaIyKJ_LRQ6a&8o@_?UJgC>J)hTq=uD*%KL zNuOUk0uV41if_KQ5Nw&SSHOeA2X6s3Nbu-+7}Vi2pO4FZy_5SRaDyQU2xdW^9h{c< zp>G1Z0lx+);MAdgAVfe_32E9livU{`D!s5rKsK-uI0-x?fjt8A>%eZ6_~3F4hAe|g z{^k&1nBc?jz{di93Iu`Uz+J%I=i=63vr4>n;tZHgAf`ZvZX^L#;IM$Zfn9(bSS!Jr z)1aH7DH!BvT^+>3+|&s1WIu5K{{7GT{eB}D430H-n@vA@^l0K*2R;Dq2WkLahuHxz zWT@`xrCW&*lcdBipE;O;bI4A2DihGt)ARb^;2=XoL&*!7&6v$*jvP6XxTgWX1C9dE z18q8dt0f-l&bGHRpy@Ci;w9O@9da|f%ahs!M59rjdFB~hE?3h1Lx&FWzylBP^2;wL zxxh!jT@tLg0!s$LVU>98lha@xwKbDY$*B@_6ClS@*9G2q;|)<>UQT&=dD8v&-g|FU z8f#ole1IYeo<9L$8-$`m{J!F|_~Azc&!n;9tEqSU$0;EX}1^*WT z+qZA`jGN3z(!H9d{h+_Ue@tH9+S#t;3~k-IRocCKci+mDD|1$_UOn!ZhaP%}0|yS^bUOLRp+f}v`;or1>7z%F zCJ{K{&6~{#Ax7(pH02uLa5zkrm6c%;~~)BKW5C^Z@AX5#aOrxVfN!yqPn{ zo!{TzPaqJOykbg{uvjcCTC@n@P9emJDYbw{AANLlSy|cEqN1W@gM))(2pBC^sYN;%;vGnkF?4nkw{X=yk={}Vqx*(#hf{F231wZBw$k(1_uWR*|B5CnENli z^wNKwH}d_N$}?0|RV9(5X&Nmpts@Z!r_=dA@29G&>Z{EIMwEB7FOOE^YpK!BojVai z(C>AkEOzjl1u~Z|ol98H(xocx?Q41Dkw?%pjc7E=`t|F_wFa-(i`(tSVzFScSW>GR z{IUz!?RMtOo<(EShTG^z!H#ByY11SumT1BqWZA%Kwc&I+i4F}R%QE%#^^8|sc|0C$ zHXG4sH1*11Y1p52!@z2_GI!Q2zPG%T%30rKwK#_EvVrAl9$_+>k}T+l8#kgwqZqLm z+g^N;hK7bQYZMg~F>~fj!r?GkSy?0cVMz%BV!BRiOAAFe-^|~S93iw|0j)cBU^+g; zG3ibU8k@;$=z_&Mt5)4fb@dkv3=AaZhx+>BF<#d>c<>-kKKUdYHf&(cnl*_rBS~3) zem-m0t_7G4blz|RI=Z?LKv6*fO>J$M&1Ou1Ng0AWz;s$iG7ALoL?!&8L`MjLra9>8 zxer;6AxRQ;hl8w~9F{CuLM#^J%{Sj<*REaH(jyPhpAs(MbU3iv?F>dD1pIzV?zjX0 zY!B|~@OckNGa#(EkQIZho?ecB5$_Gg&mj|46`U>?E|-frH{Ha*zyNF3tl{Fti(I^T zG1lGP{pOc+)(rIB5DgeF8cfU1#_e*^?(^aE`FQr(XX(s{G9x#@*$x>gA9`D}=?`bK zys?w_g41|#p@UqvMt3(TgM-YRGY8M~=`2{VfSQ^bo__ji4jec@I2=~HySulfOgVh| z^l7|aFF83meER990JL>{Umo6d8GWn{5TUs+PeY+D0IGGQhbwQjv&c*_=|LqixsP*YRG(xppjYipyaxtWfR z4$8~Rk!2Yn1l`@;sY8IS>l73e(A?ZiMn(oMmy3>$j(ENw3}Us}$elI~Aq0hmg-Ic9 zq;DXEKxi5kt2GhlbzP^jvXV$7LL?H2TTgyILI~R1+f#=C!!XFn$)Ta40k79fR#p}l zE?h|D@v5RQBPRy|OrJhIsdgV4PH+svz~OKZ4u_E>iC8Q~W@aX}wY4aUf+R`kx=v0` z4nhd*c6;g&V6j*@fBrnBrKQx@*Hc?tOKE9o!Ub|Y9va))kcJPfs%pZAM+zQEk~ndq z9JkvIz`Vjj!r^dY4WG})sZ*yCy@Sarp(llaSS-fvx8F`hMFqvh#S|44B?^+(VG%4E z7Qwc*wxokIoEqC~HiRgruKpc1eh!U>nO&S@8!G|G%gbZkx^;;;ot>RjR8*vn2-elr zv2fu+g25nNU0uwdKOcbUxw%Zs&ZeuckB>k4h=s+)WM^lSpPvsvW@ctW0}R8=%ozjN zw7K*LEWGvC29~YQLqR8x3`&>-z~hfUPIYxPSFT){R06K+O{1!+*zI<7T}M%rL`7yc zn~`N1e;|ObtE)B;2qdMsqw{_tATtweRwv-3wq2zr?59J7=^;XhA;LtdJX49HC_6); zkT)d=&@_!(Z@rbOsw%Rxv$^fI+qiV;5&*$)n5NcNwr}4KFfSMkqN*wuiv_paJ?zU8 zLR@X2cnTqZo)t``{NyjEkfRd446vVz9HI%E$&ATl(txhXF4`vY9$>4cX|LXQ-+h#n zlu%b!M^8@=D^{!+9qy`u1tS*^Km72$u~_V%&pr1XLI`X&A6Ba$O*2ti3e7B_P0QdH zHE`mX8MF$D3*7wt{$=c(`+cso)=NwQCZqv-_Uu8HW%ljchuv-`8jYgs`sfbMT|iS( zec*JSR1{_2!Gi~Dgb*xWKAT00rYF8vi2n;jivegXI{Y_mBF3Ng+ObZ{m`^JbQ}BX( zLIeZ?fllD#zP@YLAN9ol?2*9Uy?etzc=P7X9}6LfM56P$x(4!f9WGynCZ&xI-HOo$ zt{$*Pl#_B)Q59u~3l}cbOl9BVhTw@O9(z>?wvJpkG&B;4L=r!-1ar@xJ=;neX{3=x x8fm1FMjC0PkwzM6q>)A%X{3=x8fi=#{sV)oj6a<*5orJb002ovPDHLkV1lUwFPQ)U literal 0 HcmV?d00001 diff --git a/scripts/WixBuildInstaller.cmd b/scripts/WixBuildInstaller.cmd new file mode 100644 index 00000000..a151acad --- /dev/null +++ b/scripts/WixBuildInstaller.cmd @@ -0,0 +1,98 @@ +@echo off +setlocal + +REM Azure AI CLI package version (e.g. 1.0.0 or 1.0.0-preview-20231214.1) +if "%~1"=="" ( + echo Error: Azure AI CLI package version is not set. 1>&2 + exit /b 1 +) + +REM Azure AI CLI product version x.y.z (e.g. 1.0.0), ref. https://learn.microsoft.com/windows/win32/msi/productversion +REM This is the version shown in the list of installed programs. +REM If the package version changes but product version remains the same, +REM an existing installation cannot be upgraded without uninstalling it first! +if "%~2"=="" ( + echo Error: Azure AI CLI product version is not set. 1>&2 + exit /b 2 +) + +set PACKAGE_VERSION=%~1 +set PRODUCT_VERSION=%~2 +set TARGET_PLATFORM=x64 +set INSTALLER_FILE=Setup-%TARGET_PLATFORM%.exe +set PACKAGE_URL=https://csspeechstorage.blob.core.windows.net/drop/private/ai/Azure.AI.CLI.%PACKAGE_VERSION%.nupkg + +REM Dependencies +set AZURE_CLI_VERSION=2.57.0 +set AZURE_CLI_INSTALLER=azure-cli-%AZURE_CLI_VERSION%-%TARGET_PLATFORM%.msi +set AZURE_CLI_URL=https://azcliprod.blob.core.windows.net/msi/%AZURE_CLI_INSTALLER% +set DOTNET_VERSION=7.0.405 +set DOTNET_INSTALLER=dotnet-sdk-%DOTNET_VERSION%-win-%TARGET_PLATFORM%.exe +set DOTNET_URL=https://dotnetcli.azureedge.net/dotnet/Sdk/%DOTNET_VERSION%/%DOTNET_INSTALLER% + +REM Check for WiX toolset +where candle.exe >nul 2>&1 +if %ERRORLEVEL% neq 0 set PATH=%PATH%;C:\Program Files (x86)\WiX Toolset v3.11\bin;C:\Program Files (x86)\WiX Toolset v3.14\bin +where candle.exe >nul 2>&1 +if %ERRORLEVEL% neq 0 ( + echo Error: Install WiX v3.14 Toolset from https://wixtoolset.org/docs/v3/releases/v3-14-0-6526/ 1>&2 + exit /b 3 +) + +REM Check for curl.exe (https://techcommunity.microsoft.com/t5/containers/tar-and-curl-come-to-windows/ba-p/382409) +where curl.exe >nul 2>&1 +if %ERRORLEVEL% neq 0 ( + echo Error: curl.exe not found 1>&2 + exit /b 4 +) + +REM Download Azure CLI installer +curl.exe --output %AZURE_CLI_INSTALLER% --silent --url %AZURE_CLI_URL% +if %ERRORLEVEL% neq 0 ( + echo Error while downloading Azure CLI installer 1>&2 + exit /b 5 +) + +REM Download .NET SDK installer +curl.exe --output %DOTNET_INSTALLER% --silent --url %DOTNET_URL% +if %ERRORLEVEL% neq 0 ( + echo Error while downloading .NET SDK installer 1>&2 + exit /b 6 +) + +REM Build AI CLI installer .msi +candle.exe Azure-AI-CLI.wxs -dproductVersion=%PRODUCT_VERSION% -dpackageVersion=%PACKAGE_VERSION% -dpackageUrl=%PACKAGE_URL% -dtargetPlatform=%TARGET_PLATFORM% +if %ERRORLEVEL% neq 0 ( + set EXITCODE=%ERRORLEVEL% + echo Error from candle.exe [%EXITCODE%] 1>&2 + exit /b %EXITCODE% +) + +light.exe Azure-AI-CLI.wixobj -ext WixUIExtension -ext WixUtilExtension +if %ERRORLEVEL% neq 0 ( + set EXITCODE=%ERRORLEVEL% + echo Error from light.exe [%EXITCODE%] 1>&2 + exit /b %EXITCODE% +) + +REM Build installation bundle .exe +candle.exe Azure-AI-CLI-Bundle.wxs -ext WixBalExtension -ext WixUtilExtension ^ + -dproductVersion=%PRODUCT_VERSION% -dtargetPlatform=%TARGET_PLATFORM% ^ + -dazureCliVersion=%AZURE_CLI_VERSION% -dazureCliUrl=%AZURE_CLI_URL% ^ + -ddotNetVersion=%DOTNET_VERSION% -ddotNetUrl=%DOTNET_URL% +if %ERRORLEVEL% neq 0 ( + set EXITCODE=%ERRORLEVEL% + echo Error from candle.exe [%EXITCODE%] 1>&2 + exit /b %EXITCODE% +) + +light.exe Azure-AI-CLI-Bundle.wixobj -ext WixBalExtension -ext WixUtilExtension -out %INSTALLER_FILE% +if %ERRORLEVEL% neq 0 ( + set EXITCODE=%ERRORLEVEL% + echo Error from light.exe [%EXITCODE%] 1>&2 + exit /b %EXITCODE% +) + +:end +echo Built %INSTALLER_FILE% successfully! +endlocal diff --git a/scripts/WixRunInsignia.cmd b/scripts/WixRunInsignia.cmd new file mode 100644 index 00000000..d19aea9e --- /dev/null +++ b/scripts/WixRunInsignia.cmd @@ -0,0 +1,75 @@ +@echo off +setlocal + +REM Detach/attach the bundle bootstrap engine so that it can be properly signed together with the bundle exe. +REM ref. https://wixtoolset.org/docs/v3/overview/insignia/ + +if "%~1"=="" ( + echo Error: Action is not specified. 1>&2 + exit /b 1 +) + +if "%~2"=="" ( + echo Error: Target platform is not set. 1>&2 + exit /b 2 +) + +if "%~3"=="" ( + echo Error: Package version is not set. 1>&2 + exit /b 3 +) + +set ACTION=%~1 +set TARGET_PLATFORM=%~2 +set PACKAGE_VERSION=%~3 + +set UNSIGNED_BUNDLE=Setup-%TARGET_PLATFORM%.exe +set BUNDLE_ENGINE=engine-%TARGET_PLATFORM%.exe +set SIGNED_BUNDLE=Azure-AI-CLI-Setup-%PACKAGE_VERSION%-%TARGET_PLATFORM%.exe + +REM Check for WiX toolset. +where insignia.exe >nul 2>&1 +if %ERRORLEVEL% neq 0 set PATH=%PATH%;C:\Program Files (x86)\WiX Toolset v3.11\bin;C:\Program Files (x86)\WiX Toolset v3.14\bin +where insignia.exe >nul 2>&1 +if %ERRORLEVEL% neq 0 ( + echo Error: Install WiX v3.14 Toolset from https://wixtoolset.org/docs/v3/releases/v3-14-0-6526/ 1>&2 + exit /b 4 +) + +if not exist %UNSIGNED_BUNDLE% ( + echo Error: %UNSIGNED_BUNDLE% not found. 1>&2 + exit /b 5 +) + +REM Detach engine from the package bundle installer. +if "%ACTION%"=="detach" ( + insignia -ib %UNSIGNED_BUNDLE% -o %BUNDLE_ENGINE% + if %ERRORLEVEL% neq 0 ( + set EXITCODE=%ERRORLEVEL% + echo Error from insignia.exe while detaching engine [%EXITCODE%] 1>&2 + exit /b %EXITCODE% + ) + echo Detached %BUNDLE_ENGINE% from %UNSIGNED_BUNDLE% + goto end +) + +REM engine.exe is expected to be signed between these detach and attach actions. + +REM (Re)attach engine to the package bundle installer. +if "%ACTION%"=="attach" ( + if not exist %BUNDLE_ENGINE% ( + echo Error: %BUNDLE_ENGINE% not found. 1>&2 + exit /b 6 + ) + insignia -ab %BUNDLE_ENGINE% %UNSIGNED_BUNDLE% -o %SIGNED_BUNDLE% + if %ERRORLEVEL% neq 0 ( + set EXITCODE=%ERRORLEVEL% + echo Error from insignia.exe while attaching engine [%EXITCODE%] 1>&2 + exit /b %EXITCODE% + ) + echo Attached %BUNDLE_ENGINE% to %SIGNED_BUNDLE% + goto end +) + +:end +endlocal From f569cedfee114efcb8dafc5a326624354c7a1d40 Mon Sep 17 00:00:00 2001 From: pankopon <55108151+pankopon@users.noreply.github.com> Date: Wed, 7 Feb 2024 16:45:41 -0800 Subject: [PATCH 28/30] Revert "Add Windows x64 installer (#165)" (#173) This reverts commit 5e12aaff4947b69f44046492475fd6f36c5d370a. Co-authored-by: Panu Koponen --- .azure/pipelines/build.yaml | 69 ++------------- .azure/pipelines/scripts/set-variables.sh | 46 +--------- .azure/pipelines/sign-dll-exe.yaml | 60 ------------- scripts/Azure-AI-CLI-Bundle.wxs | 79 ----------------- scripts/Azure-AI-CLI.wxs | 80 +++--------------- scripts/InstallCLI.cmd | 2 + scripts/UninstallCLI.cmd | 2 + scripts/WindowsInstaller.png | Bin 2795 -> 0 bytes scripts/WixBuildInstaller.cmd | 98 ---------------------- scripts/WixRunInsignia.cmd | 75 ----------------- 10 files changed, 24 insertions(+), 487 deletions(-) delete mode 100644 .azure/pipelines/sign-dll-exe.yaml delete mode 100644 scripts/Azure-AI-CLI-Bundle.wxs create mode 100644 scripts/InstallCLI.cmd create mode 100644 scripts/UninstallCLI.cmd delete mode 100644 scripts/WindowsInstaller.png delete mode 100644 scripts/WixBuildInstaller.cmd delete mode 100644 scripts/WixRunInsignia.cmd diff --git a/.azure/pipelines/build.yaml b/.azure/pipelines/build.yaml index b25d5182..6134df89 100644 --- a/.azure/pipelines/build.yaml +++ b/.azure/pipelines/build.yaml @@ -19,7 +19,7 @@ stages: name: Variables inputs: filePath: ./.azure/pipelines/scripts/set-variables.sh - arguments: '$(Build.BuildNumber) $(Build.BuildId)' + arguments: '0.0.0-dev2024.$(Build.BuildId)' displayName: 'Set up environment variables' - stage: BuildStage @@ -62,16 +62,15 @@ stages: folderPath: '$(Build.ArtifactStagingDirectory)' pattern: '$(AICLINuPkgFileName)' - task: Bash@3 - displayName: 'Create Linux installation script' + displayName: 'Create installation script' inputs: filePath: ./scripts/InstallAzureAICLIDeb-UpdateVersion.sh arguments: '$(AICLIVersion) $(Build.StagingDirectory)' - - task: PublishBuildArtifacts@1 + - task: PublishPipelineArtifact@1 displayName: Publish artifacts inputs: - pathToPublish: '$(Build.ArtifactStagingDirectory)' targetPath: '$(Build.StagingDirectory)' - artifactName: 'ai-cli-artifacts' + artifact: 'ai-cli-artifacts' - job: Publish dependsOn: [Build] @@ -94,19 +93,12 @@ stages: arguments: '$(Build.ArtifactStagingDirectory)/$(AICLINuPkgFileName) private/ai/$(AICLINuPkgFileName)' scriptPath: './.azure/pipelines/scripts/upload-file.sh' - task: AzureCLI@2 - displayName: Upload Linux installation script + displayName: Upload installation script inputs: azureSubscription: 'Carbon Dropper (CSSpeechStorage Drop)' scriptType: 'bash' arguments: '$(Build.ArtifactStagingDirectory)/InstallAzureAICLIDeb-$(AICLIVersion).sh private/ai/InstallAzureAICLIDeb-$(AICLIVersion).sh' scriptPath: './.azure/pipelines/scripts/upload-file.sh' - - task: AzureCLI@2 - displayName: Upload Windows installer - inputs: - azureSubscription: 'Carbon Dropper (CSSpeechStorage Drop)' - scriptType: 'bash' - arguments: '$(Build.ArtifactStagingDirectory)/Azure-AI-CLI-Setup-$(AICLIVersion)-x64.exe private/ai/Azure-AI-CLI-Setup-$(AICLIVersion)-x64.exe' - scriptPath: './.azure/pipelines/scripts/upload-file.sh' - task: GithubRelease@1 condition: and(succeeded(), eq(variables['IsRelease'], 'true')) displayName: Create GitHub release @@ -195,57 +187,6 @@ stages: docker tag acrbn.azurecr.io/azure-ai-cli:bookworm-$(AICLIVersion) acrbn.azurecr.io/azure-ai-cli:latest docker push acrbn.azurecr.io/azure-ai-cli:latest -- stage: BuildInstallerStage - dependsOn: SetupStage - pool: - name: SkymanWS2022Compliant - jobs: - - job: BuildWindowsInstaller - variables: - AICLIVersion: $[ stageDependencies.SetupStage.SetupJob.outputs['Variables.AICLIVersion']] - AICLISemVerVersion: $[ stageDependencies.SetupStage.SetupJob.outputs['Variables.AICLISemVerVersion']] - steps: - - script: call WixBuildInstaller.cmd $(AICLIVersion) $(AICLISemVerVersion) - workingDirectory: $(Build.SourcesDirectory)/scripts - failOnStderr: true - displayName: Build installation package bundle - - - script: call WixRunInsignia.cmd detach x64 $(AICLIVersion) - workingDirectory: $(Build.SourcesDirectory)/scripts - failOnStderr: true - displayName: Detach bundle bootstrap engine - - - template: sign-dll-exe.yaml - parameters: - displayName: Sign bundle bootstrap engine - folderPath: '$(Build.SourcesDirectory)/scripts' - pattern: 'engine-*.exe' - - - script: call WixRunInsignia.cmd attach x64 $(AICLIVersion) - workingDirectory: $(Build.SourcesDirectory)/scripts - failOnStderr: true - displayName: Re-attach bootstrap bundle engine - - - template: sign-dll-exe.yaml - parameters: - displayName: Sign installer - folderPath: '$(Build.SourcesDirectory)/scripts' - pattern: 'Azure-AI-CLI-Setup-*.exe' - - - task: CopyFiles@2 - inputs: - sourceFolder: '$(Build.SourcesDirectory)/scripts' - contents: 'Azure-AI-CLI-Setup-*.exe' - targetFolder: '$(Build.ArtifactStagingDirectory)' - displayName: Copy files - - - task: PublishBuildArtifacts@1 - inputs: - pathToPublish: '$(Build.ArtifactStagingDirectory)' - targetPath: '$(Build.StagingDirectory)' - artifactName: 'ai-cli-artifacts' - displayName: Publish artifacts - - stage: TestStage dependsOn: [SetupStage, BuildStage] condition: and(succeeded(), or(eq(variables['IsRelease'], 'true'), eq(variables['TestDevBuild'], 'true'))) diff --git a/.azure/pipelines/scripts/set-variables.sh b/.azure/pipelines/scripts/set-variables.sh index ebed002f..4d6ac2fc 100644 --- a/.azure/pipelines/scripts/set-variables.sh +++ b/.azure/pipelines/scripts/set-variables.sh @@ -1,56 +1,16 @@ #!/bin/bash define_variable () { - echo "$1=$2" echo "##vso[task.setvariable variable=$1;isOutput=true]$2" } echo "Source branch: $BUILD_SOURCEBRANCH" -# Determine the product version (major.minor.build). -# ref. https://learn.microsoft.com/windows/win32/msi/productversion -# NOTE: If the major or minor version is not updated before a new year, the version number becomes ambiguous -# and it may not be possible to upgrade an old version from the previous year without manual uninstallation. -# Example: -# - last build of year N: 1.0.36599 -# - first build of year N+1: 1.0.101 -> cannot update with this, must uninstall 1.0.36599 first. - -MAJOR_VERSION="1" -MINOR_VERSION="0" -BUILD_VERSION="0" - -# Parse Build.BuildNumber for build date and daily run # (max 99). +# if the user passed in a custom dev branch, use it if [ ! -z "$1" ]; then - # e.g. "20240120.2" -> build year 2024, month 01, day 20, run 2 - BUILD_YEAR=$(echo "$1" | sed 's/^\([0-9]\{4\}\)[0-9]\{4\}\.[0-9]*$/\1/') - BUILD_MONTH=$(echo "$1" | sed 's/^[0-9]\{4\}\([0-9]\{2\}\)[0-9]\{2\}\.[0-9]*$/\1/') - BUILD_DAY=$(echo "$1" | sed 's/^[0-9]\{6\}\([0-9]\{2\}\)\.[0-9]*$/\1/') - BUILD_RUN=$(echo "$1" | sed 's/^[0-9]\{8\}\.\([0-9]*$\)/\1/') - - if [ ! -z "$BUILD_MONTH" -a $BUILD_MONTH -ge 1 -a $BUILD_MONTH -le 12 -a \ - ! -z "$BUILD_DAY" -a $BUILD_DAY -ge 1 -a $BUILD_DAY -le 31 -a \ - ! -z "$BUILD_RUN" -a $BUILD_RUN -ge 1 -a $BUILD_RUN -le 99 ] - then - let DayOfYear="($BUILD_MONTH - 1) * 31 + $BUILD_DAY" # estimate using max days/month - if [ $BUILD_RUN -lt 10 ]; then - BUILD_VERSION="${DayOfYear}0${BUILD_RUN}" - else - BUILD_VERSION="${DayOfYear}${BUILD_RUN}" - fi - else - >&2 echo "Ignored invalid argument: Build.BuildNumber $1" - fi -fi - -PRODUCT_VERSION="${MAJOR_VERSION}.${MINOR_VERSION}.${BUILD_VERSION}" -echo "Product version: $PRODUCT_VERSION" - -# Append Build.BuildId to version string. -if [ ! -z "$2" ]; then - BUILD_ID=$2 - DEV_VERSION="${PRODUCT_VERSION}-dev${BUILD_YEAR}.${BUILD_ID}" + DEV_VERSION="$1" else - DEV_VERSION="${PRODUCT_VERSION}-dev${BUILD_YEAR}" + DEV_VERSION="0.0.0-dev" fi # If the build was triggered from a tag, use the tag as the version. Otherwise, set the version to dev. diff --git a/.azure/pipelines/sign-dll-exe.yaml b/.azure/pipelines/sign-dll-exe.yaml deleted file mode 100644 index 8e5388a2..00000000 --- a/.azure/pipelines/sign-dll-exe.yaml +++ /dev/null @@ -1,60 +0,0 @@ -parameters: - - name: displayName - type: string - default: 'Sign dlls' - - name: folderPath - type: string - - name: pattern - type: string - default: '*.dll' - - name: condition - type: boolean - default: true - -steps: -- task: EsrpCodeSigning@2 - displayName: ${{ parameters.displayName }} - inputs: - ConnectedServiceName: 'Speech SDK ESRP Signing Alternate' - FolderPath: '${{ parameters.folderPath }}' - Pattern: '${{ parameters.pattern }}' - signConfigType: inlineSignParams - inlineOperation: | - [ - { - "keyCode": "CP-230012", - "operationSetCode": "SigntoolSign", - "parameters": [ - { - "parameterName": "OpusName", - "parameterValue": "Microsoft" - }, - { - "parameterName": "OpusInfo", - "parameterValue": "http://www.microsoft.com" - }, - { - "parameterName": "PageHash", - "parameterValue": "/NPH" - }, - { - "parameterName": "FileDigest", - "parameterValue": "/fd sha256" - }, - { - "parameterName": "TimeStamp", - "parameterValue": "/tr \"http://rfc3161.gtm.corp.microsoft.com/TSS/HttpTspServer\" /td sha256" - } - ], - "toolName": "signtool.exe", - "toolVersion": "6.2.9304.0" - }, - { - "keyCode": "CP-230012", - "operationSetCode": "SigntoolVerify", - "parameters": [ ], - "toolName": "signtool.exe", - "toolVersion": "6.2.9304.0" - } - ] - condition: succeeded() diff --git a/scripts/Azure-AI-CLI-Bundle.wxs b/scripts/Azure-AI-CLI-Bundle.wxs deleted file mode 100644 index 36e3bfb0..00000000 --- a/scripts/Azure-AI-CLI-Bundle.wxs +++ /dev/null @@ -1,79 +0,0 @@ - - - - - - - - - - - - - - NOT WixBundleAction = 5 OR NOT AlreadyInstalledVersion >= v$(var.productVersion) - - - - - - - - - - - - - - - - - - - - - - - - - - - diff --git a/scripts/Azure-AI-CLI.wxs b/scripts/Azure-AI-CLI.wxs index a8b9155c..5282adc3 100644 --- a/scripts/Azure-AI-CLI.wxs +++ b/scripts/Azure-AI-CLI.wxs @@ -1,31 +1,7 @@ - - - - - - - - + + @@ -35,60 +11,28 @@ - - - - - - - - + + - - + + + + + - - NOT Installed OR REINSTALL OR UPGRADINGPRODUCTCODE - NOT Installed OR REINSTALL OR UPGRADINGPRODUCTCODE - Installed AND NOT REINSTALL AND NOT UPGRADINGPRODUCTCODE + NOT Installed + Installed - + diff --git a/scripts/InstallCLI.cmd b/scripts/InstallCLI.cmd new file mode 100644 index 00000000..f8908167 --- /dev/null +++ b/scripts/InstallCLI.cmd @@ -0,0 +1,2 @@ +@echo off +dotnet.exe tool install --global Azure.AI.CLI diff --git a/scripts/UninstallCLI.cmd b/scripts/UninstallCLI.cmd new file mode 100644 index 00000000..113d51cd --- /dev/null +++ b/scripts/UninstallCLI.cmd @@ -0,0 +1,2 @@ +@echo off +dotnet.exe tool uninstall --global Azure.AI.CLI \ No newline at end of file diff --git a/scripts/WindowsInstaller.png b/scripts/WindowsInstaller.png deleted file mode 100644 index 150a14096728fde49582457aa9bc29bba4dcda35..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 2795 zcmVVGd01AsqL_t(|+U=TcY*W`6$A9%-woRJIT=Q2=fU2sjTel8bmaqF+EiEnh{eGI8n*sg> zYy#R+nt*6Dir4Fcwh9t$Ue&q9O;Q~pE+u`X8I}_JKHaIyKJ_LRQ6a&8o@_?UJgC>J)hTq=uD*%KL zNuOUk0uV41if_KQ5Nw&SSHOeA2X6s3Nbu-+7}Vi2pO4FZy_5SRaDyQU2xdW^9h{c< zp>G1Z0lx+);MAdgAVfe_32E9livU{`D!s5rKsK-uI0-x?fjt8A>%eZ6_~3F4hAe|g z{^k&1nBc?jz{di93Iu`Uz+J%I=i=63vr4>n;tZHgAf`ZvZX^L#;IM$Zfn9(bSS!Jr z)1aH7DH!BvT^+>3+|&s1WIu5K{{7GT{eB}D430H-n@vA@^l0K*2R;Dq2WkLahuHxz zWT@`xrCW&*lcdBipE;O;bI4A2DihGt)ARb^;2=XoL&*!7&6v$*jvP6XxTgWX1C9dE z18q8dt0f-l&bGHRpy@Ci;w9O@9da|f%ahs!M59rjdFB~hE?3h1Lx&FWzylBP^2;wL zxxh!jT@tLg0!s$LVU>98lha@xwKbDY$*B@_6ClS@*9G2q;|)<>UQT&=dD8v&-g|FU z8f#ole1IYeo<9L$8-$`m{J!F|_~Azc&!n;9tEqSU$0;EX}1^*WT z+qZA`jGN3z(!H9d{h+_Ue@tH9+S#t;3~k-IRocCKci+mDD|1$_UOn!ZhaP%}0|yS^bUOLRp+f}v`;or1>7z%F zCJ{K{&6~{#Ax7(pH02uLa5zkrm6c%;~~)BKW5C^Z@AX5#aOrxVfN!yqPn{ zo!{TzPaqJOykbg{uvjcCTC@n@P9emJDYbw{AANLlSy|cEqN1W@gM))(2pBC^sYN;%;vGnkF?4nkw{X=yk={}Vqx*(#hf{F231wZBw$k(1_uWR*|B5CnENli z^wNKwH}d_N$}?0|RV9(5X&Nmpts@Z!r_=dA@29G&>Z{EIMwEB7FOOE^YpK!BojVai z(C>AkEOzjl1u~Z|ol98H(xocx?Q41Dkw?%pjc7E=`t|F_wFa-(i`(tSVzFScSW>GR z{IUz!?RMtOo<(EShTG^z!H#ByY11SumT1BqWZA%Kwc&I+i4F}R%QE%#^^8|sc|0C$ zHXG4sH1*11Y1p52!@z2_GI!Q2zPG%T%30rKwK#_EvVrAl9$_+>k}T+l8#kgwqZqLm z+g^N;hK7bQYZMg~F>~fj!r?GkSy?0cVMz%BV!BRiOAAFe-^|~S93iw|0j)cBU^+g; zG3ibU8k@;$=z_&Mt5)4fb@dkv3=AaZhx+>BF<#d>c<>-kKKUdYHf&(cnl*_rBS~3) zem-m0t_7G4blz|RI=Z?LKv6*fO>J$M&1Ou1Ng0AWz;s$iG7ALoL?!&8L`MjLra9>8 zxer;6AxRQ;hl8w~9F{CuLM#^J%{Sj<*REaH(jyPhpAs(MbU3iv?F>dD1pIzV?zjX0 zY!B|~@OckNGa#(EkQIZho?ecB5$_Gg&mj|46`U>?E|-frH{Ha*zyNF3tl{Fti(I^T zG1lGP{pOc+)(rIB5DgeF8cfU1#_e*^?(^aE`FQr(XX(s{G9x#@*$x>gA9`D}=?`bK zys?w_g41|#p@UqvMt3(TgM-YRGY8M~=`2{VfSQ^bo__ji4jec@I2=~HySulfOgVh| z^l7|aFF83meER990JL>{Umo6d8GWn{5TUs+PeY+D0IGGQhbwQjv&c*_=|LqixsP*YRG(xppjYipyaxtWfR z4$8~Rk!2Yn1l`@;sY8IS>l73e(A?ZiMn(oMmy3>$j(ENw3}Us}$elI~Aq0hmg-Ic9 zq;DXEKxi5kt2GhlbzP^jvXV$7LL?H2TTgyILI~R1+f#=C!!XFn$)Ta40k79fR#p}l zE?h|D@v5RQBPRy|OrJhIsdgV4PH+svz~OKZ4u_E>iC8Q~W@aX}wY4aUf+R`kx=v0` z4nhd*c6;g&V6j*@fBrnBrKQx@*Hc?tOKE9o!Ub|Y9va))kcJPfs%pZAM+zQEk~ndq z9JkvIz`Vjj!r^dY4WG})sZ*yCy@Sarp(llaSS-fvx8F`hMFqvh#S|44B?^+(VG%4E z7Qwc*wxokIoEqC~HiRgruKpc1eh!U>nO&S@8!G|G%gbZkx^;;;ot>RjR8*vn2-elr zv2fu+g25nNU0uwdKOcbUxw%Zs&ZeuckB>k4h=s+)WM^lSpPvsvW@ctW0}R8=%ozjN zw7K*LEWGvC29~YQLqR8x3`&>-z~hfUPIYxPSFT){R06K+O{1!+*zI<7T}M%rL`7yc zn~`N1e;|ObtE)B;2qdMsqw{_tATtweRwv-3wq2zr?59J7=^;XhA;LtdJX49HC_6); zkT)d=&@_!(Z@rbOsw%Rxv$^fI+qiV;5&*$)n5NcNwr}4KFfSMkqN*wuiv_paJ?zU8 zLR@X2cnTqZo)t``{NyjEkfRd446vVz9HI%E$&ATl(txhXF4`vY9$>4cX|LXQ-+h#n zlu%b!M^8@=D^{!+9qy`u1tS*^Km72$u~_V%&pr1XLI`X&A6Ba$O*2ti3e7B_P0QdH zHE`mX8MF$D3*7wt{$=c(`+cso)=NwQCZqv-_Uu8HW%ljchuv-`8jYgs`sfbMT|iS( zec*JSR1{_2!Gi~Dgb*xWKAT00rYF8vi2n;jivegXI{Y_mBF3Ng+ObZ{m`^JbQ}BX( zLIeZ?fllD#zP@YLAN9ol?2*9Uy?etzc=P7X9}6LfM56P$x(4!f9WGynCZ&xI-HOo$ zt{$*Pl#_B)Q59u~3l}cbOl9BVhTw@O9(z>?wvJpkG&B;4L=r!-1ar@xJ=;neX{3=x x8fm1FMjC0PkwzM6q>)A%X{3=x8fi=#{sV)oj6a<*5orJb002ovPDHLkV1lUwFPQ)U diff --git a/scripts/WixBuildInstaller.cmd b/scripts/WixBuildInstaller.cmd deleted file mode 100644 index a151acad..00000000 --- a/scripts/WixBuildInstaller.cmd +++ /dev/null @@ -1,98 +0,0 @@ -@echo off -setlocal - -REM Azure AI CLI package version (e.g. 1.0.0 or 1.0.0-preview-20231214.1) -if "%~1"=="" ( - echo Error: Azure AI CLI package version is not set. 1>&2 - exit /b 1 -) - -REM Azure AI CLI product version x.y.z (e.g. 1.0.0), ref. https://learn.microsoft.com/windows/win32/msi/productversion -REM This is the version shown in the list of installed programs. -REM If the package version changes but product version remains the same, -REM an existing installation cannot be upgraded without uninstalling it first! -if "%~2"=="" ( - echo Error: Azure AI CLI product version is not set. 1>&2 - exit /b 2 -) - -set PACKAGE_VERSION=%~1 -set PRODUCT_VERSION=%~2 -set TARGET_PLATFORM=x64 -set INSTALLER_FILE=Setup-%TARGET_PLATFORM%.exe -set PACKAGE_URL=https://csspeechstorage.blob.core.windows.net/drop/private/ai/Azure.AI.CLI.%PACKAGE_VERSION%.nupkg - -REM Dependencies -set AZURE_CLI_VERSION=2.57.0 -set AZURE_CLI_INSTALLER=azure-cli-%AZURE_CLI_VERSION%-%TARGET_PLATFORM%.msi -set AZURE_CLI_URL=https://azcliprod.blob.core.windows.net/msi/%AZURE_CLI_INSTALLER% -set DOTNET_VERSION=7.0.405 -set DOTNET_INSTALLER=dotnet-sdk-%DOTNET_VERSION%-win-%TARGET_PLATFORM%.exe -set DOTNET_URL=https://dotnetcli.azureedge.net/dotnet/Sdk/%DOTNET_VERSION%/%DOTNET_INSTALLER% - -REM Check for WiX toolset -where candle.exe >nul 2>&1 -if %ERRORLEVEL% neq 0 set PATH=%PATH%;C:\Program Files (x86)\WiX Toolset v3.11\bin;C:\Program Files (x86)\WiX Toolset v3.14\bin -where candle.exe >nul 2>&1 -if %ERRORLEVEL% neq 0 ( - echo Error: Install WiX v3.14 Toolset from https://wixtoolset.org/docs/v3/releases/v3-14-0-6526/ 1>&2 - exit /b 3 -) - -REM Check for curl.exe (https://techcommunity.microsoft.com/t5/containers/tar-and-curl-come-to-windows/ba-p/382409) -where curl.exe >nul 2>&1 -if %ERRORLEVEL% neq 0 ( - echo Error: curl.exe not found 1>&2 - exit /b 4 -) - -REM Download Azure CLI installer -curl.exe --output %AZURE_CLI_INSTALLER% --silent --url %AZURE_CLI_URL% -if %ERRORLEVEL% neq 0 ( - echo Error while downloading Azure CLI installer 1>&2 - exit /b 5 -) - -REM Download .NET SDK installer -curl.exe --output %DOTNET_INSTALLER% --silent --url %DOTNET_URL% -if %ERRORLEVEL% neq 0 ( - echo Error while downloading .NET SDK installer 1>&2 - exit /b 6 -) - -REM Build AI CLI installer .msi -candle.exe Azure-AI-CLI.wxs -dproductVersion=%PRODUCT_VERSION% -dpackageVersion=%PACKAGE_VERSION% -dpackageUrl=%PACKAGE_URL% -dtargetPlatform=%TARGET_PLATFORM% -if %ERRORLEVEL% neq 0 ( - set EXITCODE=%ERRORLEVEL% - echo Error from candle.exe [%EXITCODE%] 1>&2 - exit /b %EXITCODE% -) - -light.exe Azure-AI-CLI.wixobj -ext WixUIExtension -ext WixUtilExtension -if %ERRORLEVEL% neq 0 ( - set EXITCODE=%ERRORLEVEL% - echo Error from light.exe [%EXITCODE%] 1>&2 - exit /b %EXITCODE% -) - -REM Build installation bundle .exe -candle.exe Azure-AI-CLI-Bundle.wxs -ext WixBalExtension -ext WixUtilExtension ^ - -dproductVersion=%PRODUCT_VERSION% -dtargetPlatform=%TARGET_PLATFORM% ^ - -dazureCliVersion=%AZURE_CLI_VERSION% -dazureCliUrl=%AZURE_CLI_URL% ^ - -ddotNetVersion=%DOTNET_VERSION% -ddotNetUrl=%DOTNET_URL% -if %ERRORLEVEL% neq 0 ( - set EXITCODE=%ERRORLEVEL% - echo Error from candle.exe [%EXITCODE%] 1>&2 - exit /b %EXITCODE% -) - -light.exe Azure-AI-CLI-Bundle.wixobj -ext WixBalExtension -ext WixUtilExtension -out %INSTALLER_FILE% -if %ERRORLEVEL% neq 0 ( - set EXITCODE=%ERRORLEVEL% - echo Error from light.exe [%EXITCODE%] 1>&2 - exit /b %EXITCODE% -) - -:end -echo Built %INSTALLER_FILE% successfully! -endlocal diff --git a/scripts/WixRunInsignia.cmd b/scripts/WixRunInsignia.cmd deleted file mode 100644 index d19aea9e..00000000 --- a/scripts/WixRunInsignia.cmd +++ /dev/null @@ -1,75 +0,0 @@ -@echo off -setlocal - -REM Detach/attach the bundle bootstrap engine so that it can be properly signed together with the bundle exe. -REM ref. https://wixtoolset.org/docs/v3/overview/insignia/ - -if "%~1"=="" ( - echo Error: Action is not specified. 1>&2 - exit /b 1 -) - -if "%~2"=="" ( - echo Error: Target platform is not set. 1>&2 - exit /b 2 -) - -if "%~3"=="" ( - echo Error: Package version is not set. 1>&2 - exit /b 3 -) - -set ACTION=%~1 -set TARGET_PLATFORM=%~2 -set PACKAGE_VERSION=%~3 - -set UNSIGNED_BUNDLE=Setup-%TARGET_PLATFORM%.exe -set BUNDLE_ENGINE=engine-%TARGET_PLATFORM%.exe -set SIGNED_BUNDLE=Azure-AI-CLI-Setup-%PACKAGE_VERSION%-%TARGET_PLATFORM%.exe - -REM Check for WiX toolset. -where insignia.exe >nul 2>&1 -if %ERRORLEVEL% neq 0 set PATH=%PATH%;C:\Program Files (x86)\WiX Toolset v3.11\bin;C:\Program Files (x86)\WiX Toolset v3.14\bin -where insignia.exe >nul 2>&1 -if %ERRORLEVEL% neq 0 ( - echo Error: Install WiX v3.14 Toolset from https://wixtoolset.org/docs/v3/releases/v3-14-0-6526/ 1>&2 - exit /b 4 -) - -if not exist %UNSIGNED_BUNDLE% ( - echo Error: %UNSIGNED_BUNDLE% not found. 1>&2 - exit /b 5 -) - -REM Detach engine from the package bundle installer. -if "%ACTION%"=="detach" ( - insignia -ib %UNSIGNED_BUNDLE% -o %BUNDLE_ENGINE% - if %ERRORLEVEL% neq 0 ( - set EXITCODE=%ERRORLEVEL% - echo Error from insignia.exe while detaching engine [%EXITCODE%] 1>&2 - exit /b %EXITCODE% - ) - echo Detached %BUNDLE_ENGINE% from %UNSIGNED_BUNDLE% - goto end -) - -REM engine.exe is expected to be signed between these detach and attach actions. - -REM (Re)attach engine to the package bundle installer. -if "%ACTION%"=="attach" ( - if not exist %BUNDLE_ENGINE% ( - echo Error: %BUNDLE_ENGINE% not found. 1>&2 - exit /b 6 - ) - insignia -ab %BUNDLE_ENGINE% %UNSIGNED_BUNDLE% -o %SIGNED_BUNDLE% - if %ERRORLEVEL% neq 0 ( - set EXITCODE=%ERRORLEVEL% - echo Error from insignia.exe while attaching engine [%EXITCODE%] 1>&2 - exit /b %EXITCODE% - ) - echo Attached %BUNDLE_ENGINE% to %SIGNED_BUNDLE% - goto end -) - -:end -endlocal From 39691ada0b382f1c8a42bb3522d08439119c0d6f Mon Sep 17 00:00:00 2001 From: Rob Chambers Date: Wed, 7 Feb 2024 16:49:01 -0800 Subject: [PATCH 29/30] updating to dotnet 8.0 (#170) * updating to dotnet 8.0 * update install script * better message, set pip install in template testing to skip by default * revert dev shell --- .azure/pipelines/build.yaml | 8 +-- .github/workflows/build-package.yaml | 2 +- .gitignore | 1 + ideas/azure-ai-cli-installation-spec.md | 2 +- .../HelperFunctionsProject.csproj | 4 +- scripts/InstallAzureAICLIDeb.sh | 21 ++++--- .../HelperFunctionsProject.csproj._ | 2 +- .../OpenAIChatCompletions.csproj._ | 2 +- .../OpenAIChatCompletionsStreaming.csproj._ | 2 +- ...IChatCompletionsWithDataStreaming.csproj._ | 2 +- ...ChatCompletionsFunctionsStreaming.csproj._ | 2 +- src/ai/ai-cli.csproj | 17 +++++- src/ai/commands/dev_command.cs | 56 +++++++++++++++++-- src/ai/linker.xml | 8 +++ src/common/common.csproj | 2 +- .../FileHelperFunctions.cs | 15 +++-- .../helper_functions_extension.csproj | 2 +- .../template_extension/TemplateFactory.cs | 2 +- .../template_extension.csproj | 2 +- .../test_helper_functions_extension.csproj | 2 +- tests/test3.yaml | 41 ++++++++++++-- tests/testadapter/README.md | 24 ++++---- tests/testadapter/YamlTestAdapter.csproj | 2 +- .../testadapter/YamlTestAdapterCommon.targets | 2 +- tests/testframework/YamlTestFramework.csproj | 2 +- .../YamlTestFrameworkCommon.targets | 2 +- tests/testrunner/YamlTestRunner.csproj | 2 +- tests/testrunner/YamlTestRunnerCommon.targets | 2 +- 28 files changed, 167 insertions(+), 64 deletions(-) create mode 100644 src/ai/linker.xml diff --git a/.azure/pipelines/build.yaml b/.azure/pipelines/build.yaml index 6134df89..a629f4df 100644 --- a/.azure/pipelines/build.yaml +++ b/.azure/pipelines/build.yaml @@ -241,7 +241,7 @@ stages: includeNuGetOrg: false command: custom custom: tool - version: '7.0.x' + version: '8.0.x' arguments: install --ignore-failed-sources --add-source "$(System.DefaultWorkingDirectory)" @@ -274,7 +274,7 @@ stages: inputs: includeNuGetOrg: false command: build - version: '7.0.x' + version: '8.0.x' projects: '**/testadapter/YamlTestAdapter.csproj' arguments: -c $(BuildConfiguration) @@ -295,8 +295,8 @@ stages: az --version az account show cd $(TestResultsPath) - echo dotnet test --logger trx --results-directory "$(Agent.TempDirectory)" --logger:"trx;LogFileName=$(TestRunTrxFileName)" --logger:"console;verbosity=normal" --filter "$(TestFilter)" "$(LocalBinOutputPath)/$(BuildConfiguration)/net7.0/Azure.AI.CLI.TestAdapter.dll" - dotnet test --logger trx --results-directory "$(Agent.TempDirectory)" --logger:"trx;LogFileName=$(TestRunTrxFileName)" --logger:"console;verbosity=normal" --filter "$(TestFilter)" "$(LocalBinOutputPath)/$(BuildConfiguration)/net7.0/Azure.AI.CLI.TestAdapter.dll" + echo dotnet test --logger trx --results-directory "$(Agent.TempDirectory)" --logger:"trx;LogFileName=$(TestRunTrxFileName)" --logger:"console;verbosity=normal" --filter "$(TestFilter)" "$(LocalBinOutputPath)/$(BuildConfiguration)/net8.0/Azure.AI.CLI.TestAdapter.dll" + dotnet test --logger trx --results-directory "$(Agent.TempDirectory)" --logger:"trx;LogFileName=$(TestRunTrxFileName)" --logger:"console;verbosity=normal" --filter "$(TestFilter)" "$(LocalBinOutputPath)/$(BuildConfiguration)/net8.0/Azure.AI.CLI.TestAdapter.dll" # ----------------------------------------------------------------------------- # Archive and publish the test run backup artifact diff --git a/.github/workflows/build-package.yaml b/.github/workflows/build-package.yaml index 0cd920ac..d0feb462 100644 --- a/.github/workflows/build-package.yaml +++ b/.github/workflows/build-package.yaml @@ -18,7 +18,7 @@ jobs: - name: Set up .NET Core uses: actions/setup-dotnet@v2 with: - dotnet-version: '7.0.x' # Set the desired .NET version + dotnet-version: '8.0.x' # Set the desired .NET version - name: Set up environment variables run: | diff --git a/.gitignore b/.gitignore index 6818c46c..4b125cb9 100644 --- a/.gitignore +++ b/.gitignore @@ -3,6 +3,7 @@ **/.vscode/** **/bin/*/net6.0/* **/bin/*/net7.0/* +**/bin/*/net8.0/* **/obj/* ideas/website/node_modules/** testresults/** diff --git a/ideas/azure-ai-cli-installation-spec.md b/ideas/azure-ai-cli-installation-spec.md index 32090e24..bbbef258 100644 --- a/ideas/azure-ai-cli-installation-spec.md +++ b/ideas/azure-ai-cli-installation-spec.md @@ -12,7 +12,7 @@ Customer Requirements: - Support Debian 10, 11, and 12 - Support Ubunutu 20.04 and 22.04 - Check and install Azure CLI if not present -- Check and install dotnet 7.0 if not present +- Check and install dotnet 8.0 if not present - Check and install Python azure.ai.generative SDK if not present - Update user's shell rc file (e.g. `$HOME/.bashrc` and/or `$HOME/.zshrc`) diff --git a/ideas/cool-helper-function-scenarios/HelperFunctionsProject.csproj b/ideas/cool-helper-function-scenarios/HelperFunctionsProject.csproj index 36bc2604..faf7eac5 100644 --- a/ideas/cool-helper-function-scenarios/HelperFunctionsProject.csproj +++ b/ideas/cool-helper-function-scenarios/HelperFunctionsProject.csproj @@ -1,11 +1,11 @@ - net7.0 + net8.0 enable enable true - D:\src\ai-cli\src\ai\bin\Debug\net7.0 + D:\src\ai-cli\src\ai\bin\Debug\net8.0 diff --git a/scripts/InstallAzureAICLIDeb.sh b/scripts/InstallAzureAICLIDeb.sh index a8eb0104..f948c04e 100644 --- a/scripts/InstallAzureAICLIDeb.sh +++ b/scripts/InstallAzureAICLIDeb.sh @@ -32,7 +32,7 @@ if ! command -v az &> /dev/null; then fi fi -# Check if dotnet 7.0 is installed +# Check if dotnet 8.0 is installed if ! command -v dotnet &> /dev/null; then echo "dotnet is not installed." dotnet_version=0 @@ -40,11 +40,11 @@ else dotnet_version=$(dotnet --version | cut -d. -f1) fi -if [ "$dotnet_version" -eq "7" ]; then +if [ "$dotnet_version" -eq "8" ]; then dotnet_version=$(dotnet --version) echo "dotnet $dotnet_version is already installed." else - echo "Installing dotnet 7.0..." + echo "Installing dotnet 8.0..." # Update the package list sudo apt-get update @@ -60,11 +60,10 @@ else sudo dpkg -i packages-microsoft-prod.deb rm packages-microsoft-prod.deb elif [[ "$CHECK_VERSION" == "22.04" ]]; then - # We don't need to install the Microsoft package signing key for Ubuntu 22.04; in fact, if we do, `dotnet tool` doesn't work - echo "Ubuntu 22.04 detected. Skipping Microsoft package signing key installation." - # wget https://packages.microsoft.com/config/ubuntu/22.04/packages-microsoft-prod.deb -O packages-microsoft-prod.deb - # sudo dpkg -i packages-microsoft-prod.deb - # rm packages-microsoft-prod.deb + # Install the Microsoft package signing key for Ubuntu 20.04 + wget https://packages.microsoft.com/config/ubuntu/22.04/packages-microsoft-prod.deb -O packages-microsoft-prod.deb + sudo dpkg -i packages-microsoft-prod.deb + rm packages-microsoft-prod.deb else echo "Unsupported Ubuntu version: $CHECK_VERSION" exit 1 @@ -96,13 +95,13 @@ else exit 1 fi - # Install dotnet 7.0 runtime + # Install dotnet 8.0 runtime sudo apt-get update - sudo apt-get install -y dotnet-sdk-7.0 + sudo apt-get install -y dotnet-sdk-8.0 # Check if the installation was successful if [ $? -ne 0 ]; then - echo "Failed to install Dotnet 7.0." + echo "Failed to install Dotnet 8.0." exit 1 fi fi diff --git a/src/ai/.x/templates/helper_functions/HelperFunctionsProject.csproj._ b/src/ai/.x/templates/helper_functions/HelperFunctionsProject.csproj._ index 6c9cb517..3663e601 100644 --- a/src/ai/.x/templates/helper_functions/HelperFunctionsProject.csproj._ +++ b/src/ai/.x/templates/helper_functions/HelperFunctionsProject.csproj._ @@ -4,7 +4,7 @@ - net7.0 + net8.0 enable enable true diff --git a/src/ai/.x/templates/openai-chat-cs/OpenAIChatCompletions.csproj._ b/src/ai/.x/templates/openai-chat-cs/OpenAIChatCompletions.csproj._ index 4677a2e0..6ee2c11d 100644 --- a/src/ai/.x/templates/openai-chat-cs/OpenAIChatCompletions.csproj._ +++ b/src/ai/.x/templates/openai-chat-cs/OpenAIChatCompletions.csproj._ @@ -4,7 +4,7 @@ - net7.0 + net8.0 enable enable true diff --git a/src/ai/.x/templates/openai-chat-streaming-cs/OpenAIChatCompletionsStreaming.csproj._ b/src/ai/.x/templates/openai-chat-streaming-cs/OpenAIChatCompletionsStreaming.csproj._ index 4677a2e0..6ee2c11d 100644 --- a/src/ai/.x/templates/openai-chat-streaming-cs/OpenAIChatCompletionsStreaming.csproj._ +++ b/src/ai/.x/templates/openai-chat-streaming-cs/OpenAIChatCompletionsStreaming.csproj._ @@ -4,7 +4,7 @@ - net7.0 + net8.0 enable enable true diff --git a/src/ai/.x/templates/openai-chat-streaming-with-data-cs/OpenAIChatCompletionsWithDataStreaming.csproj._ b/src/ai/.x/templates/openai-chat-streaming-with-data-cs/OpenAIChatCompletionsWithDataStreaming.csproj._ index 4677a2e0..6ee2c11d 100644 --- a/src/ai/.x/templates/openai-chat-streaming-with-data-cs/OpenAIChatCompletionsWithDataStreaming.csproj._ +++ b/src/ai/.x/templates/openai-chat-streaming-with-data-cs/OpenAIChatCompletionsWithDataStreaming.csproj._ @@ -4,7 +4,7 @@ - net7.0 + net8.0 enable enable true diff --git a/src/ai/.x/templates/openai-chat-streaming-with-functions-cs/OpenAIChatCompletionsFunctionsStreaming.csproj._ b/src/ai/.x/templates/openai-chat-streaming-with-functions-cs/OpenAIChatCompletionsFunctionsStreaming.csproj._ index 06e19a27..565629b9 100644 --- a/src/ai/.x/templates/openai-chat-streaming-with-functions-cs/OpenAIChatCompletionsFunctionsStreaming.csproj._ +++ b/src/ai/.x/templates/openai-chat-streaming-with-functions-cs/OpenAIChatCompletionsFunctionsStreaming.csproj._ @@ -4,7 +4,7 @@ - net7.0 + net8.0 enable enable true diff --git a/src/ai/ai-cli.csproj b/src/ai/ai-cli.csproj index 030c0714..fd4bdd5a 100644 --- a/src/ai/ai-cli.csproj +++ b/src/ai/ai-cli.csproj @@ -1,9 +1,8 @@  - net7.0 + net8.0 ai - false Exe win-x64;linux-x64 Debug;Release @@ -27,6 +26,20 @@ true + + false + + + + + true + true + true + true + true + linker.xml + + $(LocalBuildSDKBinPath) bin diff --git a/src/ai/commands/dev_command.cs b/src/ai/commands/dev_command.cs index 114a70fd..cda027cb 100644 --- a/src/ai/commands/dev_command.cs +++ b/src/ai/commands/dev_command.cs @@ -117,11 +117,24 @@ private void DoDevShell() Console.WriteLine(); var runCommand = RunCommandScriptToken.Data().GetOrDefault(_values); - var processOutput = string.IsNullOrEmpty(runCommand) - ? ProcessHelpers.RunShellCommandAsync(fileName, arguments, env, null, null, null, false).Result - : ProcessHelpers.RunShellCommandAsync(runCommand, env, null, null, null, false).Result; - var exitCode = processOutput.ExitCode; + // var processOutput = string.IsNullOrEmpty(runCommand) + // ? ProcessHelpers.RunShellCommandAsync(fileName, arguments, env, null, null, null, false).Result + // : ProcessHelpers.RunShellCommandAsync(runCommand, env, null, null, null, false).Result; + + // var exitCode = processOutput.ExitCode; + + UpdateFileNameArguments(runCommand, ref fileName, ref arguments, out var deleteWhenDone); + + var process = ProcessHelpers.StartProcess(fileName, arguments, env, false); + process.WaitForExit(); + + if (!string.IsNullOrEmpty(deleteWhenDone)) + { + File.Delete(deleteWhenDone); + } + + var exitCode = process.ExitCode; if (exitCode != 0) { Console.WriteLine("\n(ai dev shell) FAILED!\n"); @@ -133,6 +146,41 @@ private void DoDevShell() } } + private static void UpdateFileNameArguments(string runCommand, ref string fileName, ref string arguments, out string? deleteTempFileWhenDone) + { + deleteTempFileWhenDone = null; + + if (!string.IsNullOrEmpty(runCommand)) + { + var isSingleLine = !runCommand.Contains('\n') && !runCommand.Contains('\r'); + if (isSingleLine) + { + var parts = runCommand.Split(new char[] { ' ' }, 2); + var inPath = FileHelpers.FileExistsInOsPath(parts[0]) || (OS.IsWindows() && FileHelpers.FileExistsInOsPath(parts[0] + ".exe")); + + var filePart = parts[0]; + var argsPart = parts.Length == 2 ? parts[1] : null; + + fileName = inPath ? filePart : fileName; + arguments = inPath ? argsPart : (OS.IsLinux() + ? $"-lic \"{runCommand}\"" + : $"/c \"{runCommand}\""); + + Console.WriteLine($"Running command: {runCommand}\n"); + } + else + { + deleteTempFileWhenDone = Path.GetTempFileName() + (OS.IsWindows() ? ".cmd" : ".sh"); + File.WriteAllText(deleteTempFileWhenDone, runCommand); + + fileName = OS.IsLinux() ? "bash" : "cmd.exe"; + arguments = OS.IsLinux() ? $"-lic \"{deleteTempFileWhenDone}\"" : $"/c \"{deleteTempFileWhenDone}\""; + + Console.WriteLine($"Running script:\n\n{runCommand}\n"); + } + } + } + private void DisplayBanner(string which) { if (_quiet) return; diff --git a/src/ai/linker.xml b/src/ai/linker.xml new file mode 100644 index 00000000..45f2eaed --- /dev/null +++ b/src/ai/linker.xml @@ -0,0 +1,8 @@ + + + + + + + + \ No newline at end of file diff --git a/src/common/common.csproj b/src/common/common.csproj index 88483ccb..0e03114e 100644 --- a/src/common/common.csproj +++ b/src/common/common.csproj @@ -1,7 +1,7 @@ - net7.0 + net8.0 enable Azure.AI.CLI.Common diff --git a/src/extensions/helper_functions_extension/FileHelperFunctions.cs b/src/extensions/helper_functions_extension/FileHelperFunctions.cs index d19f5ec8..da22af35 100644 --- a/src/extensions/helper_functions_extension/FileHelperFunctions.cs +++ b/src/extensions/helper_functions_extension/FileHelperFunctions.cs @@ -42,15 +42,20 @@ public static bool DirectoryCreate(string directoryName) return true; } + [HelperFunctionDescription("List files; lists all files regardless of name; only in current directory")] + public static string FindAllFilesInCurrentDirectory() + { + return FindAllFilesMatchingPattern("*"); + } - [HelperFunctionDescription("List files; lists all files regardless of name")] - public static string FindAllFiles() + [HelperFunctionDescription("List files; lists all files regardless of name; searches current directory and all sub-directories")] + public static string FindAllFilesRecursively() { - return FindFilesMatchingPattern("**/*"); + return FindAllFilesMatchingPattern("**/*"); } - [HelperFunctionDescription("List files; lists files matching pattern")] - public static string FindFilesMatchingPattern([HelperFunctionParameterDescription("The pattern to search for; use '**/*.ext' to search sub-directories")] string pattern) + [HelperFunctionDescription("List files; lists all files matching pattern; searches current directory, and if pattern includes '**', all sub-directories")] + public static string FindAllFilesMatchingPattern([HelperFunctionParameterDescription("The pattern to search for; use '**/*.ext' to search sub-directories")] string pattern) { var files = FileHelpers.FindFiles(".", pattern); return string.Join("\n", files); diff --git a/src/extensions/helper_functions_extension/helper_functions_extension.csproj b/src/extensions/helper_functions_extension/helper_functions_extension.csproj index 3761f3cf..f7bb9259 100644 --- a/src/extensions/helper_functions_extension/helper_functions_extension.csproj +++ b/src/extensions/helper_functions_extension/helper_functions_extension.csproj @@ -2,7 +2,7 @@ Azure.AI.CLI.Extensions.HelperFunctions - net7.0 + net8.0 Azure.AI.Details.Common.CLI.Extensions.HelperFunctions enable enable diff --git a/src/extensions/template_extension/TemplateFactory.cs b/src/extensions/template_extension/TemplateFactory.cs index 29f31d0d..60cbee84 100644 --- a/src/extensions/template_extension/TemplateFactory.cs +++ b/src/extensions/template_extension/TemplateFactory.cs @@ -36,7 +36,7 @@ public static bool ListTemplates(string? templateFilter, string? languageFilter) var groups = GetFilteredTemplateGroups(templateFilter, languageFilter); if (groups.Count == 0) { - Console.WriteLine($"No matching templates found\n"); + ConsoleHelpers.WriteLineError($"No matching templates found\n"); groups = GetTemplateGroups(); } diff --git a/src/extensions/template_extension/template_extension.csproj b/src/extensions/template_extension/template_extension.csproj index 6c522271..8d58fd94 100644 --- a/src/extensions/template_extension/template_extension.csproj +++ b/src/extensions/template_extension/template_extension.csproj @@ -2,7 +2,7 @@ Azure.AI.CLI.Extensions.Templates - net7.0 + net8.0 Azure.AI.Details.Common.CLI.Extensions.Templates enable enable diff --git a/src/extensions/test_helper_functions_extension/test_helper_functions_extension.csproj b/src/extensions/test_helper_functions_extension/test_helper_functions_extension.csproj index 80004a38..e2bf0920 100644 --- a/src/extensions/test_helper_functions_extension/test_helper_functions_extension.csproj +++ b/src/extensions/test_helper_functions_extension/test_helper_functions_extension.csproj @@ -2,7 +2,7 @@ Azure.AI.CLI.Extensions.HelperFunctions.Test - net7.0 + net8.0 Azure.AI.Details.Common.CLI.Extensions.HelperFunctions.Test enable enable diff --git a/tests/test3.yaml b/tests/test3.yaml index dd402492..842cadc0 100644 --- a/tests/test3.yaml +++ b/tests/test3.yaml @@ -21,23 +21,47 @@ command: ai chat --question "Why is the sky blue, what's it called" --index-name @none expect: Rayleigh +- name: test ai chat built in functions + command: ai chat --interactive --built-in-functions + input: | + Create a file named "test.txt" with the following content: "Hello, World!" + What files are in the current directory? + Show me what's in the file "test.txt" + expect: | + assistant-function: CreateFileAndSaveText + assistant-function: FindAllFilesInCurrentDirectory + test.txt + Hello, World! + - name: dev new environment command: ai dev new .env - class: dev new helper-functions steps: - name: generate template - command: ai dev new helper-functions + command: ai dev new helper-functions --instructions "Create a helper function named GetPersonsAge that returns ages of people; John is 55; Jane is 53; everyone else, return unknown" - name: build template bash: | cd helper-functions dotnet build - name: run template - command: ai chat --interactive --helper-functions helper-functions/bin/Debug/net7.0/HelperFunctionsProject.dll + command: ai chat --interactive --helper-functions helper-functions/bin/Debug/net8.0/HelperFunctionsProject.dll input: | What is my name? + How old is John? + How old is Jane? + How old is Bob? expect: | assistant-function: GetUsersName\({}\) = + assistant-function: GetPersonsAge\({ + John + }\) = + 55 + assistant-function: GetPersonsAge\({ + Jane + }\) = + 53 + [Uu]nknown - area: ai dev new openai-chat tests: @@ -51,7 +75,7 @@ cd openai-chat-cs dotnet build - name: run template - command: ai dev shell --run "openai-chat-cs\bin\Debug\net7.0\OpenAIChatCompletions" + command: ai dev shell --run "openai-chat-cs\bin\Debug\net8.0\OpenAIChatCompletions" input: |- Tell me a joke Tell me another joke @@ -117,6 +141,7 @@ bash: | cd openai-chat-py pip install -r requirements.txt + tag: skip - name: run template command: ai dev shell --run "cd openai-chat-py && python openai_chat_completions.py" input: |- @@ -136,7 +161,7 @@ cd openai-chat-streaming-cs dotnet build - name: run template - command: ai dev shell --run "openai-chat-streaming-cs\bin\Debug\net7.0\OpenAIChatCompletionsStreaming" + command: ai dev shell --run "openai-chat-streaming-cs\bin\Debug\net8.0\OpenAIChatCompletionsStreaming" input: |- Tell me a joke Tell me another joke @@ -202,6 +227,7 @@ bash: | cd openai-chat-streaming-py pip install -r requirements.txt + tag: skip - name: run template command: ai dev shell --run "cd openai-chat-streaming-py && python main.py" input: |- @@ -221,7 +247,7 @@ cd openai-chat-streaming-with-data-cs dotnet build - name: run template - command: ai dev shell --run "openai-chat-streaming-with-data-cs\bin\Debug\net7.0\OpenAIChatCompletionsWithDataStreaming" + command: ai dev shell --run "openai-chat-streaming-with-data-cs\bin\Debug\net8.0\OpenAIChatCompletionsWithDataStreaming" input: |- What parameter should i use to select my resources? tag: skip @@ -248,11 +274,13 @@ bash: | cd openai-chat-streaming-with-data-py pip install -r requirements.txt + tag: skip - name: run template command: ai dev shell --run "cd openai-chat-streaming-with-data-py && python main.py" input: |- What parameter should i use to select my resources? tag: skip + - class: dev new openai-chat-streaming-with-data (go) steps: - name: generate template @@ -279,7 +307,7 @@ cd openai-chat-streaming-with-functions-cs dotnet build - name: run template - command: ai dev shell --run "cd openai-chat-streaming-with-functions-cs && bin\Debug\net7.0\OpenAIChatCompletionsFunctionsStreaming" + command: ai dev shell --run "cd openai-chat-streaming-with-functions-cs && bin\Debug\net8.0\OpenAIChatCompletionsFunctionsStreaming" input: |- What is the date? What is the time? @@ -324,6 +352,7 @@ bash: | cd openai-chat-streaming-with-functions-py pip install -r requirements.txt + tag: skip - name: run template command: ai dev shell --run "cd openai-chat-streaming-with-functions-py && python main.py" input: |- diff --git a/tests/testadapter/README.md b/tests/testadapter/README.md index 40189c92..48d22e4e 100644 --- a/tests/testadapter/README.md +++ b/tests/testadapter/README.md @@ -26,12 +26,12 @@ From fresh clone (one step, CLI): OR ... [Build](#BUILD) first, then w/CLI: * DEBUG: ```dotnetcli - cd tests\testadapter\bin\Debug\net7.0 + cd tests\testadapter\bin\Debug\net8.0 dotnet test Azure.AI.CLI.TestAdapter.dll --logger:trx ``` * RELEASE: ```dotnetcli - cd tests\testadapter\bin\Release\net7.0 + cd tests\testadapter\bin\Release\net8.0 dotnet test Azure.AI.CLI.TestAdapter.dll --logger:trx --logger:console;verbosity=normal ``` @@ -39,12 +39,12 @@ OR ... [Build](#BUILD) first, then w/CLI: OR ... [Build](#BUILD) first, then w/CLI: * DEBUG: ```dotnetcli - cd tests\testadapter\bin\Debug\net7.0 + cd tests\testadapter\bin\Debug\net8.0 dotnet vstest Azure.AI.CLI.TestAdapter.dll --logger:trx ``` * RELEASE: ```dotnetcli - cd tests\testadapter\bin\Release\net7.0 + cd tests\testadapter\bin\Release\net8.0 dotnet vstest Azure.AI.CLI.TestAdapter.dll --logger:trx --logger:console;verbosity=normal ``` @@ -70,12 +70,12 @@ From fresh clone (one step, CLI): OR ... [Build](#BUILD) first, then w/CLI: * DEBUG: ```dotnetcli - cd tests\testadapter\bin\Debug\net7.0 + cd tests\testadapter\bin\Debug\net8.0 dotnet test Azure.AI.CLI.TestAdapter.dll -t ``` * RELEASE: ```dotnetcli - cd tests\testadapter\bin\Release\net7.0 + cd tests\testadapter\bin\Release\net8.0 dotnet test Azure.AI.CLI.TestAdapter.dll -t ``` @@ -83,12 +83,12 @@ OR ... [Build](#BUILD) first, then w/CLI: OR ... [Build](#BUILD) first, then w/CLI: * DEBUG: ```dotnetcli - cd tests\testadapter\bin\Debug\net7.0 + cd tests\testadapter\bin\Debug\net8.0 dotnet vstest Azure.AI.CLI.TestAdapter.dll -lt ``` * RELEASE: ```dotnetcli - cd tests\testadapter\bin\Release\net7.0 + cd tests\testadapter\bin\Release\net8.0 dotnet vstest Azure.AI.CLI.TestAdapter.dll -lt ``` @@ -110,12 +110,12 @@ OR ... [Build](#BUILD) first, then w/CLI: * DEBUG: ```dotnetcli - cd tests\testadapter\bin\Debug\net7.0 + cd tests\testadapter\bin\Debug\net8.0 dotnet test --filter:name~PARTIAL_NAME Azure.AI.CLI.TestAdapter.dll ``` * RELEASE: ```dotnetcli - cd tests\testadapter\bin\Release\net7.0 + cd tests\testadapter\bin\Release\net8.0 dotnet test --filter:name~PARTIAL_NAME Azure.AI.CLI.TestAdapter.dll ``` @@ -123,12 +123,12 @@ OR ... [Build](#BUILD) first, then w/CLI: OR ... [Build](#BUILD) first, then w/CLI: * DEBUG: ```dotnetcli - cd tests\testadapter\bin\Debug\net7.0 + cd tests\testadapter\bin\Debug\net8.0 dotnet vstest Azure.AI.CLI.TestAdapter.dll --logger:trx --testcasefilter:name~PARTIAL_NAME ``` * RELEASE: ```dotnetcli - cd tests\testadapter\bin\Release\net7.0 + cd tests\testadapter\bin\Release\net8.0 dotnet vstest Azure.AI.CLI.TestAdapter.dll --logger:trx --testcasefilter:name~PARTIAL_NAME ``` diff --git a/tests/testadapter/YamlTestAdapter.csproj b/tests/testadapter/YamlTestAdapter.csproj index 49849286..c359a4ce 100644 --- a/tests/testadapter/YamlTestAdapter.csproj +++ b/tests/testadapter/YamlTestAdapter.csproj @@ -1,7 +1,7 @@  - net7.0 + net8.0 diff --git a/tests/testadapter/YamlTestAdapterCommon.targets b/tests/testadapter/YamlTestAdapterCommon.targets index 0a1f5428..3da5c71a 100644 --- a/tests/testadapter/YamlTestAdapterCommon.targets +++ b/tests/testadapter/YamlTestAdapterCommon.targets @@ -2,7 +2,7 @@ - net7.0 + net8.0 Library Azure.AI.CLI.TestAdapter false diff --git a/tests/testframework/YamlTestFramework.csproj b/tests/testframework/YamlTestFramework.csproj index 74d60cbe..3c4af653 100644 --- a/tests/testframework/YamlTestFramework.csproj +++ b/tests/testframework/YamlTestFramework.csproj @@ -1,7 +1,7 @@  - net7.0 + net8.0 diff --git a/tests/testframework/YamlTestFrameworkCommon.targets b/tests/testframework/YamlTestFrameworkCommon.targets index ae2fd904..4646d02c 100644 --- a/tests/testframework/YamlTestFrameworkCommon.targets +++ b/tests/testframework/YamlTestFrameworkCommon.targets @@ -2,7 +2,7 @@ - net7.0 + net8.0 Library Azure.AI.CLI.TestFramework false diff --git a/tests/testrunner/YamlTestRunner.csproj b/tests/testrunner/YamlTestRunner.csproj index 4a17b466..e04b5a5f 100644 --- a/tests/testrunner/YamlTestRunner.csproj +++ b/tests/testrunner/YamlTestRunner.csproj @@ -1,7 +1,7 @@  - net7.0 + net8.0 Exe diff --git a/tests/testrunner/YamlTestRunnerCommon.targets b/tests/testrunner/YamlTestRunnerCommon.targets index 6755939a..5ae2055c 100644 --- a/tests/testrunner/YamlTestRunnerCommon.targets +++ b/tests/testrunner/YamlTestRunnerCommon.targets @@ -2,7 +2,7 @@ - net7.0 + net8.0 ait false From 772892aa57eed991c5d3501ba278bf9c87d52baf Mon Sep 17 00:00:00 2001 From: Rob Chambers Date: Wed, 7 Feb 2024 17:19:01 -0800 Subject: [PATCH 30/30] Update requirements.txt from hanchi --- requirements.txt | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/requirements.txt b/requirements.txt index 1521f520..789b7c04 100644 --- a/requirements.txt +++ b/requirements.txt @@ -3,7 +3,9 @@ ipykernel openai>1.0 -azure-ai-generative[evaluate,index,promptflow]==1.0.0b6 +azure_ai_resources @https://pkgs.dev.azure.com/azure-sdk/29ec6040-b234-4e31-b139-33dc4287b756/_packaging/3572dbf9-b5ef-433b-9137-fc4d7768e7cc/pypi/download/azure-ai-resources/1a20240207004/azure_ai_resources-1.0.0a20240207004-py3-none-any.whl +azure-ai-generative[evaluate,index,promptflow] @https://pkgs.dev.azure.com/azure-sdk/29ec6040-b234-4e31-b139-33dc4287b756/_packaging/3572dbf9-b5ef-433b-9137-fc4d7768e7cc/pypi/download/azure-ai-generative/1a20240207004/azure_ai_generative-1.0.0a20240207004-py3-none-any.whl +keyrings.alt # This is for promptflow # hardcoded the version of azureml-mlflow here for faster Docker image building speed azureml-mlflow==1.53.0 @@ -11,4 +13,4 @@ pytest langchain==0.1.1 langchain-openai==0.0.2.post1 -semantic-kernel \ No newline at end of file +semantic-kernel