diff --git a/.azure/pipelines/build.yaml b/.azure/pipelines/build.yaml index bcac4424..9284ccaa 100644 --- a/.azure/pipelines/build.yaml +++ b/.azure/pipelines/build.yaml @@ -217,8 +217,160 @@ stages: docker tag acrbn.azurecr.io/azure-ai-cli:bookworm-$(AICLIVersion) acrbn.azurecr.io/azure-ai-cli:latest docker push acrbn.azurecr.io/azure-ai-cli:latest -- stage: ManualApproval +- stage: TestStage dependsOn: [SetupStage, BuildStage] + condition: and(succeeded(), or(eq(variables['IsRelease'], 'true'), eq(variables['TestDevBuild'], 'true'))) + variables: + AICLIVersion: $[ stageDependencies.SetupStage.SetupJob.outputs['Variables.AICLIVersion']] + AICLISemVerVersion: $[ stageDependencies.SetupStage.SetupJob.outputs['Variables.AICLISemVerVersion']] + AICLINuPkgFileName: $[ stageDependencies.SetupStage.SetupJob.outputs['Variables.AICLINuPkgFileName']] + BuildConfiguration: 'ReleaseUnixOS' + LocalBinOutputPath: '$(Build.SourcesDirectory)/tests/build/bin' + LocalInputPath: '$(Build.SourcesDirectory)/tests' + TargetFolder: '$(System.DefaultWorkingDirectory)' + TestFilter: 'cli=ai&tag!=skip' + TestResultsPath: '$(Build.SourcesDirectory)/testresults' + TestRunTitle: 'Azure AI CLI $(AICLIVersion) ($(BuildConfiguration)-$(Build.BuildNumber))' + TestRunTrxFileName: '$(TestResultsPath)/ai-cli-test-results-$(BuildConfiguration)-$(Build.BuildNumber).trx' + TestBackupArtifactFile: '$(Build.ArtifactStagingDirectory)/ai-cli-test-run-backup-artifact-$(BuildConfiguration)-$(Build.BuildNumber).zip' + jobs: + - job: TestJob + steps: + + # ----------------------------------------------------------------------------- + # Download the ai CLI artifacts + # ----------------------------------------------------------------------------- + - task: DownloadPipelineArtifact@2 + displayName: Download ai-cli-artifacts + inputs: + artifact: 'ai-cli-artifacts' + targetPath: '$(Build.ArtifactStagingDirectory)/ai-cli-artifacts' + - task: CopyFiles@2 + displayName: Copy downloaded ai-cli-artifacts + inputs: + Contents: '**/*' + SourceFolder: $(Build.ArtifactStagingDirectory)/ai-cli-artifacts + TargetFolder: $(TargetFolder) + FlattenFolders: true + - task: Bash@3 + displayName: List files... + inputs: + targetType: 'inline' + script: | + echo listing for $(System.DefaultWorkingDirectory) + ls -la $(System.DefaultWorkingDirectory) + echo listing for $(Build.ArtifactStagingDirectory) + ls -la $(Build.ArtifactStagingDirectory) + + # ----------------------------------------------------------------------------- + # Install the ai CLI + # ----------------------------------------------------------------------------- + - task: DotNetCoreCLI@2 + displayName: INSTALL AI - Installing ai CLI via `dotnet tool install` + inputs: + includeNuGetOrg: false + command: custom + custom: tool + version: '8.0.x' + arguments: install + --ignore-failed-sources + --add-source "$(System.DefaultWorkingDirectory)" + --global Azure.AI.CLI + --version "$(AICLIVersion)" + + # ----------------------------------------------------------------------------- + # Finish job prep (mkdir, config cli) + # ----------------------------------------------------------------------------- + - bash: | + env | sort + which dotnet + dotnet --version + echo "TestResultsPath: $(TestResultsPath)" + mkdir $(TestResultsPath) + echo "LocalInputPath: $(LocalInputPath)" + ls -la $(LocalInputPath) + ai config system --set input.path $(LocalInputPath) + ai config --find + echo "DefaultWorkingDirectory: $(System.DefaultWorkingDirectory)" + ls -la $(System.DefaultWorkingDirectory) + displayName: Finish job prep (mkdir, config cli) + workingDirectory: '$(System.DefaultWorkingDirectory)' + + # ----------------------------------------------------------------------------- + # Build the YamlTestAdapter + # ----------------------------------------------------------------------------- + - task: DotNetCoreCLI@2 + displayName: Build YamlTestAdapter + inputs: + includeNuGetOrg: false + command: build + version: '8.0.x' + projects: '**/testadapter/YamlTestAdapter.csproj' + arguments: + -c $(BuildConfiguration) + /p:Platform=x64 + /p:LocalBinOutputPath="$(LocalBinOutputPath)" + + # ----------------------------------------------------------------------------- + # Run the tests + # ----------------------------------------------------------------------------- + - task: AzureCLI@2 + displayName: Run ai-cli tests + continueOnError: true + inputs: + azureSubscription: 'AI_CLI_TestAdapter' + scriptType: 'bash' + scriptLocation: 'inlineScript' + inlineScript: | + az --version + az account show + cd $(TestResultsPath) + echo dotnet test --logger trx --results-directory "$(Agent.TempDirectory)" --logger:"trx;LogFileName=$(TestRunTrxFileName)" --logger:"console;verbosity=normal" --filter "$(TestFilter)" "$(LocalBinOutputPath)/$(BuildConfiguration)/net8.0/Azure.AI.CLI.TestAdapter.dll" + dotnet test --logger trx --results-directory "$(Agent.TempDirectory)" --logger:"trx;LogFileName=$(TestRunTrxFileName)" --logger:"console;verbosity=normal" --filter "$(TestFilter)" "$(LocalBinOutputPath)/$(BuildConfiguration)/net8.0/Azure.AI.CLI.TestAdapter.dll" + + # ----------------------------------------------------------------------------- + # Archive and publish the test run backup artifact + # ----------------------------------------------------------------------------- + - task: ArchiveFiles@2 + displayName: Archive ai-cli-test run backup artifact (build/bin) + continueOnError: true + inputs: + rootFolderOrFile: '$(LocalBinOutputPath)' + includeRootFolder: false + archiveFile: '$(TestBackupArtifactFile)' + replaceExistingArchive: false + + - task: ArchiveFiles@2 + displayName: Archive ai-cli-test run backup artifact (testresults) + continueOnError: true + inputs: + rootFolderOrFile: '$(TestResultsPath)' + includeRootFolder: false + archiveFile: '$(TestBackupArtifactFile)' + replaceExistingArchive: false + + - task: PublishBuildArtifacts@1 + displayName: Publish ai-cli-test run backup artifact + continueOnError: true + retryCountOnTaskFailure: 5 + inputs: + parallel: true + pathToPublish: '$(TestBackupArtifactFile)' + artifactName: TestRunBackup + + # ----------------------------------------------------------------------------- + # Publish the test results + # ----------------------------------------------------------------------------- + - task: PublishTestResults@2 + displayName: Publish ai-cli test results + inputs: + testRunner: VSTest + testResultsFiles: '$(TestRunTrxFileName)' + testRunTitle: '$(TestRunTitle)' + failTaskOnFailedTests: true + +- stage: ManualApproval + dependsOn: [SetupStage, BuildStage, TestStage] condition: and(succeeded(), or(eq(stageDependencies.SetupStage.outputs['SetupJob.Variables.IsRelease'], 'true'), eq(variables['PublishDevBuild'], 'true'))) variables: AICLIVersion: $[ stageDependencies.SetupStage.SetupJob.outputs['Variables.AICLIVersion']] @@ -238,7 +390,7 @@ stages: onTimeout: reject - stage: PublishPublic - dependsOn: [SetupStage, BuildStage, ManualApproval] + dependsOn: [SetupStage, BuildStage, TestStage, ManualApproval] condition: and(succeeded(), or(eq(stageDependencies.SetupStage.outputs['SetupJob.Variables.IsRelease'], 'true'), eq(variables['PublishDevBuild'], 'true'))) variables: AICLIVersion: $[ stageDependencies.SetupStage.SetupJob.outputs['Variables.AICLIVersion']] diff --git a/.github/compliance/inventory.yml b/.github/compliance/inventory.yml new file mode 100644 index 00000000..e9b10d8e --- /dev/null +++ b/.github/compliance/inventory.yml @@ -0,0 +1,6 @@ +inventory: +- source: DirectOwners + isProduction: false + items: + - id: robch@microsoft.com + - id: cdev@microsoft.com \ No newline at end of file diff --git a/.github/workflows/build-package.yaml b/.github/workflows/build-package.yaml index 0cd920ac..d0feb462 100644 --- a/.github/workflows/build-package.yaml +++ b/.github/workflows/build-package.yaml @@ -18,7 +18,7 @@ jobs: - name: Set up .NET Core uses: actions/setup-dotnet@v2 with: - dotnet-version: '7.0.x' # Set the desired .NET version + dotnet-version: '8.0.x' # Set the desired .NET version - name: Set up environment variables run: | diff --git a/.gitignore b/.gitignore index ec6afab7..e6151fc2 100644 --- a/.gitignore +++ b/.gitignore @@ -3,7 +3,9 @@ **/.vscode/** **/bin/*/net6.0/* **/bin/*/net7.0/* +**/bin/*/net8.0/* **/obj/* **/Properties/launchSettings.json ideas/website/node_modules/** +testresults/** *.user diff --git a/README.md b/README.md index 8b2e4cda..ebec9ba5 100644 --- a/README.md +++ b/README.md @@ -1,9 +1,9 @@ -Status: Draft in Progress +Status: Draft in Progress Owner: Rob Chambers # Using the Azure AI CLI -The Azure `AI` Command-Line Interface (CLI) is a cross-platform command-line tool to connect to Azure AI services and execute control-plane and data-plane operations without having to write any code. The CLI allows the execution of commands through a terminal using interactive command-line prompts or via script. +The Azure `AI` Command-Line Interface (CLI) is a cross-platform command-line tool to connect to Azure AI services and execute control-plane and data-plane operations without having to write any code. The CLI allows the execution of commands through a terminal using interactive command-line prompts or via script. You can easily use the `AI` CLI to experiment with key Azure AI service features and see how they work with your use cases. Within minutes, you can setup all the required Azure resources needed, and build a customized Copilot using OpenAI's chat completions APIs and your own data. You can try it out interactively, or script larger processes to automate your own workflows and evaluations as part of your CI/CD system. @@ -11,7 +11,7 @@ In the future, you'll even be able to use the `AI` CLI to dynamically create cod ## **STEP 1**: Setup your development environment -You can install the Azure `AI` CLI locally on Linux, Mac, or Windows computers, or use it thru an internet browser or Docker container. +You can install the Azure `AI` CLI locally on Linux, Mac, or Windows computers, or use it thru an internet browser or Docker container. During this public preview, we recommend using the Azure `AI` CLI thru GitHub Codespaces. This will allow you to quickly get started without having to install anything locally. @@ -82,12 +82,12 @@ ai chat --system @prompt.txt --user "Tell me about Azure AI Studio" ``` USAGE: ai chat [...] - CONNECTION (see: ai help connection) + CONNECTION (see: ai help connection) --deployment DEPLOYMENT (see: ai help chat deployment) - --endpoint ENDPOINT (see: ai help chat endpoint) - --key KEY (see: ai help chat key) + --endpoint ENDPOINT (see: ai help chat endpoint) + --key KEY (see: ai help chat key) - INPUT (see: ai help chat input) + INPUT (see: ai help chat input) --interactive (see: ai help chat interactive) --system PROMPT (see: ai help chat system prompt) * --file FILE (see: ai help chat history file) @@ -118,7 +118,7 @@ ai search index update --files "../../data/3-product-info/*.md" --index-name "pr ``` AI - Azure AI CLI, Version 1.0.0 -Copyright (c) 2023 Microsoft Corporation. All Rights Reserved. +Copyright (c) 2024 Microsoft Corporation. All Rights Reserved. This PUBLIC PREVIEW version may change at any time. See: https://aka.ms/azure-ai-cli-public-preview @@ -187,4 +187,4 @@ ai chat --interactive --system @prompt.txt --index-name "product-info" ```bash ai chat --system @prompt.txt --index-name "product-info" --user "Which tent has the highest rainfly waterproof rating?" -``` \ No newline at end of file +``` diff --git a/ai-cli.sln b/ai-cli.sln index 7bc1ce00..9f68cb47 100644 --- a/ai-cli.sln +++ b/ai-cli.sln @@ -15,6 +15,14 @@ Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "test_helper_functions_exten EndProject Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "template_extension", "src\extensions\template_extension\template_extension.csproj", "{023B4F9C-E2B3-4CCD-A993-87E337C16EDE}" EndProject +Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "YamlTestAdapter", "tests\testadapter\YamlTestAdapter.csproj", "{7C3F1355-B679-487D-904D-7E5FEBA9E75C}" +EndProject +Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "YamlTestFramework", "tests\testframework\YamlTestFramework.csproj", "{B0B3437F-1828-4A13-866F-1CF7C924015E}" +EndProject +Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "YamlTestRunner", "tests\testrunner\YamlTestRunner.csproj", "{39876475-2D98-40CF-8B08-CD423A5EB4E8}" +EndProject +Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "Tests", "Tests", "{C8AFF891-D6AA-4B8F-BC21-10404DF4B355}" +EndProject Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "telemetry.aria", "src\telemetry\telemetry.aria.csproj", "{306A3CD6-91C2-450B-9995-79701CE63FE2}" EndProject Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "Solution Items", "Solution Items", "{68834003-CCC2-44D7-9EA4-F9B6C65CFC99}" @@ -48,6 +56,18 @@ Global {023B4F9C-E2B3-4CCD-A993-87E337C16EDE}.Debug|Any CPU.Build.0 = Debug|Any CPU {023B4F9C-E2B3-4CCD-A993-87E337C16EDE}.Release|Any CPU.ActiveCfg = Release|Any CPU {023B4F9C-E2B3-4CCD-A993-87E337C16EDE}.Release|Any CPU.Build.0 = Release|Any CPU + {7C3F1355-B679-487D-904D-7E5FEBA9E75C}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {7C3F1355-B679-487D-904D-7E5FEBA9E75C}.Debug|Any CPU.Build.0 = Debug|Any CPU + {7C3F1355-B679-487D-904D-7E5FEBA9E75C}.Release|Any CPU.ActiveCfg = Release|Any CPU + {7C3F1355-B679-487D-904D-7E5FEBA9E75C}.Release|Any CPU.Build.0 = Release|Any CPU + {B0B3437F-1828-4A13-866F-1CF7C924015E}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {B0B3437F-1828-4A13-866F-1CF7C924015E}.Debug|Any CPU.Build.0 = Debug|Any CPU + {B0B3437F-1828-4A13-866F-1CF7C924015E}.Release|Any CPU.ActiveCfg = Release|Any CPU + {B0B3437F-1828-4A13-866F-1CF7C924015E}.Release|Any CPU.Build.0 = Release|Any CPU + {39876475-2D98-40CF-8B08-CD423A5EB4E8}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {39876475-2D98-40CF-8B08-CD423A5EB4E8}.Debug|Any CPU.Build.0 = Debug|Any CPU + {39876475-2D98-40CF-8B08-CD423A5EB4E8}.Release|Any CPU.ActiveCfg = Release|Any CPU + {39876475-2D98-40CF-8B08-CD423A5EB4E8}.Release|Any CPU.Build.0 = Release|Any CPU {306A3CD6-91C2-450B-9995-79701CE63FE2}.Debug|Any CPU.ActiveCfg = Debug|Any CPU {306A3CD6-91C2-450B-9995-79701CE63FE2}.Debug|Any CPU.Build.0 = Debug|Any CPU {306A3CD6-91C2-450B-9995-79701CE63FE2}.Release|Any CPU.ActiveCfg = Release|Any CPU @@ -60,6 +80,9 @@ Global {272E0B1B-6C05-428E-BF64-E30B1E5F603A} = {644B75F1-C768-4DB3-BAF2-C69A1F36DD28} {7BD6EF67-BA75-478D-9721-C1B2AB6DE3FF} = {644B75F1-C768-4DB3-BAF2-C69A1F36DD28} {023B4F9C-E2B3-4CCD-A993-87E337C16EDE} = {644B75F1-C768-4DB3-BAF2-C69A1F36DD28} + {7C3F1355-B679-487D-904D-7E5FEBA9E75C} = {C8AFF891-D6AA-4B8F-BC21-10404DF4B355} + {B0B3437F-1828-4A13-866F-1CF7C924015E} = {C8AFF891-D6AA-4B8F-BC21-10404DF4B355} + {39876475-2D98-40CF-8B08-CD423A5EB4E8} = {C8AFF891-D6AA-4B8F-BC21-10404DF4B355} EndGlobalSection GlobalSection(ExtensibilityGlobals) = postSolution SolutionGuid = {002655B1-E1E1-4F2A-8D53-C9CD55136AE2} diff --git a/ideas/azure-ai-cli-installation-spec.md b/ideas/azure-ai-cli-installation-spec.md index 32090e24..bbbef258 100644 --- a/ideas/azure-ai-cli-installation-spec.md +++ b/ideas/azure-ai-cli-installation-spec.md @@ -12,7 +12,7 @@ Customer Requirements: - Support Debian 10, 11, and 12 - Support Ubunutu 20.04 and 22.04 - Check and install Azure CLI if not present -- Check and install dotnet 7.0 if not present +- Check and install dotnet 8.0 if not present - Check and install Python azure.ai.generative SDK if not present - Update user's shell rc file (e.g. `$HOME/.bashrc` and/or `$HOME/.zshrc`) diff --git a/ideas/cool-helper-function-scenarios/HelperFunctionsProject.csproj b/ideas/cool-helper-function-scenarios/HelperFunctionsProject.csproj index 36bc2604..faf7eac5 100644 --- a/ideas/cool-helper-function-scenarios/HelperFunctionsProject.csproj +++ b/ideas/cool-helper-function-scenarios/HelperFunctionsProject.csproj @@ -1,11 +1,11 @@ - net7.0 + net8.0 enable enable true - D:\src\ai-cli\src\ai\bin\Debug\net7.0 + D:\src\ai-cli\src\ai\bin\Debug\net8.0 diff --git a/ideas/template-generator/system.prompt.md b/ideas/template-generator/system.prompt.md index f2db0074..08112f09 100644 --- a/ideas/template-generator/system.prompt.md +++ b/ideas/template-generator/system.prompt.md @@ -13,13 +13,13 @@ Are comprised of multiple project template files: ## `_.json` project template file: -`"_Name"` is the long descriptive name of the project template. This is required. +`"_LongName"` is the long descriptive name of the project template. This is required. Example: ```json { - "_Name": "OpenAI Chat Completions Class Library", + "_LongName": "OpenAI Chat Completions Class Library", "ClassName": "OpenAIChatCompletionsClass", "AICLIExtensionReferencePath": "" } diff --git a/requirements.txt b/requirements.txt index 5e3d0e43..789b7c04 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,15 +1,16 @@ # other app dependencies ipykernel -# generative ai SDK dependencies -azure-ai-generative[evaluate,index,promptflow] +openai>1.0 + +azure_ai_resources @https://pkgs.dev.azure.com/azure-sdk/29ec6040-b234-4e31-b139-33dc4287b756/_packaging/3572dbf9-b5ef-433b-9137-fc4d7768e7cc/pypi/download/azure-ai-resources/1a20240207004/azure_ai_resources-1.0.0a20240207004-py3-none-any.whl +azure-ai-generative[evaluate,index,promptflow] @https://pkgs.dev.azure.com/azure-sdk/29ec6040-b234-4e31-b139-33dc4287b756/_packaging/3572dbf9-b5ef-433b-9137-fc4d7768e7cc/pypi/download/azure-ai-generative/1a20240207004/azure_ai_generative-1.0.0a20240207004-py3-none-any.whl +keyrings.alt # This is for promptflow # hardcoded the version of azureml-mlflow here for faster Docker image building speed azureml-mlflow==1.53.0 pytest -# langchain dependencies, these should be optional in the future -# langchain moved Embeddings from langchain.embeddings.base to langchain.schema.embeddings while azureml-rag is still referencing it. -# once azureml-rag fixes, we should remove the langchain reference from this file -langchain==0.0.324 +langchain==0.1.1 +langchain-openai==0.0.2.post1 semantic-kernel diff --git a/scripts/InstallAzureAICLIDeb.sh b/scripts/InstallAzureAICLIDeb.sh index a8eb0104..f948c04e 100644 --- a/scripts/InstallAzureAICLIDeb.sh +++ b/scripts/InstallAzureAICLIDeb.sh @@ -32,7 +32,7 @@ if ! command -v az &> /dev/null; then fi fi -# Check if dotnet 7.0 is installed +# Check if dotnet 8.0 is installed if ! command -v dotnet &> /dev/null; then echo "dotnet is not installed." dotnet_version=0 @@ -40,11 +40,11 @@ else dotnet_version=$(dotnet --version | cut -d. -f1) fi -if [ "$dotnet_version" -eq "7" ]; then +if [ "$dotnet_version" -eq "8" ]; then dotnet_version=$(dotnet --version) echo "dotnet $dotnet_version is already installed." else - echo "Installing dotnet 7.0..." + echo "Installing dotnet 8.0..." # Update the package list sudo apt-get update @@ -60,11 +60,10 @@ else sudo dpkg -i packages-microsoft-prod.deb rm packages-microsoft-prod.deb elif [[ "$CHECK_VERSION" == "22.04" ]]; then - # We don't need to install the Microsoft package signing key for Ubuntu 22.04; in fact, if we do, `dotnet tool` doesn't work - echo "Ubuntu 22.04 detected. Skipping Microsoft package signing key installation." - # wget https://packages.microsoft.com/config/ubuntu/22.04/packages-microsoft-prod.deb -O packages-microsoft-prod.deb - # sudo dpkg -i packages-microsoft-prod.deb - # rm packages-microsoft-prod.deb + # Install the Microsoft package signing key for Ubuntu 20.04 + wget https://packages.microsoft.com/config/ubuntu/22.04/packages-microsoft-prod.deb -O packages-microsoft-prod.deb + sudo dpkg -i packages-microsoft-prod.deb + rm packages-microsoft-prod.deb else echo "Unsupported Ubuntu version: $CHECK_VERSION" exit 1 @@ -96,13 +95,13 @@ else exit 1 fi - # Install dotnet 7.0 runtime + # Install dotnet 8.0 runtime sudo apt-get update - sudo apt-get install -y dotnet-sdk-7.0 + sudo apt-get install -y dotnet-sdk-8.0 # Check if the installation was successful if [ $? -ne 0 ]; then - echo "Failed to install Dotnet 7.0." + echo "Failed to install Dotnet 8.0." exit 1 fi fi diff --git a/src/ai/.x/help/include.python.script.connection_list.py b/src/ai/.x/help/include.python.script.connection_list.py index 928b7f29..cc056ef6 100644 --- a/src/ai/.x/help/include.python.script.connection_list.py +++ b/src/ai/.x/help/include.python.script.connection_list.py @@ -22,7 +22,7 @@ def list_connections(subscription_id, resource_group_name, project_name): "name": item.name, "type": item.type, "target": item.target, - "credentials": item.credentials.values() + "credentials": item.credentials.values() if item.credentials else None, } connections.append(connection) diff --git a/src/ai/.x/help/include.python.script.function_call_evaluate.py b/src/ai/.x/help/include.python.script.function_call_evaluate.py index 147b8a3a..fe6b1174 100755 --- a/src/ai/.x/help/include.python.script.function_call_evaluate.py +++ b/src/ai/.x/help/include.python.script.function_call_evaluate.py @@ -215,13 +215,10 @@ def run_evaluate_part(subscription_id, resource_group_name, project_name, run_re user_agent="ai-cli 0.0.1" ) - def dont_call_this_method(kwargs): - raise Exception("This method should not be called.") - from azure.ai.generative.evaluate import evaluate eval_results = evaluate( evaluation_name=name, - target=dont_call_this_method, + target=None, data=run_results, truth_data="truth", prediction_data="answer", @@ -236,7 +233,7 @@ def dont_call_this_method(kwargs): "api_version": os.getenv("OPENAI_API_VERSION"), "api_base": os.getenv("OPENAI_API_BASE"), "api_type": os.getenv("OPENAI_API_TYPE"), - "api_key": os.getenv("OPENAI_API_KEY"), + "api_key": os.getenv("AZURE_OPENAI_KEY"), "deployment_id": os.getenv("AZURE_OPENAI_EVALUATION_DEPLOYMENT") }, tracking_uri=client.tracking_uri, diff --git a/src/ai/.x/help/include.python.script.ml_index_update.py b/src/ai/.x/help/include.python.script.ml_index_update.py index d8235f2f..3d2ee8bf 100644 --- a/src/ai/.x/help/include.python.script.ml_index_update.py +++ b/src/ai/.x/help/include.python.script.ml_index_update.py @@ -53,7 +53,13 @@ def search_index_update( ) openaiConnection = client.get_default_aoai_connection() - openaiConnection.set_current_environment() + # openaiConnection.set_current_environment() + + # This is a workaround for build_index(), as it has nested logic depending on openai 0.x environment variables. + # This sets environment variables in openai 0.x fashion. + openaiConnection._set_current_environment_old() + # This sets environment variables in openai 1.x fashion. + openaiConnection._set_current_environment_new() searchConnection = client.connections.get("AzureAISearch") searchConnection.set_current_environment() diff --git a/src/ai/.x/templates/helper_functions/HelperFunctionsProject.csproj._ b/src/ai/.x/templates/helper_functions/HelperFunctionsProject.csproj._ index 6c9cb517..3663e601 100644 --- a/src/ai/.x/templates/helper_functions/HelperFunctionsProject.csproj._ +++ b/src/ai/.x/templates/helper_functions/HelperFunctionsProject.csproj._ @@ -4,7 +4,7 @@ - net7.0 + net8.0 enable enable true diff --git a/src/ai/.x/templates/helper_functions/_.json b/src/ai/.x/templates/helper_functions/_.json index ce746bb2..b911b99a 100644 --- a/src/ai/.x/templates/helper_functions/_.json +++ b/src/ai/.x/templates/helper_functions/_.json @@ -1,5 +1,6 @@ { - "_Name": "Helper Function Class Library", + "_LongName": "Helper Function Class Library", + "_ShortName": "helper-functions", "_Language": "C#", "ClassName": "HelperFunctionClass", "AICLIExtensionReferencePath": "" diff --git a/src/ai/.x/templates/openai-chat/OpenAIChatCompletions.csproj._ b/src/ai/.x/templates/openai-chat-cs/OpenAIChatCompletions.csproj._ similarity index 93% rename from src/ai/.x/templates/openai-chat/OpenAIChatCompletions.csproj._ rename to src/ai/.x/templates/openai-chat-cs/OpenAIChatCompletions.csproj._ index 4677a2e0..6ee2c11d 100644 --- a/src/ai/.x/templates/openai-chat/OpenAIChatCompletions.csproj._ +++ b/src/ai/.x/templates/openai-chat-cs/OpenAIChatCompletions.csproj._ @@ -4,7 +4,7 @@ - net7.0 + net8.0 enable enable true diff --git a/src/ai/.x/templates/openai-chat-cs/OpenAIChatCompletionsClass.cs b/src/ai/.x/templates/openai-chat-cs/OpenAIChatCompletionsClass.cs new file mode 100644 index 00000000..e352c9d0 --- /dev/null +++ b/src/ai/.x/templates/openai-chat-cs/OpenAIChatCompletionsClass.cs @@ -0,0 +1,45 @@ +<#@ template hostspecific="true" #> +<#@ output extension=".cs" encoding="utf-8" #> +<#@ parameter type="System.String" name="ClassName" #> +using Azure; +using Azure.AI.OpenAI; +using Azure.Identity; +using System; + +public class <#= ClassName #> +{ + public <#= ClassName #>(string openAIEndpoint, string openAIKey, string openAIChatDeploymentName, string openAISystemPrompt) + { + _openAISystemPrompt = openAISystemPrompt; + + _client = string.IsNullOrEmpty(openAIKey) + ? new OpenAIClient(new Uri(openAIEndpoint), new DefaultAzureCredential()) + : new OpenAIClient(new Uri(openAIEndpoint), new AzureKeyCredential(openAIKey)); + + _options = new ChatCompletionsOptions(); + _options.DeploymentName = openAIChatDeploymentName; + + ClearConversation(); + } + + public void ClearConversation() + { + _options.Messages.Clear(); + _options.Messages.Add(new ChatRequestSystemMessage(_openAISystemPrompt)); + } + + public string GetChatCompletion(string userPrompt) + { + _options.Messages.Add(new ChatRequestUserMessage(userPrompt)); + + var response = _client.GetChatCompletions(_options); + var responseContent = response.Value.Choices[0].Message.Content; + + _options.Messages.Add(new ChatRequestAssistantMessage(responseContent)); + return responseContent; + } + + private string _openAISystemPrompt; + private ChatCompletionsOptions _options; + private OpenAIClient _client; +} \ No newline at end of file diff --git a/src/ai/.x/templates/openai-chat-cs/Program.cs b/src/ai/.x/templates/openai-chat-cs/Program.cs new file mode 100644 index 00000000..021f22ef --- /dev/null +++ b/src/ai/.x/templates/openai-chat-cs/Program.cs @@ -0,0 +1,31 @@ +<#@ template hostspecific="true" #> +<#@ output extension=".cs" encoding="utf-8" #> +<#@ parameter type="System.String" name="ClassName" #> +<#@ parameter type="System.String" name="AZURE_OPENAI_ENDPOINT" #> +<#@ parameter type="System.String" name="AZURE_OPENAI_KEY" #> +<#@ parameter type="System.String" name="AZURE_OPENAI_CHAT_DEPLOYMENT" #> +<#@ parameter type="System.String" name="AZURE_OPENAI_SYSTEM_PROMPT" #> +using System; + +public class Program +{ + public static void Main(string[] args) + { + var openAIEndpoint = Environment.GetEnvironmentVariable("AZURE_OPENAI_ENDPOINT") ?? "<#= AZURE_OPENAI_ENDPOINT #>"; + var openAIKey = Environment.GetEnvironmentVariable("AZURE_OPENAI_KEY") ?? "<#= AZURE_OPENAI_KEY #>"; + var openAIChatDeploymentName = Environment.GetEnvironmentVariable("AZURE_OPENAI_CHAT_DEPLOYMENT") ?? "<#= AZURE_OPENAI_CHAT_DEPLOYMENT #>"; + var openAISystemPrompt = Environment.GetEnvironmentVariable("AZURE_OPENAI_SYSTEM_PROMPT") ?? "<#= AZURE_OPENAI_SYSTEM_PROMPT #>"; + + var chat = new <#= ClassName #>(openAIEndpoint, openAIKey, openAIChatDeploymentName, openAISystemPrompt); + + while (true) + { + Console.Write("User: "); + var userPrompt = Console.ReadLine(); + if (string.IsNullOrEmpty(userPrompt) || userPrompt == "exit") break; + + var response = chat.GetChatCompletion(userPrompt); + Console.WriteLine($"\nAssistant: {response}\n"); + } + } +} \ No newline at end of file diff --git a/src/ai/.x/templates/openai-chat-cs/_.json b/src/ai/.x/templates/openai-chat-cs/_.json new file mode 100644 index 00000000..e3d0f633 --- /dev/null +++ b/src/ai/.x/templates/openai-chat-cs/_.json @@ -0,0 +1,10 @@ +{ + "_LongName": "OpenAI Chat Completions", + "_ShortName": "openai-chat", + "_Language": "C#", + "ClassName": "OpenAIChatCompletionsClass", + "AZURE_OPENAI_ENDPOINT": "", + "AZURE_OPENAI_KEY": "", + "AZURE_OPENAI_CHAT_DEPLOYMENT": "", + "AZURE_OPENAI_SYSTEM_PROMPT": "You are a helpful AI assistant." +} \ No newline at end of file diff --git a/src/ai/.x/templates/openai-chat-go/_.json b/src/ai/.x/templates/openai-chat-go/_.json index c9e6bfff..9b771ad4 100644 --- a/src/ai/.x/templates/openai-chat-go/_.json +++ b/src/ai/.x/templates/openai-chat-go/_.json @@ -1,8 +1,10 @@ { - "_Name": "OpenAI Chat Completions in Go", + "_LongName": "OpenAI Chat Completions", + "_ShortName": "openai-chat", "_Language": "Go", - "OPENAI_ENDPOINT": "", - "OPENAI_API_KEY": "", - "AZURE_OPENAI_CHAT_DEPLOYMENT": "", + "ClassName": "OpenAIChatCompletionsExample", + "AZURE_OPENAI_ENDPOINT": "", + "AZURE_OPENAI_KEY": "", + "AZURE_OPENAI_CHAT_DEPLOYMENT": "", "AZURE_OPENAI_SYSTEM_PROMPT": "You are a helpful AI assistant." } \ No newline at end of file diff --git a/src/ai/.x/templates/openai-chat-go/go.mod b/src/ai/.x/templates/openai-chat-go/go.mod index 5b9b4d4a..77cb86da 100644 --- a/src/ai/.x/templates/openai-chat-go/go.mod +++ b/src/ai/.x/templates/openai-chat-go/go.mod @@ -1,6 +1,6 @@ -module openai_chat_completions_hello_world.go +module openai_chat_completions_hello_world require ( - github.com/Azure/azure-sdk-for-go/sdk/ai/azopenai v0.3.0 - github.com/Azure/azure-sdk-for-go/sdk/azcore v1.9.0 + github.com/Azure/azure-sdk-for-go/sdk/ai/azopenai v0.4.1 + github.com/Azure/azure-sdk-for-go/sdk/azcore v1.9.1 ) diff --git a/src/ai/.x/templates/openai-chat-go/main.go b/src/ai/.x/templates/openai-chat-go/main.go new file mode 100644 index 00000000..8bcbe45c --- /dev/null +++ b/src/ai/.x/templates/openai-chat-go/main.go @@ -0,0 +1,71 @@ +<#@ template hostspecific="true" #> +<#@ output extension=".go" encoding="utf-8" #> +<#@ parameter type="System.String" name="ClassName" #> +<#@ parameter type="System.String" name="AZURE_OPENAI_ENDPOINT" #> +<#@ parameter type="System.String" name="AZURE_OPENAI_KEY" #> +<#@ parameter type="System.String" name="AZURE_OPENAI_CHAT_DEPLOYMENT" #> +<#@ parameter type="System.String" name="AZURE_OPENAI_SYSTEM_PROMPT" #> +package main + +import ( + "bufio" + "fmt" + "log" + "os" + "strings" +) + +func main() { + openAIEndpoint := os.Getenv("AZURE_OPENAI_ENDPOINT") + if openAIEndpoint == "" { + openAIEndpoint = "<#= AZURE_OPENAI_ENDPOINT #>" + } + openAIKey := os.Getenv("AZURE_OPENAI_KEY") + if openAIKey == "" { + openAIKey = "<#= AZURE_OPENAI_KEY #>" + } + openAIChatDeploymentName := os.Getenv("AZURE_OPENAI_CHAT_DEPLOYMENT") + if openAIChatDeploymentName == "" { + openAIChatDeploymentName = "<#= AZURE_OPENAI_CHAT_DEPLOYMENT #>" + } + openAISystemPrompt := os.Getenv("OPENAI_SYSTEM_PROMPT") + if openAISystemPrompt == "" { + openAISystemPrompt = "<#= AZURE_OPENAI_SYSTEM_PROMPT #>" + } + + if openAIEndpoint == "" || openAIKey == "" || openAIChatDeploymentName == "" || openAISystemPrompt == "" { + fmt.Println("Please set the environment variables.") + os.Exit(1) + } + + chat, err := New<#= ClassName #>(openAIEndpoint, openAIKey, openAIChatDeploymentName, openAISystemPrompt) + if err != nil { + log.Fatalf("ERROR: %s", err) + } + + for { + fmt.Print("User: ") + input, _ := getUserInput() + if input == "exit" || input == "" { + break + } + + response, err := chat.GetChatCompletions(input) + if err != nil { + log.Fatalf("ERROR: %s", err) + } + + fmt.Printf("Assistant: %s\n\n", response) + } +} + +func getUserInput() (string, error) { + reader := bufio.NewReader(os.Stdin) + userInput, err := reader.ReadString('\n') + if err != nil { + return "", err + } + userInput = strings.TrimSuffix(userInput, "\n") + userInput = strings.TrimSuffix(userInput, "\r") + return userInput, nil +} diff --git a/src/ai/.x/templates/openai-chat-go/openai_chat_completions_hello_world.go b/src/ai/.x/templates/openai-chat-go/openai_chat_completions_hello_world.go index 469d35f5..3f129cc1 100644 --- a/src/ai/.x/templates/openai-chat-go/openai_chat_completions_hello_world.go +++ b/src/ai/.x/templates/openai-chat-go/openai_chat_completions_hello_world.go @@ -1,90 +1,60 @@ <#@ template hostspecific="true" #> <#@ output extension=".go" encoding="utf-8" #> -<#@ parameter type="System.String" name="OPENAI_ENDPOINT" #> -<#@ parameter type="System.String" name="OPENAI_API_KEY" #> -<#@ parameter type="System.String" name="AZURE_OPENAI_CHAT_DEPLOYMENT" #> -<#@ parameter type="System.String" name="AZURE_OPENAI_SYSTEM_PROMPT" #> +<#@ parameter type="System.String" name="ClassName" #> package main import ( - "bufio" - "context" - "fmt" - "log" - "os" - "strings" + "context" - "github.com/Azure/azure-sdk-for-go/sdk/ai/azopenai" - "github.com/Azure/azure-sdk-for-go/sdk/azcore/to" + "github.com/Azure/azure-sdk-for-go/sdk/ai/azopenai" + "github.com/Azure/azure-sdk-for-go/sdk/azcore" + "github.com/Azure/azure-sdk-for-go/sdk/azcore/to" ) -func main() { - azureOpenAIKey := os.Getenv("OPENAI_API_KEY") - if azureOpenAIKey == "" { - azureOpenAIKey = "<#= OPENAI_API_KEY #>" - } - azureOpenAIEndpoint := os.Getenv("OPENAI_ENDPOINT") - if azureOpenAIEndpoint == "" { - azureOpenAIEndpoint = "<#= OPENAI_ENDPOINT #>" - } - modelDeploymentID := os.Getenv("AZURE_OPENAI_CHAT_DEPLOYMENT") - if modelDeploymentID == "" { - modelDeploymentID = "<#= AZURE_OPENAI_CHAT_DEPLOYMENT #>" - } - systemPrompt := os.Getenv("OPENAI_SYSTEM_PROMPT") - if systemPrompt == "" { - systemPrompt = "<#= AZURE_OPENAI_SYSTEM_PROMPT #>" - } - - keyCredential, err := azopenai.NewKeyCredential(azureOpenAIKey) - if err != nil { - log.Fatalf("ERROR: %s", err) - } - client, err := azopenai.NewClientWithKeyCredential(azureOpenAIEndpoint, keyCredential, nil) - if err != nil { - log.Fatalf("ERROR: %s", err) - } - - options := azopenai.ChatCompletionsOptions{ - Deployment: modelDeploymentID, - Messages: []azopenai.ChatMessage{ - {Role: to.Ptr(azopenai.ChatRoleSystem), Content: to.Ptr(systemPrompt)}, - }, - } - - for { - fmt.Print("User: ") +type <#= ClassName #> struct { + client *azopenai.Client + options *azopenai.ChatCompletionsOptions +} - userPrompt, err := getUserInput() - if err != nil { - fmt.Println("Error reading input:", err) - break - } - if userPrompt == "exit" || userPrompt == "" { - break - } +func New<#= ClassName #>(openAIEndpoint string, openAIKey string, openAIChatDeploymentName string, openAISystemPrompt string) (*<#= ClassName #>, error) { + keyCredential := azcore.NewKeyCredential(openAIKey) + + client, err := azopenai.NewClientWithKeyCredential(openAIEndpoint, keyCredential, nil) + if err != nil { + return nil, err + } + + messages := []azopenai.ChatRequestMessageClassification{ + &azopenai.ChatRequestSystemMessage{ + Content: &openAISystemPrompt, + }, + } + + options := &azopenai.ChatCompletionsOptions{ + DeploymentName: &openAIChatDeploymentName, + Messages: messages, + } + + return &<#= ClassName #> { + client: client, + options: options, + }, nil +} - options.Messages = append(options.Messages, azopenai.ChatMessage{Role: to.Ptr(azopenai.ChatRoleUser), Content: to.Ptr(userPrompt)}) +func (chat *<#= ClassName #>) ClearConversation() { + chat.options.Messages = chat.options.Messages[:1] +} - resp, err := client.GetChatCompletions(context.TODO(), options, nil) - if err != nil { - log.Fatalf("ERROR: %s", err) - } +func (chat *<#= ClassName #>) GetChatCompletions(userPrompt string) (string, error) { + chat.options.Messages = append(chat.options.Messages, &azopenai.ChatRequestUserMessage{Content: azopenai.NewChatRequestUserMessageContent(userPrompt)}) - responseContent := *resp.Choices[0].Message.Content - options.Messages = append(options.Messages, azopenai.ChatMessage{Role: to.Ptr(azopenai.ChatRoleAssistant), Content: to.Ptr(responseContent)}) + resp, err := chat.client.GetChatCompletions(context.TODO(), *chat.options, nil) + if err != nil { + return "", err + } - fmt.Printf("\nAssistant: %s\n\n", responseContent) - } -} + responseContent := *resp.Choices[0].Message.Content + chat.options.Messages = append(chat.options.Messages, &azopenai.ChatRequestAssistantMessage{Content: to.Ptr(responseContent)}) -func getUserInput() (string, error) { - reader := bufio.NewReader(os.Stdin) - userInput, err := reader.ReadString('\n') - if err != nil { - return "", err - } - userInput = strings.TrimSuffix(userInput, "\n") - userInput = strings.TrimSuffix(userInput, "\r") - return userInput, nil + return responseContent, nil } diff --git a/src/ai/.x/templates/openai-chat-java/_.json b/src/ai/.x/templates/openai-chat-java/_.json index e11606dd..d4485593 100644 --- a/src/ai/.x/templates/openai-chat-java/_.json +++ b/src/ai/.x/templates/openai-chat-java/_.json @@ -1,8 +1,10 @@ { - "_Name": "OpenAI Chat Completions in Java", + "_LongName": "OpenAI Chat Completions", + "_ShortName": "openai-chat", "_Language": "Java", - "OPENAI_ENDPOINT": "", - "OPENAI_API_KEY": "", - "AZURE_OPENAI_CHAT_DEPLOYMENT": "", + "ClassName": "OpenAIChatCompletionsClass", + "AZURE_OPENAI_ENDPOINT": "", + "AZURE_OPENAI_KEY": "", + "AZURE_OPENAI_CHAT_DEPLOYMENT": "", "AZURE_OPENAI_SYSTEM_PROMPT": "You are a helpful AI assistant." } \ No newline at end of file diff --git a/src/ai/.x/templates/openai-chat-java/pom.xml b/src/ai/.x/templates/openai-chat-java/pom.xml index ec38d9fa..50d91b14 100644 --- a/src/ai/.x/templates/openai-chat-java/pom.xml +++ b/src/ai/.x/templates/openai-chat-java/pom.xml @@ -10,7 +10,7 @@ com.azure azure-ai-openai - 1.0.0-beta.5 + 1.0.0-beta.6 diff --git a/src/ai/.x/templates/openai-chat-java/scripts/2-compile.bat b/src/ai/.x/templates/openai-chat-java/scripts/2-compile.bat index a0ab8417..13f0dc8c 100644 --- a/src/ai/.x/templates/openai-chat-java/scripts/2-compile.bat +++ b/src/ai/.x/templates/openai-chat-java/scripts/2-compile.bat @@ -1 +1 @@ -javac -cp target/lib/* src/OpenAIQuickstart.java -d out +javac -cp target/lib/* src/OpenAIChatCompletionsClass.java src/Main.java -d out diff --git a/src/ai/.x/templates/openai-chat-java/scripts/3-run.bat b/src/ai/.x/templates/openai-chat-java/scripts/3-run.bat index 6423520e..6d301cb6 100644 --- a/src/ai/.x/templates/openai-chat-java/scripts/3-run.bat +++ b/src/ai/.x/templates/openai-chat-java/scripts/3-run.bat @@ -1 +1 @@ -java -cp out;target/lib/* OpenAIQuickstart +java -cp out;target/lib/* Main diff --git a/src/ai/.x/templates/openai-chat-java/src/Main.java b/src/ai/.x/templates/openai-chat-java/src/Main.java new file mode 100644 index 00000000..c11db157 --- /dev/null +++ b/src/ai/.x/templates/openai-chat-java/src/Main.java @@ -0,0 +1,31 @@ +<#@ template hostspecific="true" #> +<#@ output extension=".java" encoding="utf-8" #> +<#@ parameter type="System.String" name="ClassName" #> +<#@ parameter type="System.String" name="AZURE_OPENAI_ENDPOINT" #> +<#@ parameter type="System.String" name="AZURE_OPENAI_KEY" #> +<#@ parameter type="System.String" name="AZURE_OPENAI_CHAT_DEPLOYMENT" #> +<#@ parameter type="System.String" name="AZURE_OPENAI_SYSTEM_PROMPT" #> +import java.util.Scanner; +public class Main { + public static void main(String[] args) { + String openAIKey = (System.getenv("AZURE_OPENAI_KEY") != null) ? System.getenv("AZURE_OPENAI_KEY") : ""; + String openAIEndpoint = (System.getenv("AZURE_OPENAI_ENDPOINT") != null) ? System.getenv("AZURE_OPENAI_ENDPOINT") : ""; + String openAIChatDeployment = (System.getenv("AZURE_OPENAI_CHAT_DEPLOYMENT") != null) ? System.getenv("AZURE_OPENAI_CHAT_DEPLOYMENT") : ""; + String openAISystemPrompt = (System.getenv("AZURE_OPENAI_SYSTEM_PROMPT") != null) ? System.getenv("AZURE_OPENAI_SYSTEM_PROMPT") : "You are a helpful AI assistant."; + + <#= ClassName #> chat = new <#= ClassName #>(openAIKey, openAIEndpoint, openAIChatDeployment, openAISystemPrompt); + + Scanner scanner = new Scanner(System.in); + while (true) { + System.out.print("User: "); + if (!scanner.hasNextLine()) break; + + String userPrompt = scanner.nextLine(); + if (userPrompt.isEmpty() || "exit".equals(userPrompt)) break; + + String response = chat.getChatCompletion(userPrompt); + System.out.println("\nAssistant: " + response + "\n"); + } + scanner.close(); + } +} \ No newline at end of file diff --git a/src/ai/.x/templates/openai-chat-java/src/OpenAIChatCompletionsClass.java b/src/ai/.x/templates/openai-chat-java/src/OpenAIChatCompletionsClass.java new file mode 100644 index 00000000..792e85f0 --- /dev/null +++ b/src/ai/.x/templates/openai-chat-java/src/OpenAIChatCompletionsClass.java @@ -0,0 +1,52 @@ +<#@ template hostspecific="true" #> +<#@ output extension=".java" encoding="utf-8" #> +<#@ parameter type="System.String" name="ClassName" #> +import com.azure.ai.openai.OpenAIClient; +import com.azure.ai.openai.OpenAIClientBuilder; +import com.azure.ai.openai.models.ChatRequestAssistantMessage; +import com.azure.ai.openai.models.ChatRequestMessage; +import com.azure.ai.openai.models.ChatRequestSystemMessage; +import com.azure.ai.openai.models.ChatRequestUserMessage; +import com.azure.ai.openai.models.ChatCompletions; +import com.azure.ai.openai.models.ChatCompletionsOptions; +import com.azure.core.credential.AzureKeyCredential; + +import java.util.ArrayList; +import java.util.List; + +public class <#= ClassName #> { + + private OpenAIClient client; + private ChatCompletionsOptions options; + private String openAIChatDeployment; + private String openAISystemPrompt; + + public <#= ClassName #> (String openAIKey, String openAIEndpoint, String openAIChatDeployment, String openAISystemPrompt) { + this.openAIChatDeployment = openAIChatDeployment; + this.openAISystemPrompt = openAISystemPrompt; + client = new OpenAIClientBuilder() + .endpoint(openAIEndpoint) + .credential(new AzureKeyCredential(openAIKey)) + .buildClient(); + + List chatMessages = new ArrayList<>(); + options = new ChatCompletionsOptions(chatMessages); + ClearConversation(); + } + + public void ClearConversation(){ + List chatMessages = options.getMessages(); + chatMessages.clear(); + chatMessages.add(new ChatRequestSystemMessage(this.openAISystemPrompt)); + } + + public String getChatCompletion(String userPrompt) { + options.getMessages().add(new ChatRequestUserMessage(userPrompt)); + + ChatCompletions chatCompletions = client.getChatCompletions(this.openAIChatDeployment, options); + String responseContent = chatCompletions.getChoices().get(0).getMessage().getContent(); + options.getMessages().add(new ChatRequestAssistantMessage(responseContent.toString())); + + return responseContent; + } +} \ No newline at end of file diff --git a/src/ai/.x/templates/openai-chat-java/src/OpenAIQuickstart.java b/src/ai/.x/templates/openai-chat-java/src/OpenAIQuickstart.java deleted file mode 100644 index 39087733..00000000 --- a/src/ai/.x/templates/openai-chat-java/src/OpenAIQuickstart.java +++ /dev/null @@ -1,69 +0,0 @@ -<#@ template hostspecific="true" #> -<#@ output extension=".java" encoding="utf-8" #> -<#@ parameter type="System.String" name="ClassName" #> -<#@ parameter type="System.String" name="OPENAI_ENDPOINT" #> -<#@ parameter type="System.String" name="OPENAI_API_KEY" #> -<#@ parameter type="System.String" name="AZURE_OPENAI_CHAT_DEPLOYMENT" #> -<#@ parameter type="System.String" name="AZURE_OPENAI_SYSTEM_PROMPT" #> -import com.azure.ai.openai.OpenAIClient; -import com.azure.ai.openai.OpenAIClientBuilder; -import com.azure.ai.openai.models.ChatChoice; -import com.azure.ai.openai.models.ChatCompletions; -import com.azure.ai.openai.models.ChatCompletionsOptions; -import com.azure.ai.openai.models.ChatMessage; -import com.azure.ai.openai.models.ChatRole; -import com.azure.ai.openai.models.CompletionsUsage; -import com.azure.core.credential.AzureKeyCredential; - -import java.util.ArrayList; -import java.util.List; -import java.util.Scanner; - -public class OpenAIQuickstart { - - private OpenAIClient client; - private ChatCompletionsOptions options; - - private String key = (System.getenv("OPENAI_API_KEY") != null) ? System.getenv("OPENAI_API_KEY") : "<#= OPENAI_API_KEY #>"; - private String endpoint = (System.getenv("OPENAI_ENDPOINT") != null) ? System.getenv("OPENAI_ENDPOINT") : "<#= OPENAI_ENDPOINT #>"; - private String deploymentName = (System.getenv("AZURE_OPENAI_CHAT_DEPLOYMENT") != null) ? System.getenv("AZURE_OPENAI_CHAT_DEPLOYMENT") : "<#= AZURE_OPENAI_CHAT_DEPLOYMENT #>"; - private String systemPrompt = (System.getenv("AZURE_OPENAI_SYSTEM_PROMPT") != null) ? System.getenv("AZURE_OPENAI_SYSTEM_PROMPT") : "<#= AZURE_OPENAI_SYSTEM_PROMPT #>"; - - public OpenAIQuickstart() { - - client = new OpenAIClientBuilder() - .endpoint(endpoint) - .credential(new AzureKeyCredential(key)) - .buildClient(); - - List chatMessages = new ArrayList<>(); - chatMessages.add(new ChatMessage(ChatRole.SYSTEM, systemPrompt)); - - options = new ChatCompletionsOptions(chatMessages); - } - - public String getChatCompletion(String userPrompt) { - options.getMessages().add(new ChatMessage(ChatRole.USER, userPrompt)); - - ChatCompletions chatCompletions = client.getChatCompletions(deploymentName, options); - String responseContent = chatCompletions.getChoices().get(0).getMessage().getContent(); - options.getMessages().add(new ChatMessage(ChatRole.ASSISTANT, responseContent)); - - return responseContent; - } - - public static void main(String[] args) { - OpenAIQuickstart chat = new OpenAIQuickstart(); - - Scanner scanner = new Scanner(System.in); - while (true) { - System.out.print("User: "); - String userPrompt = scanner.nextLine(); - if (userPrompt.isEmpty() || "exit".equals(userPrompt)) break; - - String response = chat.getChatCompletion(userPrompt); - System.out.println("\nAssistant: " + response + "\n"); - } - scanner.close(); - } -} \ No newline at end of file diff --git a/src/ai/.x/templates/openai-chat-js/ChatCompletion.js b/src/ai/.x/templates/openai-chat-js/ChatCompletion.js deleted file mode 100644 index 2256a152..00000000 --- a/src/ai/.x/templates/openai-chat-js/ChatCompletion.js +++ /dev/null @@ -1,48 +0,0 @@ -<#@ template hostspecific="true" #> -<#@ output extension=".js" encoding="utf-8" #> -<#@ parameter type="System.String" name="OPENAI_ENDPOINT" #> -<#@ parameter type="System.String" name="OPENAI_API_KEY" #> -<#@ parameter type="System.String" name="AZURE_OPENAI_CHAT_DEPLOYMENT" #> -<#@ parameter type="System.String" name="AZURE_OPENAI_SYSTEM_PROMPT" #> -const { OpenAIClient, AzureKeyCredential } = require("@azure/openai"); -const readline = require('readline'); -const rl = readline.createInterface({ - input: process.stdin, - output: process.stdout -}); - -const endpoint = process.env["OPENAI_ENDPOINT"] || "<#= OPENAI_ENDPOINT #>"; -const azureApiKey = process.env["OPENAI_API_KEY"] || "<#= OPENAI_API_KEY #>"; -const deploymentName = process.env["AZURE_OPENAI_CHAT_DEPLOYMENT"] || "<#= AZURE_OPENAI_CHAT_DEPLOYMENT #>" ; -const systemPrompt = process.env["AZURE_OPENAI_SYSTEM_PROMPT"] || "<#= AZURE_OPENAI_SYSTEM_PROMPT #>" ; - -messages = [ - { role: "system", content: systemPrompt }, -]; - -async function main() { - - const client = new OpenAIClient(endpoint, new AzureKeyCredential(azureApiKey)); - - while (true) { - - const input = await new Promise(resolve => rl.question('User: ', resolve)); - if (input === 'exit' || input === '') break; - - messages.push({ role: "user", content: input }); - const result = await client.getChatCompletions(deploymentName, messages); - - const response_content = result.choices[0].message.content; - messages.push({ role: "assistant", content: response_content }); - - console.log(`\nAssistant: ${response_content}\n`); - } - - console.log('Bye!'); -} - -main().catch((err) => { - console.error("The sample encountered an error:", err); -}); - -module.exports = { main }; diff --git a/src/ai/.x/templates/openai-chat-js/Main.js b/src/ai/.x/templates/openai-chat-js/Main.js new file mode 100644 index 00000000..0c9933f3 --- /dev/null +++ b/src/ai/.x/templates/openai-chat-js/Main.js @@ -0,0 +1,45 @@ +<#@ template hostspecific="true" #> +<#@ output extension=".js" encoding="utf-8" #> +<#@ parameter type="System.String" name="ClassName" #> +<#@ parameter type="System.String" name="AZURE_OPENAI_ENDPOINT" #> +<#@ parameter type="System.String" name="AZURE_OPENAI_KEY" #> +<#@ parameter type="System.String" name="AZURE_OPENAI_CHAT_DEPLOYMENT" #> +<#@ parameter type="System.String" name="AZURE_OPENAI_SYSTEM_PROMPT" #> +const { <#= ClassName #> } = require("./OpenAIChatCompletionsClass"); + +const readline = require('readline'); +const rl = readline.createInterface({ + input: process.stdin, + output: process.stdout +}); + +async function main() { + + const openAIEndpoint = process.env["AZURE_OPENAI_ENDPOINT"] || "<#= AZURE_OPENAI_ENDPOINT #>"; + const openAIKey = process.env["AZURE_OPENAI_KEY"] || "<#= AZURE_OPENAI_KEY #>"; + const openAIChatDeploymentName = process.env["AZURE_OPENAI_CHAT_DEPLOYMENT"] || "<#= AZURE_OPENAI_CHAT_DEPLOYMENT #>" ; + const openAISystemPrompt = process.env["AZURE_OPENAI_SYSTEM_PROMPT"] || "<#= AZURE_OPENAI_SYSTEM_PROMPT #>" ; + + const chat = new <#= ClassName #>(openAIEndpoint, openAIKey, openAIChatDeploymentName, openAISystemPrompt); + + while (true) { + + const input = await new Promise(resolve => rl.question('User: ', resolve)); + if (input === 'exit' || input === '') break; + + let response = await chat.getChatCompletions(input); + console.log(`\nAssistant: ${response}\n`); + } + + console.log('Bye!'); + process.exit(); +} + +main().catch((err) => { + if (err.code !== 'ERR_USE_AFTER_CLOSE') { // filter out expected error (EOF on redirected input) + console.error("The sample encountered an error:", err); + process.exit(1); + } +}); + +module.exports = { main }; diff --git a/src/ai/.x/templates/openai-chat-js/OpenAIChatCompletionsClass.js b/src/ai/.x/templates/openai-chat-js/OpenAIChatCompletionsClass.js new file mode 100644 index 00000000..00bcceda --- /dev/null +++ b/src/ai/.x/templates/openai-chat-js/OpenAIChatCompletionsClass.js @@ -0,0 +1,31 @@ +<#@ template hostspecific="true" #> +<#@ output extension=".js" encoding="utf-8" #> +<#@ parameter type="System.String" name="ClassName" #> +const { OpenAIClient, AzureKeyCredential } = require("@azure/openai"); + +class <#= ClassName #> { + constructor(openAIEndpoint, openAIKey, openAIChatDeploymentName, openAISystemPrompt) { + this.openAISystemPrompt = openAISystemPrompt; + this.openAIChatDeploymentName = openAIChatDeploymentName; + this.client = new OpenAIClient(openAIEndpoint, new AzureKeyCredential(openAIKey)); + this.clearConversation(); + } + + clearConversation() { + this.messages = [ + { role: 'system', content: this.openAISystemPrompt } + ]; + } + + async getChatCompletions(userInput) { + this.messages.push({ role: 'user', content: userInput }); + + const result = await this.client.getChatCompletions(this.openAIChatDeploymentName, this.messages); + const responseContent = result.choices[0].message.content; + + this.messages.push({ role: 'assistant', content: responseContent }); + return responseContent; + } +} + +exports.<#= ClassName #> = <#= ClassName #>; \ No newline at end of file diff --git a/src/ai/.x/templates/openai-chat-js/_.json b/src/ai/.x/templates/openai-chat-js/_.json index fb6365fd..8a1f1e93 100644 --- a/src/ai/.x/templates/openai-chat-js/_.json +++ b/src/ai/.x/templates/openai-chat-js/_.json @@ -1,8 +1,10 @@ { - "_Name": "OpenAI Chat Completions in JavaScript", + "_LongName": "OpenAI Chat Completions", + "_ShortName": "openai-chat", "_Language": "JavaScript", - "OPENAI_ENDPOINT": "", - "OPENAI_API_KEY": "", - "AZURE_OPENAI_CHAT_DEPLOYMENT": "", + "ClassName": "OpenAIChatCompletionsClass", + "AZURE_OPENAI_ENDPOINT": "", + "AZURE_OPENAI_KEY": "", + "AZURE_OPENAI_CHAT_DEPLOYMENT": "", "AZURE_OPENAI_SYSTEM_PROMPT": "You are a helpful AI assistant." } \ No newline at end of file diff --git a/src/ai/.x/templates/openai-chat-js/package.json b/src/ai/.x/templates/openai-chat-js/package.json index 63b70f82..90d9afe9 100644 --- a/src/ai/.x/templates/openai-chat-js/package.json +++ b/src/ai/.x/templates/openai-chat-js/package.json @@ -2,14 +2,14 @@ "name": "openai-chat", "version": "1.0.0", "description": "", - "main": "ChatCompletions.js", + "main": "Main.js", "scripts": { "test": "echo \"Error: no test specified\" && exit 1" }, "author": "", "license": "MIT", "dependencies": { - "@azure/openai": "1.0.0-beta.8" + "@azure/openai": "1.0.0-beta.10" } } \ No newline at end of file diff --git a/src/ai/.x/templates/openai-chat-py/_.json b/src/ai/.x/templates/openai-chat-py/_.json index 42d9549b..a90bb783 100644 --- a/src/ai/.x/templates/openai-chat-py/_.json +++ b/src/ai/.x/templates/openai-chat-py/_.json @@ -1,9 +1,10 @@ { - "_Name": "OpenAI Chat Completions in Python", + "_LongName": "OpenAI Chat Completions", + "_ShortName": "openai-chat", "_Language": "Python", - "OPENAI_ENDPOINT": "", - "OPENAI_API_KEY": "", - "OPENAI_API_VERSION": "2023-12-01-preview", - "AZURE_OPENAI_CHAT_DEPLOYMENT": "", + "AZURE_OPENAI_ENDPOINT": "", + "AZURE_OPENAI_KEY": "", + "AZURE_OPENAI_API_VERSION": "2023-12-01-preview", + "AZURE_OPENAI_CHAT_DEPLOYMENT": "", "AZURE_OPENAI_SYSTEM_PROMPT": "You are a helpful AI assistant." } \ No newline at end of file diff --git a/src/ai/.x/templates/openai-chat-py/openai_chat_completions.py b/src/ai/.x/templates/openai-chat-py/openai_chat_completions.py index 810615ee..49cca028 100644 --- a/src/ai/.x/templates/openai-chat-py/openai_chat_completions.py +++ b/src/ai/.x/templates/openai-chat-py/openai_chat_completions.py @@ -1,42 +1,57 @@ <#@ template hostspecific="true" #> <#@ output extension=".py" encoding="utf-8" #> -<#@ parameter type="System.String" name="OPENAI_ENDPOINT" #> -<#@ parameter type="System.String" name="OPENAI_API_KEY" #> -<#@ parameter type="System.String" name="OPENAI_API_VERSION" #> +<#@ parameter type="System.String" name="AZURE_OPENAI_ENDPOINT" #> +<#@ parameter type="System.String" name="AZURE_OPENAI_KEY" #> +<#@ parameter type="System.String" name="AZURE_OPENAI_API_VERSION" #> <#@ parameter type="System.String" name="AZURE_OPENAI_CHAT_DEPLOYMENT" #> <#@ parameter type="System.String" name="AZURE_OPENAI_SYSTEM_PROMPT" #> +from openai import AzureOpenAI import os -import openai +import sys -openai.api_type = "azure" -openai.api_base = os.getenv("OPENAI_ENDPOINT") or "<#= OPENAI_ENDPOINT #>" -openai.api_key = os.getenv("OPENAI_API_KEY") or "<#= OPENAI_API_KEY #>" -openai.api_version = os.getenv("OPENAI_API_VERSION") or "<#= OPENAI_API_VERSION #>" +openai_api_version = os.getenv('AZURE_OPENAI_API_VERSION', '<#= AZURE_OPENAI_API_VERSION #>') +openai_endpoint = os.getenv('AZURE_OPENAI_ENDPOINT', '<#= AZURE_OPENAI_ENDPOINT #>') +openai_key = os.getenv('AZURE_OPENAI_KEY', '<#= AZURE_OPENAI_KEY #>') +openai_chat_deployment_name = os.getenv('AZURE_OPENAI_CHAT_DEPLOYMENT', '<#= AZURE_OPENAI_CHAT_DEPLOYMENT #>') +openai_system_prompt = os.getenv('AZURE_OPENAI_SYSTEM_PROMPT', '<#= AZURE_OPENAI_SYSTEM_PROMPT #>') -deploymentName = os.getenv("AZURE_OPENAI_CHAT_DEPLOYMENT") or "<#= AZURE_OPENAI_CHAT_DEPLOYMENT #>" -systemPrompt = os.getenv("AZURE_OPENAI_SYSTEM_PROMPT") or "<#= AZURE_OPENAI_SYSTEM_PROMPT #>" +client = AzureOpenAI( + api_key=openai_key, + api_version=openai_api_version, + azure_endpoint = openai_endpoint +) messages=[ - {"role": "system", "content": systemPrompt}, + {'role': 'system', 'content': openai_system_prompt}, ] -def getChatCompletions() -> str: - messages.append({"role": "user", "content": userPrompt}) +def get_chat_completions(user_input) -> str: + messages.append({'role': 'user', 'content': user_input}) - response = openai.ChatCompletion.create( - engine=deploymentName, + response = client.chat.completions.create( + model=openai_chat_deployment_name, messages=messages, ) - response_content = response["choices"][0]["message"]["content"] - messages.append({"role": "assistant", "content": response_content}) + response_content = response.choices[0].message.content + messages.append({'role': 'assistant', 'content': response_content}) return response_content -while True: - userPrompt = input("User: ") - if userPrompt == "" or userPrompt == "exit": - break - - response_content = getChatCompletions() - print(f"\nAssistant: {response_content}\n") \ No newline at end of file +def main(): + while True: + user_input = input('User: ') + if user_input == 'exit' or user_input == '': + break + + response_content = get_chat_completions(user_input) + print(f"\nAssistant: {response_content}\n") + +if __name__ == '__main__': + try: + main() + except EOFError: + pass + except Exception as e: + print(f"The sample encountered an error: {e}") + sys.exit(1) \ No newline at end of file diff --git a/src/ai/.x/templates/openai-chat-py/requirements.txt b/src/ai/.x/templates/openai-chat-py/requirements.txt index d008bb14..7a06be70 100644 --- a/src/ai/.x/templates/openai-chat-py/requirements.txt +++ b/src/ai/.x/templates/openai-chat-py/requirements.txt @@ -1 +1 @@ -openai==0.28.1 +openai==1.0.0 diff --git a/src/ai/.x/templates/openai-chat-streaming/OpenAIChatCompletionsStreaming.csproj._ b/src/ai/.x/templates/openai-chat-streaming-cs/OpenAIChatCompletionsStreaming.csproj._ similarity index 93% rename from src/ai/.x/templates/openai-chat-streaming/OpenAIChatCompletionsStreaming.csproj._ rename to src/ai/.x/templates/openai-chat-streaming-cs/OpenAIChatCompletionsStreaming.csproj._ index 4677a2e0..6ee2c11d 100644 --- a/src/ai/.x/templates/openai-chat-streaming/OpenAIChatCompletionsStreaming.csproj._ +++ b/src/ai/.x/templates/openai-chat-streaming-cs/OpenAIChatCompletionsStreaming.csproj._ @@ -4,7 +4,7 @@ - net7.0 + net8.0 enable enable true diff --git a/src/ai/.x/templates/openai-chat-streaming-cs/OpenAIChatCompletionsStreamingClass.cs b/src/ai/.x/templates/openai-chat-streaming-cs/OpenAIChatCompletionsStreamingClass.cs new file mode 100644 index 00000000..fe7120a2 --- /dev/null +++ b/src/ai/.x/templates/openai-chat-streaming-cs/OpenAIChatCompletionsStreamingClass.cs @@ -0,0 +1,62 @@ +<#@ template hostspecific="true" #> +<#@ output extension=".cs" encoding="utf-8" #> +<#@ parameter type="System.String" name="ClassName" #> +using Azure; +using Azure.AI.OpenAI; +using Azure.Identity; +using System; + +public class <#= ClassName #> +{ + public <#= ClassName #>(string openAIEndpoint, string openAIKey, string openAIChatDeploymentName, string openAISystemPrompt) + { + _openAISystemPrompt = openAISystemPrompt; + + _client = string.IsNullOrEmpty(openAIKey) + ? new OpenAIClient(new Uri(openAIEndpoint), new DefaultAzureCredential()) + : new OpenAIClient(new Uri(openAIEndpoint), new AzureKeyCredential(openAIKey)); + + _options = new ChatCompletionsOptions(); + _options.DeploymentName = openAIChatDeploymentName; + + ClearConversation(); + } + + public void ClearConversation() + { + _options.Messages.Clear(); + _options.Messages.Add(new ChatRequestSystemMessage(_openAISystemPrompt)); + } + + public async Task GetChatCompletionsStreamingAsync(string userPrompt, Action? callback = null) + { + _options.Messages.Add(new ChatRequestUserMessage(userPrompt)); + + var responseContent = string.Empty; + var response = await _client.GetChatCompletionsStreamingAsync(_options); + await foreach (var update in response.EnumerateValues()) + { + var content = update.ContentUpdate; + if (update.FinishReason == CompletionsFinishReason.ContentFiltered) + { + content = $"{content}\nWARNING: Content filtered!"; + } + else if (update.FinishReason == CompletionsFinishReason.TokenLimitReached) + { + content = $"{content}\nERROR: Exceeded token limit!"; + } + + if (string.IsNullOrEmpty(content)) continue; + + responseContent += content; + if (callback != null) callback(update); + } + + _options.Messages.Add(new ChatRequestAssistantMessage(responseContent)); + return responseContent; + } + + private string _openAISystemPrompt; + private ChatCompletionsOptions _options; + private OpenAIClient _client; +} \ No newline at end of file diff --git a/src/ai/.x/templates/openai-chat-streaming-cs/Program.cs b/src/ai/.x/templates/openai-chat-streaming-cs/Program.cs new file mode 100644 index 00000000..46c8dffe --- /dev/null +++ b/src/ai/.x/templates/openai-chat-streaming-cs/Program.cs @@ -0,0 +1,34 @@ +<#@ template hostspecific="true" #> +<#@ output extension=".cs" encoding="utf-8" #> +<#@ parameter type="System.String" name="ClassName" #> +<#@ parameter type="System.String" name="AZURE_OPENAI_ENDPOINT" #> +<#@ parameter type="System.String" name="AZURE_OPENAI_KEY" #> +<#@ parameter type="System.String" name="AZURE_OPENAI_CHAT_DEPLOYMENT" #> +<#@ parameter type="System.String" name="AZURE_OPENAI_SYSTEM_PROMPT" #> +using System; + +public class Program +{ + public static async Task Main(string[] args) + { + var openAIEndpoint = Environment.GetEnvironmentVariable("AZURE_OPENAI_ENDPOINT") ?? "<#= AZURE_OPENAI_ENDPOINT #>"; + var openAIKey = Environment.GetEnvironmentVariable("AZURE_OPENAI_KEY") ?? "<#= AZURE_OPENAI_KEY #>"; + var openAIChatDeploymentName = Environment.GetEnvironmentVariable("AZURE_OPENAI_CHAT_DEPLOYMENT") ?? "<#= AZURE_OPENAI_CHAT_DEPLOYMENT #>"; + var openAISystemPrompt = Environment.GetEnvironmentVariable("AZURE_OPENAI_SYSTEM_PROMPT") ?? "<#= AZURE_OPENAI_SYSTEM_PROMPT #>"; + + var chat = new <#= ClassName #>(openAIEndpoint, openAIKey, openAIChatDeploymentName, openAISystemPrompt); + + while (true) + { + Console.Write("User: "); + var userPrompt = Console.ReadLine(); + if (string.IsNullOrEmpty(userPrompt) || userPrompt == "exit") break; + + Console.Write("\nAssistant: "); + var response = await chat.GetChatCompletionsStreamingAsync(userPrompt, update => + Console.Write(update.ContentUpdate) + ); + Console.WriteLine("\n"); + } + } +} \ No newline at end of file diff --git a/src/ai/.x/templates/openai-chat-streaming-cs/_.json b/src/ai/.x/templates/openai-chat-streaming-cs/_.json new file mode 100644 index 00000000..227dbd09 --- /dev/null +++ b/src/ai/.x/templates/openai-chat-streaming-cs/_.json @@ -0,0 +1,10 @@ +{ + "_LongName": "OpenAI Chat Completions (Streaming)", + "_ShortName": "openai-chat-streaming", + "_Language": "C#", + "ClassName": "OpenAIChatCompletionsStreamingClass", + "AZURE_OPENAI_ENDPOINT": "", + "AZURE_OPENAI_KEY": "", + "AZURE_OPENAI_CHAT_DEPLOYMENT": "", + "AZURE_OPENAI_SYSTEM_PROMPT": "You are a helpful AI assistant." +} \ No newline at end of file diff --git a/src/ai/.x/templates/openai-chat-streaming-go/_.json b/src/ai/.x/templates/openai-chat-streaming-go/_.json new file mode 100644 index 00000000..d3b112ec --- /dev/null +++ b/src/ai/.x/templates/openai-chat-streaming-go/_.json @@ -0,0 +1,10 @@ +{ + "_LongName": "OpenAI Chat Completions (Streaming)", + "_ShortName": "openai-chat-streaming", + "_Language": "Go", + "ClassName": "OpenAIChatCompletionsStreamingExample", + "AZURE_OPENAI_ENDPOINT": "", + "AZURE_OPENAI_KEY": "", + "AZURE_OPENAI_CHAT_DEPLOYMENT": "", + "AZURE_OPENAI_SYSTEM_PROMPT": "You are a helpful AI assistant." +} \ No newline at end of file diff --git a/src/ai/.x/templates/openai-chat-streaming-go/go.mod b/src/ai/.x/templates/openai-chat-streaming-go/go.mod new file mode 100644 index 00000000..525650b3 --- /dev/null +++ b/src/ai/.x/templates/openai-chat-streaming-go/go.mod @@ -0,0 +1,6 @@ +module openai_chat_completions_streaming_hello_world + +require ( + github.com/Azure/azure-sdk-for-go/sdk/ai/azopenai v0.4.1 + github.com/Azure/azure-sdk-for-go/sdk/azcore v1.9.1 +) diff --git a/src/ai/.x/templates/openai-chat-streaming-go/main.go b/src/ai/.x/templates/openai-chat-streaming-go/main.go new file mode 100644 index 00000000..5e6deb95 --- /dev/null +++ b/src/ai/.x/templates/openai-chat-streaming-go/main.go @@ -0,0 +1,73 @@ +<#@ template hostspecific="true" #> +<#@ output extension=".go" encoding="utf-8" #> +<#@ parameter type="System.String" name="ClassName" #> +<#@ parameter type="System.String" name="AZURE_OPENAI_ENDPOINT" #> +<#@ parameter type="System.String" name="AZURE_OPENAI_KEY" #> +<#@ parameter type="System.String" name="AZURE_OPENAI_CHAT_DEPLOYMENT" #> +<#@ parameter type="System.String" name="AZURE_OPENAI_SYSTEM_PROMPT" #> +package main + +import ( + "bufio" + "fmt" + "log" + "os" + "strings" +) + +func main() { + openAIEndpoint := os.Getenv("AZURE_OPENAI_ENDPOINT") + if openAIEndpoint == "" { + openAIEndpoint = "<#= AZURE_OPENAI_ENDPOINT #>" + } + openAIKey := os.Getenv("AZURE_OPENAI_KEY") + if openAIKey == "" { + openAIKey = "<#= AZURE_OPENAI_KEY #>" + } + openAIChatDeploymentName := os.Getenv("AZURE_OPENAI_CHAT_DEPLOYMENT") + if openAIChatDeploymentName == "" { + openAIChatDeploymentName = "<#= AZURE_OPENAI_CHAT_DEPLOYMENT #>" + } + openAISystemPrompt := os.Getenv("OPENAI_SYSTEM_PROMPT") + if openAISystemPrompt == "" { + openAISystemPrompt = "<#= AZURE_OPENAI_SYSTEM_PROMPT #>" + } + + if openAIEndpoint == "" || openAIKey == "" || openAIChatDeploymentName == "" || openAISystemPrompt == "" { + fmt.Println("Please set the environment variables.") + os.Exit(1) + } + + chat, err := New<#= ClassName #>(openAIEndpoint, openAIKey, openAIChatDeploymentName, openAISystemPrompt) + if err != nil { + log.Fatalf("ERROR: %s", err) + } + + for { + fmt.Print("User: ") + input, _ := getUserInput() + if input == "exit" || input == "" { + break + } + + fmt.Printf("\nAssistant: ") + _, err := chat.GetChatCompletionsStream(input, func(content string) { + fmt.Printf("%s", content) + }) + if err != nil { + log.Fatalf("ERROR: %s", err) + } + fmt.Printf("\n\n") + } +} + +func getUserInput() (string, error) { + reader := bufio.NewReader(os.Stdin) + userInput, err := reader.ReadString('\n') + if err != nil { + return "", err + } + userInput = strings.TrimSuffix(userInput, "\n") + userInput = strings.TrimSuffix(userInput, "\r") + return userInput, nil +} diff --git a/src/ai/.x/templates/openai-chat-streaming-go/openai_chat_completions_streaming_hello_world.go b/src/ai/.x/templates/openai-chat-streaming-go/openai_chat_completions_streaming_hello_world.go new file mode 100644 index 00000000..a2220e83 --- /dev/null +++ b/src/ai/.x/templates/openai-chat-streaming-go/openai_chat_completions_streaming_hello_world.go @@ -0,0 +1,94 @@ +<#@ template hostspecific="true" #> +<#@ output extension=".go" encoding="utf-8" #> +<#@ parameter type="System.String" name="ClassName" #> +package main + +import ( + "context" + "errors" + "io" + + "github.com/Azure/azure-sdk-for-go/sdk/ai/azopenai" + "github.com/Azure/azure-sdk-for-go/sdk/azcore" + "github.com/Azure/azure-sdk-for-go/sdk/azcore/to" +) + +type <#= ClassName #> struct { + client *azopenai.Client + options *azopenai.ChatCompletionsOptions +} + +func New<#= ClassName #>(openAIEndpoint string, openAIKey string, openAIChatDeploymentName string, openAISystemPrompt string) (*<#= ClassName #>, error) { + keyCredential := azcore.NewKeyCredential(openAIKey) + + client, err := azopenai.NewClientWithKeyCredential(openAIEndpoint, keyCredential, nil) + if err != nil { + return nil, err + } + + messages := []azopenai.ChatRequestMessageClassification{ + &azopenai.ChatRequestSystemMessage{ + Content: &openAISystemPrompt, + }, + } + + options := &azopenai.ChatCompletionsOptions{ + DeploymentName: &openAIChatDeploymentName, + Messages: messages, + } + + return &<#= ClassName #> { + client: client, + options: options, + }, nil +} + +func (chat *<#= ClassName #>) ClearConversation() { + chat.options.Messages = chat.options.Messages[:1] +} + +func (chat *<#= ClassName #>) GetChatCompletionsStream(userPrompt string, callback func(content string)) (string, error) { + chat.options.Messages = append(chat.options.Messages, &azopenai.ChatRequestUserMessage{Content: azopenai.NewChatRequestUserMessageContent(userPrompt)}) + + resp, err := chat.client.GetChatCompletionsStream(context.TODO(), *chat.options, nil) + if err != nil { + return "", err + } + defer resp.ChatCompletionsStream.Close() + + responseContent := "" + for { + chatCompletions, err := resp.ChatCompletionsStream.Read() + if errors.Is(err, io.EOF) { + break + } + if err != nil { + return "", err + } + + for _, choice := range chatCompletions.Choices { + + content := "" + if choice.Delta.Content != nil { + content = *choice.Delta.Content + } + + if choice.FinishReason != nil { + finishReason := *choice.FinishReason + if finishReason == azopenai.CompletionsFinishReasonTokenLimitReached { + content = content + "\nWARNING: Exceeded token limit!" + } + } + + if content == "" { + continue + } + + callback(content) + responseContent += content + } + } + + chat.options.Messages = append(chat.options.Messages, &azopenai.ChatRequestAssistantMessage{Content: to.Ptr(responseContent)}) + return responseContent, nil +} diff --git a/src/ai/.x/templates/openai-chat-streaming-java/_.json b/src/ai/.x/templates/openai-chat-streaming-java/_.json index acb799d8..5c2e64d0 100644 --- a/src/ai/.x/templates/openai-chat-streaming-java/_.json +++ b/src/ai/.x/templates/openai-chat-streaming-java/_.json @@ -1,8 +1,10 @@ { - "_Name": "OpenAI Chat Completions (Streaming) in Java", + "_LongName": "OpenAI Chat Completions (Streaming)", + "_ShortName": "openai-chat-streaming", "_Language": "Java", - "OPENAI_ENDPOINT": "", - "OPENAI_API_KEY": "", - "AZURE_OPENAI_CHAT_DEPLOYMENT": "", + "ClassName": "OpenAIChatCompletionsStreamingClass", + "AZURE_OPENAI_ENDPOINT": "", + "AZURE_OPENAI_KEY": "", + "AZURE_OPENAI_CHAT_DEPLOYMENT": "", "AZURE_OPENAI_SYSTEM_PROMPT": "You are a helpful AI assistant." } \ No newline at end of file diff --git a/src/ai/.x/templates/openai-chat-streaming-java/scripts/2-compile.bat b/src/ai/.x/templates/openai-chat-streaming-java/scripts/2-compile.bat index f50d9fb1..a2fac8c8 100644 --- a/src/ai/.x/templates/openai-chat-streaming-java/scripts/2-compile.bat +++ b/src/ai/.x/templates/openai-chat-streaming-java/scripts/2-compile.bat @@ -1 +1 @@ -javac -cp target/lib/* src/OpenAIQuickstartStreaming.java -d out +javac -cp target/lib/* src/OpenAIChatCompletionsStreamingClass.java src/Main.java -d out diff --git a/src/ai/.x/templates/openai-chat-streaming-java/scripts/3-run.bat b/src/ai/.x/templates/openai-chat-streaming-java/scripts/3-run.bat index 4f50c0b2..6d301cb6 100644 --- a/src/ai/.x/templates/openai-chat-streaming-java/scripts/3-run.bat +++ b/src/ai/.x/templates/openai-chat-streaming-java/scripts/3-run.bat @@ -1 +1 @@ -java -cp out;target/lib/* OpenAIQuickstartStreaming +java -cp out;target/lib/* Main diff --git a/src/ai/.x/templates/openai-chat-streaming-java/src/Main.java b/src/ai/.x/templates/openai-chat-streaming-java/src/Main.java new file mode 100644 index 00000000..643227fd --- /dev/null +++ b/src/ai/.x/templates/openai-chat-streaming-java/src/Main.java @@ -0,0 +1,40 @@ +<#@ template hostspecific="true" #> +<#@ output extension=".java" encoding="utf-8" #> +<#@ parameter type="System.String" name="ClassName" #> +<#@ parameter type="System.String" name="AZURE_OPENAI_ENDPOINT" #> +<#@ parameter type="System.String" name="AZURE_OPENAI_KEY" #> +<#@ parameter type="System.String" name="AZURE_OPENAI_CHAT_DEPLOYMENT" #> +<#@ parameter type="System.String" name="AZURE_OPENAI_SYSTEM_PROMPT" #> +import java.util.Scanner; +import reactor.core.publisher.Flux; +import com.azure.ai.openai.models.ChatCompletions; + +public class Main { + + public static void main(String[] args) { + String openAIKey = (System.getenv("AZURE_OPENAI_KEY") != null) ? System.getenv("AZURE_OPENAI_KEY") : ""; + String openAIEndpoint = (System.getenv("AZURE_OPENAI_ENDPOINT") != null) ? System.getenv("AZURE_OPENAI_ENDPOINT") : ""; + String openAIChatDeployment = (System.getenv("AZURE_OPENAI_CHAT_DEPLOYMENT") != null) ? System.getenv("AZURE_OPENAI_CHAT_DEPLOYMENT") : ""; + String openAISystemPrompt = (System.getenv("AZURE_OPENAI_SYSTEM_PROMPT") != null) ? System.getenv("AZURE_OPENAI_SYSTEM_PROMPT") : "You are a helpful AI assistant."; + + <#= ClassName #> chat = new <#= ClassName #>(openAIKey, openAIEndpoint, openAIChatDeployment, openAISystemPrompt); + + Scanner scanner = new Scanner(System.in); + while (true) { + System.out.print("User: "); + if (!scanner.hasNextLine()) break; + + String userPrompt = scanner.nextLine(); + if (userPrompt.isEmpty() || "exit".equals(userPrompt)) + break; + + System.out.print("\nAssistant: "); + Flux responseFlux = chat.getChatCompletionsStreamingAsync(userPrompt, update -> { + System.out.print(update.getContent()); + }); + responseFlux.blockLast(); + System.out.println("\n"); + } + scanner.close(); + } +} \ No newline at end of file diff --git a/src/ai/.x/templates/openai-chat-streaming-java/src/OpenAIChatCompletionsStreamingClass.java b/src/ai/.x/templates/openai-chat-streaming-java/src/OpenAIChatCompletionsStreamingClass.java new file mode 100644 index 00000000..227e29b8 --- /dev/null +++ b/src/ai/.x/templates/openai-chat-streaming-java/src/OpenAIChatCompletionsStreamingClass.java @@ -0,0 +1,77 @@ +<#@ template hostspecific="true" #> +<#@ output extension=".java" encoding="utf-8" #> +<#@ parameter type="System.String" name="ClassName" #> +import com.azure.ai.openai.OpenAIAsyncClient; +import com.azure.ai.openai.OpenAIClientBuilder; +import com.azure.ai.openai.models.*; +import com.azure.core.credential.AzureKeyCredential; +import reactor.core.publisher.Flux; + +import java.util.ArrayList; +import java.util.function.Consumer; +import java.util.List; + +public class <#= ClassName #> { + + private OpenAIAsyncClient client; + private ChatCompletionsOptions options; + private String openAIChatDeployment; + private String openAISystemPrompt; + + public <#= ClassName #> (String openAIKey, String openAIEndpoint, String openAIChatDeployment, String openAISystemPrompt) { + + this.openAIChatDeployment = openAIChatDeployment; + this.openAISystemPrompt = openAISystemPrompt; + client = new OpenAIClientBuilder() + .endpoint(openAIEndpoint) + .credential(new AzureKeyCredential(openAIKey)) + .buildAsyncClient(); + + List chatMessages = new ArrayList<>(); + options = new ChatCompletionsOptions(chatMessages); + ClearConversation(); + options.setStream(true); + } + + public void ClearConversation(){ + List chatMessages = options.getMessages(); + chatMessages.clear(); + chatMessages.add(new ChatRequestSystemMessage(this.openAISystemPrompt)); + } + + public Flux getChatCompletionsStreamingAsync(String userPrompt, + Consumer callback) { + options.getMessages().add(new ChatRequestUserMessage(userPrompt)); + + StringBuilder responseContent = new StringBuilder(); + Flux response = client.getChatCompletionsStream(this.openAIChatDeployment, options); + + response.subscribe(chatResponse -> { + if (chatResponse.getChoices() != null) { + for (ChatChoice update : chatResponse.getChoices()) { + if (update.getDelta() == null || update.getDelta().getContent() == null) + continue; + String content = update.getDelta().getContent(); + + if (update.getFinishReason() == CompletionsFinishReason.CONTENT_FILTERED) { + content = content + "\nWARNING: Content filtered!"; + } else if (update.getFinishReason() == CompletionsFinishReason.TOKEN_LIMIT_REACHED) { + content = content + "\nERROR: Exceeded token limit!"; + } + + if (content.isEmpty()) + continue; + + if(callback != null) { + callback.accept(update.getDelta()); + } + responseContent.append(content); + } + + options.getMessages().add(new ChatRequestAssistantMessage(responseContent.toString())); + } + }); + + return response; + } +} \ No newline at end of file diff --git a/src/ai/.x/templates/openai-chat-streaming-java/src/OpenAIQuickstartStreaming.java b/src/ai/.x/templates/openai-chat-streaming-java/src/OpenAIQuickstartStreaming.java deleted file mode 100644 index 88c3db27..00000000 --- a/src/ai/.x/templates/openai-chat-streaming-java/src/OpenAIQuickstartStreaming.java +++ /dev/null @@ -1,120 +0,0 @@ -<#@ template hostspecific="true" #> -<#@ output extension=".java" encoding="utf-8" #> -<#@ parameter type="System.String" name="ClassName" #> -<#@ parameter type="System.String" name="OPENAI_ENDPOINT" #> -<#@ parameter type="System.String" name="OPENAI_API_KEY" #> -<#@ parameter type="System.String" name="AZURE_OPENAI_CHAT_DEPLOYMENT" #> -<#@ parameter type="System.String" name="AZURE_OPENAI_SYSTEM_PROMPT" #> -import com.azure.ai.openai.OpenAIAsyncClient; -import com.azure.ai.openai.OpenAIClient; -import com.azure.ai.openai.OpenAIClientBuilder; -import com.azure.ai.openai.models.ChatChoice; -import com.azure.ai.openai.models.ChatCompletions; -import com.azure.ai.openai.models.ChatCompletionsOptions; -import com.azure.ai.openai.models.ChatRequestAssistantMessage; -import com.azure.ai.openai.models.ChatRequestMessage; -import com.azure.ai.openai.models.ChatRequestSystemMessage; -import com.azure.ai.openai.models.ChatRole; -import com.azure.ai.openai.models.ChatRequestUserMessage; -import com.azure.ai.openai.models.ChatResponseMessage; -import com.azure.ai.openai.models.CompletionsUsage; -import com.azure.ai.openai.models.CompletionsFinishReason; -import com.azure.core.credential.AzureKeyCredential; - -import reactor.core.publisher.Flux; - -import java.time.Duration; -import java.util.ArrayList; -import java.util.concurrent.CompletableFuture; -import java.util.concurrent.ExecutionException; -import java.util.concurrent.atomic.AtomicInteger; -import java.util.function.Consumer; -import java.util.List; -import java.util.Scanner; - -public class OpenAIQuickstartStreaming { - - private OpenAIAsyncClient client; - private ChatCompletionsOptions options; - - private String key = (System.getenv("OPENAI_API_KEY") != null) ? System.getenv("OPENAI_API_KEY") - : ""; - private String endpoint = (System.getenv("OPENAI_ENDPOINT") != null) ? System.getenv("OPENAI_ENDPOINT") - : ""; - private String deploymentName = (System.getenv("AZURE_OPENAI_CHAT_DEPLOYMENT") != null) - ? System.getenv("AZURE_OPENAI_CHAT_DEPLOYMENT") - : ""; - private String systemPrompt = (System.getenv("AZURE_OPENAI_SYSTEM_PROMPT") != null) - ? System.getenv("AZURE_OPENAI_SYSTEM_PROMPT") - : "You are a helpful AI assistant."; - - public OpenAIQuickstartStreaming() { - - client = new OpenAIClientBuilder() - .endpoint(endpoint) - .credential(new AzureKeyCredential(key)) - .buildAsyncClient(); - - List chatMessages = new ArrayList<>(); - chatMessages.add(new ChatRequestSystemMessage(systemPrompt)); - - options = new ChatCompletionsOptions(chatMessages); - options.setStream(true); - } - - public Flux getChatCompletionsStreamingAsync(String userPrompt, - Consumer callback) { - options.getMessages().add(new ChatRequestUserMessage(userPrompt)); - - StringBuilder responseContent = new StringBuilder(); - Flux response = client.getChatCompletionsStream(deploymentName, options); - - response.subscribe(chatResponse -> { - if (chatResponse.getChoices() != null) { - for (ChatChoice update : chatResponse.getChoices()) { - if (update.getDelta() == null || update.getDelta().getContent() == null) - continue; - callback.accept(update.getDelta()); - String content = update.getDelta().getContent(); - - if (update.getFinishReason() == null) - continue; - if (update.getFinishReason() == CompletionsFinishReason.CONTENT_FILTERED) { - content = content + "\nWARNING: Content filtered!"; - } else if (update.getFinishReason() == CompletionsFinishReason.TOKEN_LIMIT_REACHED) { - content = content + "\nERROR: Exceeded token limit!"; - } - - if (content.isEmpty()) - continue; - - responseContent.append(content); - } - - options.getMessages().add(new ChatRequestAssistantMessage(responseContent.toString())); - } - }); - - return response; - } - - public static void main(String[] args) { - OpenAIQuickstartStreaming chat = new OpenAIQuickstartStreaming(); - - Scanner scanner = new Scanner(System.in); - while (true) { - System.out.print("User: "); - String userPrompt = scanner.nextLine(); - if (userPrompt.isEmpty() || "exit".equals(userPrompt)) - break; - - System.out.print("\nAssistant: "); - Flux responseFlux = chat.getChatCompletionsStreamingAsync(userPrompt, update -> { - System.out.print(update.getContent()); - }); - responseFlux.blockLast(Duration.ofSeconds(20)); - System.out.println("\n"); - } - scanner.close(); - } -} \ No newline at end of file diff --git a/src/ai/.x/templates/openai-chat-streaming-js/ChatCompletionsStreaming.js b/src/ai/.x/templates/openai-chat-streaming-js/ChatCompletionsStreaming.js deleted file mode 100644 index be985d45..00000000 --- a/src/ai/.x/templates/openai-chat-streaming-js/ChatCompletionsStreaming.js +++ /dev/null @@ -1,85 +0,0 @@ -<#@ template hostspecific="true" #> -<#@ output extension=".js" encoding="utf-8" #> -<#@ parameter type="System.String" name="OPENAI_ENDPOINT" #> -<#@ parameter type="System.String" name="OPENAI_API_KEY" #> -<#@ parameter type="System.String" name="AZURE_OPENAI_CHAT_DEPLOYMENT" #> -<#@ parameter type="System.String" name="AZURE_OPENAI_SYSTEM_PROMPT" #> -const { OpenAIClient, AzureKeyCredential } = require("@azure/openai"); - -class OpenAIStreamingChatCompletions { - constructor(systemPrompt, endpoint, azureApiKey, deploymentName) { - this.systemPrompt = systemPrompt; - this.endpoint = endpoint; - this.azureApiKey = azureApiKey; - this.deploymentName = deploymentName; - this.client = new OpenAIClient(this.endpoint, new AzureKeyCredential(this.azureApiKey)); - this.clearConversation(); - } - - clearConversation() { - this.messages = [ - { role: 'system', content: this.systemPrompt } - ]; - } - - async getChatCompletions(userInput, callback) { - this.messages.push({ role: 'user', content: userInput }); - - const events = this.client.listChatCompletions(this.deploymentName, this.messages); - - let contentComplete = ''; - for await (const event of events) { - for (const choice of event.choices) { - - let content = choice.delta?.content; - if (choice.finishReason === 'length') { - content = `${content}\nERROR: Exceeded token limit!`; - } - - if (content != null) { - callback(content); - await new Promise(r => setTimeout(r, 50)); // delay to simulate real-time output, word by word - contentComplete += content; - } - } - } - - this.messages.push({ role: 'assistant', content: contentComplete }); - return contentComplete; - } -} - -const readline = require('readline'); -const rl = readline.createInterface({ - input: process.stdin, - output: process.stdout -}); - -async function main() { - const endpoint = process.env["OPENAI_ENDPOINT"] || "<#= OPENAI_ENDPOINT #>"; - const azureApiKey = process.env["OPENAI_API_KEY"] || "<#= OPENAI_API_KEY #>"; - const deploymentName = process.env["AZURE_OPENAI_CHAT_DEPLOYMENT"] || "<#= AZURE_OPENAI_CHAT_DEPLOYMENT #>" ; - const systemPrompt = process.env["AZURE_OPENAI_SYSTEM_PROMPT"] || "<#= AZURE_OPENAI_SYSTEM_PROMPT #>" ; - - const streamingChatCompletions = new OpenAIStreamingChatCompletions(systemPrompt, endpoint, azureApiKey, deploymentName); - - while (true) { - - const input = await new Promise(resolve => rl.question('User: ', resolve)); - if (input === 'exit' || input === '') break; - - let response = await streamingChatCompletions.getChatCompletions(input, (content) => { - console.log(`assistant-streaming: ${content}`); - }); - - console.log(`\nAssistant: ${response}\n`); - } - - console.log('Bye!'); -} - -main().catch((err) => { - console.error("The sample encountered an error:", err); -}); - -module.exports = { main }; diff --git a/src/ai/.x/templates/openai-chat-streaming-js/Main.js b/src/ai/.x/templates/openai-chat-streaming-js/Main.js new file mode 100644 index 00000000..ca55cc11 --- /dev/null +++ b/src/ai/.x/templates/openai-chat-streaming-js/Main.js @@ -0,0 +1,48 @@ +<#@ template hostspecific="true" #> +<#@ output extension=".js" encoding="utf-8" #> +<#@ parameter type="System.String" name="ClassName" #> +<#@ parameter type="System.String" name="AZURE_OPENAI_ENDPOINT" #> +<#@ parameter type="System.String" name="AZURE_OPENAI_KEY" #> +<#@ parameter type="System.String" name="AZURE_OPENAI_CHAT_DEPLOYMENT" #> +<#@ parameter type="System.String" name="AZURE_OPENAI_SYSTEM_PROMPT" #> +const { <#= ClassName #> } = require("./OpenAIChatCompletionsStreamingClass"); + +const readline = require('readline'); +const rl = readline.createInterface({ + input: process.stdin, + output: process.stdout +}); + +async function main() { + + const openAIEndpoint = process.env["AZURE_OPENAI_ENDPOINT"] || "<#= AZURE_OPENAI_ENDPOINT #>"; + const openAIKey = process.env["AZURE_OPENAI_KEY"] || "<#= AZURE_OPENAI_KEY #>"; + const openAIChatDeploymentName = process.env["AZURE_OPENAI_CHAT_DEPLOYMENT"] || "<#= AZURE_OPENAI_CHAT_DEPLOYMENT #>" ; + const openAISystemPrompt = process.env["AZURE_OPENAI_SYSTEM_PROMPT"] || "<#= AZURE_OPENAI_SYSTEM_PROMPT #>" ; + + const chat = new <#= ClassName #>(openAIEndpoint, openAIKey, openAIChatDeploymentName, openAISystemPrompt); + + while (true) { + + const input = await new Promise(resolve => rl.question('User: ', resolve)); + if (input === 'exit' || input === '') break; + + let response = await chat.getChatCompletions(input, (content) => { + console.log(`assistant-streaming: ${content}`); + }); + + console.log(`\nAssistant: ${response}\n`); + } + + console.log('Bye!'); + process.exit(); +} + +main().catch((err) => { + if (err.code !== 'ERR_USE_AFTER_CLOSE') { // filter out expected error (EOF on redirected input) + console.error("The sample encountered an error:", err); + process.exit(1); + } +}); + +module.exports = { main }; diff --git a/src/ai/.x/templates/openai-chat-streaming-js/OpenAIChatCompletionsStreamingClass.js b/src/ai/.x/templates/openai-chat-streaming-js/OpenAIChatCompletionsStreamingClass.js new file mode 100644 index 00000000..eeed181c --- /dev/null +++ b/src/ai/.x/templates/openai-chat-streaming-js/OpenAIChatCompletionsStreamingClass.js @@ -0,0 +1,49 @@ +<#@ template hostspecific="true" #> +<#@ output extension=".js" encoding="utf-8" #> +<#@ parameter type="System.String" name="ClassName" #> +const { OpenAIClient, AzureKeyCredential } = require("@azure/openai"); + +class <#= ClassName #> { + constructor(openAIEndpoint, openAIKey, openAIChatDeploymentName, openAISystemPrompt) { + this.openAISystemPrompt = openAISystemPrompt; + this.openAIChatDeploymentName = openAIChatDeploymentName; + this.client = new OpenAIClient(openAIEndpoint, new AzureKeyCredential(openAIKey)); + this.clearConversation(); + } + + clearConversation() { + this.messages = [ + { role: 'system', content: this.openAISystemPrompt } + ]; + } + + async getChatCompletions(userInput, callback) { + this.messages.push({ role: 'user', content: userInput }); + + let contentComplete = ''; + const events = await this.client.streamChatCompletions(this.openAIChatDeploymentName, this.messages); + + for await (const event of events) { + for (const choice of event.choices) { + + let content = choice.delta?.content; + if (choice.finishReason === 'length') { + content = `${content}\nERROR: Exceeded token limit!`; + } + + if (content != null) { + if(callback != null) { + callback(content); + } + await new Promise(r => setTimeout(r, 50)); // delay to simulate real-time output, word by word + contentComplete += content; + } + } + } + + this.messages.push({ role: 'assistant', content: contentComplete }); + return contentComplete; + } +} + +exports.<#= ClassName #> = <#= ClassName #>; \ No newline at end of file diff --git a/src/ai/.x/templates/openai-chat-streaming-js/_.json b/src/ai/.x/templates/openai-chat-streaming-js/_.json index 0fa5ea1b..59568312 100644 --- a/src/ai/.x/templates/openai-chat-streaming-js/_.json +++ b/src/ai/.x/templates/openai-chat-streaming-js/_.json @@ -1,8 +1,10 @@ { - "_Name": "OpenAI Chat Completions (Streaming) in JavaScript", + "_LongName": "OpenAI Chat Completions (Streaming)", + "_ShortName": "openai-chat-streaming", "_Language": "JavaScript", - "OPENAI_ENDPOINT": "", - "OPENAI_API_KEY": "", - "AZURE_OPENAI_CHAT_DEPLOYMENT": "", + "ClassName": "OpenAIChatCompletionsStreamingClass", + "AZURE_OPENAI_ENDPOINT": "", + "AZURE_OPENAI_KEY": "", + "AZURE_OPENAI_CHAT_DEPLOYMENT": "", "AZURE_OPENAI_SYSTEM_PROMPT": "You are a helpful AI assistant." } \ No newline at end of file diff --git a/src/ai/.x/templates/openai-chat-streaming-js/package.json b/src/ai/.x/templates/openai-chat-streaming-js/package.json index 1d208557..17195509 100644 --- a/src/ai/.x/templates/openai-chat-streaming-js/package.json +++ b/src/ai/.x/templates/openai-chat-streaming-js/package.json @@ -2,14 +2,14 @@ "name": "openai-chat-streaming", "version": "1.0.0", "description": "", - "main": "ChatCompletionsStreaming.js", + "main": "Main.js", "scripts": { "test": "echo \"Error: no test specified\" && exit 1" }, "author": "", "license": "MIT", "dependencies": { - "@azure/openai": "1.0.0-beta.8" + "@azure/openai": "1.0.0-beta.10" } } \ No newline at end of file diff --git a/src/ai/.x/templates/openai-chat-streaming-py/_.json b/src/ai/.x/templates/openai-chat-streaming-py/_.json index fb3a3ab9..6a043ccc 100644 --- a/src/ai/.x/templates/openai-chat-streaming-py/_.json +++ b/src/ai/.x/templates/openai-chat-streaming-py/_.json @@ -1,9 +1,11 @@ { - "_Name": "OpenAI Chat Completions (Streaming) in Python", + "_LongName": "OpenAI Chat Completions (Streaming)", + "_ShortName": "openai-chat-streaming", "_Language": "Python", - "OPENAI_ENDPOINT": "", - "OPENAI_API_KEY": "", - "OPENAI_API_VERSION": "2023-12-01-preview", - "AZURE_OPENAI_CHAT_DEPLOYMENT": "", + "ClassName": "OpenAIChatCompletionsStreaming", + "AZURE_OPENAI_ENDPOINT": "", + "AZURE_OPENAI_KEY": "", + "AZURE_OPENAI_API_VERSION": "2023-12-01-preview", + "AZURE_OPENAI_CHAT_DEPLOYMENT": "", "AZURE_OPENAI_SYSTEM_PROMPT": "You are a helpful AI assistant." } \ No newline at end of file diff --git a/src/ai/.x/templates/openai-chat-streaming-py/main.py b/src/ai/.x/templates/openai-chat-streaming-py/main.py new file mode 100644 index 00000000..574cab71 --- /dev/null +++ b/src/ai/.x/templates/openai-chat-streaming-py/main.py @@ -0,0 +1,38 @@ +<#@ template hostspecific="true" #> +<#@ output extension=".py" encoding="utf-8" #> +<#@ parameter type="System.String" name="ClassName" #> +<#@ parameter type="System.String" name="AZURE_OPENAI_ENDPOINT" #> +<#@ parameter type="System.String" name="AZURE_OPENAI_KEY" #> +<#@ parameter type="System.String" name="AZURE_OPENAI_API_VERSION" #> +<#@ parameter type="System.String" name="AZURE_OPENAI_CHAT_DEPLOYMENT" #> +<#@ parameter type="System.String" name="AZURE_OPENAI_SYSTEM_PROMPT" #> +from openai_chat_completions_streaming import <#= ClassName #> +import os +import sys + +def main(): + openai_api_version = os.getenv('AZURE_OPENAI_API_VERSION', '<#= AZURE_OPENAI_API_VERSION #>') + openai_endpoint = os.getenv('AZURE_OPENAI_ENDPOINT', '<#= AZURE_OPENAI_ENDPOINT #>') + openai_key = os.getenv('AZURE_OPENAI_KEY', '<#= AZURE_OPENAI_KEY #>') + openai_chat_deployment_name = os.getenv('AZURE_OPENAI_CHAT_DEPLOYMENT', '<#= AZURE_OPENAI_CHAT_DEPLOYMENT #>') + openai_system_prompt = os.getenv('AZURE_OPENAI_SYSTEM_PROMPT', '<#= AZURE_OPENAI_SYSTEM_PROMPT #>') + + chat = <#= ClassName #>(openai_api_version, openai_endpoint, openai_key, openai_chat_deployment_name, openai_system_prompt) + + while True: + user_input = input('User: ') + if user_input == 'exit' or user_input == '': + break + + print("\nAssistant: ", end="") + response = chat.get_chat_completions(user_input, lambda content: print(content, end="")) + print("\n") + +if __name__ == '__main__': + try: + main() + except EOFError: + pass + except Exception as e: + print(f"The sample encountered an error: {e}") + sys.exit(1) \ No newline at end of file diff --git a/src/ai/.x/templates/openai-chat-streaming-py/openai_chat_completions_streaming.py b/src/ai/.x/templates/openai-chat-streaming-py/openai_chat_completions_streaming.py index 43b23c16..b6b47f85 100644 --- a/src/ai/.x/templates/openai-chat-streaming-py/openai_chat_completions_streaming.py +++ b/src/ai/.x/templates/openai-chat-streaming-py/openai_chat_completions_streaming.py @@ -1,56 +1,47 @@ <#@ template hostspecific="true" #> <#@ output extension=".py" encoding="utf-8" #> -<#@ parameter type="System.String" name="OPENAI_ENDPOINT" #> -<#@ parameter type="System.String" name="OPENAI_API_KEY" #> -<#@ parameter type="System.String" name="OPENAI_API_VERSION" #> -<#@ parameter type="System.String" name="AZURE_OPENAI_CHAT_DEPLOYMENT" #> -<#@ parameter type="System.String" name="AZURE_OPENAI_SYSTEM_PROMPT" #> -import os -import openai - -openai.api_type = "azure" -openai.api_base = os.getenv("OPENAI_ENDPOINT") or "<#= OPENAI_ENDPOINT #>" -openai.api_key = os.getenv("OPENAI_API_KEY") or "<#= OPENAI_API_KEY #>" -openai.api_version = os.getenv("OPENAI_API_VERSION") or "<#= OPENAI_API_VERSION #>" - -deploymentName = os.getenv("AZURE_OPENAI_CHAT_DEPLOYMENT") or "<#= AZURE_OPENAI_CHAT_DEPLOYMENT #>" -systemPrompt = os.getenv("AZURE_OPENAI_SYSTEM_PROMPT") or "<#= AZURE_OPENAI_SYSTEM_PROMPT #>" - -messages=[ - {"role": "system", "content": systemPrompt}, -] - -def getChatStreamingCompletions() -> str: - messages.append({"role": "user", "content": userPrompt}) - - response_content = "" - response = openai.ChatCompletion.create( - engine=deploymentName, - messages=messages, - stream=True) - - for update in response: - - choices = update["choices"] if "choices" in update else [] - choice0 = choices[0] if len(choices) > 0 else {} - delta = choice0["delta"] if "delta" in choice0 else {} - - content = delta["content"] if "content" in delta else "" - response_content += content - print(content, end="") - - finish_reason = choice0["finish_reason"] if "finish_reason" in choice0 else "" - if finish_reason == "length": - content += f"{content}\nERROR: Exceeded max token length!" - - messages.append({"role": "assistant", "content": response_content}) - return response_content - -while True: - userPrompt = input("User: ") - if userPrompt == "" or userPrompt == "exit": - break - - print("\nAssistant: ", end="") - response_content = getChatStreamingCompletions() - print("\n") +<#@ parameter type="System.String" name="ClassName" #> +from openai import AzureOpenAI + +class <#= ClassName #>: + def __init__(self, openai_api_version, openai_endpoint, openai_key, openai_chat_deployment_name, openai_system_prompt): + self.openai_system_prompt = openai_system_prompt + self.openai_chat_deployment_name = openai_chat_deployment_name + self.client = AzureOpenAI( + api_key=openai_key, + api_version=openai_api_version, + azure_endpoint = openai_endpoint + ) + self.clear_conversation() + + def clear_conversation(self): + self.messages = [ + {'role': 'system', 'content': self.openai_system_prompt} + ] + + def get_chat_completions(self, user_input, callback): + self.messages.append({'role': 'user', 'content': user_input}) + + complete_content = '' + response = self.client.chat.completions.create( + model=self.openai_chat_deployment_name, + messages=self.messages, + stream=True) + + for chunk in response: + + choice0 = chunk.choices[0] if hasattr(chunk, 'choices') and chunk.choices else None + delta = choice0.delta if choice0 and hasattr(choice0, 'delta') else None + content = delta.content if delta and hasattr(delta, 'content') else '' + + finish_reason = choice0.finish_reason if choice0 and hasattr(choice0, 'finish_reason') else None + if finish_reason == 'length': + content += f"{content}\nERROR: Exceeded max token length!" + + if content is None: continue + + complete_content += content + callback(content) + + self.messages.append({'role': 'assistant', 'content': complete_content}) + return complete_content diff --git a/src/ai/.x/templates/openai-chat-streaming-py/requirements.txt b/src/ai/.x/templates/openai-chat-streaming-py/requirements.txt index d008bb14..7a06be70 100644 --- a/src/ai/.x/templates/openai-chat-streaming-py/requirements.txt +++ b/src/ai/.x/templates/openai-chat-streaming-py/requirements.txt @@ -1 +1 @@ -openai==0.28.1 +openai==1.0.0 diff --git a/src/ai/.x/templates/openai-chat-with-azure-ai-search/OpenAIChatWithAzureAISearch.csproj._ b/src/ai/.x/templates/openai-chat-streaming-with-data-cs/OpenAIChatCompletionsWithDataStreaming.csproj._ similarity index 88% rename from src/ai/.x/templates/openai-chat-with-azure-ai-search/OpenAIChatWithAzureAISearch.csproj._ rename to src/ai/.x/templates/openai-chat-streaming-with-data-cs/OpenAIChatCompletionsWithDataStreaming.csproj._ index 51ad3f8f..6ee2c11d 100644 --- a/src/ai/.x/templates/openai-chat-with-azure-ai-search/OpenAIChatWithAzureAISearch.csproj._ +++ b/src/ai/.x/templates/openai-chat-streaming-with-data-cs/OpenAIChatCompletionsWithDataStreaming.csproj._ @@ -4,10 +4,11 @@ - net7.0 + net8.0 enable enable true + Exe diff --git a/src/ai/.x/templates/openai-chat-streaming-with-data-cs/OpenAIChatCompletionsWithDataStreamingClass.cs b/src/ai/.x/templates/openai-chat-streaming-with-data-cs/OpenAIChatCompletionsWithDataStreamingClass.cs new file mode 100644 index 00000000..0ae92a34 --- /dev/null +++ b/src/ai/.x/templates/openai-chat-streaming-with-data-cs/OpenAIChatCompletionsWithDataStreamingClass.cs @@ -0,0 +1,80 @@ +<#@ template hostspecific="true" #> +<#@ output extension=".cs" encoding="utf-8" #> +<#@ parameter type="System.String" name="ClassName" #> +using Azure; +using Azure.AI.OpenAI; +using Azure.Identity; +using System; +using System.Collections.Generic; +using System.IO; +using System.Threading.Tasks; + +public class <#= ClassName #> +{ + public <#= ClassName #>(string openAIEndpoint, string openAIKey, string openAIChatDeploymentName, string openAISystemPrompt, string searchEndpoint, string searchApiKey, string searchIndexName, string embeddingsEndpoint) + { + _openAISystemPrompt = openAISystemPrompt; + + _client = string.IsNullOrEmpty(openAIKey) + ? new OpenAIClient(new Uri(openAIEndpoint), new DefaultAzureCredential()) + : new OpenAIClient(new Uri(openAIEndpoint), new AzureKeyCredential(openAIKey)); + + var extensionConfig = new AzureCognitiveSearchChatExtensionConfiguration() + { + SearchEndpoint = new Uri(searchEndpoint), + Key = searchApiKey, + IndexName = searchIndexName, + QueryType = AzureCognitiveSearchQueryType.VectorSimpleHybrid, // Use VectorSimpleHybrid to get the best vector and keyword search query types. + EmbeddingEndpoint = new Uri(embeddingsEndpoint), + EmbeddingKey = openAIKey, + }; + _options = new ChatCompletionsOptions() + { + DeploymentName = openAIChatDeploymentName, + AzureExtensionsOptions = new() + { + Extensions = { extensionConfig } + } + }; + + ClearConversation(); + } + + public void ClearConversation() + { + _options.Messages.Clear(); + _options.Messages.Add(new ChatRequestSystemMessage(_openAISystemPrompt)); + } + + public async Task GetChatCompletionsStreamingAsync(string userPrompt, Action? callback = null) + { + _options.Messages.Add(new ChatRequestUserMessage(userPrompt)); + + var responseContent = string.Empty; + var response = await _client.GetChatCompletionsStreamingAsync(_options); + await foreach (var update in response.EnumerateValues()) + { + var content = update.ContentUpdate; + if (update.FinishReason == CompletionsFinishReason.ContentFiltered) + { + content = $"{content}\nWARNING: Content filtered!"; + } + else if (update.FinishReason == CompletionsFinishReason.TokenLimitReached) + { + content = $"{content}\nERROR: Exceeded token limit!"; + } + + if (string.IsNullOrEmpty(content)) continue; + + responseContent += content; + if (callback != null) callback(update); + } + + _options.Messages.Add(new ChatRequestAssistantMessage(responseContent)); + return responseContent; + } + + private string _openAISystemPrompt; + private ChatCompletionsOptions _options; + private OpenAIClient _client; +} \ No newline at end of file diff --git a/src/ai/.x/templates/openai-chat-streaming-with-data-cs/Program.cs b/src/ai/.x/templates/openai-chat-streaming-with-data-cs/Program.cs new file mode 100644 index 00000000..34c93500 --- /dev/null +++ b/src/ai/.x/templates/openai-chat-streaming-with-data-cs/Program.cs @@ -0,0 +1,47 @@ +<#@ template hostspecific="true" #> +<#@ output extension=".cs" encoding="utf-8" #> +<#@ parameter type="System.String" name="ClassName" #> +<#@ parameter type="System.String" name="AZURE_OPENAI_API_VERSION" #> +<#@ parameter type="System.String" name="AZURE_OPENAI_ENDPOINT" #> +<#@ parameter type="System.String" name="AZURE_OPENAI_KEY" #> +<#@ parameter type="System.String" name="AZURE_OPENAI_CHAT_DEPLOYMENT" #> +<#@ parameter type="System.String" name="AZURE_OPENAI_EMBEDDING_DEPLOYMENT" #> +<#@ parameter type="System.String" name="AZURE_OPENAI_SYSTEM_PROMPT" #> +<#@ parameter type="System.String" name="AZURE_AI_SEARCH_ENDPOINT" #> +<#@ parameter type="System.String" name="AZURE_AI_SEARCH_KEY" #> +<#@ parameter type="System.String" name="AZURE_AI_SEARCH_INDEX_NAME" #> +using System; + +public class Program +{ + public static async Task Main(string[] args) + { + var openAIEndpoint = Environment.GetEnvironmentVariable("AZURE_OPENAI_ENDPOINT") ?? "<#= AZURE_OPENAI_ENDPOINT #>"; + var openAIKey = Environment.GetEnvironmentVariable("AZURE_OPENAI_KEY") ?? "<#= AZURE_OPENAI_KEY #>"; + var openAIChatDeploymentName = Environment.GetEnvironmentVariable("AZURE_OPENAI_CHAT_DEPLOYMENT") ?? "<#= AZURE_OPENAI_CHAT_DEPLOYMENT #>"; + var openAISystemPrompt = Environment.GetEnvironmentVariable("AZURE_OPENAI_SYSTEM_PROMPT") ?? "<#= AZURE_OPENAI_SYSTEM_PROMPT #>"; + + var openAIApiVersion = Environment.GetEnvironmentVariable("AZURE_OPENAI_API_VERSION") ?? "<#= AZURE_OPENAI_API_VERSION #>"; + var openAIEmbeddingsDeploymentName = Environment.GetEnvironmentVariable("AZURE_OPENAI_EMBEDDING_DEPLOYMENT") ?? "<#= AZURE_OPENAI_EMBEDDING_DEPLOYMENT #>"; + var openAIEmbeddingsEndpoint = $"{openAIEndpoint.Trim('/')}/openai/deployments/{openAIEmbeddingsDeploymentName}/embeddings?api-version={openAIApiVersion}"; + + var searchEndpoint = Environment.GetEnvironmentVariable("AZURE_AI_SEARCH_ENDPOINT") ?? "<#= AZURE_AI_SEARCH_ENDPOINT #>"; + var searchApiKey = Environment.GetEnvironmentVariable("AZURE_AI_SEARCH_KEY") ?? "<#= AZURE_AI_SEARCH_KEY #>"; + var searchIndexName = Environment.GetEnvironmentVariable("AZURE_AI_SEARCH_INDEX_NAME") ?? "<#= AZURE_AI_SEARCH_INDEX_NAME #>"; + + var chat = new <#= ClassName #>(openAIEndpoint, openAIKey, openAIChatDeploymentName, openAISystemPrompt, searchEndpoint, searchApiKey, searchIndexName, openAIEmbeddingsEndpoint); + + while (true) + { + Console.Write("User: "); + var userPrompt = Console.ReadLine(); + if (string.IsNullOrEmpty(userPrompt) || userPrompt == "exit") break; + + Console.Write("\nAssistant: "); + var response = await chat.GetChatCompletionsStreamingAsync(userPrompt, update => + Console.Write(update.ContentUpdate) + ); + Console.WriteLine("\n"); + } + } +} \ No newline at end of file diff --git a/src/ai/.x/templates/openai-chat-streaming-with-data-cs/_.json b/src/ai/.x/templates/openai-chat-streaming-with-data-cs/_.json new file mode 100644 index 00000000..f767eab9 --- /dev/null +++ b/src/ai/.x/templates/openai-chat-streaming-with-data-cs/_.json @@ -0,0 +1,15 @@ +{ + "_LongName": "OpenAI Chat Completions (w/ Data + AI Search)", + "_ShortName": "openai-chat-streaming-with-data", + "_Language": "C#", + "ClassName": "OpenAIChatCompletionsWithDataStreamingClass", + "AZURE_OPENAI_API_VERSION": "", + "AZURE_OPENAI_ENDPOINT": "", + "AZURE_OPENAI_KEY": "", + "AZURE_OPENAI_CHAT_DEPLOYMENT": "", + "AZURE_OPENAI_EMBEDDING_DEPLOYMENT": "", + "AZURE_OPENAI_SYSTEM_PROMPT": "You are a helpful AI assistant.", + "AZURE_AI_SEARCH_ENDPOINT": "", + "AZURE_AI_SEARCH_KEY": "", + "AZURE_AI_SEARCH_INDEX_NAME": "" +} \ No newline at end of file diff --git a/src/ai/.x/templates/openai-chat-streaming-with-data-go/_.json b/src/ai/.x/templates/openai-chat-streaming-with-data-go/_.json new file mode 100644 index 00000000..b938bc70 --- /dev/null +++ b/src/ai/.x/templates/openai-chat-streaming-with-data-go/_.json @@ -0,0 +1,15 @@ +{ + "_LongName": "OpenAI Chat Completions (w/ Data + AI Search)", + "_ShortName": "openai-chat-streaming-with-data", + "_Language": "Go", + "ClassName": "OpenAIChatCompletionsWithDataStreamingExample", + "AZURE_OPENAI_API_VERSION": "", + "AZURE_OPENAI_ENDPOINT": "", + "AZURE_OPENAI_KEY": "", + "AZURE_OPENAI_CHAT_DEPLOYMENT": "", + "AZURE_OPENAI_EMBEDDING_DEPLOYMENT": "", + "AZURE_OPENAI_SYSTEM_PROMPT": "You are a helpful AI assistant.", + "AZURE_AI_SEARCH_ENDPOINT": "", + "AZURE_AI_SEARCH_KEY": "", + "AZURE_AI_SEARCH_INDEX_NAME": "" +} \ No newline at end of file diff --git a/src/ai/.x/templates/openai-chat-streaming-with-data-go/go.mod b/src/ai/.x/templates/openai-chat-streaming-with-data-go/go.mod new file mode 100644 index 00000000..1d0b37af --- /dev/null +++ b/src/ai/.x/templates/openai-chat-streaming-with-data-go/go.mod @@ -0,0 +1,6 @@ +module openai_chat_completions_streaming_with_data_hello_world + +require ( + github.com/Azure/azure-sdk-for-go/sdk/ai/azopenai v0.4.1 + github.com/Azure/azure-sdk-for-go/sdk/azcore v1.9.1 +) diff --git a/src/ai/.x/templates/openai-chat-streaming-with-data-go/main.go b/src/ai/.x/templates/openai-chat-streaming-with-data-go/main.go new file mode 100644 index 00000000..c522b6b3 --- /dev/null +++ b/src/ai/.x/templates/openai-chat-streaming-with-data-go/main.go @@ -0,0 +1,105 @@ +<#@ template hostspecific="true" #> +<#@ output extension=".go" encoding="utf-8" #> +<#@ parameter type="System.String" name="ClassName" #> +<#@ parameter type="System.String" name="AZURE_OPENAI_API_VERSION" #> +<#@ parameter type="System.String" name="AZURE_OPENAI_ENDPOINT" #> +<#@ parameter type="System.String" name="AZURE_OPENAI_KEY" #> +<#@ parameter type="System.String" name="AZURE_OPENAI_CHAT_DEPLOYMENT" #> +<#@ parameter type="System.String" name="AZURE_OPENAI_EMBEDDING_DEPLOYMENT" #> +<#@ parameter type="System.String" name="AZURE_OPENAI_SYSTEM_PROMPT" #> +<#@ parameter type="System.String" name="AZURE_AI_SEARCH_ENDPOINT" #> +<#@ parameter type="System.String" name="AZURE_AI_SEARCH_KEY" #> +<#@ parameter type="System.String" name="AZURE_AI_SEARCH_INDEX_NAME" #> +package main + +import ( + "bufio" + "fmt" + "log" + "os" + "strings" +) + +func main() { + openAIEndpoint := os.Getenv("AZURE_OPENAI_ENDPOINT") + if openAIEndpoint == "" { + openAIEndpoint = "<#= AZURE_OPENAI_ENDPOINT #>" + } + openAIKey := os.Getenv("AZURE_OPENAI_KEY") + if openAIKey == "" { + openAIKey = "<#= AZURE_OPENAI_KEY #>" + } + openAIChatDeploymentName := os.Getenv("AZURE_OPENAI_CHAT_DEPLOYMENT") + if openAIChatDeploymentName == "" { + openAIChatDeploymentName = "<#= AZURE_OPENAI_CHAT_DEPLOYMENT #>" + } + openAISystemPrompt := os.Getenv("OPENAI_SYSTEM_PROMPT") + if openAISystemPrompt == "" { + openAISystemPrompt = "<#= AZURE_OPENAI_SYSTEM_PROMPT #>" + } + + openAIApiVersion := os.Getenv("AZURE_OPENAI_API_VERSION") + if openAIApiVersion == "" { + openAIApiVersion = "<#= AZURE_OPENAI_API_VERSION #>" + } + + openAIEmbeddingsDeploymentName := os.Getenv("AZURE_OPENAI_EMBEDDING_DEPLOYMENT") + if openAIEmbeddingsDeploymentName == "" { + openAIEmbeddingsDeploymentName = "<#= AZURE_OPENAI_EMBEDDING_DEPLOYMENT #>" + } + + openAIEndpoint = strings.TrimSuffix(openAIEndpoint, "/") + + azureSearchEndpoint := os.Getenv("AZURE_AI_SEARCH_ENDPOINT") + if azureSearchEndpoint == "" { + azureSearchEndpoint = "<#= AZURE_AI_SEARCH_ENDPOINT #>" + } + + azureSearchApiKey := os.Getenv("AZURE_AI_SEARCH_KEY") + if azureSearchApiKey == "" { + azureSearchApiKey = "<#= AZURE_AI_SEARCH_KEY #>" + } + + azureSearchIndexName := os.Getenv("AZURE_AI_SEARCH_INDEX_NAME") + if azureSearchIndexName == "" { + azureSearchIndexName = "<#= AZURE_AI_SEARCH_INDEX_NAME #>" + } + + if openAIEndpoint == "" || openAIKey == "" || openAIChatDeploymentName == "" || openAISystemPrompt == "" { + fmt.Println("Please set the environment variables.") + os.Exit(1) + } + + chat, err := New<#= ClassName #>(openAIEndpoint, openAIKey, openAIChatDeploymentName, openAISystemPrompt, azureSearchEndpoint, azureSearchApiKey, azureSearchIndexName, openAIEmbeddingsDeploymentName) + if err != nil { + log.Fatalf("ERROR: %s", err) + } + + for { + fmt.Print("User: ") + input, _ := getUserInput() + if input == "exit" || input == "" { + break + } + + fmt.Printf("\nAssistant: ") + _, err := chat.GetChatCompletionsStream(input, func(content string) { + fmt.Printf("%s", content) + }) + if err != nil { + log.Fatalf("ERROR: %s", err) + } + fmt.Printf("\n\n") + } +} + +func getUserInput() (string, error) { + reader := bufio.NewReader(os.Stdin) + userInput, err := reader.ReadString('\n') + if err != nil { + return "", err + } + userInput = strings.TrimSuffix(userInput, "\n") + userInput = strings.TrimSuffix(userInput, "\r") + return userInput, nil +} diff --git a/src/ai/.x/templates/openai-chat-streaming-with-data-go/openai_chat_completions_streaming_with_data_hello_world.go b/src/ai/.x/templates/openai-chat-streaming-with-data-go/openai_chat_completions_streaming_with_data_hello_world.go new file mode 100644 index 00000000..c606f996 --- /dev/null +++ b/src/ai/.x/templates/openai-chat-streaming-with-data-go/openai_chat_completions_streaming_with_data_hello_world.go @@ -0,0 +1,122 @@ +<#@ template hostspecific="true" #> +<#@ output extension=".go" encoding="utf-8" #> +<#@ parameter type="System.String" name="ClassName" #> +package main + +import ( + "context" + "errors" + "io" + + "github.com/Azure/azure-sdk-for-go/sdk/ai/azopenai" + "github.com/Azure/azure-sdk-for-go/sdk/azcore" + "github.com/Azure/azure-sdk-for-go/sdk/azcore/to" +) + +type <#= ClassName #> struct { + client *azopenai.Client + options *azopenai.ChatCompletionsOptions +} + +func New<#= ClassName #>( + openAIEndpoint string, + openAIKey string, + openAIChatDeploymentName string, + openAISystemPrompt string, + azureSearchEndpoint string, + azureSearchApiKey string, + azureSearchIndexName string, + openAIEmbeddingsDeploymentName string, + ) (*<#= ClassName #>, error) { + keyCredential := azcore.NewKeyCredential(openAIKey) + + client, err := azopenai.NewClientWithKeyCredential(openAIEndpoint, keyCredential, nil) + if err != nil { + return nil, err + } + + messages := []azopenai.ChatRequestMessageClassification{ + &azopenai.ChatRequestSystemMessage{ + Content: &openAISystemPrompt, + }, + } + + options := &azopenai.ChatCompletionsOptions{ + DeploymentName: &openAIChatDeploymentName, + Messages: messages, + AzureExtensionsOptions: []azopenai.AzureChatExtensionConfigurationClassification{ + &azopenai.AzureCognitiveSearchChatExtensionConfiguration{ + Parameters: &azopenai.AzureCognitiveSearchChatExtensionParameters{ + Endpoint: &azureSearchEndpoint, + IndexName: &azureSearchIndexName, + Authentication: &azopenai.OnYourDataAPIKeyAuthenticationOptions{ + Key: &azureSearchApiKey, + }, + QueryType: to.Ptr(azopenai.AzureCognitiveSearchQueryTypeVectorSimpleHybrid), + EmbeddingDependency: &azopenai.OnYourDataDeploymentNameVectorizationSource{ + DeploymentName: &openAIEmbeddingsDeploymentName, + Type: to.Ptr(azopenai.OnYourDataVectorizationSourceTypeDeploymentName), + }, + }, + }, + }, + } + + return &OpenAIChatCompletionsWithDataStreamingExample{ + client: client, + options: options, + }, nil + } + +func (chat *<#= ClassName #>) ClearConversation() { + chat.options.Messages = chat.options.Messages[:1] +} + +func (chat *<#= ClassName #>) GetChatCompletionsStream(userPrompt string, callback func(content string)) (string, error) { + chat.options.Messages = append(chat.options.Messages, &azopenai.ChatRequestUserMessage{Content: azopenai.NewChatRequestUserMessageContent(userPrompt)}) + + resp, err := chat.client.GetChatCompletionsStream(context.TODO(), *chat.options, nil) + if err != nil { + return "", err + } + defer resp.ChatCompletionsStream.Close() + + responseContent := "" + for { + chatCompletions, err := resp.ChatCompletionsStream.Read() + if errors.Is(err, io.EOF) { + break + } + if err != nil { + return "", err + } + + for _, choice := range chatCompletions.Choices { + + content := "" + if choice.Delta.Content != nil { + content = *choice.Delta.Content + } + + if choice.FinishReason != nil { + finishReason := *choice.FinishReason + if finishReason == azopenai.CompletionsFinishReasonTokenLimitReached { + content = content + "\nWARNING: Exceeded token limit!" + } + } + + if content == "" { + continue + } + + if callback != nil { + callback(content) + } + responseContent += content + } + } + + chat.options.Messages = append(chat.options.Messages, &azopenai.ChatRequestAssistantMessage{Content: to.Ptr(responseContent)}) + return responseContent, nil +} + diff --git a/src/ai/.x/templates/openai-chat-streaming-with-data-java/_.json b/src/ai/.x/templates/openai-chat-streaming-with-data-java/_.json new file mode 100644 index 00000000..a4123b59 --- /dev/null +++ b/src/ai/.x/templates/openai-chat-streaming-with-data-java/_.json @@ -0,0 +1,15 @@ +{ + "_LongName": "OpenAI Chat Completions (w/ Data + AI Search)", + "_ShortName": "openai-chat-streaming-with-data", + "_Language": "Java", + "ClassName": "OpenAIChatCompletionsWithDataStreamingClass", + "AZURE_OPENAI_API_VERSION": "", + "AZURE_OPENAI_ENDPOINT": "", + "AZURE_OPENAI_KEY": "", + "AZURE_OPENAI_CHAT_DEPLOYMENT": "", + "AZURE_OPENAI_EMBEDDING_DEPLOYMENT": "", + "AZURE_OPENAI_SYSTEM_PROMPT": "You are a helpful AI assistant.", + "AZURE_AI_SEARCH_ENDPOINT": "", + "AZURE_AI_SEARCH_KEY": "", + "AZURE_AI_SEARCH_INDEX_NAME": "" +} \ No newline at end of file diff --git a/src/ai/.x/templates/openai-chat-streaming-with-data-java/pom.xml b/src/ai/.x/templates/openai-chat-streaming-with-data-java/pom.xml new file mode 100644 index 00000000..59337755 --- /dev/null +++ b/src/ai/.x/templates/openai-chat-streaming-with-data-java/pom.xml @@ -0,0 +1,39 @@ + + 4.0.0 + + com.azure.ai.openai.samples + openai-chat-java-streaming + 1.0-SNAPSHOT + + + + + com.azure + azure-ai-openai + 1.0.0-beta.6 + + + + + + + org.apache.maven.plugins + maven-dependency-plugin + 3.1.2 + + + copy-dependencies + prepare-package + + copy-dependencies + + + ${project.build.directory}/lib + + + + + + + + \ No newline at end of file diff --git a/src/ai/.x/templates/openai-chat-streaming-with-data-java/scripts/1-copydependencies.bat b/src/ai/.x/templates/openai-chat-streaming-with-data-java/scripts/1-copydependencies.bat new file mode 100644 index 00000000..f0b4c1c7 --- /dev/null +++ b/src/ai/.x/templates/openai-chat-streaming-with-data-java/scripts/1-copydependencies.bat @@ -0,0 +1 @@ +mvn clean package \ No newline at end of file diff --git a/src/ai/.x/templates/openai-chat-streaming-with-data-java/scripts/2-compile.bat b/src/ai/.x/templates/openai-chat-streaming-with-data-java/scripts/2-compile.bat new file mode 100644 index 00000000..f0249b59 --- /dev/null +++ b/src/ai/.x/templates/openai-chat-streaming-with-data-java/scripts/2-compile.bat @@ -0,0 +1 @@ +javac -cp target/lib/* src/OpenAIChatCompletionsWithDataStreamingClass.java src/Main.java -d out diff --git a/src/ai/.x/templates/openai-chat-streaming-with-data-java/scripts/3-run.bat b/src/ai/.x/templates/openai-chat-streaming-with-data-java/scripts/3-run.bat new file mode 100644 index 00000000..6d301cb6 --- /dev/null +++ b/src/ai/.x/templates/openai-chat-streaming-with-data-java/scripts/3-run.bat @@ -0,0 +1 @@ +java -cp out;target/lib/* Main diff --git a/src/ai/.x/templates/openai-chat-streaming-with-data-java/src/Main.java b/src/ai/.x/templates/openai-chat-streaming-with-data-java/src/Main.java new file mode 100644 index 00000000..f4efebc5 --- /dev/null +++ b/src/ai/.x/templates/openai-chat-streaming-with-data-java/src/Main.java @@ -0,0 +1,49 @@ +<#@ template hostspecific="true" #> +<#@ output extension=".java" encoding="utf-8" #> +<#@ parameter type="System.String" name="ClassName" #> +<#@ parameter type="System.String" name="AZURE_OPENAI_API_VERSION" #> +<#@ parameter type="System.String" name="AZURE_OPENAI_ENDPOINT" #> +<#@ parameter type="System.String" name="AZURE_OPENAI_KEY" #> +<#@ parameter type="System.String" name="AZURE_OPENAI_CHAT_DEPLOYMENT" #> +<#@ parameter type="System.String" name="AZURE_OPENAI_EMBEDDING_DEPLOYMENT" #> +<#@ parameter type="System.String" name="AZURE_OPENAI_SYSTEM_PROMPT" #> +<#@ parameter type="System.String" name="AZURE_AI_SEARCH_ENDPOINT" #> +<#@ parameter type="System.String" name="AZURE_AI_SEARCH_KEY" #> +<#@ parameter type="System.String" name="AZURE_AI_SEARCH_INDEX_NAME" #> +import java.util.Scanner; +import reactor.core.publisher.Flux; +import com.azure.ai.openai.models.ChatCompletions; + +public class Main { + + public static void main(String[] args) { + String openAIKey = (System.getenv("AZURE_OPENAI_KEY") != null) ? System.getenv("AZURE_OPENAI_KEY") : ""; + String openAIEndpoint = (System.getenv("AZURE_OPENAI_ENDPOINT") != null) ? System.getenv("AZURE_OPENAI_ENDPOINT") : ""; + String openAIChatDeployment = (System.getenv("AZURE_OPENAI_CHAT_DEPLOYMENT") != null) ? System.getenv("AZURE_OPENAI_CHAT_DEPLOYMENT") : ""; + String openAISystemPrompt = (System.getenv("AZURE_OPENAI_SYSTEM_PROMPT") != null) ? System.getenv("AZURE_OPENAI_SYSTEM_PROMPT") : "You are a helpful AI assistant."; + + String openAIApiVersion = System.getenv("AZURE_OPENAI_API_VERSION") != null ? System.getenv("AZURE_OPENAI_API_VERSION") : "<#= AZURE_OPENAI_API_VERSION #>"; + String azureSearchEmbeddingsDeploymentName = System.getenv("AZURE_OPENAI_EMBEDDING_DEPLOYMENT") != null ? System.getenv("AZURE_OPENAI_EMBEDDING_DEPLOYMENT") : "<#= AZURE_OPENAI_EMBEDDING_DEPLOYMENT #>"; + String azureSearchEndpoint = System.getenv("AZURE_AI_SEARCH_ENDPOINT") != null ? System.getenv("AZURE_AI_SEARCH_ENDPOINT") : "<#= AZURE_AI_SEARCH_ENDPOINT #>"; + String azureSearchAPIKey = System.getenv("AZURE_AI_SEARCH_KEY") != null ? System.getenv("AZURE_AI_SEARCH_KEY") : "<#= AZURE_AI_SEARCH_KEY #>"; + String azureSearchIndexName = System.getenv("AZURE_AI_SEARCH_INDEX_NAME") != null ? System.getenv("AZURE_AI_SEARCH_INDEX_NAME") : "<#= AZURE_AI_SEARCH_INDEX_NAME #>"; + + <#= ClassName #> chat = new <#= ClassName #>(openAIKey, openAIEndpoint, openAIChatDeployment, openAISystemPrompt, azureSearchEndpoint, azureSearchIndexName, azureSearchAPIKey, azureSearchEmbeddingsDeploymentName); + + Scanner scanner = new Scanner(System.in); + while (true) { + System.out.print("User: "); + String userPrompt = scanner.nextLine(); + if (userPrompt.isEmpty() || "exit".equals(userPrompt)) + break; + + System.out.print("\nAssistant: "); + Flux responseFlux = chat.getChatCompletionsStreamingAsync(userPrompt, update -> { + System.out.print(update.getContent()); + }); + responseFlux.blockLast(); + System.out.println("\n"); + } + scanner.close(); + } +} \ No newline at end of file diff --git a/src/ai/.x/templates/openai-chat-streaming-with-data-java/src/OpenAIChatCompletionsWithDataStreamingClass.java b/src/ai/.x/templates/openai-chat-streaming-with-data-java/src/OpenAIChatCompletionsWithDataStreamingClass.java new file mode 100644 index 00000000..ec7469bd --- /dev/null +++ b/src/ai/.x/templates/openai-chat-streaming-with-data-java/src/OpenAIChatCompletionsWithDataStreamingClass.java @@ -0,0 +1,109 @@ +<#@ template hostspecific="true" #> +<#@ output extension=".java" encoding="utf-8" #> +<#@ parameter type="System.String" name="ClassName" #> +import com.azure.ai.openai.OpenAIAsyncClient; +import com.azure.ai.openai.OpenAIClientBuilder; +import com.azure.ai.openai.models.AzureCognitiveSearchChatExtensionConfiguration; +import com.azure.ai.openai.models.AzureCognitiveSearchChatExtensionParameters; +import com.azure.ai.openai.models.AzureCognitiveSearchIndexFieldMappingOptions; +import com.azure.ai.openai.models.AzureCognitiveSearchQueryType; +import com.azure.ai.openai.models.ChatChoice; +import com.azure.ai.openai.models.ChatCompletions; +import com.azure.ai.openai.models.ChatCompletionsOptions; +import com.azure.ai.openai.models.ChatRequestAssistantMessage; +import com.azure.ai.openai.models.ChatRequestMessage; +import com.azure.ai.openai.models.ChatRequestSystemMessage; +import com.azure.ai.openai.models.ChatRequestUserMessage; +import com.azure.ai.openai.models.ChatResponseMessage; +import com.azure.ai.openai.models.CompletionsFinishReason; +import com.azure.ai.openai.models.OnYourDataApiKeyAuthenticationOptions; +import com.azure.ai.openai.models.OnYourDataDeploymentNameVectorizationSource; +import com.azure.core.credential.AzureKeyCredential; +import reactor.core.publisher.Flux; + +import java.util.ArrayList; +import java.util.Arrays; +import java.util.function.Consumer; +import java.util.List; + +public class <#= ClassName #> { + + private OpenAIAsyncClient client; + private ChatCompletionsOptions options; + private String openAIChatDeployment; + private String openAISystemPrompt; + + public <#= ClassName #> ( + String openAIKey, + String openAIEndpoint, + String openAIChatDeployment, + String openAISystemPrompt, + String azureSearchEndpoint, + String azureSearchIndexName, + String azureSearchAPIKey, + String azureSearchEmbeddingsDeploymentName) { + + this.openAIChatDeployment = openAIChatDeployment; + this.openAISystemPrompt = openAISystemPrompt; + client = new OpenAIClientBuilder() + .endpoint(openAIEndpoint) + .credential(new AzureKeyCredential(openAIKey)) + .buildAsyncClient(); + + AzureCognitiveSearchChatExtensionConfiguration searchConfiguration = + new AzureCognitiveSearchChatExtensionConfiguration( + new AzureCognitiveSearchChatExtensionParameters(azureSearchEndpoint, azureSearchIndexName) + .setAuthentication(new OnYourDataApiKeyAuthenticationOptions(azureSearchAPIKey)) + .setQueryType(AzureCognitiveSearchQueryType.VECTOR_SIMPLE_HYBRID) + .setEmbeddingDependency(new OnYourDataDeploymentNameVectorizationSource(azureSearchEmbeddingsDeploymentName)) + ); + + List chatMessages = new ArrayList<>(); + options = new ChatCompletionsOptions(chatMessages) + .setDataSources(Arrays.asList(searchConfiguration)); + ClearConversation(); + options.setStream(true); + } + + public void ClearConversation(){ + List chatMessages = options.getMessages(); + chatMessages.clear(); + chatMessages.add(new ChatRequestSystemMessage(this.openAISystemPrompt)); + } + + public Flux getChatCompletionsStreamingAsync(String userPrompt, + Consumer callback) { + options.getMessages().add(new ChatRequestUserMessage(userPrompt)); + + StringBuilder responseContent = new StringBuilder(); + Flux response = client.getChatCompletionsStream(this.openAIChatDeployment, options); + + response.subscribe(chatResponse -> { + if (chatResponse.getChoices() != null) { + for (ChatChoice update : chatResponse.getChoices()) { + if (update.getDelta() == null || update.getDelta().getContent() == null) + continue; + String content = update.getDelta().getContent(); + + if (update.getFinishReason() == CompletionsFinishReason.CONTENT_FILTERED) { + content = content + "\nWARNING: Content filtered!"; + } else if (update.getFinishReason() == CompletionsFinishReason.TOKEN_LIMIT_REACHED) { + content = content + "\nERROR: Exceeded token limit!"; + } + + if (content.isEmpty()) + continue; + + if(callback != null) { + callback.accept(update.getDelta()); + } + responseContent.append(content); + } + + options.getMessages().add(new ChatRequestAssistantMessage(responseContent.toString())); + } + }); + + return response; + } +} \ No newline at end of file diff --git a/src/ai/.x/templates/openai-chat-streaming-with-data-js/Main.js b/src/ai/.x/templates/openai-chat-streaming-with-data-js/Main.js new file mode 100644 index 00000000..2123e1fe --- /dev/null +++ b/src/ai/.x/templates/openai-chat-streaming-with-data-js/Main.js @@ -0,0 +1,59 @@ +<#@ template hostspecific="true" #> +<#@ output extension=".js" encoding="utf-8" #> +<#@ parameter type="System.String" name="ClassName" #> +<#@ parameter type="System.String" name="AZURE_OPENAI_ENDPOINT" #> +<#@ parameter type="System.String" name="AZURE_OPENAI_KEY" #> +<#@ parameter type="System.String" name="AZURE_OPENAI_API_VERSION" #> +<#@ parameter type="System.String" name="AZURE_OPENAI_CHAT_DEPLOYMENT" #> +<#@ parameter type="System.String" name="AZURE_OPENAI_SYSTEM_PROMPT" #> +<#@ parameter type="System.String" name="AZURE_AI_SEARCH_ENDPOINT" #> +<#@ parameter type="System.String" name="AZURE_AI_SEARCH_KEY" #> +<#@ parameter type="System.String" name="AZURE_AI_SEARCH_INDEX_NAME" #> +<#@ parameter type="System.String" name="AZURE_OPENAI_EMBEDDING_DEPLOYMENT" #> +const { <#= ClassName #> } = require("./OpenAIChatCompletionsStreamingWithDataClass"); + +const readline = require('readline'); +const rl = readline.createInterface({ + input: process.stdin, + output: process.stdout +}); + +async function main() { + + const openAIEndpoint = process.env["AZURE_OPENAI_ENDPOINT"] || "<#= AZURE_OPENAI_ENDPOINT #>"; + const openAIKey = process.env["AZURE_OPENAI_KEY"] || "<#= AZURE_OPENAI_KEY #>"; + const openAIChatDeploymentName = process.env["AZURE_OPENAI_CHAT_DEPLOYMENT"] || "<#= AZURE_OPENAI_CHAT_DEPLOYMENT #>" ; + const openAISystemPrompt = process.env["AZURE_OPENAI_SYSTEM_PROMPT"] || "<#= AZURE_OPENAI_SYSTEM_PROMPT #>" ; + const openAIAPIVersion = process.env["AZURE_OPENAI_API_VERSION"] || "<#= AZURE_OPENAI_API_VERSION #>" ; + const searchEndpoint = process.env["AZURE_AI_SEARCH_ENDPOINT"] || "<#= AZURE_AI_SEARCH_ENDPOINT #>" ; + const searchAPIKey = process.env["AZURE_AI_SEARCH_KEY"] || "<#= AZURE_AI_SEARCH_KEY #>" ; + const searchIndexName = process.env["AZURE_AI_SEARCH_INDEX_NAME"] || "<#= AZURE_AI_SEARCH_INDEX_NAME #>" ; + const openAIEmbeddingsDeploymentName = process.env["AZURE_OPENAI_EMBEDDING_DEPLOYMENT"] || "<#= AZURE_OPENAI_EMBEDDING_DEPLOYMENT #>" ; + const openAIEmbeddingsEndpoint = `${openAIEndpoint.replace(/\/+$/, '')}/openai/deployments/${openAIEmbeddingsDeploymentName}/embeddings?api-version=${openAIAPIVersion}`; + + const chat = new <#= ClassName #>(openAIEndpoint, openAIKey, openAIChatDeploymentName, openAISystemPrompt, searchEndpoint, searchAPIKey, searchIndexName, openAIEmbeddingsEndpoint); + + while (true) { + + const input = await new Promise(resolve => rl.question('User: ', resolve)); + if (input === 'exit' || input === '') break; + + let response = await chat.getChatCompletions(input, (content) => { + console.log(`assistant-streaming: ${content}`); + }); + + console.log(`\nAssistant: ${response}\n`); + } + + console.log('Bye!'); + process.exit(); +} + +main().catch((err) => { + if (err.code !== 'ERR_USE_AFTER_CLOSE') { // filter out expected error (EOF on redirected input) + console.error("The sample encountered an error:", err); + process.exit(1); + } +}); + +module.exports = { main }; diff --git a/src/ai/.x/templates/openai-chat-streaming-with-data-js/OpenAIChatCompletionsStreamingWithDataClass.js b/src/ai/.x/templates/openai-chat-streaming-with-data-js/OpenAIChatCompletionsStreamingWithDataClass.js new file mode 100644 index 00000000..0d4446a6 --- /dev/null +++ b/src/ai/.x/templates/openai-chat-streaming-with-data-js/OpenAIChatCompletionsStreamingWithDataClass.js @@ -0,0 +1,66 @@ +<#@ template hostspecific="true" #> +<#@ output extension=".js" encoding="utf-8" #> +<#@ parameter type="System.String" name="ClassName" #> +const { OpenAIClient, AzureKeyCredential } = require("@azure/openai"); + +class <#= ClassName #> { + constructor(openAIEndpoint, openAIKey, openAIChatDeploymentName, openAISystemPrompt, searchEndpoint, searchAPIKey, searchIndexName, openAIEmbeddingsEndpoint) { + this.openAISystemPrompt = openAISystemPrompt; + this.openAIChatDeploymentName = openAIChatDeploymentName; + this.client = new OpenAIClient(openAIEndpoint, new AzureKeyCredential(openAIKey)); + + this.azureExtensionOptions = { + azureExtensionOptions: { + extensions: [ + { + type: "AzureCognitiveSearch", + endpoint: searchEndpoint, + key: searchAPIKey, + indexName: searchIndexName, + embeddingEndpoint: openAIEmbeddingsEndpoint, + embeddingKey: openAIKey, + queryType: "vectorSimpleHybrid" + }, + ], + } + } + + this.clearConversation(); + } + + clearConversation() { + this.messages = [ + { role: 'system', content: this.openAISystemPrompt } + ]; + } + + async getChatCompletions(userInput, callback) { + this.messages.push({ role: 'user', content: userInput }); + + let contentComplete = ''; + const events = await this.client.streamChatCompletions(this.openAIChatDeploymentName, this.messages, this.azureExtensionOptions); + + for await (const event of events) { + for (const choice of event.choices) { + + let content = choice.delta?.content; + if (choice.finishReason === 'length') { + content = `${content}\nERROR: Exceeded token limit!`; + } + + if (content != null) { + if(callback != null) { + callback(content); + } + await new Promise(r => setTimeout(r, 50)); // delay to simulate real-time output, word by word + contentComplete += content; + } + } + } + + this.messages.push({ role: 'assistant', content: contentComplete }); + return contentComplete; + } +} + +exports.<#= ClassName #> = <#= ClassName #>; \ No newline at end of file diff --git a/src/ai/.x/templates/openai-chat-streaming-with-data-js/_.json b/src/ai/.x/templates/openai-chat-streaming-with-data-js/_.json new file mode 100644 index 00000000..a44ddf53 --- /dev/null +++ b/src/ai/.x/templates/openai-chat-streaming-with-data-js/_.json @@ -0,0 +1,16 @@ +{ + "_LongName": "OpenAI Chat Completions (w/ Data + AI Search)", + "_ShortName": "openai-chat-streaming-with-data", + "_Language": "JavaScript", + "ClassName": "OpenAIChatCompletionsStreamingWithDataClass", + "AZURE_OPENAI_ENDPOINT": "", + "AZURE_OPENAI_KEY": "", + "AZURE_OPENAI_API_VERSION": "", + "AZURE_OPENAI_CHAT_DEPLOYMENT": "", + "AZURE_OPENAI_EMBEDDING_DEPLOYMENT": "", + "AZURE_OPENAI_SYSTEM_PROMPT": "You are a helpful AI assistant.", + "AZURE_AI_SEARCH_ENDPOINT": "", + "AZURE_AI_SEARCH_KEY": "", + "AZURE_AI_SEARCH_INDEX_NAME": "", + "OPENAI_API_VERSION": "" +} \ No newline at end of file diff --git a/src/ai/.x/templates/openai-chat-streaming-with-data-js/package.json b/src/ai/.x/templates/openai-chat-streaming-with-data-js/package.json new file mode 100644 index 00000000..17195509 --- /dev/null +++ b/src/ai/.x/templates/openai-chat-streaming-with-data-js/package.json @@ -0,0 +1,15 @@ +{ + "name": "openai-chat-streaming", + "version": "1.0.0", + "description": "", + "main": "Main.js", + "scripts": { + "test": "echo \"Error: no test specified\" && exit 1" + }, + "author": "", + "license": "MIT", + "dependencies": { + "@azure/openai": "1.0.0-beta.10" + } + } + \ No newline at end of file diff --git a/src/ai/.x/templates/openai-chat-streaming-with-data-py/_.json b/src/ai/.x/templates/openai-chat-streaming-with-data-py/_.json new file mode 100644 index 00000000..a4cfdfd5 --- /dev/null +++ b/src/ai/.x/templates/openai-chat-streaming-with-data-py/_.json @@ -0,0 +1,16 @@ +{ + "_LongName": "OpenAI Chat Completions (w/ Data + AI Search)", + "_ShortName": "openai-chat-streaming-with-data", + "_Language": "Python", + "ClassName": "OpenAIChatCompletionsStreamingWithData", + "AZURE_OPENAI_ENDPOINT": "", + "AZURE_OPENAI_KEY": "", + "AZURE_OPENAI_API_VERSION": "", + "AZURE_OPENAI_CHAT_DEPLOYMENT": "", + "AZURE_OPENAI_EMBEDDING_DEPLOYMENT": "", + "AZURE_OPENAI_SYSTEM_PROMPT": "You are a helpful AI assistant.", + "AZURE_AI_SEARCH_ENDPOINT": "", + "AZURE_AI_SEARCH_KEY": "", + "AZURE_AI_SEARCH_INDEX_NAME": "", + "OPENAI_API_VERSION": "" +} diff --git a/src/ai/.x/templates/openai-chat-streaming-with-data-py/main.py b/src/ai/.x/templates/openai-chat-streaming-with-data-py/main.py new file mode 100644 index 00000000..210ba757 --- /dev/null +++ b/src/ai/.x/templates/openai-chat-streaming-with-data-py/main.py @@ -0,0 +1,47 @@ +<#@ template hostspecific="true" #> +<#@ output extension=".py" encoding="utf-8" #> +<#@ parameter type="System.String" name="ClassName" #> +<#@ parameter type="System.String" name="AZURE_OPENAI_ENDPOINT" #> +<#@ parameter type="System.String" name="AZURE_OPENAI_KEY" #> +<#@ parameter type="System.String" name="AZURE_OPENAI_API_VERSION" #> +<#@ parameter type="System.String" name="AZURE_OPENAI_CHAT_DEPLOYMENT" #> +<#@ parameter type="System.String" name="AZURE_OPENAI_SYSTEM_PROMPT" #> +<#@ parameter type="System.String" name="AZURE_AI_SEARCH_ENDPOINT" #> +<#@ parameter type="System.String" name="AZURE_AI_SEARCH_KEY" #> +<#@ parameter type="System.String" name="AZURE_AI_SEARCH_INDEX_NAME" #> +<#@ parameter type="System.String" name="AZURE_OPENAI_EMBEDDING_DEPLOYMENT" #> +from openai_chat_completions_with_data_streaming import <#= ClassName #> +import os +import sys + +def main(): + openai_api_version = os.getenv('AZURE_OPENAI_API_VERSION', '<#= AZURE_OPENAI_API_VERSION #>') + openai_endpoint = os.getenv('AZURE_OPENAI_ENDPOINT', '<#= AZURE_OPENAI_ENDPOINT #>') + openai_key = os.getenv('AZURE_OPENAI_KEY', '<#= AZURE_OPENAI_KEY #>') + openai_chat_deployment_name = os.getenv('AZURE_OPENAI_CHAT_DEPLOYMENT', '<#= AZURE_OPENAI_CHAT_DEPLOYMENT #>') + openai_system_prompt = os.getenv('AZURE_OPENAI_SYSTEM_PROMPT', '<#= AZURE_OPENAI_SYSTEM_PROMPT #>') + search_endpoint =os.getenv('AZURE_AI_SEARCH_ENDPOINT', '<#= AZURE_AI_SEARCH_ENDPOINT #>') + search_api_key = os.getenv('AZURE_AI_SEARCH_KEY', '<#= AZURE_AI_SEARCH_KEY #>') + search_index_name = os.getenv('AZURE_AI_SEARCH_INDEX_NAME', '<#= AZURE_AI_SEARCH_INDEX_NAME #>') + openai_embeddings_deployment_name = os.getenv('AZURE_OPENAI_EMBEDDING_DEPLOYMENT', '<#= AZURE_OPENAI_EMBEDDING_DEPLOYMENT #>') + openai_embeddings_endpoint = f"{openai_endpoint.rstrip('/')}/openai/deployments/{openai_embeddings_deployment_name}/embeddings?api-version={openai_api_version}" + + chat = <#= ClassName #>(openai_api_version, openai_endpoint, openai_key, openai_chat_deployment_name, openai_system_prompt, search_endpoint, search_api_key, search_index_name, openai_embeddings_endpoint) + + while True: + user_input = input('User: ') + if user_input == 'exit' or user_input == '': + break + + print("\nAssistant: ", end="") + response = chat.get_chat_completions(user_input, lambda content: print(content, end="")) + print("\n") + +if __name__ == '__main__': + try: + main() + except EOFError: + pass + except Exception as e: + print(f"The sample encountered an error: {e}") + sys.exit(1) \ No newline at end of file diff --git a/src/ai/.x/templates/openai-chat-streaming-with-data-py/openai_chat_completions_with_data_streaming.py b/src/ai/.x/templates/openai-chat-streaming-with-data-py/openai_chat_completions_with_data_streaming.py new file mode 100644 index 00000000..f4e98c9c --- /dev/null +++ b/src/ai/.x/templates/openai-chat-streaming-with-data-py/openai_chat_completions_with_data_streaming.py @@ -0,0 +1,64 @@ +<#@ template hostspecific="true" #> +<#@ output extension=".py" encoding="utf-8" #> +<#@ parameter type="System.String" name="ClassName" #> +from openai import AzureOpenAI + +class <#= ClassName #>: + def __init__(self, openai_api_version, openai_endpoint, openai_key, openai_chat_deployment_name, openai_system_prompt, search_endpoint, search_api_key, search_index_name, openai_embeddings_endpoint): + self.openai_system_prompt = openai_system_prompt + self.openai_chat_deployment_name = openai_chat_deployment_name + self.client = AzureOpenAI( + api_key=openai_key, + api_version=openai_api_version, + base_url = f"{openai_endpoint.rstrip('/')}/openai/deployments/{openai_chat_deployment_name}/extensions" + ) + self.extra_body={ + "dataSources": [ + { + "type": "AzureCognitiveSearch", + "parameters": { + "endpoint": search_endpoint, + "key": search_api_key, + "indexName": search_index_name, + "embeddingEndpoint": openai_embeddings_endpoint, + "embeddingKey": openai_key, + "queryType": "vectorSimpleHybrid" + } + } + ] + } + + self.clear_conversation() + + def clear_conversation(self): + self.messages = [ + {'role': 'system', 'content': self.openai_system_prompt} + ] + + def get_chat_completions(self, user_input, callback): + self.messages.append({'role': 'user', 'content': user_input}) + + complete_content = '' + response = self.client.chat.completions.create( + model=self.openai_chat_deployment_name, + messages=self.messages, + extra_body=self.extra_body, + stream=True) + + for chunk in response: + + choice0 = chunk.choices[0] if hasattr(chunk, 'choices') and chunk.choices else None + delta = choice0.delta if choice0 and hasattr(choice0, 'delta') else None + content = delta.content if delta and hasattr(delta, 'content') else '' + + finish_reason = choice0.finish_reason if choice0 and hasattr(choice0, 'finish_reason') else None + if finish_reason == 'length': + content += f"{content}\nERROR: Exceeded max token length!" + + if content is None: continue + + complete_content += content + callback(content) + + self.messages.append({'role': 'assistant', 'content': complete_content}) + return complete_content diff --git a/src/ai/.x/templates/openai-chat-streaming-with-data-py/requirements.txt b/src/ai/.x/templates/openai-chat-streaming-with-data-py/requirements.txt new file mode 100644 index 00000000..7a06be70 --- /dev/null +++ b/src/ai/.x/templates/openai-chat-streaming-with-data-py/requirements.txt @@ -0,0 +1 @@ +openai==1.0.0 diff --git a/src/ai/.x/templates/openai-chat-streaming-with-functions-cs/FunctionCallContext.cs b/src/ai/.x/templates/openai-chat-streaming-with-functions-cs/FunctionCallContext.cs new file mode 100644 index 00000000..0b5031ce --- /dev/null +++ b/src/ai/.x/templates/openai-chat-streaming-with-functions-cs/FunctionCallContext.cs @@ -0,0 +1,59 @@ +using Azure.AI.OpenAI; +using System; +using System.Collections.Generic; + +public class FunctionCallContext +{ + private FunctionFactory _functionFactory; + private IList _messages; + private string _functionName = ""; + private string _functionArguments = ""; + + public FunctionCallContext(FunctionFactory functionFactory, IList messages) + { + _functionFactory = functionFactory; + _messages = messages; + } + + + public bool CheckForUpdate(StreamingChatCompletionsUpdate update) + { + var updated = false; + + var name = update?.FunctionName; + if (name != null) + { + _functionName = name; + updated = true; + } + + var args = update?.FunctionArgumentsUpdate; + if (args != null) + { + _functionArguments += args; + updated = true; + } + + return updated; + } + + public string? TryCallFunction() + { + var ok = _functionFactory.TryCallFunction(_functionName, _functionArguments, out var result); + if (!ok) return null; + + Console.WriteLine($"\rassistant-function: {_functionName}({_functionArguments}) => {result}"); + Console.Write("\nAssistant: "); + + _messages.Add(new ChatRequestAssistantMessage("") { FunctionCall = new FunctionCall(_functionName, _functionArguments) }); + _messages.Add(new ChatRequestFunctionMessage(_functionName, result)); + + return result; + } + + public void Clear() + { + _functionName = ""; + _functionArguments = ""; + } +} \ No newline at end of file diff --git a/src/ai/.x/templates/openai-chat-streaming-with-functions-cs/FunctionFactory.cs b/src/ai/.x/templates/openai-chat-streaming-with-functions-cs/FunctionFactory.cs new file mode 100644 index 00000000..79b572e2 --- /dev/null +++ b/src/ai/.x/templates/openai-chat-streaming-with-functions-cs/FunctionFactory.cs @@ -0,0 +1,384 @@ +using System.Reflection; +using Azure.AI.OpenAI; +using System.Collections; +using Newtonsoft.Json.Linq; +using Newtonsoft.Json; + +public class FunctionFactory +{ + public FunctionFactory() + { + } + + public FunctionFactory(Assembly assembly) + { + AddFunctions(assembly); + } + + public FunctionFactory(Type type1, params Type[] types) + { + AddFunctions(type1, types); + } + + public FunctionFactory(IEnumerable types) + { + AddFunctions(types); + } + + public FunctionFactory(Type type) + { + AddFunctions(type); + } + + public void AddFunctions(Assembly assembly) + { + AddFunctions(assembly.GetTypes()); + } + + public void AddFunctions(Type type1, params Type[] types) + { + AddFunctions(new List { type1 }); + AddFunctions(types); + } + + public void AddFunctions(IEnumerable types) + { + foreach (var type in types) + { + AddFunctions(type); + } + } + + public void AddFunctions(Type type) + { + var methods = type.GetMethods(BindingFlags.Static | BindingFlags.Public); + foreach (var method in methods) + { + AddFunction(method); + } + } + + public void AddFunction(MethodInfo method) + { + var attributes = method.GetCustomAttributes(typeof(HelperFunctionDescriptionAttribute), false); + if (attributes.Length > 0) + { + var funcDescriptionAttrib = attributes[0] as HelperFunctionDescriptionAttribute; + var funcDescription = funcDescriptionAttrib!.Description; + + string json = GetMethodParametersJsonSchema(method); + _functions.TryAdd(method, new FunctionDefinition(method.Name) + { + Description = funcDescription, + Parameters = new BinaryData(json) + }); + } + } + + public IEnumerable GetFunctionDefinitions() + { + return _functions.Values; + } + + public bool TryCallFunction(string functionName, string functionArguments, out string? result) + { + result = null; + if (!string.IsNullOrEmpty(functionName) && !string.IsNullOrEmpty(functionArguments)) + { + var function = _functions.FirstOrDefault(x => x.Value.Name == functionName); + if (function.Key != null) + { + result = CallFunction(function.Key, function.Value, functionArguments); + return true; + } + } + return false; + } + + // operator to add to FunctionFactories together + public static FunctionFactory operator +(FunctionFactory a, FunctionFactory b) + { + var newFactory = new FunctionFactory(); + a._functions.ToList().ForEach(x => newFactory._functions.Add(x.Key, x.Value)); + b._functions.ToList().ForEach(x => newFactory._functions.Add(x.Key, x.Value)); + return newFactory; + } + + private static string? CallFunction(MethodInfo methodInfo, FunctionDefinition functionDefinition, string argumentsAsJson) + { + var jObject = JObject.Parse(argumentsAsJson); + var arguments = new List(); + + var parameters = methodInfo.GetParameters(); + foreach (var parameter in parameters) + { + var parameterName = parameter.Name; + if (parameterName == null) continue; + + var parameterValue = jObject[parameterName]?.ToString(); + if (parameterValue == null) continue; + + var parsed = ParseParameterValue(parameterValue, parameter.ParameterType); + arguments.Add(parsed); + } + + var args = arguments.ToArray(); + var result = CallFunction(methodInfo, args); + return ConvertFunctionResultToString(result); + } + + private static object? CallFunction(MethodInfo methodInfo, object[] args) + { + var t = methodInfo.ReturnType; + return t == typeof(Task) + ? CallVoidAsyncFunction(methodInfo, args) + : t.IsGenericType && t.GetGenericTypeDefinition() == typeof(Task<>) + ? CallAsyncFunction(methodInfo, args) + : t.Name != "Void" + ? CallSyncFunction(methodInfo, args) + : CallVoidFunction(methodInfo, args); + } + + private static object? CallVoidAsyncFunction(MethodInfo methodInfo, object[] args) + { + var task = methodInfo.Invoke(null, args) as Task; + task!.Wait(); + return true; + } + + private static object? CallAsyncFunction(MethodInfo methodInfo, object[] args) + { + var task = methodInfo.Invoke(null, args) as Task; + task!.Wait(); + return task.GetType().GetProperty("Result")?.GetValue(task); + } + + private static object? CallSyncFunction(MethodInfo methodInfo, object[] args) + { + return methodInfo.Invoke(null, args); + } + + private static object? CallVoidFunction(MethodInfo methodInfo, object[] args) + { + methodInfo.Invoke(null, args); + return true; + } + + private static string? ConvertFunctionResultToString(object? result) + { + if (result is IEnumerable enumerable && !(result is string)) + { + var array = new JArray(); + foreach (var item in enumerable) + { + var str = item.ToString(); + array.Add(str); + } + return array.ToString(); + } + return result?.ToString(); + } + + private static object ParseParameterValue(string parameterValue, Type parameterType) + { + if (IsArrayType(parameterType)) + { + Type elementType = parameterType.GetElementType()!; + return CreateGenericCollectionFromJsonArray(parameterValue, typeof(Array), elementType); + } + + if (IsTuppleType(parameterType)) + { + Type elementType = parameterType.GetGenericArguments()[0]; + return CreateTuppleTypeFromJsonArray(parameterValue, elementType); + } + + if (IsGenericListOrEquivalentType(parameterType)) + { + Type elementType = parameterType.GetGenericArguments()[0]; + return CreateGenericCollectionFromJsonArray(parameterValue, typeof(List<>), elementType); + } + + switch (Type.GetTypeCode(parameterType)) + { + case TypeCode.Boolean: return bool.Parse(parameterValue!); + case TypeCode.Byte: return byte.Parse(parameterValue!); + case TypeCode.Decimal: return decimal.Parse(parameterValue!); + case TypeCode.Double: return double.Parse(parameterValue!); + case TypeCode.Single: return float.Parse(parameterValue!); + case TypeCode.Int16: return short.Parse(parameterValue!); + case TypeCode.Int32: return int.Parse(parameterValue!); + case TypeCode.Int64: return long.Parse(parameterValue!); + case TypeCode.SByte: return sbyte.Parse(parameterValue!); + case TypeCode.UInt16: return ushort.Parse(parameterValue!); + case TypeCode.UInt32: return uint.Parse(parameterValue!); + case TypeCode.UInt64: return ulong.Parse(parameterValue!); + case TypeCode.String: return parameterValue!; + default: return Convert.ChangeType(parameterValue!, parameterType); + } + } + + private static object CreateGenericCollectionFromJsonArray(string parameterValue, Type collectionType, Type elementType) + { + var array = JArray.Parse(parameterValue); + + if (collectionType == typeof(Array)) + { + var collection = Array.CreateInstance(elementType, array.Count); + for (int i = 0; i < array.Count; i++) + { + var parsed = ParseParameterValue(array[i].ToString(), elementType); + if (parsed != null) collection.SetValue(parsed, i); + } + return collection; + } + else if (collectionType == typeof(List<>)) + { + var collection = Activator.CreateInstance(collectionType.MakeGenericType(elementType)); + var list = collection as IList; + foreach (var item in array) + { + var parsed = ParseParameterValue(item.ToString(), elementType); + if (parsed != null) list!.Add(parsed); + } + return collection!; + } + + return array; + } + + private static object CreateTuppleTypeFromJsonArray(string parameterValue, Type elementType) + { + var list = new List(); + + var array = JArray.Parse(parameterValue); + foreach (var item in array) + { + var parsed = ParseParameterValue(item.ToString(), elementType); + if (parsed != null) list!.Add(parsed); + } + + var collection = list.Count() switch + { + 1 => Activator.CreateInstance(typeof(Tuple<>).MakeGenericType(elementType), list[0]), + 2 => Activator.CreateInstance(typeof(Tuple<,>).MakeGenericType(elementType, elementType), list[0], list[1]), + 3 => Activator.CreateInstance(typeof(Tuple<,,>).MakeGenericType(elementType, elementType, elementType), list[0], list[1], list[2]), + 4 => Activator.CreateInstance(typeof(Tuple<,,,>).MakeGenericType(elementType, elementType, elementType, elementType), list[0], list[1], list[2], list[3]), + 5 => Activator.CreateInstance(typeof(Tuple<,,,,>).MakeGenericType(elementType, elementType, elementType, elementType, elementType), list[0], list[1], list[2], list[3], list[4]), + 6 => Activator.CreateInstance(typeof(Tuple<,,,,,>).MakeGenericType(elementType, elementType, elementType, elementType, elementType, elementType), list[0], list[1], list[2], list[3], list[4], list[5]), + 7 => Activator.CreateInstance(typeof(Tuple<,,,,,,>).MakeGenericType(elementType, elementType, elementType, elementType, elementType, elementType, elementType), list[0], list[1], list[2], list[3], list[4], list[5], list[6]), + _ => throw new Exception("Tuples with more than 7 elements are not supported") + }; + return collection!; + } + + private static string GetMethodParametersJsonSchema(MethodInfo method) + { + var schema = new JObject(); + schema["type"] = "object"; + + var properties = new JObject(); + schema["properties"] = properties; + + var required = new JArray(); + foreach (var parameter in method.GetParameters()) + { + if (parameter.Name == null) continue; + + properties[parameter.Name] = GetJsonSchemaForParameterWithDescription(parameter); + if (!parameter.IsOptional) + { + required.Add(parameter.Name); + } + } + + schema["required"] = required; + + return schema.ToString(Formatting.None); + } + + private static JToken GetJsonSchemaForParameterWithDescription(ParameterInfo parameter) + { + var schema = GetJsonSchemaForType(parameter.ParameterType); + schema["description"] = GetParameterDescription(parameter); + return schema; + } + + private static string GetParameterDescription(ParameterInfo parameter) + { + var attributes = parameter.GetCustomAttributes(typeof(HelperFunctionParameterDescriptionAttribute), false); + var paramDescriptionAttrib = attributes.Length > 0 ? (attributes[0] as HelperFunctionParameterDescriptionAttribute) : null; + return paramDescriptionAttrib?.Description ?? $"The {parameter.Name} parameter"; + } + + private static JObject GetJsonSchemaForType(Type t) + { + return IsJsonArrayEquivalentType(t) + ? GetJsonArraySchemaFromType(t) + : GetJsonPrimativeSchemaFromType(t); + } + + private static JObject GetJsonArraySchemaFromType(Type containerType) + { + var schema = new JObject(); + schema["type"] = "array"; + schema["items"] = GetJsonArrayItemSchemaFromType(containerType); + return schema; + } + + private static JObject GetJsonArrayItemSchemaFromType(Type containerType) + { + var itemType = containerType.IsArray + ? containerType.GetElementType()! + : containerType.GetGenericArguments()[0]; + return GetJsonSchemaForType(itemType); + } + + private static JObject GetJsonPrimativeSchemaFromType(Type primativeType) + { + var schema = new JObject(); + schema["type"] = GetJsonTypeFromPrimitiveType(primativeType); + return schema; + } + + private static string GetJsonTypeFromPrimitiveType(Type primativeType) + { + return Type.GetTypeCode(primativeType) switch + { + TypeCode.Boolean => "boolean", + TypeCode.Byte or TypeCode.SByte or TypeCode.Int16 or TypeCode.Int32 or TypeCode.Int64 or + TypeCode.UInt16 or TypeCode.UInt32 or TypeCode.UInt64 => "integer", + TypeCode.Decimal or TypeCode.Double or TypeCode.Single => "number", + TypeCode.String => "string", + _ => "string" + }; + } + + private static bool IsJsonArrayEquivalentType(Type t) + { + return IsArrayType(t) || IsTuppleType(t) || IsGenericListOrEquivalentType(t); + } + + private static bool IsArrayType(Type t) + { + return t.IsArray; + } + + private static bool IsTuppleType(Type parameterType) + { + return parameterType.IsGenericType && parameterType.GetGenericTypeDefinition().Name.StartsWith("Tuple"); + } + + private static bool IsGenericListOrEquivalentType(Type t) + { + return t.IsGenericType && + (t.GetGenericTypeDefinition() == typeof(List<>) || + t.GetGenericTypeDefinition() == typeof(ICollection<>) || + t.GetGenericTypeDefinition() == typeof(IEnumerable<>) || + t.GetGenericTypeDefinition() == typeof(IList<>) || + t.GetGenericTypeDefinition() == typeof(IReadOnlyCollection<>) || + t.GetGenericTypeDefinition() == typeof(IReadOnlyList<>)); + } + + private Dictionary _functions = new(); +} diff --git a/src/ai/.x/templates/openai-chat-streaming-with-functions-cs/HelperFunctionDescriptionAttribute.cs b/src/ai/.x/templates/openai-chat-streaming-with-functions-cs/HelperFunctionDescriptionAttribute.cs new file mode 100644 index 00000000..c6678de8 --- /dev/null +++ b/src/ai/.x/templates/openai-chat-streaming-with-functions-cs/HelperFunctionDescriptionAttribute.cs @@ -0,0 +1,13 @@ +public class HelperFunctionDescriptionAttribute : Attribute +{ + public HelperFunctionDescriptionAttribute() + { + } + + public HelperFunctionDescriptionAttribute(string description) + { + Description = description; + } + + public string? Description { get; set; } +} diff --git a/src/ai/.x/templates/openai-chat-streaming-with-functions-cs/HelperFunctionParameterDescriptionAttribute.cs b/src/ai/.x/templates/openai-chat-streaming-with-functions-cs/HelperFunctionParameterDescriptionAttribute.cs new file mode 100644 index 00000000..36e672a4 --- /dev/null +++ b/src/ai/.x/templates/openai-chat-streaming-with-functions-cs/HelperFunctionParameterDescriptionAttribute.cs @@ -0,0 +1,13 @@ +public class HelperFunctionParameterDescriptionAttribute : Attribute +{ + public HelperFunctionParameterDescriptionAttribute() + { + } + + public HelperFunctionParameterDescriptionAttribute(string? description = null) + { + Description = description; + } + + public string? Description { get; set; } +} diff --git a/src/ai/.x/templates/openai-chat-streaming-with-functions-cs/OpenAIChatCompletionsCustomFunctions.cs b/src/ai/.x/templates/openai-chat-streaming-with-functions-cs/OpenAIChatCompletionsCustomFunctions.cs new file mode 100644 index 00000000..55b7daad --- /dev/null +++ b/src/ai/.x/templates/openai-chat-streaming-with-functions-cs/OpenAIChatCompletionsCustomFunctions.cs @@ -0,0 +1,24 @@ +using System; + +public class OpenAIChatCompletionsCustomFunctions +{ + [HelperFunctionDescription("Gets the current weather for a location.")] + public static string GetCurrentWeather(string location) + { + return $"The weather in {location} is 72 degrees and sunny."; + } + + [HelperFunctionDescription("Gets the current date.")] + public static string GetCurrentDate() + { + var date = DateTime.Now; + return $"{date.Year}-{date.Month}-{date.Day}"; + } + + [HelperFunctionDescription("Gets the current time.")] + public static string GetCurrentTime() + { + var date = DateTime.Now; + return $"{date.Hour}:{date.Minute}:{date.Second}"; + } +} diff --git a/src/ai/.x/templates/openai-chat-streaming-with-functions-cs/OpenAIChatCompletionsFunctionsStreaming.csproj._ b/src/ai/.x/templates/openai-chat-streaming-with-functions-cs/OpenAIChatCompletionsFunctionsStreaming.csproj._ new file mode 100644 index 00000000..565629b9 --- /dev/null +++ b/src/ai/.x/templates/openai-chat-streaming-with-functions-cs/OpenAIChatCompletionsFunctionsStreaming.csproj._ @@ -0,0 +1,21 @@ +<#@ template hostspecific="true" #> +<#@ output extension=".csproj" encoding="utf-8" #> +<#@ parameter name="AICLIExtensionReferencePath" type="System.String" #> + + + + net8.0 + enable + enable + true + Exe + + + + + + + + + + \ No newline at end of file diff --git a/src/ai/.x/templates/openai-chat-streaming-with-functions-cs/OpenAIChatCompletionsFunctionsStreamingClass.cs b/src/ai/.x/templates/openai-chat-streaming-with-functions-cs/OpenAIChatCompletionsFunctionsStreamingClass.cs new file mode 100644 index 00000000..a804e0c5 --- /dev/null +++ b/src/ai/.x/templates/openai-chat-streaming-with-functions-cs/OpenAIChatCompletionsFunctionsStreamingClass.cs @@ -0,0 +1,83 @@ +<#@ template hostspecific="true" #> +<#@ output extension=".cs" encoding="utf-8" #> +<#@ parameter type="System.String" name="ClassName" #> +using Azure; +using Azure.AI.OpenAI; +using Azure.Identity; +using System; + +public class <#= ClassName #> +{ + public <#= ClassName #>(string openAIEndpoint, string openAIKey, string openAIChatDeploymentName, string openAISystemPrompt, FunctionFactory factory) + { + _openAISystemPrompt = openAISystemPrompt; + _functionFactory = factory; + + _client = string.IsNullOrEmpty(openAIKey) + ? new OpenAIClient(new Uri(openAIEndpoint), new DefaultAzureCredential()) + : new OpenAIClient(new Uri(openAIEndpoint), new AzureKeyCredential(openAIKey)); + + _options = new ChatCompletionsOptions(); + _options.DeploymentName = openAIChatDeploymentName; + + foreach (var function in _functionFactory.GetFunctionDefinitions()) + { + _options.Functions.Add(function); + // _options.Tools.Add(new ChatCompletionsFunctionToolDefinition(function)); + } + + _functionCallContext = new FunctionCallContext(_functionFactory, _options.Messages); + ClearConversation(); + } + + public void ClearConversation() + { + _options.Messages.Clear(); + _options.Messages.Add(new ChatRequestSystemMessage(_openAISystemPrompt)); + } + + public async Task GetChatCompletionsStreamingAsync(string userPrompt, Action? callback = null) + { + _options.Messages.Add(new ChatRequestUserMessage(userPrompt)); + + var responseContent = string.Empty; + while (true) + { + var response = await _client.GetChatCompletionsStreamingAsync(_options); + await foreach (var update in response.EnumerateValues()) + { + _functionCallContext.CheckForUpdate(update); + + var content = update.ContentUpdate; + if (update.FinishReason == CompletionsFinishReason.ContentFiltered) + { + content = $"{content}\nWARNING: Content filtered!"; + } + else if (update.FinishReason == CompletionsFinishReason.TokenLimitReached) + { + content = $"{content}\nERROR: Exceeded token limit!"; + } + + if (string.IsNullOrEmpty(content)) continue; + + responseContent += content; + if (callback != null) callback(update); + } + + if (_functionCallContext.TryCallFunction() != null) + { + _functionCallContext.Clear(); + continue; + } + + _options.Messages.Add(new ChatRequestAssistantMessage(responseContent)); + return responseContent; + } + } + + private string _openAISystemPrompt; + private FunctionFactory _functionFactory; + private FunctionCallContext _functionCallContext; + private ChatCompletionsOptions _options; + private OpenAIClient _client; +} \ No newline at end of file diff --git a/src/ai/.x/templates/openai-chat-streaming-with-functions-cs/Program.cs b/src/ai/.x/templates/openai-chat-streaming-with-functions-cs/Program.cs new file mode 100644 index 00000000..769c8e1b --- /dev/null +++ b/src/ai/.x/templates/openai-chat-streaming-with-functions-cs/Program.cs @@ -0,0 +1,37 @@ +<#@ template hostspecific="true" #> +<#@ output extension=".cs" encoding="utf-8" #> +<#@ parameter type="System.String" name="ClassName" #> +<#@ parameter type="System.String" name="AZURE_OPENAI_ENDPOINT" #> +<#@ parameter type="System.String" name="AZURE_OPENAI_KEY" #> +<#@ parameter type="System.String" name="AZURE_OPENAI_CHAT_DEPLOYMENT" #> +<#@ parameter type="System.String" name="AZURE_OPENAI_SYSTEM_PROMPT" #> +using System; + +public class Program +{ + public static async Task Main(string[] args) + { + var openAIEndpoint = Environment.GetEnvironmentVariable("AZURE_OPENAI_ENDPOINT") ?? "<#= AZURE_OPENAI_ENDPOINT #>"; + var openAIKey = Environment.GetEnvironmentVariable("AZURE_OPENAI_KEY") ?? "<#= AZURE_OPENAI_KEY #>"; + var openAIChatDeploymentName = Environment.GetEnvironmentVariable("AZURE_OPENAI_CHAT_DEPLOYMENT") ?? "<#= AZURE_OPENAI_CHAT_DEPLOYMENT #>"; + var openAISystemPrompt = Environment.GetEnvironmentVariable("AZURE_OPENAI_SYSTEM_PROMPT") ?? "<#= AZURE_OPENAI_SYSTEM_PROMPT #>"; + + var factory = new FunctionFactory(); + factory.AddFunctions(typeof(OpenAIChatCompletionsCustomFunctions)); + + var chat = new <#= ClassName #>(openAIEndpoint, openAIKey, openAIChatDeploymentName, openAISystemPrompt, factory); + + while (true) + { + Console.Write("User: "); + var userPrompt = Console.ReadLine(); + if (string.IsNullOrEmpty(userPrompt) || userPrompt == "exit") break; + + Console.Write("\nAssistant: "); + var response = await chat.GetChatCompletionsStreamingAsync(userPrompt, update => + Console.Write(update.ContentUpdate) + ); + Console.WriteLine("\n"); + } + } +} \ No newline at end of file diff --git a/src/ai/.x/templates/openai-chat-streaming-with-functions-cs/_.json b/src/ai/.x/templates/openai-chat-streaming-with-functions-cs/_.json new file mode 100644 index 00000000..46e4cd04 --- /dev/null +++ b/src/ai/.x/templates/openai-chat-streaming-with-functions-cs/_.json @@ -0,0 +1,10 @@ +{ + "_LongName": "OpenAI Chat Completions (w/ Functions)", + "_ShortName": "openai-chat-streaming-with-functions", + "_Language": "C#", + "ClassName": "OpenAIChatCompletionsFunctionsStreamingClass", + "AZURE_OPENAI_ENDPOINT": "", + "AZURE_OPENAI_KEY": "", + "AZURE_OPENAI_CHAT_DEPLOYMENT": "", + "AZURE_OPENAI_SYSTEM_PROMPT": "You are a helpful AI assistant." +} \ No newline at end of file diff --git a/src/ai/.x/templates/openai-chat-streaming-with-functions-go/_.json b/src/ai/.x/templates/openai-chat-streaming-with-functions-go/_.json new file mode 100644 index 00000000..9399b730 --- /dev/null +++ b/src/ai/.x/templates/openai-chat-streaming-with-functions-go/_.json @@ -0,0 +1,10 @@ +{ + "_LongName": "OpenAI Chat Completions (w/ Functions)", + "_ShortName": "openai-chat-streaming-with-functions", + "_Language": "Go", + "ClassName": "OpenAIChatCompletionsFunctionsStreamingExample", + "AZURE_OPENAI_ENDPOINT": "", + "AZURE_OPENAI_KEY": "", + "AZURE_OPENAI_CHAT_DEPLOYMENT": "", + "AZURE_OPENAI_SYSTEM_PROMPT": "You are a helpful AI assistant." +} \ No newline at end of file diff --git a/src/ai/.x/templates/openai-chat-streaming-with-functions-go/function_call_context.go b/src/ai/.x/templates/openai-chat-streaming-with-functions-go/function_call_context.go new file mode 100644 index 00000000..0f69a4c4 --- /dev/null +++ b/src/ai/.x/templates/openai-chat-streaming-with-functions-go/function_call_context.go @@ -0,0 +1,66 @@ +package main + +import ( + "fmt" + + "github.com/Azure/azure-sdk-for-go/sdk/ai/azopenai" + "github.com/Azure/azure-sdk-for-go/sdk/azcore/to" +) + +type FunctionCallContext struct { + functionFactory *FunctionFactory + options *azopenai.ChatCompletionsOptions + functionName string + functionArguments string +} + +func NewFunctionCallContext(functionFactory *FunctionFactory, options *azopenai.ChatCompletionsOptions) *FunctionCallContext { + return &FunctionCallContext{ + functionFactory: functionFactory, + options: options, + functionName: "", + functionArguments: "", + } +} + +func (fcc *FunctionCallContext) CheckForUpdate(choice azopenai.ChatChoice) bool { + updated := false + + if choice.Delta != nil && choice.Delta.FunctionCall != nil { + name := choice.Delta.FunctionCall.Name + if name != nil && *name != "" { + fcc.functionName = *name + updated = true + } + } + + if choice.Delta != nil && choice.Delta.FunctionCall != nil { + args := choice.Delta.FunctionCall.Arguments + if args != nil && *args != "" { + fcc.functionArguments = *args + updated = true + } + } + + return updated +} + +func (fcc *FunctionCallContext) TryCallFunction() string { + result := fcc.functionFactory.TryCallFunction(fcc.functionName, fcc.functionArguments) + if result == "" { + return "" + } + + fmt.Printf("\rassistant-function: %s(%s) => %s\n", fcc.functionName, fcc.functionArguments, result) + fmt.Printf("\nAssistant: ") + + fcc.options.Messages = append(fcc.options.Messages, azopenai.ChatMessage{Role: to.Ptr(azopenai.ChatRoleAssistant), Content: to.Ptr(""), FunctionCall: &azopenai.ChatMessageFunctionCall{Name: to.Ptr(fcc.functionName), Arguments: to.Ptr(fcc.functionArguments)}}) + fcc.options.Messages = append(fcc.options.Messages, azopenai.ChatMessage{Role: to.Ptr(azopenai.ChatRoleFunction), Content: to.Ptr(result), Name: to.Ptr(fcc.functionName)}) + + return result +} + +func (fcc *FunctionCallContext) Clear() { + fcc.functionName = "" + fcc.functionArguments = "" +} diff --git a/src/ai/.x/templates/openai-chat-streaming-with-functions-go/function_factory.go b/src/ai/.x/templates/openai-chat-streaming-with-functions-go/function_factory.go new file mode 100644 index 00000000..0a952fb8 --- /dev/null +++ b/src/ai/.x/templates/openai-chat-streaming-with-functions-go/function_factory.go @@ -0,0 +1,41 @@ +package main + +import ( + "github.com/Azure/azure-sdk-for-go/sdk/ai/azopenai" +) + +type FunctionInfo struct { + Schema azopenai.FunctionDefinition + Function func(string) string +} + +type FunctionFactory struct { + functions map[string]FunctionInfo +} + +func NewFunctionFactory() *FunctionFactory { + return &FunctionFactory{ + functions: make(map[string]FunctionInfo), + } +} + +func (ff *FunctionFactory) AddFunction(schema azopenai.FunctionDefinition, fun func(string) string) { + ff.functions[*schema.Name] = FunctionInfo{Schema: schema, Function: fun} +} + +func (ff *FunctionFactory) GetFunctionSchemas() []azopenai.FunctionDefinition { + schemas := []azopenai.FunctionDefinition{} + for _, functionInfo := range ff.functions { + schemas = append(schemas, functionInfo.Schema) + } + return schemas +} + +func (ff *FunctionFactory) TryCallFunction(functionName string, functionArguments string) string { + functionInfo, exists := ff.functions[functionName] + if !exists { + return "" + } + + return functionInfo.Function(functionArguments) +} diff --git a/src/ai/.x/templates/openai-chat-streaming-with-functions-go/go.mod b/src/ai/.x/templates/openai-chat-streaming-with-functions-go/go.mod new file mode 100644 index 00000000..34f300c4 --- /dev/null +++ b/src/ai/.x/templates/openai-chat-streaming-with-functions-go/go.mod @@ -0,0 +1,6 @@ +module openai_chat_completions_functions_streaming_hello_world + +require ( + github.com/Azure/azure-sdk-for-go/sdk/ai/azopenai v0.3.0 + github.com/Azure/azure-sdk-for-go/sdk/azcore v1.9.0 +) diff --git a/src/ai/.x/templates/openai-chat-streaming-with-functions-go/main.go b/src/ai/.x/templates/openai-chat-streaming-with-functions-go/main.go new file mode 100644 index 00000000..c20008d4 --- /dev/null +++ b/src/ai/.x/templates/openai-chat-streaming-with-functions-go/main.go @@ -0,0 +1,74 @@ +<#@ template hostspecific="true" #> +<#@ output extension=".go" encoding="utf-8" #> +<#@ parameter type="System.String" name="ClassName" #> +<#@ parameter type="System.String" name="AZURE_OPENAI_ENDPOINT" #> +<#@ parameter type="System.String" name="AZURE_OPENAI_KEY" #> +<#@ parameter type="System.String" name="AZURE_OPENAI_CHAT_DEPLOYMENT" #> +<#@ parameter type="System.String" name="AZURE_OPENAI_SYSTEM_PROMPT" #> +package main + +import ( + "bufio" + "fmt" + "log" + "os" + "strings" +) + +func main() { + openAIEndpoint := os.Getenv("AZURE_OPENAI_ENDPOINT") + if openAIEndpoint == "" { + openAIEndpoint = "<#= AZURE_OPENAI_ENDPOINT #>" + } + openAIKey := os.Getenv("AZURE_OPENAI_KEY") + if openAIKey == "" { + openAIKey = "<#= AZURE_OPENAI_KEY #>" + } + openAIChatDeploymentName := os.Getenv("AZURE_OPENAI_CHAT_DEPLOYMENT") + if openAIChatDeploymentName == "" { + openAIChatDeploymentName = "<#= AZURE_OPENAI_CHAT_DEPLOYMENT #>" + } + openAISystemPrompt := os.Getenv("OPENAI_SYSTEM_PROMPT") + if openAISystemPrompt == "" { + openAISystemPrompt = "<#= AZURE_OPENAI_SYSTEM_PROMPT #>" + } + + if openAIEndpoint == "" || openAIKey == "" || openAIChatDeploymentName == "" || openAISystemPrompt == "" { + fmt.Println("Please set the environment variables.") + os.Exit(1) + } + + factory := NewFunctionFactoryWithCustomFunctions() + chat, err := New<#= ClassName #>(openAIEndpoint, openAIKey, openAIChatDeploymentName, openAISystemPrompt, factory) + if err != nil { + log.Fatalf("ERROR: %s", err) + } + + for { + fmt.Print("User: ") + input, _ := getUserInput() + if input == "exit" || input == "" { + break + } + + fmt.Printf("\nAssistant: ") + _, err := chat.GetChatCompletionsStream(input, func(content string) { + fmt.Printf("%s", content) + }) + if err != nil { + log.Fatalf("ERROR: %s", err) + } + fmt.Printf("\n\n") + } +} + +func getUserInput() (string, error) { + reader := bufio.NewReader(os.Stdin) + userInput, err := reader.ReadString('\n') + if err != nil { + return "", err + } + userInput = strings.TrimSuffix(userInput, "\n") + userInput = strings.TrimSuffix(userInput, "\r") + return userInput, nil +} diff --git a/src/ai/.x/templates/openai-chat-streaming-with-functions-go/openai_chat_completions_custom_functions.go b/src/ai/.x/templates/openai-chat-streaming-with-functions-go/openai_chat_completions_custom_functions.go new file mode 100644 index 00000000..cf6e8500 --- /dev/null +++ b/src/ai/.x/templates/openai-chat-streaming-with-functions-go/openai_chat_completions_custom_functions.go @@ -0,0 +1,66 @@ +package main + +import ( + "encoding/json" + "fmt" + "time" + + "github.com/Azure/azure-sdk-for-go/sdk/ai/azopenai" + "github.com/Azure/azure-sdk-for-go/sdk/azcore/to" +) + +func GetCurrentWeather(functionArguments string) string { + var args map[string]string + json.Unmarshal([]byte(functionArguments), &args) + location, _ := args["location"] + return fmt.Sprintf("The weather in %s is 72 degrees and sunny.", location) +} + +var GetCurrentWeatherSchema = azopenai.FunctionDefinition{ + Name: to.Ptr("get_current_weather"), + Description: to.Ptr("Get the current weather in a given location"), + Parameters: map[string]any{ + "type": "object", + "properties": map[string]any{ + "location": map[string]any{ + "type": "string", + "description": "The city and state, e.g. San Francisco, CA", + }, + }, + "required": []string{"location"}, + }, +} + +func GetCurrentDate(_ string) string { + return time.Now().Format("2006-01-02") +} + +var GetCurrentDateSchema = azopenai.FunctionDefinition{ + Name: to.Ptr("get_current_date"), + Description: to.Ptr("Get the current date"), + Parameters: map[string]any{ + "type": "object", + "properties": map[string]any{}, + }, +} + +func GetCurrentTime(_ string) string { + return time.Now().Format("15:04:05") +} + +var GetCurrentTimeSchema = azopenai.FunctionDefinition{ + Name: to.Ptr("get_current_time"), + Description: to.Ptr("Get the current time"), + Parameters: map[string]any{ + "type": "object", + "properties": map[string]any{}, + }, +} + +func NewFunctionFactoryWithCustomFunctions() *FunctionFactory { + factory := NewFunctionFactory() + factory.AddFunction(GetCurrentWeatherSchema, GetCurrentWeather) + factory.AddFunction(GetCurrentDateSchema, GetCurrentDate) + factory.AddFunction(GetCurrentTimeSchema, GetCurrentTime) + return factory +} diff --git a/src/ai/.x/templates/openai-chat-streaming-with-functions-go/openai_chat_completions_functions_streaming_hello_world.go b/src/ai/.x/templates/openai-chat-streaming-with-functions-go/openai_chat_completions_functions_streaming_hello_world.go new file mode 100644 index 00000000..4513f056 --- /dev/null +++ b/src/ai/.x/templates/openai-chat-streaming-with-functions-go/openai_chat_completions_functions_streaming_hello_world.go @@ -0,0 +1,113 @@ +<#@ template hostspecific="true" #> +<#@ output extension=".go" encoding="utf-8" #> +<#@ parameter type="System.String" name="ClassName" #> +<#@ parameter type="System.String" name="AZURE_OPENAI_ENDPOINT" #> +<#@ parameter type="System.String" name="AZURE_OPENAI_KEY" #> +<#@ parameter type="System.String" name="AZURE_OPENAI_CHAT_DEPLOYMENT" #> +<#@ parameter type="System.String" name="AZURE_OPENAI_SYSTEM_PROMPT" #> +package main + +import ( + "context" + "errors" + "io" + + "github.com/Azure/azure-sdk-for-go/sdk/ai/azopenai" + "github.com/Azure/azure-sdk-for-go/sdk/azcore/to" +) + +type <#= ClassName #> struct { + client *azopenai.Client + options *azopenai.ChatCompletionsOptions + functionFactory *FunctionFactory + functionCallContext *FunctionCallContext +} + +func New<#= ClassName #>(openAIEndpoint string, openAIKey string, openAIChatDeploymentName string, openAISystemPrompt string, functionFactory *FunctionFactory) (*<#= ClassName #>, error) { + keyCredential, err := azopenai.NewKeyCredential(openAIKey) + if err != nil { + return nil, err + } + client, err := azopenai.NewClientWithKeyCredential(openAIEndpoint, keyCredential, nil) + if err != nil { + return nil, err + } + + messages := []azopenai.ChatMessage{ + {Role: to.Ptr(azopenai.ChatRoleSystem), Content: to.Ptr(openAISystemPrompt)}, + } + + options := &azopenai.ChatCompletionsOptions{ + Deployment: openAIChatDeploymentName, + Messages: messages, + FunctionCall: &azopenai.ChatCompletionsOptionsFunctionCall{ + Value: to.Ptr("auto"), + }, + Functions: functionFactory.GetFunctionSchemas(), + } + + return &<#= ClassName #>{ + client: client, + options: options, + functionCallContext: NewFunctionCallContext(functionFactory, options), + }, nil +} + +func (chat *<#= ClassName #>) ClearConversation() { + chat.options.Messages = chat.options.Messages[:1] +} + +func (chat *<#= ClassName #>) GetChatCompletionsStream(userPrompt string, callback func(content string)) (string, error) { + chat.options.Messages = append(chat.options.Messages, azopenai.ChatMessage{Role: to.Ptr(azopenai.ChatRoleUser), Content: to.Ptr(userPrompt)}) + + responseContent := "" + for { + resp, err := chat.client.GetChatCompletionsStream(context.TODO(), *chat.options, nil) + if err != nil { + return "", err + } + defer resp.ChatCompletionsStream.Close() + + for { + chatCompletions, err := resp.ChatCompletionsStream.Read() + if errors.Is(err, io.EOF) { + break + } + if err != nil { + return "", err + } + + for _, choice := range chatCompletions.Choices { + + chat.functionCallContext.CheckForUpdate(choice) + + content := "" + if choice.Delta.Content != nil { + content = *choice.Delta.Content + } + + if choice.FinishReason != nil { + finishReason := *choice.FinishReason + if finishReason == azopenai.CompletionsFinishReasonLength { + content = content + "\nWARNING: Exceeded token limit!" + } + } + + if content == "" { + continue + } + + callback(content) + responseContent += content + } + } + + if chat.functionCallContext.TryCallFunction() != "" { + chat.functionCallContext.Clear() + continue + } + + chat.options.Messages = append(chat.options.Messages, azopenai.ChatMessage{Role: to.Ptr(azopenai.ChatRoleAssistant), Content: to.Ptr(responseContent)}) + return responseContent, nil + } +} diff --git a/src/ai/.x/templates/openai-functions-streaming-js/FunctionCallContext.js b/src/ai/.x/templates/openai-chat-streaming-with-functions-js/FunctionCallContext.js similarity index 100% rename from src/ai/.x/templates/openai-functions-streaming-js/FunctionCallContext.js rename to src/ai/.x/templates/openai-chat-streaming-with-functions-js/FunctionCallContext.js diff --git a/src/ai/.x/templates/openai-functions-streaming-js/FunctionFactory.js b/src/ai/.x/templates/openai-chat-streaming-with-functions-js/FunctionFactory.js similarity index 100% rename from src/ai/.x/templates/openai-functions-streaming-js/FunctionFactory.js rename to src/ai/.x/templates/openai-chat-streaming-with-functions-js/FunctionFactory.js diff --git a/src/ai/.x/templates/openai-chat-streaming-with-functions-js/Main.js b/src/ai/.x/templates/openai-chat-streaming-with-functions-js/Main.js new file mode 100644 index 00000000..20c1cab3 --- /dev/null +++ b/src/ai/.x/templates/openai-chat-streaming-with-functions-js/Main.js @@ -0,0 +1,48 @@ +<#@ template hostspecific="true" #> +<#@ output extension=".js" encoding="utf-8" #> +<#@ parameter type="System.String" name="ClassName" #> +<#@ parameter type="System.String" name="AZURE_OPENAI_ENDPOINT" #> +<#@ parameter type="System.String" name="AZURE_OPENAI_KEY" #> +<#@ parameter type="System.String" name="AZURE_OPENAI_CHAT_DEPLOYMENT" #> +<#@ parameter type="System.String" name="AZURE_OPENAI_SYSTEM_PROMPT" #> +const { factory } = require("./OpenAIChatCompletionsCustomFunctions"); +const { <#= ClassName #> } = require("./OpenAIChatCompletionsFunctionsStreamingClass"); + +const readline = require('readline'); +const rl = readline.createInterface({ + input: process.stdin, + output: process.stdout +}); + +async function main() { + + const openAIEndpoint = process.env["AZURE_OPENAI_ENDPOINT"] || "<#= AZURE_OPENAI_ENDPOINT #>"; + const openAIKey = process.env["AZURE_OPENAI_KEY"] || "<#= AZURE_OPENAI_KEY #>"; + const openAIChatDeploymentName = process.env["AZURE_OPENAI_CHAT_DEPLOYMENT"] || "<#= AZURE_OPENAI_CHAT_DEPLOYMENT #>" ; + const openAISystemPrompt = process.env["AZURE_OPENAI_SYSTEM_PROMPT"] || "<#= AZURE_OPENAI_SYSTEM_PROMPT #>" ; + + const chat = new <#= ClassName #>(openAIEndpoint, openAIKey, openAIChatDeploymentName, openAISystemPrompt, factory); + + while (true) { + + const input = await new Promise(resolve => rl.question('User: ', resolve)); + if (input === 'exit' || input === '') break; + + let response = await chat.getChatCompletions(input, (content) => { + console.log(`assistant-streaming: ${content}`); + }); + + console.log(`\nAssistant: ${response}\n`); + } + + console.log('Bye!'); +} + +main().catch((err) => { + if (err.code !== 'ERR_USE_AFTER_CLOSE') { // filter out expected error (EOF on redirected input) + console.error("The sample encountered an error:", err); + process.exit(1); + } +}); + +module.exports = { main }; diff --git a/src/ai/.x/templates/openai-functions-streaming-js/ChatCompletionsCustomFunctions.js b/src/ai/.x/templates/openai-chat-streaming-with-functions-js/OpenAIChatCompletionsCustomFunctions.js similarity index 60% rename from src/ai/.x/templates/openai-functions-streaming-js/ChatCompletionsCustomFunctions.js rename to src/ai/.x/templates/openai-chat-streaming-with-functions-js/OpenAIChatCompletionsCustomFunctions.js index ad3c3e8d..15ed3234 100644 --- a/src/ai/.x/templates/openai-functions-streaming-js/ChatCompletionsCustomFunctions.js +++ b/src/ai/.x/templates/openai-chat-streaming-with-functions-js/OpenAIChatCompletionsCustomFunctions.js @@ -1,3 +1,6 @@ +const { FunctionFactory } = require("./FunctionFactory"); +let factory = new FunctionFactory(); + function getCurrentWeather(function_arguments) { const location = JSON.parse(function_arguments).location; return `The weather in ${location} is 72 degrees and sunny.`; @@ -22,6 +25,8 @@ const getCurrentWeatherSchema = { }, }; +factory.addFunction(getCurrentWeatherSchema, getCurrentWeather); + function getCurrentDate() { const date = new Date(); return `${date.getFullYear()}-${date.getMonth() + 1}-${date.getDate()}`; @@ -36,7 +41,22 @@ const getCurrentDateSchema = { }, }; -exports.getCurrentWeather = getCurrentWeather; -exports.getCurrentWeatherSchema = getCurrentWeatherSchema; -exports.getCurrentDate = getCurrentDate; -exports.getCurrentDateSchema = getCurrentDateSchema; +factory.addFunction(getCurrentDateSchema, getCurrentDate); + +function getCurrentTime() { + const date = new Date(); + return `${date.getHours()}:${date.getMinutes()}:${date.getSeconds()}`; +} + +const getCurrentTimeSchema = { + name: "get_current_time", + description: "Get the current time", + parameters: { + type: "object", + properties: {}, + }, +}; + +factory.addFunction(getCurrentTimeSchema, getCurrentTime); + +exports.factory = factory; \ No newline at end of file diff --git a/src/ai/.x/templates/openai-webpage/src/ChatCompletionsFunctionsStreaming.js b/src/ai/.x/templates/openai-chat-streaming-with-functions-js/OpenAIChatCompletionsFunctionsStreamingClass.js similarity index 64% rename from src/ai/.x/templates/openai-webpage/src/ChatCompletionsFunctionsStreaming.js rename to src/ai/.x/templates/openai-chat-streaming-with-functions-js/OpenAIChatCompletionsFunctionsStreamingClass.js index b379f063..907d6899 100644 --- a/src/ai/.x/templates/openai-webpage/src/ChatCompletionsFunctionsStreaming.js +++ b/src/ai/.x/templates/openai-chat-streaming-with-functions-js/OpenAIChatCompletionsFunctionsStreamingClass.js @@ -1,21 +1,21 @@ +<#@ template hostspecific="true" #> +<#@ output extension=".js" encoding="utf-8" #> +<#@ parameter type="System.String" name="ClassName" #> const { OpenAIClient, AzureKeyCredential } = require("@azure/openai"); -const { FunctionFactory } = require("./FunctionFactory"); const { FunctionCallContext } = require("./FunctionCallContext"); -class ChatCompletionsFunctionsStreaming { - constructor(systemPrompt, endpoint, azureApiKey, deploymentName, functionFactory) { - this.systemPrompt = systemPrompt; - this.endpoint = endpoint; - this.azureApiKey = azureApiKey; - this.deploymentName = deploymentName; - this.client = new OpenAIClient(this.endpoint, new AzureKeyCredential(this.azureApiKey)); - this.functionFactory = functionFactory || new FunctionFactory(); +class <#= ClassName #> { + constructor(openAIEndpoint, openAIKey, openAIChatDeploymentName, openAISystemPrompt, functionFactory) { + this.openAISystemPrompt = openAISystemPrompt; + this.openAIChatDeploymentName = openAIChatDeploymentName; + this.client = new OpenAIClient(openAIEndpoint, new AzureKeyCredential(openAIKey)); + this.functionFactory = functionFactory; this.clearConversation(); } clearConversation() { this.messages = [ - { role: 'system', content: this.systemPrompt } + { role: 'system', content: this.openAISystemPrompt } ]; this.functionCallContext = new FunctionCallContext(this.functionFactory, this.messages); } @@ -23,9 +23,9 @@ class ChatCompletionsFunctionsStreaming { async getChatCompletions(userInput, callback) { this.messages.push({ role: 'user', content: userInput }); - let contentComplete = ""; + let contentComplete = ''; while (true) { - const events = this.client.listChatCompletions(this.deploymentName, this.messages, { + const events = await this.client.streamChatCompletions(this.openAIChatDeploymentName, this.messages, { functions: this.functionFactory.getFunctionSchemas(), }); @@ -58,4 +58,4 @@ class ChatCompletionsFunctionsStreaming { } } -exports.ChatCompletionsFunctionsStreaming = ChatCompletionsFunctionsStreaming; \ No newline at end of file +exports.<#= ClassName #> = <#= ClassName #>; \ No newline at end of file diff --git a/src/ai/.x/templates/openai-chat-streaming-with-functions-js/_.json b/src/ai/.x/templates/openai-chat-streaming-with-functions-js/_.json new file mode 100644 index 00000000..92a27fd3 --- /dev/null +++ b/src/ai/.x/templates/openai-chat-streaming-with-functions-js/_.json @@ -0,0 +1,10 @@ +{ + "_LongName": "OpenAI Chat Completions (w/ Functions)", + "_ShortName": "openai-chat-streaming-with-functions", + "_Language": "JavaScript", + "ClassName": "OpenAIChatCompletionsFunctionsStreamingClass", + "AZURE_OPENAI_ENDPOINT": "", + "AZURE_OPENAI_KEY": "", + "AZURE_OPENAI_CHAT_DEPLOYMENT": "", + "AZURE_OPENAI_SYSTEM_PROMPT": "You are a helpful AI assistant." +} \ No newline at end of file diff --git a/src/ai/.x/templates/openai-functions-streaming-js/package.json b/src/ai/.x/templates/openai-chat-streaming-with-functions-js/package.json similarity index 87% rename from src/ai/.x/templates/openai-functions-streaming-js/package.json rename to src/ai/.x/templates/openai-chat-streaming-with-functions-js/package.json index 9532927e..5eae9dd5 100644 --- a/src/ai/.x/templates/openai-functions-streaming-js/package.json +++ b/src/ai/.x/templates/openai-chat-streaming-with-functions-js/package.json @@ -9,7 +9,7 @@ "author": "", "license": "MIT", "dependencies": { - "@azure/openai": "1.0.0-beta.8" + "@azure/openai": "1.0.0-beta.10" } } \ No newline at end of file diff --git a/src/ai/.x/templates/openai-chat-streaming-with-functions-py/_.json b/src/ai/.x/templates/openai-chat-streaming-with-functions-py/_.json new file mode 100644 index 00000000..dc8b30d6 --- /dev/null +++ b/src/ai/.x/templates/openai-chat-streaming-with-functions-py/_.json @@ -0,0 +1,10 @@ +{ + "_LongName": "OpenAI Chat Completions (w/ Functions)", + "_ShortName": "openai-chat-streaming-with-functions", + "_Language": "Python", + "AZURE_OPENAI_ENDPOINT": "", + "AZURE_OPENAI_KEY": "", + "AZURE_OPENAI_API_VERSION": "2023-12-01-preview", + "AZURE_OPENAI_CHAT_DEPLOYMENT": "", + "AZURE_OPENAI_SYSTEM_PROMPT": "You are a helpful AI assistant." +} \ No newline at end of file diff --git a/src/ai/.x/templates/openai-chat-streaming-with-functions-py/function_call_context.py b/src/ai/.x/templates/openai-chat-streaming-with-functions-py/function_call_context.py new file mode 100644 index 00000000..5948fbfa --- /dev/null +++ b/src/ai/.x/templates/openai-chat-streaming-with-functions-py/function_call_context.py @@ -0,0 +1,42 @@ +import json +import logging + +class FunctionCallContext: + def __init__(self, function_factory, messages): + self.function_factory = function_factory + self.messages = messages + self.function_name = '' + self.function_arguments = '' + + def check_for_update(self, choice): + updated = False + + delta = choice.delta if choice and hasattr(choice, 'delta') else {} + name = delta.function_call.name if delta and hasattr(delta, 'function_call') and delta.function_call and hasattr(delta.function_call, 'name') else None + if name is not None: + self.function_name = name + updated = True + + args = delta.function_call.arguments if delta and hasattr(delta, 'function_call') and delta.function_call and hasattr(delta.function_call, 'arguments') else None + if args is not None: + self.function_arguments = f'{self.function_arguments}{args}' + updated = True + + return updated + + def try_call_function(self): + + dict = json.loads(self.function_arguments) if self.function_arguments != '' else None + if dict is None: return None + + result = self.function_factory.try_call_function(self.function_name, dict) + if result is None: return None + + self.messages.append({'role': 'assistant', 'content': None, 'function_call': {'name': self.function_name, 'arguments': self.function_arguments}}) + self.messages.append({'role': 'function', 'content': result, 'name': self.function_name}) + + return result + + def clear(self): + self.function_name = '' + self.function_arguments = '' \ No newline at end of file diff --git a/src/ai/.x/templates/openai-chat-streaming-with-functions-py/function_factory.py b/src/ai/.x/templates/openai-chat-streaming-with-functions-py/function_factory.py new file mode 100644 index 00000000..50ffb085 --- /dev/null +++ b/src/ai/.x/templates/openai-chat-streaming-with-functions-py/function_factory.py @@ -0,0 +1,16 @@ +class FunctionFactory: + def __init__(self): + self.functions = {} + + def add_function(self, schema, func): + self.functions[schema['name']] = {'schema': schema, 'function': func} + + def get_function_schemas(self): + return [value['schema'] for value in self.functions.values()] + + def try_call_function(self, function_name, function_arguments): + function_info = self.functions.get(function_name) + if function_info is None: + return None + + return function_info['function'](function_arguments) \ No newline at end of file diff --git a/src/ai/.x/templates/openai-chat-streaming-with-functions-py/main.py b/src/ai/.x/templates/openai-chat-streaming-with-functions-py/main.py new file mode 100644 index 00000000..112f102f --- /dev/null +++ b/src/ai/.x/templates/openai-chat-streaming-with-functions-py/main.py @@ -0,0 +1,38 @@ +<#@ template hostspecific="true" #> +<#@ output extension=".py" encoding="utf-8" #> +<#@ parameter type="System.String" name="AZURE_OPENAI_ENDPOINT" #> +<#@ parameter type="System.String" name="AZURE_OPENAI_KEY" #> +<#@ parameter type="System.String" name="AZURE_OPENAI_API_VERSION" #> +<#@ parameter type="System.String" name="AZURE_OPENAI_CHAT_DEPLOYMENT" #> +<#@ parameter type="System.String" name="AZURE_OPENAI_SYSTEM_PROMPT" #> +from openai_chat_completions_custom_functions import factory +from openai_chat_completions_functions_streaming import OpenAIChatCompletionsFunctionsStreaming +import os +import sys + +def main(): + openai_api_version = os.getenv('AZURE_OPENAI_API_VERSION', '<#= AZURE_OPENAI_API_VERSION #>') + openai_endpoint = os.getenv('AZURE_OPENAI_ENDPOINT', '<#= AZURE_OPENAI_ENDPOINT #>') + openai_key = os.getenv('AZURE_OPENAI_KEY', '<#= AZURE_OPENAI_KEY #>') + openai_chat_deployment_name = os.getenv('AZURE_OPENAI_CHAT_DEPLOYMENT', '<#= AZURE_OPENAI_CHAT_DEPLOYMENT #>') + openai_system_prompt = os.getenv('AZURE_OPENAI_SYSTEM_PROMPT', '<#= AZURE_OPENAI_SYSTEM_PROMPT #>') + + chat = OpenAIChatCompletionsFunctionsStreaming(openai_api_version, openai_endpoint, openai_key, openai_chat_deployment_name, openai_system_prompt, factory) + + while True: + user_input = input('User: ') + if user_input == 'exit' or user_input == '': + break + + print("\nAssistant: ", end="") + response = chat.get_chat_completions(user_input, lambda content: print(content, end="")) + print("\n") + +if __name__ == '__main__': + try: + main() + except EOFError: + pass + except Exception as e: + print(f"The sample encountered an error: {e}") + sys.exit(1) \ No newline at end of file diff --git a/src/ai/.x/templates/openai-chat-streaming-with-functions-py/openai_chat_completions_custom_functions.py b/src/ai/.x/templates/openai-chat-streaming-with-functions-py/openai_chat_completions_custom_functions.py new file mode 100644 index 00000000..91283bdb --- /dev/null +++ b/src/ai/.x/templates/openai-chat-streaming-with-functions-py/openai_chat_completions_custom_functions.py @@ -0,0 +1,66 @@ +from function_factory import FunctionFactory +factory = FunctionFactory() + +def ignore_args_decorator(func): + def wrapper(*args, **kwargs): + return func() + return wrapper + +@ignore_args_decorator +def get_current_date(): + from datetime import date + today = date.today() + return f'{today.year}-{today.month}-{today.day}' + +get_current_date_schema = { + 'name': 'get_current_date', + 'description': 'Get the current date', + 'parameters': { + 'type': 'object', + 'properties': {}, + }, +} + +factory.add_function(get_current_date_schema, get_current_date) + +@ignore_args_decorator +def get_current_time(): + from datetime import datetime + now = datetime.now() + return f'{now.hour}:{now.minute}' + +get_current_time_schema = { + 'name': 'get_current_time', + 'description': 'Get the current time', + 'parameters': { + 'type': 'object', + 'properties': {}, + }, +} + +factory.add_function(get_current_time_schema, get_current_time) + +def get_current_weather(function_arguments): + location = function_arguments.get('location') + return f'The weather in {location} is 72 degrees and sunny.' + +get_current_weather_schema = { + 'name': 'get_current_weather', + 'description': 'Get the current weather in a given location', + 'parameters': { + 'type': 'object', + 'properties': { + 'location': { + 'type': 'string', + 'description': 'The city and state, e.g. San Francisco, CA', + }, + 'unit': { + 'type': 'string', + 'enum': ['celsius', 'fahrenheit'], + }, + }, + 'required': ['location'], + }, +} + +factory.add_function(get_current_weather_schema, get_current_weather) diff --git a/src/ai/.x/templates/openai-chat-streaming-with-functions-py/openai_chat_completions_functions_streaming.py b/src/ai/.x/templates/openai-chat-streaming-with-functions-py/openai_chat_completions_functions_streaming.py new file mode 100644 index 00000000..104a2c4e --- /dev/null +++ b/src/ai/.x/templates/openai-chat-streaming-with-functions-py/openai_chat_completions_functions_streaming.py @@ -0,0 +1,58 @@ +from openai import AzureOpenAI +from function_call_context import FunctionCallContext + +class OpenAIChatCompletionsFunctionsStreaming: + def __init__(self, openai_api_version, openai_endpoint, openai_key, openai_chat_deployment_name, openai_system_prompt, function_factory): + self.openai_system_prompt = openai_system_prompt + self.openai_chat_deployment_name = openai_chat_deployment_name + self.function_factory = function_factory + self.client = AzureOpenAI( + api_key=openai_key, + api_version=openai_api_version, + azure_endpoint = openai_endpoint + ) + self.clear_conversation() + + def clear_conversation(self): + self.messages = [ + {'role': 'system', 'content': self.openai_system_prompt} + ] + self.function_call_context = FunctionCallContext(self.function_factory, self.messages) + + def get_chat_completions(self, user_input, callback): + self.messages.append({'role': 'user', 'content': user_input}) + + complete_content = '' + functions = self.function_factory.get_function_schemas() + + while True: + response = self.client.chat.completions.create( + model=self.openai_chat_deployment_name, + messages=self.messages, + stream=True, + functions=functions, + function_call='auto') + + for chunk in response: + + choice0 = chunk.choices[0] if hasattr(chunk, 'choices') and chunk.choices else None + self.function_call_context.check_for_update(choice0) + + delta = choice0.delta if choice0 and hasattr(choice0, 'delta') else None + content = delta.content if delta and hasattr(delta, 'content') else '' + + finish_reason = choice0.finish_reason if choice0 and hasattr(choice0, 'finish_reason') else None + if finish_reason == 'length': + content += f"{content}\nERROR: Exceeded max token length!" + + if content is None: continue + + complete_content += content + callback(content) + + if self.function_call_context.try_call_function() is not None: + self.function_call_context.clear() + continue + + self.messages.append({'role': 'assistant', 'content': complete_content}) + return complete_content diff --git a/src/ai/.x/templates/openai-chat-streaming-with-functions-py/requirements.txt b/src/ai/.x/templates/openai-chat-streaming-with-functions-py/requirements.txt new file mode 100644 index 00000000..7a06be70 --- /dev/null +++ b/src/ai/.x/templates/openai-chat-streaming-with-functions-py/requirements.txt @@ -0,0 +1 @@ +openai==1.0.0 diff --git a/src/ai/.x/templates/openai-chat-streaming/OpenAIChatCompletionsStreamingClass.cs b/src/ai/.x/templates/openai-chat-streaming/OpenAIChatCompletionsStreamingClass.cs deleted file mode 100644 index 3eff8a12..00000000 --- a/src/ai/.x/templates/openai-chat-streaming/OpenAIChatCompletionsStreamingClass.cs +++ /dev/null @@ -1,80 +0,0 @@ -<#@ template hostspecific="true" #> -<#@ output extension=".cs" encoding="utf-8" #> -<#@ parameter type="System.String" name="ClassName" #> -<#@ parameter type="System.String" name="OPENAI_ENDPOINT" #> -<#@ parameter type="System.String" name="OPENAI_API_KEY" #> -<#@ parameter type="System.String" name="AZURE_OPENAI_CHAT_DEPLOYMENT" #> -<#@ parameter type="System.String" name="AZURE_OPENAI_SYSTEM_PROMPT" #> -using Azure; -using Azure.AI.OpenAI; -using Azure.Identity; -using System; - -public class <#= ClassName #> -{ - private OpenAIClient client; - private ChatCompletionsOptions options; - - public <#= ClassName #>() - { - var key = Environment.GetEnvironmentVariable("OPENAI_API_KEY") ?? "<#= OPENAI_API_KEY #>"; - var endpoint = Environment.GetEnvironmentVariable("OPENAI_ENDPOINT") ?? "<#= OPENAI_ENDPOINT #>"; - var deploymentName = Environment.GetEnvironmentVariable("AZURE_OPENAI_CHAT_DEPLOYMENT") ?? "<#= AZURE_OPENAI_CHAT_DEPLOYMENT #>"; - var systemPrompt = Environment.GetEnvironmentVariable("AZURE_OPENAI_SYSTEM_PROMPT") ?? "<#= AZURE_OPENAI_SYSTEM_PROMPT #>"; - - client = string.IsNullOrEmpty(key) - ? new OpenAIClient(new Uri(endpoint), new DefaultAzureCredential()) - : new OpenAIClient(new Uri(endpoint), new AzureKeyCredential(key)); - - options = new ChatCompletionsOptions(); - options.DeploymentName = deploymentName; - options.Messages.Add(new ChatRequestSystemMessage(systemPrompt)); - } - - public async Task GetChatCompletionsStreamingAsync(string userPrompt, Action callback = null) - { - options.Messages.Add(new ChatRequestUserMessage(userPrompt)); - - var responseContent = string.Empty; - var response = await client.GetChatCompletionsStreamingAsync(options); - await foreach (var update in response.EnumerateValues()) - { - callback(update); - - var content = update.ContentUpdate; - if (update.FinishReason == CompletionsFinishReason.ContentFiltered) - { - content = $"{content}\nWARNING: Content filtered!"; - } - else if (update.FinishReason == CompletionsFinishReason.TokenLimitReached) - { - content = $"{content}\nERROR: Exceeded token limit!"; - } - - if (string.IsNullOrEmpty(content)) continue; - - responseContent += content; - } - - options.Messages.Add(new ChatRequestAssistantMessage(responseContent)); - return responseContent; - } - - public static async Task Main(string[] args) - { - var chat = new <#= ClassName #>(); - - while (true) - { - Console.Write("User: "); - var userPrompt = Console.ReadLine(); - if (string.IsNullOrEmpty(userPrompt) || userPrompt == "exit") break; - - Console.Write("\nAssistant: "); - var response = await chat.GetChatCompletionsStreamingAsync(userPrompt, update => - Console.Write(update.ContentUpdate) - ); - Console.WriteLine("\n"); - } - } -} diff --git a/src/ai/.x/templates/openai-chat-streaming/_.json b/src/ai/.x/templates/openai-chat-streaming/_.json deleted file mode 100644 index 252f0afd..00000000 --- a/src/ai/.x/templates/openai-chat-streaming/_.json +++ /dev/null @@ -1,9 +0,0 @@ -{ - "_Name": "OpenAI Chat Completions (Streaming) in C#", - "_Language": "C#", - "ClassName": "OpenAIHelloWorldStreamingClass", - "OPENAI_ENDPOINT": "", - "OPENAI_API_KEY": "", - "AZURE_OPENAI_CHAT_DEPLOYMENT": "", - "AZURE_OPENAI_SYSTEM_PROMPT": "You are a helpful AI assistant." -} \ No newline at end of file diff --git a/src/ai/.x/templates/openai-chat-with-azure-ai-search/OpenAIChatWithAzureAISearchClass.cs b/src/ai/.x/templates/openai-chat-with-azure-ai-search/OpenAIChatWithAzureAISearchClass.cs deleted file mode 100644 index e6847165..00000000 --- a/src/ai/.x/templates/openai-chat-with-azure-ai-search/OpenAIChatWithAzureAISearchClass.cs +++ /dev/null @@ -1,63 +0,0 @@ -<#@ template hostspecific="true" #> -<#@ output extension=".cs" encoding="utf-8" #> -<#@ parameter type="System.String" name="ClassName" #> -<#@ parameter type="System.String" name="OpenAIEndpoint" #> -<#@ parameter type="System.String" name="OpenAIDeploymentName" #> -<#@ parameter type="System.String" name="SearchEndpoint" #> -<#@ parameter type="System.String" name="SearchApiKey" #> -<#@ parameter type="System.String" name="SearchIndexName" #> -using Azure; -using Azure.AI.OpenAI; -using Azure.Identity; -using System; -using System.Collections.Generic; -using System.IO; -using System.Threading.Tasks; - -public class <#= ClassName #> -{ - private static string _openAIEndpoint = "<#= OpenAIEndpoint #>"; - private static string _openAIDeploymentName = "<#= OpenAIDeploymentName #>"; - private static string _searchEndpoint = "<#= SearchEndpoint #>"; - private static string _searchApiKey = "<#= SearchApiKey #>"; - private static string _searchIndexName = "<#= SearchIndexName #>"; - - public async Task ChatUsingYourOwnData() - { - var client = new OpenAIClient(new Uri(_openAIEndpoint), new DefaultAzureCredential()); - - var contosoExtensionConfig = new AzureCognitiveSearchChatExtensionConfiguration() - { - SearchEndpoint = new Uri(_searchEndpoint), - Key = _searchApiKey, - IndexName = _searchIndexName, - }; - - ChatCompletionsOptions chatCompletionsOptions = new() - { - DeploymentName = _openAIDeploymentName, - Messages = - { - new ChatRequestSystemMessage("You are a helpful assistant that answers questions about the Contoso product database."), - new ChatRequestUserMessage("What are the best-selling Contoso products this month?") - }, - - AzureExtensionsOptions = new() - { - Extensions = { contosoExtensionConfig } - } - }; - - Response response = await client.GetChatCompletionsAsync(chatCompletionsOptions); - var message = response.Value.Choices[0].Message; - - Console.WriteLine($"{message.Role}: {message.Content}"); - - Console.WriteLine("Citations and other information:"); - - foreach (var contextMessage in message.AzureExtensionsContext.Messages) - { - Console.WriteLine($"{contextMessage.Role}: {contextMessage.Content}"); - } - } -} \ No newline at end of file diff --git a/src/ai/.x/templates/openai-chat-with-azure-ai-search/_.json b/src/ai/.x/templates/openai-chat-with-azure-ai-search/_.json deleted file mode 100644 index f06ecb0a..00000000 --- a/src/ai/.x/templates/openai-chat-with-azure-ai-search/_.json +++ /dev/null @@ -1,11 +0,0 @@ -{ - "_Name": "OpenAI Chat w/ Azure AI Search Class Library", - "_Language": "C#", - "AICLIExtensionReferencePath": "", - "ClassName": "OpenAIChatWithAzureAISearchClass", - "OpenAIEndpoint": "https://myaccount.openai.azure.com/", - "SearchEndpoint": "https://your-contoso-search-resource.search.windows.net", - "SearchApiKey": "your-search-api-key", - "SearchIndexName": "contoso-products-index", - "OpenAIDeploymentName": "gpt-35-turbo-0613" -} \ No newline at end of file diff --git a/src/ai/.x/templates/openai-chat/OpenAIChatCompletionsClass.cs b/src/ai/.x/templates/openai-chat/OpenAIChatCompletionsClass.cs deleted file mode 100644 index 060ae8ce..00000000 --- a/src/ai/.x/templates/openai-chat/OpenAIChatCompletionsClass.cs +++ /dev/null @@ -1,59 +0,0 @@ -<#@ template hostspecific="true" #> -<#@ output extension=".cs" encoding="utf-8" #> -<#@ parameter type="System.String" name="ClassName" #> -<#@ parameter type="System.String" name="OPENAI_ENDPOINT" #> -<#@ parameter type="System.String" name="OPENAI_API_KEY" #> -<#@ parameter type="System.String" name="AZURE_OPENAI_CHAT_DEPLOYMENT" #> -<#@ parameter type="System.String" name="AZURE_OPENAI_SYSTEM_PROMPT" #> -using Azure; -using Azure.AI.OpenAI; -using Azure.Identity; -using System; - -public class <#= ClassName #> -{ - private OpenAIClient client; - private ChatCompletionsOptions options; - - public <#= ClassName #>() - { - var key = Environment.GetEnvironmentVariable("OPENAI_API_KEY") ?? "<#= OPENAI_API_KEY #>"; - var endpoint = Environment.GetEnvironmentVariable("OPENAI_ENDPOINT") ?? "<#= OPENAI_ENDPOINT #>"; - var deploymentName = Environment.GetEnvironmentVariable("AZURE_OPENAI_CHAT_DEPLOYMENT") ?? "<#= AZURE_OPENAI_CHAT_DEPLOYMENT #>"; - var systemPrompt = Environment.GetEnvironmentVariable("AZURE_OPENAI_SYSTEM_PROMPT") ?? "<#= AZURE_OPENAI_SYSTEM_PROMPT #>"; - - client = string.IsNullOrEmpty(key) - ? new OpenAIClient(new Uri(endpoint), new DefaultAzureCredential()) - : new OpenAIClient(new Uri(endpoint), new AzureKeyCredential(key)); - - options = new ChatCompletionsOptions(); - options.DeploymentName = deploymentName; - options.Messages.Add(new ChatRequestSystemMessage(systemPrompt)); - } - - public string GetChatCompletion(string userPrompt) - { - options.Messages.Add(new ChatRequestUserMessage(userPrompt)); - - var response = client.GetChatCompletions(options); - var responseContent = response.Value.Choices[0].Message.Content; - options.Messages.Add(new ChatRequestAssistantMessage(responseContent)); - - return responseContent; - } - - public static void Main(string[] args) - { - var chat = new OpenAIHelloWorldClass(); - - while (true) - { - Console.Write("User: "); - var userPrompt = Console.ReadLine(); - if (string.IsNullOrEmpty(userPrompt) || userPrompt == "exit") break; - - var response = chat.GetChatCompletion(userPrompt); - Console.WriteLine($"\nAssistant: {response}\n"); - } - } -} \ No newline at end of file diff --git a/src/ai/.x/templates/openai-chat/_.json b/src/ai/.x/templates/openai-chat/_.json deleted file mode 100644 index a27cd1e1..00000000 --- a/src/ai/.x/templates/openai-chat/_.json +++ /dev/null @@ -1,9 +0,0 @@ -{ - "_Name": "OpenAI Chat Completions in C#", - "_Language": "C#", - "ClassName": "OpenAIHelloWorldClass", - "OPENAI_ENDPOINT": "", - "OPENAI_API_KEY": "", - "AZURE_OPENAI_CHAT_DEPLOYMENT": "", - "AZURE_OPENAI_SYSTEM_PROMPT": "You are a helpful AI assistant." -} \ No newline at end of file diff --git a/src/ai/.x/templates/openai-functions-streaming-js/Main.js b/src/ai/.x/templates/openai-functions-streaming-js/Main.js deleted file mode 100644 index 786a26af..00000000 --- a/src/ai/.x/templates/openai-functions-streaming-js/Main.js +++ /dev/null @@ -1,51 +0,0 @@ -<#@ template hostspecific="true" #> -<#@ output extension=".js" encoding="utf-8" #> -<#@ parameter type="System.String" name="OPENAI_ENDPOINT" #> -<#@ parameter type="System.String" name="OPENAI_API_KEY" #> -<#@ parameter type="System.String" name="AZURE_OPENAI_CHAT_DEPLOYMENT" #> -<#@ parameter type="System.String" name="AZURE_OPENAI_SYSTEM_PROMPT" #> -const customFunctions = require("./ChatCompletionsCustomFunctions"); -const { getCurrentWeatherSchema, getCurrentWeather } = customFunctions; -const { getCurrentDateSchema, getCurrentDate } = customFunctions; -const { FunctionFactory } = require("./FunctionFactory"); -const { ChatCompletionsFunctionsStreaming } = require("./ChatCompletionsFunctionsStreaming"); - -const readline = require('readline'); -const rl = readline.createInterface({ - input: process.stdin, - output: process.stdout -}); - -async function main() { - - let factory = new FunctionFactory(); - factory.addFunction(getCurrentWeatherSchema, getCurrentWeather); - factory.addFunction(getCurrentDateSchema, getCurrentDate); - - const endpoint = process.env["OPENAI_ENDPOINT"] || "<#= OPENAI_ENDPOINT #>"; - const azureApiKey = process.env["OPENAI_API_KEY"] || "<#= OPENAI_API_KEY #>"; - const deploymentName = process.env["AZURE_OPENAI_CHAT_DEPLOYMENT"] || "<#= AZURE_OPENAI_CHAT_DEPLOYMENT #>" ; - const systemPrompt = process.env["AZURE_OPENAI_SYSTEM_PROMPT"] || "<#= AZURE_OPENAI_SYSTEM_PROMPT #>" ; - - const streamingChatCompletions = new ChatCompletionsFunctionsStreaming(systemPrompt, endpoint, azureApiKey, deploymentName, factory); - - while (true) { - - const input = await new Promise(resolve => rl.question('User: ', resolve)); - if (input === 'exit' || input === '') break; - - let response = await streamingChatCompletions.getChatCompletions(input, (content) => { - console.log(`assistant-streaming: ${content}`); - }); - - console.log(`\nAssistant: ${response}\n`); - } - - console.log('Bye!'); -} - -main().catch((err) => { - console.error("The sample encountered an error:", err); -}); - -module.exports = { main }; diff --git a/src/ai/.x/templates/openai-functions-streaming-js/_.json b/src/ai/.x/templates/openai-functions-streaming-js/_.json deleted file mode 100644 index 7223f71b..00000000 --- a/src/ai/.x/templates/openai-functions-streaming-js/_.json +++ /dev/null @@ -1,8 +0,0 @@ -{ - "_Name": "OpenAI Chat Completions (Functions) in JavaScript", - "_Language": "JavaScript", - "OPENAI_ENDPOINT": "", - "OPENAI_API_KEY": "", - "AZURE_OPENAI_CHAT_DEPLOYMENT": "", - "AZURE_OPENAI_SYSTEM_PROMPT": "You are a helpful AI assistant." -} \ No newline at end of file diff --git a/src/ai/.x/templates/openai-webpage/.env b/src/ai/.x/templates/openai-webpage-js/.env similarity index 61% rename from src/ai/.x/templates/openai-webpage/.env rename to src/ai/.x/templates/openai-webpage-js/.env index 191f56b3..bd323058 100644 --- a/src/ai/.x/templates/openai-webpage/.env +++ b/src/ai/.x/templates/openai-webpage-js/.env @@ -1,10 +1,10 @@ <#@ template hostspecific="true" #> <#@ output extension=".env" encoding="utf-8" #> -<#@ parameter type="System.String" name="OPENAI_ENDPOINT" #> -<#@ parameter type="System.String" name="OPENAI_API_KEY" #> +<#@ parameter type="System.String" name="AZURE_OPENAI_ENDPOINT" #> +<#@ parameter type="System.String" name="AZURE_OPENAI_KEY" #> <#@ parameter type="System.String" name="AZURE_OPENAI_CHAT_DEPLOYMENT" #> <#@ parameter type="System.String" name="AZURE_OPENAI_SYSTEM_PROMPT" #> AZURE_OPENAI_CHAT_DEPLOYMENT=<#= AZURE_OPENAI_CHAT_DEPLOYMENT #> -OPENAI_API_KEY=<#= OPENAI_API_KEY #> -OPENAI_ENDPOINT=<#= OPENAI_ENDPOINT #> +AZURE_OPENAI_KEY=<#= AZURE_OPENAI_KEY #> +AZURE_OPENAI_ENDPOINT=<#= AZURE_OPENAI_ENDPOINT #> AZURE_OPENAI_SYSTEM_PROMPT=<#= AZURE_OPENAI_SYSTEM_PROMPT #> diff --git a/src/ai/.x/templates/openai-webpage/README.md b/src/ai/.x/templates/openai-webpage-js/README.md similarity index 100% rename from src/ai/.x/templates/openai-webpage/README.md rename to src/ai/.x/templates/openai-webpage-js/README.md diff --git a/src/ai/.x/templates/openai-webpage-js/_.json b/src/ai/.x/templates/openai-webpage-js/_.json new file mode 100644 index 00000000..93bdd911 --- /dev/null +++ b/src/ai/.x/templates/openai-webpage-js/_.json @@ -0,0 +1,10 @@ +{ + "_LongName": "OpenAI Webpage", + "_ShortName": "openai-webpage", + "_Language": "JavaScript", + "ClassName": "OpenAIChatCompletionsStreamingClass", + "AZURE_OPENAI_ENDPOINT": "", + "AZURE_OPENAI_KEY": "", + "AZURE_OPENAI_CHAT_DEPLOYMENT": "", + "AZURE_OPENAI_SYSTEM_PROMPT": "You are a helpful AI assistant." +} \ No newline at end of file diff --git a/src/ai/.x/templates/openai-webpage/ai.png b/src/ai/.x/templates/openai-webpage-js/ai.png similarity index 100% rename from src/ai/.x/templates/openai-webpage/ai.png rename to src/ai/.x/templates/openai-webpage-js/ai.png diff --git a/src/ai/.x/templates/openai-webpage/index.html b/src/ai/.x/templates/openai-webpage-js/index.html similarity index 100% rename from src/ai/.x/templates/openai-webpage/index.html rename to src/ai/.x/templates/openai-webpage-js/index.html diff --git a/src/ai/.x/templates/openai-webpage/package.json b/src/ai/.x/templates/openai-webpage-js/package.json similarity index 92% rename from src/ai/.x/templates/openai-webpage/package.json rename to src/ai/.x/templates/openai-webpage-js/package.json index 7d113850..89463238 100644 --- a/src/ai/.x/templates/openai-webpage/package.json +++ b/src/ai/.x/templates/openai-webpage-js/package.json @@ -9,7 +9,7 @@ "author": "", "license": "MIT", "dependencies": { - "@azure/openai": "1.0.0-beta.8", + "@azure/openai": "1.0.0-beta.10", "highlight.js": "^11.7.2", "marked": "^4.0.10" }, diff --git a/src/ai/.x/templates/openai-webpage-js/src/OpenAIChatCompletionsStreamingClass.js b/src/ai/.x/templates/openai-webpage-js/src/OpenAIChatCompletionsStreamingClass.js new file mode 100644 index 00000000..8b204001 --- /dev/null +++ b/src/ai/.x/templates/openai-webpage-js/src/OpenAIChatCompletionsStreamingClass.js @@ -0,0 +1,47 @@ +<#@ template hostspecific="true" #> +<#@ output extension=".js" encoding="utf-8" #> +<#@ parameter type="System.String" name="ClassName" #> +const { OpenAIClient, AzureKeyCredential } = require("@azure/openai"); + +class <#= ClassName #> { + constructor(openAIEndpoint, openAIKey, openAIChatDeploymentName, openAISystemPrompt) { + this.openAISystemPrompt = openAISystemPrompt; + this.openAIChatDeploymentName = openAIChatDeploymentName; + this.client = new OpenAIClient(openAIEndpoint, new AzureKeyCredential(openAIKey)); + this.clearConversation(); + } + + clearConversation() { + this.messages = [ + { role: 'system', content: this.openAISystemPrompt } + ]; + } + + async getChatCompletions(userInput, callback) { + this.messages.push({ role: 'user', content: userInput }); + + let contentComplete = ''; + const events = await this.client.streamChatCompletions(this.openAIChatDeploymentName, this.messages); + + for await (const event of events) { + for (const choice of event.choices) { + + let content = choice.delta?.content; + if (choice.finishReason === 'length') { + content = `${content}\nERROR: Exceeded token limit!`; + } + + if (content != null) { + callback(content); + await new Promise(r => setTimeout(r, 50)); // delay to simulate real-time output, word by word + contentComplete += content; + } + } + } + + this.messages.push({ role: 'assistant', content: contentComplete }); + return contentComplete; + } +} + +exports.<#= ClassName #> = <#= ClassName #>; \ No newline at end of file diff --git a/src/ai/.x/templates/openai-webpage/src/script.js b/src/ai/.x/templates/openai-webpage-js/src/script.js similarity index 83% rename from src/ai/.x/templates/openai-webpage/src/script.js rename to src/ai/.x/templates/openai-webpage-js/src/script.js index 477ce271..07a5c993 100644 --- a/src/ai/.x/templates/openai-webpage/src/script.js +++ b/src/ai/.x/templates/openai-webpage-js/src/script.js @@ -1,34 +1,34 @@ +<#@ template hostspecific="true" #> +<#@ output extension=".js" encoding="utf-8" #> +<#@ parameter type="System.String" name="ClassName" #> +<#@ parameter type="System.String" name="AZURE_OPENAI_ENDPOINT" #> +<#@ parameter type="System.String" name="AZURE_OPENAI_KEY" #> +<#@ parameter type="System.String" name="AZURE_OPENAI_CHAT_DEPLOYMENT" #> +<#@ parameter type="System.String" name="AZURE_OPENAI_SYSTEM_PROMPT" #> const marked = require("marked"); const hljs = require("highlight.js"); -const customFunctions = require("./ChatCompletionsCustomFunctions"); -const { getCurrentDateSchema, getCurrentDate } = customFunctions; -const { FunctionFactory } = require("./FunctionFactory"); - -const { ChatCompletionsFunctionsStreaming } = require('./ChatCompletionsFunctionsStreaming'); +const { <#= ClassName #> } = require('./OpenAIChatCompletionsStreamingClass'); let streamingChatCompletions; function streamingChatCompletionsInit() { - let factory = new FunctionFactory(); - factory.addFunction(getCurrentDateSchema, getCurrentDate); - - const endpoint = process.env.OPENAI_ENDPOINT; - const azureApiKey = process.env.OPENAI_API_KEY; - const deploymentName = process.env.AZURE_OPENAI_CHAT_DEPLOYMENT; - const systemPrompt = "You are a helpful AI assistant."; + const openAIEndpoint = process.env.AZURE_OPENAI_ENDPOINT || "<#= AZURE_OPENAI_ENDPOINT #>"; + const openAIKey = process.env.AZURE_OPENAI_KEY || "<#= AZURE_OPENAI_KEY #>"; + const openAIChatDeploymentName = process.env.AZURE_OPENAI_CHAT_DEPLOYMENT || "<#= AZURE_OPENAI_CHAT_DEPLOYMENT #>" ; + const openAISystemPrompt = process.env.AZURE_OPENAI_SYSTEM_PROMPT || "<#= AZURE_OPENAI_SYSTEM_PROMPT #>"; - if (!endpoint || endpoint.startsWith('(openAIEndpoint, openAIKey, openAIChatDeploymentName, openAISystemPrompt); } function streamingChatCompletionsClear() { diff --git a/src/ai/.x/templates/openai-webpage/style.css b/src/ai/.x/templates/openai-webpage-js/style.css similarity index 100% rename from src/ai/.x/templates/openai-webpage/style.css rename to src/ai/.x/templates/openai-webpage-js/style.css diff --git a/src/ai/.x/templates/openai-webpage/webpack.config.js b/src/ai/.x/templates/openai-webpage-js/webpack.config.js similarity index 100% rename from src/ai/.x/templates/openai-webpage/webpack.config.js rename to src/ai/.x/templates/openai-webpage-js/webpack.config.js diff --git a/src/ai/.x/templates/openai-webpage-ts/.env b/src/ai/.x/templates/openai-webpage-ts/.env new file mode 100644 index 00000000..bd323058 --- /dev/null +++ b/src/ai/.x/templates/openai-webpage-ts/.env @@ -0,0 +1,10 @@ +<#@ template hostspecific="true" #> +<#@ output extension=".env" encoding="utf-8" #> +<#@ parameter type="System.String" name="AZURE_OPENAI_ENDPOINT" #> +<#@ parameter type="System.String" name="AZURE_OPENAI_KEY" #> +<#@ parameter type="System.String" name="AZURE_OPENAI_CHAT_DEPLOYMENT" #> +<#@ parameter type="System.String" name="AZURE_OPENAI_SYSTEM_PROMPT" #> +AZURE_OPENAI_CHAT_DEPLOYMENT=<#= AZURE_OPENAI_CHAT_DEPLOYMENT #> +AZURE_OPENAI_KEY=<#= AZURE_OPENAI_KEY #> +AZURE_OPENAI_ENDPOINT=<#= AZURE_OPENAI_ENDPOINT #> +AZURE_OPENAI_SYSTEM_PROMPT=<#= AZURE_OPENAI_SYSTEM_PROMPT #> diff --git a/src/ai/.x/templates/openai-webpage-ts/README.md b/src/ai/.x/templates/openai-webpage-ts/README.md new file mode 100644 index 00000000..8fee923d --- /dev/null +++ b/src/ai/.x/templates/openai-webpage-ts/README.md @@ -0,0 +1,35 @@ +# `ai` chat website + +This is a simple website chat interface that uses OpenAI's API to generate text responses to user input. + +User input is typed into a text box and added to the conversation as a message inside a chat panel. The panel scrolls up and the computer responds with streaming text output into another message in the chat panel. There is a left nav that has a "new chat" button and has a spot for future expansion w/ a list of historical chats. + +## Setup + +To build the website, run the following commands: + +```bash +npm install +npx webpack +``` + +To run the website, launch `index.html` in your browser. + +These setup steps are also represented in tasks.json and launch.json, so that you can build and run the website from within VS Code. + +## Project structure + +| Category | File | Description +| --- | --- | --- +| **SOURCE CODE** | ai.png | Logo/icon for the website. +| | index.html | HTML file with controls and layout. +| | style.css | CSS file with layout and styling. +| | src/script.js | Main JS file with HTML to JS interactions. +| | src/ChatCompletionsStreaming.js | Main JS file with JS to OpenAI interactions. +| | | +| **VS CODE** | .vscode/tasks.json | VS Code tasks to build and run the website. +| | .vscode/launch.json | VS Code launch configuration to run the website. +| | | +| **BUILD + PACKAGING** | .env | Contains the API keys, endpoints, etc. +| | package.json | Contains the dependencies. +| | webpack.config.js | The webpack config file. \ No newline at end of file diff --git a/src/ai/.x/templates/openai-webpage-ts/_.json b/src/ai/.x/templates/openai-webpage-ts/_.json new file mode 100644 index 00000000..c022f699 --- /dev/null +++ b/src/ai/.x/templates/openai-webpage-ts/_.json @@ -0,0 +1,10 @@ +{ + "_LongName": "OpenAI Webpage", + "_ShortName": "openai-webpage", + "_Language": "TypeScript", + "ClassName": "OpenAIChatCompletionsStreamingClass", + "AZURE_OPENAI_ENDPOINT": "", + "AZURE_OPENAI_KEY": "", + "AZURE_OPENAI_CHAT_DEPLOYMENT": "", + "AZURE_OPENAI_SYSTEM_PROMPT": "You are a helpful AI assistant." +} \ No newline at end of file diff --git a/src/ai/.x/templates/openai-webpage-ts/ai.png b/src/ai/.x/templates/openai-webpage-ts/ai.png new file mode 100644 index 00000000..4ba344c9 Binary files /dev/null and b/src/ai/.x/templates/openai-webpage-ts/ai.png differ diff --git a/src/ai/.x/templates/openai-webpage-ts/index.html b/src/ai/.x/templates/openai-webpage-ts/index.html new file mode 100644 index 00000000..e27151ba --- /dev/null +++ b/src/ai/.x/templates/openai-webpage-ts/index.html @@ -0,0 +1,62 @@ + + + + + + + + + Chat Interface + + + + +
+ +
+ +
+ +
+ +
+ +
+ + + + +
+
+ + + +
+ +
+
+
+ + +
+ + +
+ +
+ +
+ + + + + + + \ No newline at end of file diff --git a/src/ai/.x/templates/openai-webpage-ts/package.json b/src/ai/.x/templates/openai-webpage-ts/package.json new file mode 100644 index 00000000..7e60f6a1 --- /dev/null +++ b/src/ai/.x/templates/openai-webpage-ts/package.json @@ -0,0 +1,25 @@ +{ + "name": "chat-interface", + "version": "1.0.0", + "description": "Chat Interface with OpenAI", + "main": "script.ts", + "scripts": { + "test": "echo \"Error: no test specified\" && exit 1" + }, + "author": "", + "license": "MIT", + "dependencies": { + "@azure/openai": "1.0.0-beta.10", + "highlight.js": "^11.7.2", + "marked": "^4.0.10" + }, + "keywords": [], + "devDependencies": { + "@types/node": "^20.11.1", + "dotenv-webpack": "^7.0.3", + "ts-loader": "^9.5.1", + "typescript": "^5.3.3", + "webpack": "^5.89.0", + "webpack-cli": "^5.1.4" + } +} diff --git a/src/ai/.x/templates/openai-webpage-ts/src/OpenAIChatCompletionsStreamingClass.ts b/src/ai/.x/templates/openai-webpage-ts/src/OpenAIChatCompletionsStreamingClass.ts new file mode 100644 index 00000000..aca48988 --- /dev/null +++ b/src/ai/.x/templates/openai-webpage-ts/src/OpenAIChatCompletionsStreamingClass.ts @@ -0,0 +1,50 @@ +<#@ template hostspecific="true" #> +<#@ output extension=".ts" encoding="utf-8" #> +<#@ parameter type="System.String" name="ClassName" #> +import { OpenAIClient, AzureKeyCredential, ChatRequestMessage } from "@azure/openai"; + +export class <#= ClassName #> { + private openAISystemPrompt: string; + private openAIChatDeploymentName: string; + private client: OpenAIClient; + private messages: ChatRequestMessage[] = []; + + constructor(openAIEndpoint: string, openAIKey: string, openAIChatDeploymentName: string, openAISystemPrompt: string) { + this.openAISystemPrompt = openAISystemPrompt; + this.openAIChatDeploymentName = openAIChatDeploymentName; + this.client = new OpenAIClient(openAIEndpoint, new AzureKeyCredential(openAIKey)); + this.clearConversation(); + } + + clearConversation(): void { + this.messages = [ + { role: 'system', content: this.openAISystemPrompt } + ]; + } + + async getChatCompletions(userInput: string, callback: (content: string) => void): Promise { + this.messages.push({ role: 'user', content: userInput }); + + let contentComplete = ''; + const events = await this.client.streamChatCompletions(this.openAIChatDeploymentName, this.messages); + + for await (const event of events) { + for (const choice of event.choices) { + + let content = choice.delta?.content; + if (choice.finishReason === 'length') { + content = `${content}\nERROR: Exceeded token limit!`; + } + + if (content != null) { + callback(content); + await new Promise(r => setTimeout(r, 50)); // delay to simulate real-time output, word by word + contentComplete += content; + } + } + } + + this.messages.push({ role: 'assistant', content: contentComplete }); + return contentComplete; + } +} \ No newline at end of file diff --git a/src/ai/.x/templates/openai-webpage-ts/src/script.ts b/src/ai/.x/templates/openai-webpage-ts/src/script.ts new file mode 100644 index 00000000..4ac8b45c --- /dev/null +++ b/src/ai/.x/templates/openai-webpage-ts/src/script.ts @@ -0,0 +1,298 @@ +<#@ template hostspecific="true" #> +<#@ output extension=".ts" encoding="utf-8" #> +<#@ parameter type="System.String" name="ClassName" #> +<#@ parameter type="System.String" name="AZURE_OPENAI_ENDPOINT" #> +<#@ parameter type="System.String" name="AZURE_OPENAI_KEY" #> +<#@ parameter type="System.String" name="AZURE_OPENAI_CHAT_DEPLOYMENT" #> +<#@ parameter type="System.String" name="AZURE_OPENAI_SYSTEM_PROMPT" #> +import { marked } from "marked" +import hljs from "highlight.js"; + +import { <#= ClassName #> } from './OpenAIChatCompletionsStreamingClass'; +let streamingChatCompletions: <#= ClassName #> | undefined; + +function streamingChatCompletionsInit(): void { + + const openAIEndpoint = process.env.AZURE_OPENAI_ENDPOINT || "<#= AZURE_OPENAI_ENDPOINT #>"; + const openAIKey = process.env.AZURE_OPENAI_KEY || "<#= AZURE_OPENAI_KEY #>"; + const openAIChatDeploymentName = process.env.AZURE_OPENAI_CHAT_DEPLOYMENT || "<#= AZURE_OPENAI_CHAT_DEPLOYMENT #>" ; + const openAISystemPrompt = process.env.AZURE_OPENAI_SYSTEM_PROMPT || "<#= AZURE_OPENAI_SYSTEM_PROMPT #>"; + + if (!openAIEndpoint || openAIEndpoint.startsWith('(openAIEndpoint, openAIKey, openAIChatDeploymentName, openAISystemPrompt); +} + +function streamingChatCompletionsClear(): void { + streamingChatCompletions!.clearConversation(); +} + +async function streamingChatCompletionsProcessInput(userInput: string): Promise { + const blackVerticalRectangle = '\u25AE'; // Black vertical rectangle ('▮') to simulate an insertion point + + let newMessage = chatPanelAppendMessage('computer', blackVerticalRectangle); + let completeResponse = ""; + + let computerResponse = await streamingChatCompletions!.getChatCompletions(userInput, function (response: string) { + let atBottomBeforeUpdate = chatPanelIsScrollAtBottom(); + + completeResponse += response; + let withEnding = `${completeResponse}${blackVerticalRectangle}`; + let asHtml = markdownToHtml(withEnding); + + if (asHtml !== undefined) { + newMessage.innerHTML = asHtml; + + if (atBottomBeforeUpdate) { + chatPanelScrollToBottom(); + } + } + }); + + newMessage.innerHTML = markdownToHtml(computerResponse) || computerResponse.replace(/\n/g, '
'); + chatPanelScrollToBottom(); +} + +function chatPanelGetElement(): HTMLElement | null { + return document.getElementById("chatPanel"); +} + +function chatPanelAppendMessage(sender: any, message: string) { + logoHide(); + + let messageContent = document.createElement("p"); + messageContent.className = "message-content"; + messageContent.innerHTML = message; + + let messageAuthor = document.createElement("p"); + messageAuthor.className = "message-author"; + messageAuthor.innerHTML = sender == "user" ? "You" : "Assistant"; + + let divContainingBoth = document.createElement("div"); + divContainingBoth.className = sender === "user" ? "user" : "computer"; + divContainingBoth.appendChild(messageAuthor); + divContainingBoth.appendChild(messageContent); + + let chatPanel = chatPanelGetElement(); + chatPanel?.appendChild(divContainingBoth); + chatPanelScrollToBottom(); + + return messageContent; +} + +function chatPanelIsScrollAtBottom(): boolean { + let chatPanel = chatPanelGetElement(); + let atBottom = chatPanel + ? Math.abs(chatPanel.scrollHeight - chatPanel.clientHeight - chatPanel.scrollTop) < 1 + : true; + return atBottom; +} + +function chatPanelScrollToBottom() { + let chatPanel = chatPanelGetElement(); + if (chatPanel) { + chatPanel.scrollTop = chatPanel.scrollHeight; + } +} + +function chatPanelClear() { + let chatPanel = chatPanelGetElement(); + if (chatPanel) { + chatPanel.innerHTML = ''; + } +} + +function logoGetElement() { + return document.getElementById("logo"); +} + +function logoShow() { + let logo = logoGetElement(); + if (logo) { + logo.style.display = "block"; + } +} + +function logoHide() { + let logo = logoGetElement(); + if (logo) { + logo.style.display = "none"; + } +} + +function markdownInit() { + marked.setOptions({ + highlight: (code: string, lang: string) => { + let hl = lang === undefined || lang === '' + ? hljs.highlightAuto(code).value + : hljs.highlight(lang, code).value; + return `
${hl}
`; + } + }); +} + +function markdownToHtml(markdownText: string) { + try { + return marked.parse(markdownText); + } + catch (error) { + return undefined; + } +} + +function themeInit() { + let currentTheme = localStorage.getItem('theme'); + if (currentTheme === 'dark') { + themeSetDark(); + } + else if (currentTheme === 'light') { + themeSetLight(); + } + toggleThemeButtonInit(); +} + +function themeIsLight() { + return document.body.classList.contains("light-theme"); +} + +function themeIsDark() { + return !themeIsLight(); +} + +function toggleTheme() { + if (themeIsLight()) { + themeSetDark(); + } else { + themeSetLight(); + } +} + +function themeSetLight() { + if (!themeIsLight()) { + document.body.classList.add("light-theme"); + localStorage.setItem('theme', 'light'); + + let iconElement = toggleThemeButtonGetElement()!.children[0]; + iconElement.classList.remove("fa-toggle-on"); + iconElement.classList.add("fa-toggle-off"); + } +} + +function themeSetDark() { + if (!themeIsDark()) { + document.body.classList.remove("light-theme"); + localStorage.setItem('theme', 'dark'); + + let iconElement = toggleThemeButtonGetElement()!.children[0]; + iconElement.classList.remove("fa-toggle-off"); + iconElement.classList.add("fa-toggle-on"); + } +} + +function toggleThemeButtonGetElement() { + return document.getElementById("toggleThemeButton"); +} + +function toggleThemeButtonInit() { + let buttonElement = toggleThemeButtonGetElement(); + buttonElement!.addEventListener("click", toggleTheme); + buttonElement!.addEventListener('keydown', toggleThemeButtonHandleKeyDown()); +} + +function toggleThemeButtonHandleKeyDown() { + return function (event: KeyboardEvent) { + if (event.code === 'Enter' || event.code === 'Space') { + toggleTheme(); + } + }; +} + +function userInputTextAreaGetElement() : HTMLTextAreaElement | null { + return document.getElementById("userInput") as HTMLTextAreaElement | null; +} + +function userInputTextAreaInit() { + let inputElement = userInputTextAreaGetElement(); + inputElement!.addEventListener("keydown", userInputTextAreaHandleKeyDown()); + inputElement!.addEventListener("input", userInputTextAreaUpdateHeight); +} + +function userInputTextAreaFocus() { + let inputElement = userInputTextAreaGetElement(); + inputElement!.focus(); +} + +function userInputTextAreaClear() { + userInputTextAreaGetElement()!.value = ''; + userInputTextAreaUpdateHeight(); +} + +function userInputTextAreaUpdateHeight() { + let userInput = userInputTextAreaGetElement()!; + let inputElement = userInputTextAreaGetElement(); + inputElement!.style.height = 'auto'; + inputElement!.style.height = (userInput.scrollHeight) + 'px'; +} + +function userInputTextAreaHandleKeyDown() { + return function (event: KeyboardEvent) { + if (event.key === "Enter") { + if (!event.shiftKey) { + event.preventDefault(); + sendMessage(); + } + } + }; +} + +function varsInit() { + document.addEventListener('DOMContentLoaded', varsUpdateHeightsAndWidths); + window.addEventListener('resize', varsUpdateHeightsAndWidths); +} + +function varsUpdateHeightsAndWidths() { + let headerHeight = (document.querySelector('#header') as HTMLElement).offsetHeight; + let userInputHeight = (document.querySelector('#userInputPanel') as HTMLElement).offsetHeight; + document.documentElement.style.setProperty('--header-height', headerHeight + 'px'); + document.documentElement.style.setProperty('--input-height', userInputHeight + 'px'); +} + +function newChat() { + chatPanelClear(); + logoShow(); + userInputTextAreaFocus(); + streamingChatCompletionsClear(); +} + +function sendMessage() { + let inputElement = userInputTextAreaGetElement(); + let inputValue = inputElement!.value; + + let notEmpty = inputValue.trim() !== ''; + if (notEmpty) { + let html = markdownToHtml(inputValue) || inputValue.replace(/\n/g, '
'); + chatPanelAppendMessage('user', html); + userInputTextAreaClear(); + varsUpdateHeightsAndWidths(); + streamingChatCompletionsProcessInput(inputValue); + } +} + +themeInit(); +markdownInit(); +userInputTextAreaInit(); +varsInit(); +streamingChatCompletionsInit(); +userInputTextAreaFocus(); + +(window as any).sendMessage = sendMessage; +(window as any).toggleTheme = toggleTheme; +(window as any).newChat = newChat; diff --git a/src/ai/.x/templates/openai-webpage-ts/style.css b/src/ai/.x/templates/openai-webpage-ts/style.css new file mode 100644 index 00000000..c3fbe4ba --- /dev/null +++ b/src/ai/.x/templates/openai-webpage-ts/style.css @@ -0,0 +1,367 @@ +:root { + --header-height: 0px; + --input-height: 0px; + --send-button-width: 36px; + --left-side-width: 250px; + --right-side-width: 0px; + --right-side-max-width: 768px; + --max-textarea-height: 200px; + --logo-size: 0.75in; + --logo-icon-size: 1.5em; + --border-radius: 10px; +} + +body { + background-color: #111; + color: #f2f2f2; + font-size: medium; + font-family: system-ui; + height: 100vh; + margin: 0px; + overflow: hidden; + max-height: 100vh; +} + +#header { + color: #222; +} + +body.light-theme #header { + color: #f2f2f2; +} + +#logo { + display: block; + margin-left: auto; + margin-right: auto; + margin-top: calc((100vh - var(--header-height) - var(--input-height) - 80px - var(--logo-size)) / 100 * 33); + filter: grayscale(50%); + width: var(--logo-size); + height: var(--logo-size); +} + +#logoIcon { + margin-bottom: calc(var(--logo-icon-size) / 4); + margin-right: calc(var(--logo-icon-size) / 4); + filter: grayscale(50%); + width: var(--logo-icon-size); + height: var(--logo-icon-size); +} + +#leftSide { + background-color: #000; + color: #f2f2f2; + width: var(--left-side-width); + max-width: var(--left-side-width); + height: 100vh; + max-height: 100vh; + overflow-y: auto; +} + +#newChatButton { + border: none; + cursor: pointer; + border-radius: var(--border-radius); + /* background-co lor: #557CB4; */ + width: calc(var(--left-side-width) - 16px); + margin-top: 16px; + margin-left: auto; + margin-right: auto; +} + +#rightSide { + width: 100%; + margin-left: auto; + margin-right: auto; + max-width: var(--right-side-max-width); +} + +#rightSideInside { + margin-left: auto; + margin-right: auto; + max-width: var(--right-side-max-width); +} + +#toggleThemeButton { + position: fixed; + top: 10px; + right: 0px; + cursor: pointer; + color: #fff; +} + +#chatPanel { + height: 100%; + max-height: calc(100vh - var(--header-height) - var(--input-height) - 32px); + overflow-y: auto; +} + +#sendButton { + border: none; + cursor: pointer; + font-size: 1em; + border-radius: var(--border-radius); + background-color: #557CB4; + width: var(--send-button-width); + padding: 0px; +} + +#userInputPanel { + display: flex; + max-width: 768px; +} + +#userInput { + margin-right: 15px; + width: 100%; + max-height: var(--max-textarea-height); + border-radius: var(--border-radius); + border-width: 2px; +} + +textarea { + resize: none; + background-color: #111; + color: #f2f2f2; +} + +body.light-theme textarea { + background-color: #fff; + color: #111; +} + +textarea.w3-border { + border-color: #333 !important; +} + +body.light-theme textarea.w3-border { + border-color: #ddd !important; +} + +textarea.w3-border:focus-visible { + border-color: #555 !important; + outline: none; +} + +body.light-theme textarea.w3-border:focus-visible { + border-color: #bbb !important; + outline: none; +} + +.user { + color: #d8d8d8; + background-color: #111; + border-radius: var(--border-radius); +} + +.computer { + color: #d8d8d8; + background-color: #111; + border-radius: var(--border-radius); +} + +div.user { + margin-bottom: 8px; + margin-right: 0px; + text-align: left; +} + +div.computer { + margin-bottom: 8px; + margin-right: 0px; + text-align: left; +} + +.message-author { + font-weight: bold; + padding-top: calc(var(--border-radius) / 2); + padding-left: var(--border-radius); + padding-right: var(--border-radius); +} + +p.message-author, p.message-author p { + margin: 0px; +} + +.message-content { + padding-left: var(--border-radius); + padding-bottom: calc(var(--border-radius) / 2); + padding-right: var(--border-radius); +} + +p.message-content, p.message-content p { + margin-top: 0px; + margin-left: 0px; + margin-right: 0px; +} + +.light-theme { + background-color: #fff; +} + +body.light-theme #toggleThemeButton { + color: #888; +} + +body.light-theme .user { + background-color: #fdfdfd; + color: #111; +} + +body.light-theme .computer { + background-color: #fdfdfd; + color: #111; +} + +#userInput::-webkit-scrollbar { + display: none; +} +#userInput { + -ms-overflow-style: none; + scrollbar-width: none; +} + +::-webkit-scrollbar { + height: 1rem; + width: .5rem; + background-color: #111; +} + +body.light-theme ::-webkit-scrollbar { + background-color: #fdfdfd; +} + +::-webkit-scrollbar:horizontal { + height: .5rem; + width: 1rem +} + +::-webkit-scrollbar:vertical { + height: .5rem; + width: 1rem +} + +::-webkit-scrollbar-track { + background-color: transparent; + border-radius: 9999px; +} + +::-webkit-scrollbar-thumb { + background-color: #0a0a0a; + border-color: rgba(255,255,255,var(--tw-border-opacity)); + border-radius: 9999px; + border-width: 1px; +} + +body.light-theme ::-webkit-scrollbar-thumb { + background-color: #fafafa; +} + +::-webkit-scrollbar-thumb:hover { + background-color: rgba(217,217,227,var(--tw-bg-opacity)) +} + + +.hljs { + margin: 0px; + padding: 16px; + padding-right: 0px; + border-radius: var(--border-radius); + overflow-x: auto; + max-width: 90vw; +} + +/* + +Atom One Dark by Daniel Gamage +Original One Dark Syntax theme from https://github.com/atom/one-dark-syntax + +base: #282c34 +mono-1: #abb2bf +mono-2: #818896 +mono-3: #5c6370 +hue-1: #56b6c2 +hue-2: #61aeee +hue-3: #c678dd +hue-4: #98c379 +hue-5: #e06c75 +hue-5-2: #be5046 +hue-6: #d19a66 +hue-6-2: #e6c07b + +*/ + +.hljs { + color: #abb2bf; + background: #282c34; + } + + .hljs-comment, + .hljs-quote { + color: #5c6370; + font-style: italic; + } + + .hljs-doctag, + .hljs-keyword, + .hljs-formula { + color: #c678dd; + } + + .hljs-section, + .hljs-name, + .hljs-selector-tag, + .hljs-deletion, + .hljs-subst { + color: #e06c75; + } + + .hljs-literal { + color: #56b6c2; + } + + .hljs-string, + .hljs-regexp, + .hljs-addition, + .hljs-attribute, + .hljs-meta .hljs-string { + color: #98c379; + } + + .hljs-attr, + .hljs-variable, + .hljs-template-variable, + .hljs-type, + .hljs-selector-class, + .hljs-selector-attr, + .hljs-selector-pseudo, + .hljs-number { + color: #d19a66; + } + + .hljs-symbol, + .hljs-bullet, + .hljs-link, + .hljs-meta, + .hljs-selector-id, + .hljs-title { + color: #61aeee; + } + + .hljs-built_in, + .hljs-title.class_, + .hljs-class .hljs-title { + color: #e6c07b; + } + + .hljs-emphasis { + font-style: italic; + } + + .hljs-strong { + font-weight: bold; + } + + .hljs-link { + text-decoration: underline; + } \ No newline at end of file diff --git a/src/ai/.x/templates/openai-webpage-ts/tsconfig.json b/src/ai/.x/templates/openai-webpage-ts/tsconfig.json new file mode 100644 index 00000000..464e3ae2 --- /dev/null +++ b/src/ai/.x/templates/openai-webpage-ts/tsconfig.json @@ -0,0 +1,16 @@ +{ + "compilerOptions": { + "moduleResolution": "node", + "esModuleInterop": true, + "outDir": "./dist/", + "sourceMap": true, + "strict": true, + "module": "es6", + "target": "es5", + "allowJs": true, + "typeRoots": ["./node_modules/@types", "./types"] + }, + "include": [ + "./src/**/*" + ] +} diff --git a/src/ai/.x/templates/openai-webpage-ts/types/marked.d.ts b/src/ai/.x/templates/openai-webpage-ts/types/marked.d.ts new file mode 100644 index 00000000..6c35e540 --- /dev/null +++ b/src/ai/.x/templates/openai-webpage-ts/types/marked.d.ts @@ -0,0 +1 @@ +declare module 'marked'; diff --git a/src/ai/.x/templates/openai-webpage-ts/webpack.config.js b/src/ai/.x/templates/openai-webpage-ts/webpack.config.js new file mode 100644 index 00000000..82f4687f --- /dev/null +++ b/src/ai/.x/templates/openai-webpage-ts/webpack.config.js @@ -0,0 +1,32 @@ +const path = require('path'); +const webpack = require('webpack'); +const Dotenv = require('dotenv-webpack'); + +module.exports = { + entry: './src/script.ts', + output: { + filename: 'main.js', + path: path.resolve(__dirname, 'dist'), + }, + plugins: [ + new Dotenv(), + new webpack.DefinePlugin({ + 'process.env.ENDPOINT': JSON.stringify(process.env.ENDPOINT), + 'process.env.AZURE_API_KEY': JSON.stringify(process.env.AZURE_API_KEY), + 'process.env.DEPLOYMENT_NAME': JSON.stringify(process.env.DEPLOYMENT_NAME), + 'process.env.SYSTEM_PROMPT': JSON.stringify(process.env.SYSTEM_PROMPT), + }), + ], + resolve: { + extensions: [ '.tsx', '.ts', '.js' ], + }, + module: { + rules: [ + { + test: /\.tsx?$/, + use: 'ts-loader', + exclude: /node_modules/, + }, + ], + }, +}; diff --git a/src/ai/.x/templates/openai-webpage-with-functions-ts/.env b/src/ai/.x/templates/openai-webpage-with-functions-ts/.env new file mode 100644 index 00000000..bd323058 --- /dev/null +++ b/src/ai/.x/templates/openai-webpage-with-functions-ts/.env @@ -0,0 +1,10 @@ +<#@ template hostspecific="true" #> +<#@ output extension=".env" encoding="utf-8" #> +<#@ parameter type="System.String" name="AZURE_OPENAI_ENDPOINT" #> +<#@ parameter type="System.String" name="AZURE_OPENAI_KEY" #> +<#@ parameter type="System.String" name="AZURE_OPENAI_CHAT_DEPLOYMENT" #> +<#@ parameter type="System.String" name="AZURE_OPENAI_SYSTEM_PROMPT" #> +AZURE_OPENAI_CHAT_DEPLOYMENT=<#= AZURE_OPENAI_CHAT_DEPLOYMENT #> +AZURE_OPENAI_KEY=<#= AZURE_OPENAI_KEY #> +AZURE_OPENAI_ENDPOINT=<#= AZURE_OPENAI_ENDPOINT #> +AZURE_OPENAI_SYSTEM_PROMPT=<#= AZURE_OPENAI_SYSTEM_PROMPT #> diff --git a/src/ai/.x/templates/openai-webpage-with-functions-ts/README.md b/src/ai/.x/templates/openai-webpage-with-functions-ts/README.md new file mode 100644 index 00000000..8fee923d --- /dev/null +++ b/src/ai/.x/templates/openai-webpage-with-functions-ts/README.md @@ -0,0 +1,35 @@ +# `ai` chat website + +This is a simple website chat interface that uses OpenAI's API to generate text responses to user input. + +User input is typed into a text box and added to the conversation as a message inside a chat panel. The panel scrolls up and the computer responds with streaming text output into another message in the chat panel. There is a left nav that has a "new chat" button and has a spot for future expansion w/ a list of historical chats. + +## Setup + +To build the website, run the following commands: + +```bash +npm install +npx webpack +``` + +To run the website, launch `index.html` in your browser. + +These setup steps are also represented in tasks.json and launch.json, so that you can build and run the website from within VS Code. + +## Project structure + +| Category | File | Description +| --- | --- | --- +| **SOURCE CODE** | ai.png | Logo/icon for the website. +| | index.html | HTML file with controls and layout. +| | style.css | CSS file with layout and styling. +| | src/script.js | Main JS file with HTML to JS interactions. +| | src/ChatCompletionsStreaming.js | Main JS file with JS to OpenAI interactions. +| | | +| **VS CODE** | .vscode/tasks.json | VS Code tasks to build and run the website. +| | .vscode/launch.json | VS Code launch configuration to run the website. +| | | +| **BUILD + PACKAGING** | .env | Contains the API keys, endpoints, etc. +| | package.json | Contains the dependencies. +| | webpack.config.js | The webpack config file. \ No newline at end of file diff --git a/src/ai/.x/templates/openai-webpage-with-functions-ts/_.json b/src/ai/.x/templates/openai-webpage-with-functions-ts/_.json new file mode 100644 index 00000000..8b9f1e93 --- /dev/null +++ b/src/ai/.x/templates/openai-webpage-with-functions-ts/_.json @@ -0,0 +1,10 @@ +{ + "_LongName": "OpenAI Webpage (w/ Functions)", + "_ShortName": "openai-webpage-with-functions", + "_Language": "TypeScript", + "ClassName": "OpenAIChatCompletionsFunctionsStreamingClass", + "AZURE_OPENAI_ENDPOINT": "", + "AZURE_OPENAI_KEY": "", + "AZURE_OPENAI_CHAT_DEPLOYMENT": "", + "AZURE_OPENAI_SYSTEM_PROMPT": "You are a helpful AI assistant." +} \ No newline at end of file diff --git a/src/ai/.x/templates/openai-webpage-with-functions-ts/ai.png b/src/ai/.x/templates/openai-webpage-with-functions-ts/ai.png new file mode 100644 index 00000000..4ba344c9 Binary files /dev/null and b/src/ai/.x/templates/openai-webpage-with-functions-ts/ai.png differ diff --git a/src/ai/.x/templates/openai-webpage-with-functions-ts/index.html b/src/ai/.x/templates/openai-webpage-with-functions-ts/index.html new file mode 100644 index 00000000..e27151ba --- /dev/null +++ b/src/ai/.x/templates/openai-webpage-with-functions-ts/index.html @@ -0,0 +1,62 @@ + + + + + + + + + Chat Interface + + + + +
+ +
+ +
+ +
+ +
+ +
+ + + + +
+
+ + + +
+ +
+
+
+ + +
+ + +
+ +
+ +
+ + + + + + + \ No newline at end of file diff --git a/src/ai/.x/templates/openai-webpage-with-functions-ts/package.json b/src/ai/.x/templates/openai-webpage-with-functions-ts/package.json new file mode 100644 index 00000000..7e60f6a1 --- /dev/null +++ b/src/ai/.x/templates/openai-webpage-with-functions-ts/package.json @@ -0,0 +1,25 @@ +{ + "name": "chat-interface", + "version": "1.0.0", + "description": "Chat Interface with OpenAI", + "main": "script.ts", + "scripts": { + "test": "echo \"Error: no test specified\" && exit 1" + }, + "author": "", + "license": "MIT", + "dependencies": { + "@azure/openai": "1.0.0-beta.10", + "highlight.js": "^11.7.2", + "marked": "^4.0.10" + }, + "keywords": [], + "devDependencies": { + "@types/node": "^20.11.1", + "dotenv-webpack": "^7.0.3", + "ts-loader": "^9.5.1", + "typescript": "^5.3.3", + "webpack": "^5.89.0", + "webpack-cli": "^5.1.4" + } +} diff --git a/src/ai/.x/templates/openai-webpage-with-functions-ts/src/FunctionCallContext.ts b/src/ai/.x/templates/openai-webpage-with-functions-ts/src/FunctionCallContext.ts new file mode 100644 index 00000000..7cef905b --- /dev/null +++ b/src/ai/.x/templates/openai-webpage-with-functions-ts/src/FunctionCallContext.ts @@ -0,0 +1,53 @@ +import { ChatChoice, ChatRequestMessage } from "@azure/openai"; +import { FunctionFactory } from "./FunctionFactory"; + +export class FunctionCallContext { + private function_factory: FunctionFactory; + private messages: ChatRequestMessage[]; + private function_name: string; + private function_arguments: string; + + constructor(function_factory: FunctionFactory, messages: ChatRequestMessage[]) { + this.function_factory = function_factory; + this.messages = messages; + this.function_name = ""; + this.function_arguments = ""; + } + + checkForUpdate(choice: ChatChoice): boolean { + let updated = false; + + const name = choice.delta?.functionCall?.name; + if (name !== undefined) { + this.function_name = name; + updated = true; + } + + const args = choice.delta?.functionCall?.arguments; + if (args !== undefined) { + this.function_arguments = `${this.function_arguments}${args}`; + updated = true; + } + + return updated; + } + + tryCallFunction(): string | undefined { + let result = this.function_factory.tryCallFunction(this.function_name, this.function_arguments); + if (result === undefined) { + return undefined; + } + + console.log(`assistant-function: ${this.function_name}(${this.function_arguments}) => ${result}`); + + this.messages.push({ role: 'assistant', content: '', functionCall: { name: this.function_name, arguments: this.function_arguments } }); + this.messages.push({ role: 'function', content: result, name: this.function_name }); + + return result; + } + + clear(): void { + this.function_name = ""; + this.function_arguments = ""; + } +} diff --git a/src/ai/.x/templates/openai-webpage-with-functions-ts/src/FunctionFactory.ts b/src/ai/.x/templates/openai-webpage-with-functions-ts/src/FunctionFactory.ts new file mode 100644 index 00000000..b2807021 --- /dev/null +++ b/src/ai/.x/templates/openai-webpage-with-functions-ts/src/FunctionFactory.ts @@ -0,0 +1,24 @@ +export class FunctionFactory { + private functions: { [key: string]: { schema: any, function: any } }; + + constructor() { + this.functions = {}; + } + + addFunction(schema: any, fun: any): void { + this.functions[schema.name] = { schema: schema, function: fun }; + } + + getFunctionSchemas(): any[] { + return Object.values(this.functions).map(value => value.schema); + } + + tryCallFunction(function_name: string, function_arguments: string) { + const function_info = this.functions[function_name]; + if (function_info === undefined) { + return undefined; + } + + return function_info.function(function_arguments); + } +} diff --git a/src/ai/.x/templates/openai-webpage-with-functions-ts/src/OpenAIChatCompletionsCustomFunctions.ts b/src/ai/.x/templates/openai-webpage-with-functions-ts/src/OpenAIChatCompletionsCustomFunctions.ts new file mode 100644 index 00000000..273b75dc --- /dev/null +++ b/src/ai/.x/templates/openai-webpage-with-functions-ts/src/OpenAIChatCompletionsCustomFunctions.ts @@ -0,0 +1,60 @@ +import { FunctionFactory } from './FunctionFactory'; +export let factory = new FunctionFactory(); + +function getCurrentWeather(function_arguments: string): string { + const location = JSON.parse(function_arguments).location; + return `The weather in ${location} is 72 degrees and sunny.`; + }; + +const getCurrentWeatherSchema = { + name: "get_current_weather", + description: "Get the current weather in a given location", + parameters: { + type: "object", + properties: { + location: { + type: "string", + description: "The city and state, e.g. San Francisco, CA", + }, + unit: { + type: "string", + enum: ["celsius", "fahrenheit"], + }, + }, + required: ["location"], + }, +}; + +factory.addFunction(getCurrentWeatherSchema, getCurrentWeather); + +function getCurrentDate(): string { + const date = new Date(); + return `${date.getFullYear()}-${date.getMonth() + 1}-${date.getDate()}`; +} + +const getCurrentDateSchema = { + name: "get_current_date", + description: "Get the current date", + parameters: { + type: "object", + properties: {}, + }, +}; + +factory.addFunction(getCurrentDateSchema, getCurrentDate); + +function getCurrentTime(): string { + const date = new Date(); + return `${date.getHours()}:${date.getMinutes()}:${date.getSeconds()}`; +} + +const getCurrentTimeSchema = { + name: "get_current_time", + description: "Get the current time", + parameters: { + type: "object", + properties: {}, + }, +}; + +factory.addFunction(getCurrentTimeSchema, getCurrentTime); \ No newline at end of file diff --git a/src/ai/.x/templates/openai-webpage-with-functions-ts/src/OpenAIChatCompletionsFunctionsStreamingClass.ts b/src/ai/.x/templates/openai-webpage-with-functions-ts/src/OpenAIChatCompletionsFunctionsStreamingClass.ts new file mode 100644 index 00000000..4e0324d3 --- /dev/null +++ b/src/ai/.x/templates/openai-webpage-with-functions-ts/src/OpenAIChatCompletionsFunctionsStreamingClass.ts @@ -0,0 +1,67 @@ +<#@ template hostspecific="true" #> +<#@ output extension=".ts" encoding="utf-8" #> +<#@ parameter type="System.String" name="ClassName" #> +import { OpenAIClient, AzureKeyCredential, ChatRequestMessage } from "@azure/openai"; +import { FunctionCallContext } from "./FunctionCallContext" +import { FunctionFactory } from "./FunctionFactory" + +export class <#= ClassName #> { + private openAISystemPrompt: string; + private openAIChatDeploymentName: string; + private client: OpenAIClient; + private messages: ChatRequestMessage[] = []; + private functionCallContext: FunctionCallContext | undefined; + private functionFactory: FunctionFactory; + + constructor(openAIEndpoint: string, openAIKey: string, openAIChatDeploymentName: string, openAISystemPrompt: string, functionFactory: FunctionFactory) { + this.openAISystemPrompt = openAISystemPrompt; + this.openAIChatDeploymentName = openAIChatDeploymentName; + this.client = new OpenAIClient(openAIEndpoint, new AzureKeyCredential(openAIKey)); + this.functionFactory = functionFactory; + this.clearConversation(); + } + + clearConversation(): void { + this.messages = [ + { role: 'system', content: this.openAISystemPrompt } + ]; + this.functionCallContext = new FunctionCallContext(this.functionFactory, this.messages); + } + + async getChatCompletions(userInput: string, callback: (content: string) => void): Promise { + this.messages.push({ role: 'user', content: userInput }); + + let contentComplete = ''; + while (true) { + const events = await this.client.streamChatCompletions(this.openAIChatDeploymentName, this.messages, { + functions: this.functionFactory.getFunctionSchemas(), + }); + + for await (const event of events) { + for (const choice of event.choices) { + + this.functionCallContext!.checkForUpdate(choice); + + let content = choice.delta?.content; + if (choice.finishReason === 'length') { + content = `${content}\nERROR: Exceeded token limit!`; + } + + if (content != null) { + callback(content); + await new Promise(r => setTimeout(r, 50)); // delay to simulate real-time output, word by word + contentComplete += content; + } + } + } + + if (this.functionCallContext!.tryCallFunction() !== undefined) { + this.functionCallContext!.clear(); + continue; + } + + this.messages.push({ role: 'assistant', content: contentComplete }); + return contentComplete; + } + } +} \ No newline at end of file diff --git a/src/ai/.x/templates/openai-webpage-with-functions-ts/src/script.ts b/src/ai/.x/templates/openai-webpage-with-functions-ts/src/script.ts new file mode 100644 index 00000000..0ba30e6e --- /dev/null +++ b/src/ai/.x/templates/openai-webpage-with-functions-ts/src/script.ts @@ -0,0 +1,300 @@ +<#@ template hostspecific="true" #> +<#@ output extension=".ts" encoding="utf-8" #> +<#@ parameter type="System.String" name="ClassName" #> +<#@ parameter type="System.String" name="AZURE_OPENAI_ENDPOINT" #> +<#@ parameter type="System.String" name="AZURE_OPENAI_KEY" #> +<#@ parameter type="System.String" name="AZURE_OPENAI_CHAT_DEPLOYMENT" #> +<#@ parameter type="System.String" name="AZURE_OPENAI_SYSTEM_PROMPT" #> +import { marked } from "marked" +import hljs from "highlight.js"; + +import { factory } from './OpenAIChatCompletionsCustomFunctions'; + +import { <#= ClassName #> } from './OpenAIChatCompletionsFunctionsStreamingClass'; +let streamingChatCompletions: <#= ClassName #> | undefined; + +function streamingChatCompletionsInit(): void { + + const openAIEndpoint = process.env.AZURE_OPENAI_ENDPOINT || "<#= AZURE_OPENAI_ENDPOINT #>"; + const openAIKey = process.env.AZURE_OPENAI_KEY || "<#= AZURE_OPENAI_KEY #>"; + const openAIChatDeploymentName = process.env.AZURE_OPENAI_CHAT_DEPLOYMENT || "<#= AZURE_OPENAI_CHAT_DEPLOYMENT #>" ; + const openAISystemPrompt = process.env.AZURE_OPENAI_SYSTEM_PROMPT || "<#= AZURE_OPENAI_SYSTEM_PROMPT #>"; + + if (!openAIEndpoint || openAIEndpoint.startsWith('(openAIEndpoint, openAIKey, openAIChatDeploymentName, openAISystemPrompt, factory); +} + +function streamingChatCompletionsClear(): void { + streamingChatCompletions!.clearConversation(); +} + +async function streamingChatCompletionsProcessInput(userInput: string): Promise { + const blackVerticalRectangle = '\u25AE'; // Black vertical rectangle ('▮') to simulate an insertion point + + let newMessage = chatPanelAppendMessage('computer', blackVerticalRectangle); + let completeResponse = ""; + + let computerResponse = await streamingChatCompletions!.getChatCompletions(userInput, function (response: string) { + let atBottomBeforeUpdate = chatPanelIsScrollAtBottom(); + + completeResponse += response; + let withEnding = `${completeResponse}${blackVerticalRectangle}`; + let asHtml = markdownToHtml(withEnding); + + if (asHtml !== undefined) { + newMessage.innerHTML = asHtml; + + if (atBottomBeforeUpdate) { + chatPanelScrollToBottom(); + } + } + }); + + newMessage.innerHTML = markdownToHtml(computerResponse) || computerResponse.replace(/\n/g, '
'); + chatPanelScrollToBottom(); +} + +function chatPanelGetElement(): HTMLElement | null { + return document.getElementById("chatPanel"); +} + +function chatPanelAppendMessage(sender: any, message: string) { + logoHide(); + + let messageContent = document.createElement("p"); + messageContent.className = "message-content"; + messageContent.innerHTML = message; + + let messageAuthor = document.createElement("p"); + messageAuthor.className = "message-author"; + messageAuthor.innerHTML = sender == "user" ? "You" : "Assistant"; + + let divContainingBoth = document.createElement("div"); + divContainingBoth.className = sender === "user" ? "user" : "computer"; + divContainingBoth.appendChild(messageAuthor); + divContainingBoth.appendChild(messageContent); + + let chatPanel = chatPanelGetElement(); + chatPanel?.appendChild(divContainingBoth); + chatPanelScrollToBottom(); + + return messageContent; +} + +function chatPanelIsScrollAtBottom(): boolean { + let chatPanel = chatPanelGetElement(); + let atBottom = chatPanel + ? Math.abs(chatPanel.scrollHeight - chatPanel.clientHeight - chatPanel.scrollTop) < 1 + : true; + return atBottom; +} + +function chatPanelScrollToBottom() { + let chatPanel = chatPanelGetElement(); + if (chatPanel) { + chatPanel.scrollTop = chatPanel.scrollHeight; + } +} + +function chatPanelClear() { + let chatPanel = chatPanelGetElement(); + if (chatPanel) { + chatPanel.innerHTML = ''; + } +} + +function logoGetElement() { + return document.getElementById("logo"); +} + +function logoShow() { + let logo = logoGetElement(); + if (logo) { + logo.style.display = "block"; + } +} + +function logoHide() { + let logo = logoGetElement(); + if (logo) { + logo.style.display = "none"; + } +} + +function markdownInit() { + marked.setOptions({ + highlight: (code: string, lang: string) => { + let hl = lang === undefined || lang === '' + ? hljs.highlightAuto(code).value + : hljs.highlight(lang, code).value; + return `
${hl}
`; + } + }); +} + +function markdownToHtml(markdownText: string) { + try { + return marked.parse(markdownText); + } + catch (error) { + return undefined; + } +} + +function themeInit() { + let currentTheme = localStorage.getItem('theme'); + if (currentTheme === 'dark') { + themeSetDark(); + } + else if (currentTheme === 'light') { + themeSetLight(); + } + toggleThemeButtonInit(); +} + +function themeIsLight() { + return document.body.classList.contains("light-theme"); +} + +function themeIsDark() { + return !themeIsLight(); +} + +function toggleTheme() { + if (themeIsLight()) { + themeSetDark(); + } else { + themeSetLight(); + } +} + +function themeSetLight() { + if (!themeIsLight()) { + document.body.classList.add("light-theme"); + localStorage.setItem('theme', 'light'); + + let iconElement = toggleThemeButtonGetElement()!.children[0]; + iconElement.classList.remove("fa-toggle-on"); + iconElement.classList.add("fa-toggle-off"); + } +} + +function themeSetDark() { + if (!themeIsDark()) { + document.body.classList.remove("light-theme"); + localStorage.setItem('theme', 'dark'); + + let iconElement = toggleThemeButtonGetElement()!.children[0]; + iconElement.classList.remove("fa-toggle-off"); + iconElement.classList.add("fa-toggle-on"); + } +} + +function toggleThemeButtonGetElement() { + return document.getElementById("toggleThemeButton"); +} + +function toggleThemeButtonInit() { + let buttonElement = toggleThemeButtonGetElement(); + buttonElement!.addEventListener("click", toggleTheme); + buttonElement!.addEventListener('keydown', toggleThemeButtonHandleKeyDown()); +} + +function toggleThemeButtonHandleKeyDown() { + return function (event: KeyboardEvent) { + if (event.code === 'Enter' || event.code === 'Space') { + toggleTheme(); + } + }; +} + +function userInputTextAreaGetElement() : HTMLTextAreaElement | null { + return document.getElementById("userInput") as HTMLTextAreaElement | null; +} + +function userInputTextAreaInit() { + let inputElement = userInputTextAreaGetElement(); + inputElement!.addEventListener("keydown", userInputTextAreaHandleKeyDown()); + inputElement!.addEventListener("input", userInputTextAreaUpdateHeight); +} + +function userInputTextAreaFocus() { + let inputElement = userInputTextAreaGetElement(); + inputElement!.focus(); +} + +function userInputTextAreaClear() { + userInputTextAreaGetElement()!.value = ''; + userInputTextAreaUpdateHeight(); +} + +function userInputTextAreaUpdateHeight() { + let userInput = userInputTextAreaGetElement()!; + let inputElement = userInputTextAreaGetElement(); + inputElement!.style.height = 'auto'; + inputElement!.style.height = (userInput.scrollHeight) + 'px'; +} + +function userInputTextAreaHandleKeyDown() { + return function (event: KeyboardEvent) { + if (event.key === "Enter") { + if (!event.shiftKey) { + event.preventDefault(); + sendMessage(); + } + } + }; +} + +function varsInit() { + document.addEventListener('DOMContentLoaded', varsUpdateHeightsAndWidths); + window.addEventListener('resize', varsUpdateHeightsAndWidths); +} + +function varsUpdateHeightsAndWidths() { + let headerHeight = (document.querySelector('#header') as HTMLElement).offsetHeight; + let userInputHeight = (document.querySelector('#userInputPanel') as HTMLElement).offsetHeight; + document.documentElement.style.setProperty('--header-height', headerHeight + 'px'); + document.documentElement.style.setProperty('--input-height', userInputHeight + 'px'); +} + +function newChat() { + chatPanelClear(); + logoShow(); + userInputTextAreaFocus(); + streamingChatCompletionsClear(); +} + +function sendMessage() { + let inputElement = userInputTextAreaGetElement(); + let inputValue = inputElement!.value; + + let notEmpty = inputValue.trim() !== ''; + if (notEmpty) { + let html = markdownToHtml(inputValue) || inputValue.replace(/\n/g, '
'); + chatPanelAppendMessage('user', html); + userInputTextAreaClear(); + varsUpdateHeightsAndWidths(); + streamingChatCompletionsProcessInput(inputValue); + } +} + +themeInit(); +markdownInit(); +userInputTextAreaInit(); +varsInit(); +streamingChatCompletionsInit(); +userInputTextAreaFocus(); + +(window as any).sendMessage = sendMessage; +(window as any).toggleTheme = toggleTheme; +(window as any).newChat = newChat; diff --git a/src/ai/.x/templates/openai-webpage-with-functions-ts/style.css b/src/ai/.x/templates/openai-webpage-with-functions-ts/style.css new file mode 100644 index 00000000..2b1dd145 --- /dev/null +++ b/src/ai/.x/templates/openai-webpage-with-functions-ts/style.css @@ -0,0 +1,367 @@ +:root { + --header-height: 0px; + --input-height: 0px; + --send-button-width: 36px; + --left-side-width: 250px; + --right-side-width: 0px; + --right-side-max-width: 768px; + --max-textarea-height: 200px; + --logo-size: 0.75in; + --logo-icon-size: 1.5em; + --border-radius: 10px; +} + +body { + background-color: #111; + color: #f2f2f2; + font-size: medium; + font-family: system-ui; + height: 100vh; + margin: 0px; + overflow: hidden; + max-height: 100vh; +} + +#header { + color: #222; +} + +body.light-theme #header { + color: #f2f2f2; +} + +#logo { + display: block; + margin-left: auto; + margin-right: auto; + margin-top: calc((100vh - var(--header-height) - var(--input-height) - 80px - var(--logo-size)) / 100 * 33); + filter: grayscale(50%); + width: var(--logo-size); + height: var(--logo-size); +} + +#logoIcon { + margin-bottom: calc(var(--logo-icon-size) / 4); + margin-right: calc(var(--logo-icon-size) / 4); + filter: grayscale(50%); + width: var(--logo-icon-size); + height: var(--logo-icon-size); +} + +#leftSide { + background-color: #000; + color: #f2f2f2; + width: var(--left-side-width); + max-width: var(--left-side-width); + height: 100vh; + max-height: 100vh; + overflow-y: auto; +} + +#newChatButton { + border: none; + cursor: pointer; + border-radius: var(--border-radius); + /* background-co lor: #557CB4; */ + width: calc(var(--left-side-width) - 16px); + margin-top: 16px; + margin-left: auto; + margin-right: auto; +} + +#rightSide { + width: 100%; + margin-left: auto; + margin-right: auto; + max-width: var(--right-side-max-width); +} + +#rightSideInside { + margin-left: auto; + margin-right: auto; + max-width: var(--right-side-max-width); +} + +#toggleThemeButton { + position: fixed; + top: 10px; + right: 0px; + cursor: pointer; + color: #fff; +} + +#chatPanel { + height: 100%; + max-height: calc(100vh - var(--header-height) - var(--input-height) - 32px); + overflow-y: auto; +} + +#sendButton { + border: none; + cursor: pointer; + font-size: 1em; + border-radius: var(--border-radius); + background-color: #557CB4; + width: var(--send-button-width); + padding: 0px; +} + +#userInputPanel { + display: flex; + max-width: 768px; +} + +#userInput { + margin-right: 15px; + width: 100%; + max-height: var(--max-textarea-height); + border-radius: var(--border-radius); + border-width: 2px; +} + +textarea { + resize: none; + background-color: #111; + color: #f2f2f2; +} + +body.light-theme textarea { + background-color: #fff; + color: #111; +} + +textarea.w3-border { + border-color: #333 !important; +} + +body.light-theme textarea.w3-border { + border-color: #ddd !important; +} + +textarea.w3-border:focus-visible { + border-color: #555 !important; + outline: none; +} + +body.light-theme textarea.w3-border:focus-visible { + border-color: #bbb !important; + outline: none; +} + +.user { + color: #d8d8d8; + background-color: #111; + border-radius: var(--border-radius); +} + +.computer { + color: #d8d8d8; + background-color: #111; + border-radius: var(--border-radius); +} + +div.user { + margin-bottom: 8px; + margin-right: 0px; + text-align: left; +} + +div.computer { + margin-bottom: 8px; + margin-right: 0px; + text-align: left; +} + +.message-author { + font-weight: bold; + padding-top: calc(var(--border-radius) / 2); + padding-left: var(--border-radius); + padding-right: var(--border-radius); +} + +p.message-author, p.message-author p { + margin: 0px; +} + +.message-content { + padding-left: var(--border-radius); + padding-bottom: calc(var(--border-radius) / 2); + padding-right: var(--border-radius); +} + +p.message-content, p.message-content p { + margin-top: 0px; + margin-left: 0px; + margin-right: 0px; +} + +.light-theme { + background-color: #fff; +} + +body.light-theme #toggleThemeButton { + color: #888; +} + +body.light-theme .user { + background-color: #fdfdfd; + color: #111; +} + +body.light-theme .computer { + background-color: #fdfdfd; + color: #111; +} + +#userInput::-webkit-scrollbar { + display: none; +} +#userInput { + -ms-overflow-style: none; + scrollbar-width: none; +} + +::-webkit-scrollbar { + height: 1rem; + width: .5rem; + background-color: #111; +} + +body.light-theme ::-webkit-scrollbar { + background-color: #fdfdfd; +} + +::-webkit-scrollbar:horizontal { + height: .5rem; + width: 1rem +} + +::-webkit-scrollbar:vertical { + height: .5rem; + width: 1rem +} + +::-webkit-scrollbar-track { + background-color: transparent; + border-radius: 9999px; +} + +::-webkit-scrollbar-thumb { + background-color: #0a0a0a; + border-color: rgba(255,255,255,var(--tw-border-opacity)); + border-radius: 9999px; + border-width: 1px; +} + +body.light-theme ::-webkit-scrollbar-thumb { + background-color: #fafafa; +} + +::-webkit-scrollbar-thumb:hover { + background-color: rgba(217,217,227,var(--tw-bg-opacity)) +} + + +.hljs { + margin: 0px; + padding: 16px; + padding-right: 0px; + border-radius: var(--border-radius); + overflow-x: auto; + max-width: 90vw; +} + +/* + +Atom One Dark by Daniel Gamage +Original One Dark Syntax theme from https://github.com/atom/one-dark-syntax + +base: #282c34 +mono-1: #abb2bf +mono-2: #818896 +mono-3: #5c6370 +hue-1: #56b6c2 +hue-2: #61aeee +hue-3: #c678dd +hue-4: #98c379 +hue-5: #e06c75 +hue-5-2: #be5046 +hue-6: #d19a66 +hue-6-2: #e6c07b + +*/ + +.hljs { + color: #abb2bf; + background: #282c34; + } + + .hljs-comment, + .hljs-quote { + color: #5c6370; + font-style: italic; + } + + .hljs-doctag, + .hljs-keyword, + .hljs-formula { + color: #c678dd; + } + + .hljs-section, + .hljs-name, + .hljs-selector-tag, + .hljs-deletion, + .hljs-subst { + color: #e06c75; + } + + .hljs-literal { + color: #56b6c2; + } + + .hljs-string, + .hljs-regexp, + .hljs-addition, + .hljs-attribute, + .hljs-meta .hljs-string { + color: #98c379; + } + + .hljs-attr, + .hljs-variable, + .hljs-template-variable, + .hljs-type, + .hljs-selector-class, + .hljs-selector-attr, + .hljs-selector-pseudo, + .hljs-number { + color: #d19a66; + } + + .hljs-symbol, + .hljs-bullet, + .hljs-link, + .hljs-meta, + .hljs-selector-id, + .hljs-title { + color: #61aeee; + } + + .hljs-built_in, + .hljs-title.class_, + .hljs-class .hljs-title { + color: #e6c07b; + } + + .hljs-emphasis { + font-style: italic; + } + + .hljs-strong { + font-weight: bold; + } + + .hljs-link { + text-decoration: underline; + } diff --git a/src/ai/.x/templates/openai-webpage-with-functions-ts/tsconfig.json b/src/ai/.x/templates/openai-webpage-with-functions-ts/tsconfig.json new file mode 100644 index 00000000..464e3ae2 --- /dev/null +++ b/src/ai/.x/templates/openai-webpage-with-functions-ts/tsconfig.json @@ -0,0 +1,16 @@ +{ + "compilerOptions": { + "moduleResolution": "node", + "esModuleInterop": true, + "outDir": "./dist/", + "sourceMap": true, + "strict": true, + "module": "es6", + "target": "es5", + "allowJs": true, + "typeRoots": ["./node_modules/@types", "./types"] + }, + "include": [ + "./src/**/*" + ] +} diff --git a/src/ai/.x/templates/openai-webpage-with-functions-ts/types/marked.d.ts b/src/ai/.x/templates/openai-webpage-with-functions-ts/types/marked.d.ts new file mode 100644 index 00000000..6c35e540 --- /dev/null +++ b/src/ai/.x/templates/openai-webpage-with-functions-ts/types/marked.d.ts @@ -0,0 +1 @@ +declare module 'marked'; diff --git a/src/ai/.x/templates/openai-webpage-with-functions-ts/webpack.config.js b/src/ai/.x/templates/openai-webpage-with-functions-ts/webpack.config.js new file mode 100644 index 00000000..82f4687f --- /dev/null +++ b/src/ai/.x/templates/openai-webpage-with-functions-ts/webpack.config.js @@ -0,0 +1,32 @@ +const path = require('path'); +const webpack = require('webpack'); +const Dotenv = require('dotenv-webpack'); + +module.exports = { + entry: './src/script.ts', + output: { + filename: 'main.js', + path: path.resolve(__dirname, 'dist'), + }, + plugins: [ + new Dotenv(), + new webpack.DefinePlugin({ + 'process.env.ENDPOINT': JSON.stringify(process.env.ENDPOINT), + 'process.env.AZURE_API_KEY': JSON.stringify(process.env.AZURE_API_KEY), + 'process.env.DEPLOYMENT_NAME': JSON.stringify(process.env.DEPLOYMENT_NAME), + 'process.env.SYSTEM_PROMPT': JSON.stringify(process.env.SYSTEM_PROMPT), + }), + ], + resolve: { + extensions: [ '.tsx', '.ts', '.js' ], + }, + module: { + rules: [ + { + test: /\.tsx?$/, + use: 'ts-loader', + exclude: /node_modules/, + }, + ], + }, +}; diff --git a/src/ai/.x/templates/openai-webpage-with-functions/.env b/src/ai/.x/templates/openai-webpage-with-functions/.env new file mode 100644 index 00000000..bd323058 --- /dev/null +++ b/src/ai/.x/templates/openai-webpage-with-functions/.env @@ -0,0 +1,10 @@ +<#@ template hostspecific="true" #> +<#@ output extension=".env" encoding="utf-8" #> +<#@ parameter type="System.String" name="AZURE_OPENAI_ENDPOINT" #> +<#@ parameter type="System.String" name="AZURE_OPENAI_KEY" #> +<#@ parameter type="System.String" name="AZURE_OPENAI_CHAT_DEPLOYMENT" #> +<#@ parameter type="System.String" name="AZURE_OPENAI_SYSTEM_PROMPT" #> +AZURE_OPENAI_CHAT_DEPLOYMENT=<#= AZURE_OPENAI_CHAT_DEPLOYMENT #> +AZURE_OPENAI_KEY=<#= AZURE_OPENAI_KEY #> +AZURE_OPENAI_ENDPOINT=<#= AZURE_OPENAI_ENDPOINT #> +AZURE_OPENAI_SYSTEM_PROMPT=<#= AZURE_OPENAI_SYSTEM_PROMPT #> diff --git a/src/ai/.x/templates/openai-webpage-with-functions/README.md b/src/ai/.x/templates/openai-webpage-with-functions/README.md new file mode 100644 index 00000000..8fee923d --- /dev/null +++ b/src/ai/.x/templates/openai-webpage-with-functions/README.md @@ -0,0 +1,35 @@ +# `ai` chat website + +This is a simple website chat interface that uses OpenAI's API to generate text responses to user input. + +User input is typed into a text box and added to the conversation as a message inside a chat panel. The panel scrolls up and the computer responds with streaming text output into another message in the chat panel. There is a left nav that has a "new chat" button and has a spot for future expansion w/ a list of historical chats. + +## Setup + +To build the website, run the following commands: + +```bash +npm install +npx webpack +``` + +To run the website, launch `index.html` in your browser. + +These setup steps are also represented in tasks.json and launch.json, so that you can build and run the website from within VS Code. + +## Project structure + +| Category | File | Description +| --- | --- | --- +| **SOURCE CODE** | ai.png | Logo/icon for the website. +| | index.html | HTML file with controls and layout. +| | style.css | CSS file with layout and styling. +| | src/script.js | Main JS file with HTML to JS interactions. +| | src/ChatCompletionsStreaming.js | Main JS file with JS to OpenAI interactions. +| | | +| **VS CODE** | .vscode/tasks.json | VS Code tasks to build and run the website. +| | .vscode/launch.json | VS Code launch configuration to run the website. +| | | +| **BUILD + PACKAGING** | .env | Contains the API keys, endpoints, etc. +| | package.json | Contains the dependencies. +| | webpack.config.js | The webpack config file. \ No newline at end of file diff --git a/src/ai/.x/templates/openai-webpage-with-functions/_.json b/src/ai/.x/templates/openai-webpage-with-functions/_.json new file mode 100644 index 00000000..b5280310 --- /dev/null +++ b/src/ai/.x/templates/openai-webpage-with-functions/_.json @@ -0,0 +1,10 @@ +{ + "_LongName": "OpenAI Webpage (w/ Functions)", + "_ShortName": "openai-webpage-with-functions", + "_Language": "JavaScript", + "ClassName": "OpenAIChatCompletionsFunctionsStreamingClass", + "AZURE_OPENAI_ENDPOINT": "", + "AZURE_OPENAI_KEY": "", + "AZURE_OPENAI_CHAT_DEPLOYMENT": "", + "AZURE_OPENAI_SYSTEM_PROMPT": "You are a helpful AI assistant." +} \ No newline at end of file diff --git a/src/ai/.x/templates/openai-webpage-with-functions/ai.png b/src/ai/.x/templates/openai-webpage-with-functions/ai.png new file mode 100644 index 00000000..4ba344c9 Binary files /dev/null and b/src/ai/.x/templates/openai-webpage-with-functions/ai.png differ diff --git a/src/ai/.x/templates/openai-webpage-with-functions/index.html b/src/ai/.x/templates/openai-webpage-with-functions/index.html new file mode 100644 index 00000000..e27151ba --- /dev/null +++ b/src/ai/.x/templates/openai-webpage-with-functions/index.html @@ -0,0 +1,62 @@ + + + + + + + + + Chat Interface + + + + +
+ +
+ +
+ +
+ +
+ +
+ + + + +
+
+ + + +
+ +
+
+
+ + +
+ + +
+ +
+ +
+ + + + + + + \ No newline at end of file diff --git a/src/ai/.x/templates/openai-webpage-with-functions/package.json b/src/ai/.x/templates/openai-webpage-with-functions/package.json new file mode 100644 index 00000000..89463238 --- /dev/null +++ b/src/ai/.x/templates/openai-webpage-with-functions/package.json @@ -0,0 +1,22 @@ +{ + "name": "chat-interface", + "version": "1.0.0", + "description": "Chat Interface with OpenAI", + "main": "script.js", + "scripts": { + "test": "echo \"Error: no test specified\" && exit 1" + }, + "author": "", + "license": "MIT", + "dependencies": { + "@azure/openai": "1.0.0-beta.10", + "highlight.js": "^11.7.2", + "marked": "^4.0.10" + }, + "keywords": [], + "devDependencies": { + "dotenv-webpack": "^7.0.3", + "webpack": "^5.89.0", + "webpack-cli": "^5.1.4" + } +} diff --git a/src/ai/.x/templates/openai-webpage/src/FunctionCallContext.js b/src/ai/.x/templates/openai-webpage-with-functions/src/FunctionCallContext.js similarity index 100% rename from src/ai/.x/templates/openai-webpage/src/FunctionCallContext.js rename to src/ai/.x/templates/openai-webpage-with-functions/src/FunctionCallContext.js diff --git a/src/ai/.x/templates/openai-webpage/src/FunctionFactory.js b/src/ai/.x/templates/openai-webpage-with-functions/src/FunctionFactory.js similarity index 100% rename from src/ai/.x/templates/openai-webpage/src/FunctionFactory.js rename to src/ai/.x/templates/openai-webpage-with-functions/src/FunctionFactory.js diff --git a/src/ai/.x/templates/openai-webpage-with-functions/src/OpenAIChatCompletionsCustomFunctions.js b/src/ai/.x/templates/openai-webpage-with-functions/src/OpenAIChatCompletionsCustomFunctions.js new file mode 100644 index 00000000..15ed3234 --- /dev/null +++ b/src/ai/.x/templates/openai-webpage-with-functions/src/OpenAIChatCompletionsCustomFunctions.js @@ -0,0 +1,62 @@ +const { FunctionFactory } = require("./FunctionFactory"); +let factory = new FunctionFactory(); + +function getCurrentWeather(function_arguments) { + const location = JSON.parse(function_arguments).location; + return `The weather in ${location} is 72 degrees and sunny.`; + }; + +const getCurrentWeatherSchema = { + name: "get_current_weather", + description: "Get the current weather in a given location", + parameters: { + type: "object", + properties: { + location: { + type: "string", + description: "The city and state, e.g. San Francisco, CA", + }, + unit: { + type: "string", + enum: ["celsius", "fahrenheit"], + }, + }, + required: ["location"], + }, +}; + +factory.addFunction(getCurrentWeatherSchema, getCurrentWeather); + +function getCurrentDate() { + const date = new Date(); + return `${date.getFullYear()}-${date.getMonth() + 1}-${date.getDate()}`; +} + +const getCurrentDateSchema = { + name: "get_current_date", + description: "Get the current date", + parameters: { + type: "object", + properties: {}, + }, +}; + +factory.addFunction(getCurrentDateSchema, getCurrentDate); + +function getCurrentTime() { + const date = new Date(); + return `${date.getHours()}:${date.getMinutes()}:${date.getSeconds()}`; +} + +const getCurrentTimeSchema = { + name: "get_current_time", + description: "Get the current time", + parameters: { + type: "object", + properties: {}, + }, +}; + +factory.addFunction(getCurrentTimeSchema, getCurrentTime); + +exports.factory = factory; \ No newline at end of file diff --git a/src/ai/.x/templates/openai-functions-streaming-js/ChatCompletionsFunctionsStreaming.js b/src/ai/.x/templates/openai-webpage-with-functions/src/OpenAIChatCompletionsFunctionsStreamingClass.js similarity index 64% rename from src/ai/.x/templates/openai-functions-streaming-js/ChatCompletionsFunctionsStreaming.js rename to src/ai/.x/templates/openai-webpage-with-functions/src/OpenAIChatCompletionsFunctionsStreamingClass.js index b379f063..907d6899 100644 --- a/src/ai/.x/templates/openai-functions-streaming-js/ChatCompletionsFunctionsStreaming.js +++ b/src/ai/.x/templates/openai-webpage-with-functions/src/OpenAIChatCompletionsFunctionsStreamingClass.js @@ -1,21 +1,21 @@ +<#@ template hostspecific="true" #> +<#@ output extension=".js" encoding="utf-8" #> +<#@ parameter type="System.String" name="ClassName" #> const { OpenAIClient, AzureKeyCredential } = require("@azure/openai"); -const { FunctionFactory } = require("./FunctionFactory"); const { FunctionCallContext } = require("./FunctionCallContext"); -class ChatCompletionsFunctionsStreaming { - constructor(systemPrompt, endpoint, azureApiKey, deploymentName, functionFactory) { - this.systemPrompt = systemPrompt; - this.endpoint = endpoint; - this.azureApiKey = azureApiKey; - this.deploymentName = deploymentName; - this.client = new OpenAIClient(this.endpoint, new AzureKeyCredential(this.azureApiKey)); - this.functionFactory = functionFactory || new FunctionFactory(); +class <#= ClassName #> { + constructor(openAIEndpoint, openAIKey, openAIChatDeploymentName, openAISystemPrompt, functionFactory) { + this.openAISystemPrompt = openAISystemPrompt; + this.openAIChatDeploymentName = openAIChatDeploymentName; + this.client = new OpenAIClient(openAIEndpoint, new AzureKeyCredential(openAIKey)); + this.functionFactory = functionFactory; this.clearConversation(); } clearConversation() { this.messages = [ - { role: 'system', content: this.systemPrompt } + { role: 'system', content: this.openAISystemPrompt } ]; this.functionCallContext = new FunctionCallContext(this.functionFactory, this.messages); } @@ -23,9 +23,9 @@ class ChatCompletionsFunctionsStreaming { async getChatCompletions(userInput, callback) { this.messages.push({ role: 'user', content: userInput }); - let contentComplete = ""; + let contentComplete = ''; while (true) { - const events = this.client.listChatCompletions(this.deploymentName, this.messages, { + const events = await this.client.streamChatCompletions(this.openAIChatDeploymentName, this.messages, { functions: this.functionFactory.getFunctionSchemas(), }); @@ -58,4 +58,4 @@ class ChatCompletionsFunctionsStreaming { } } -exports.ChatCompletionsFunctionsStreaming = ChatCompletionsFunctionsStreaming; \ No newline at end of file +exports.<#= ClassName #> = <#= ClassName #>; \ No newline at end of file diff --git a/src/ai/.x/templates/openai-webpage-with-functions/src/script.js b/src/ai/.x/templates/openai-webpage-with-functions/src/script.js new file mode 100644 index 00000000..dc82572b --- /dev/null +++ b/src/ai/.x/templates/openai-webpage-with-functions/src/script.js @@ -0,0 +1,289 @@ +<#@ template hostspecific="true" #> +<#@ output extension=".js" encoding="utf-8" #> +<#@ parameter type="System.String" name="ClassName" #> +<#@ parameter type="System.String" name="AZURE_OPENAI_ENDPOINT" #> +<#@ parameter type="System.String" name="AZURE_OPENAI_KEY" #> +<#@ parameter type="System.String" name="AZURE_OPENAI_CHAT_DEPLOYMENT" #> +<#@ parameter type="System.String" name="AZURE_OPENAI_SYSTEM_PROMPT" #> +const marked = require("marked"); +const hljs = require("highlight.js"); + +const { factory } = require("./OpenAIChatCompletionsCustomFunctions"); + +const { <#= ClassName #> } = require('./OpenAIChatCompletionsFunctionsStreamingClass'); +let streamingChatCompletions; + +function streamingChatCompletionsInit() { + + const openAIEndpoint = process.env.AZURE_OPENAI_ENDPOINT || "<#= AZURE_OPENAI_ENDPOINT #>"; + const openAIKey = process.env.AZURE_OPENAI_KEY || "<#= AZURE_OPENAI_KEY #>"; + const openAIChatDeploymentName = process.env.AZURE_OPENAI_CHAT_DEPLOYMENT || "<#= AZURE_OPENAI_CHAT_DEPLOYMENT #>" ; + const openAISystemPrompt = process.env.AZURE_OPENAI_SYSTEM_PROMPT || "<#= AZURE_OPENAI_SYSTEM_PROMPT #>"; + + if (!openAIEndpoint || openAIEndpoint.startsWith('(openAIEndpoint, openAIKey, openAIChatDeploymentName, openAISystemPrompt, factory); +} + +function streamingChatCompletionsClear() { + streamingChatCompletions.clearConversation(); +} + +async function streamingChatCompletionsProcessInput(userInput) { + const blackVerticalRectangle = '\u25AE'; // Black vertical rectangle ('▮') to simulate an insertion point + + let newMessage = chatPanelAppendMessage('computer', blackVerticalRectangle); + let completeResponse = ""; + + let computerResponse = await streamingChatCompletions.getChatCompletions(userInput, function (response) { + let atBottomBeforeUpdate = chatPanelIsScrollAtBottom(); + + completeResponse += response; + let withEnding = `${completeResponse}${blackVerticalRectangle}`; + let asHtml = markdownToHtml(withEnding); + + if (asHtml !== undefined) { + newMessage.innerHTML = asHtml; + + if (atBottomBeforeUpdate) { + chatPanelScrollToBottom(); + } + } + }); + + newMessage.innerHTML = markdownToHtml(computerResponse) || computerResponse.replace(/\n/g, '
'); + chatPanel.scrollTop = chatPanel.scrollHeight; +} + +function chatPanelGetElement() { + return document.getElementById("chatPanel"); +} + +function chatPanelAppendMessage(sender, message) { + logoHide(); + + let messageContent = document.createElement("p"); + messageContent.className = "message-content"; + messageContent.innerHTML = message; + + let messageAuthor = document.createElement("p"); + messageAuthor.className = "message-author"; + messageAuthor.innerHTML = sender == "user" ? "You" : "Assistant"; + + let divContainingBoth = document.createElement("div"); + divContainingBoth.className = sender === "user" ? "user" : "computer"; + divContainingBoth.appendChild(messageAuthor); + divContainingBoth.appendChild(messageContent); + + let chatPanel = chatPanelGetElement(); + chatPanel.appendChild(divContainingBoth); + chatPanelScrollToBottom(); + + return messageContent; +} + +function chatPanelIsScrollAtBottom() { + let chatPanel = chatPanelGetElement(); + let atBottom = Math.abs(chatPanel.scrollHeight - chatPanel.clientHeight - chatPanel.scrollTop) < 1; + return atBottom; +} + +function chatPanelScrollToBottom() { + let chatPanel = chatPanelGetElement(); + chatPanel.scrollTop = chatPanel.scrollHeight; +} + +function chatPanelClear() { + let chatPanel = chatPanelGetElement(); + chatPanel.innerHTML = ''; +} + +function logoGetElement() { + return document.getElementById("logo"); +} + +function logoShow() { + let logo = logoGetElement(); + logo.style.display = "block"; +} + +function logoHide() { + let logo = logoGetElement(); + logo.style.display = "none"; +} + +function markdownInit() { + marked.setOptions({ + highlight: function (code, lang) { + let hl = lang === undefined || lang === '' + ? hljs.highlightAuto(code).value + : hljs.highlight(lang, code).value; + return `
${hl}
`; + } + }); +} + +function markdownToHtml(markdownText) { + try { + return marked.parse(markdownText); + } + catch (error) { + return undefined; + } +} + +function themeInit() { + let currentTheme = localStorage.getItem('theme'); + if (currentTheme === 'dark') { + themeSetDark(); + } + else if (currentTheme === 'light') { + themeSetLight(); + } + toggleThemeButtonInit(); +} + +function themeIsLight() { + return document.body.classList.contains("light-theme"); +} + +function themeIsDark() { + return !themeIsLight(); +} + +function toggleTheme() { + if (themeIsLight()) { + themeSetDark(); + } else { + themeSetLight(); + } +} + +function themeSetLight() { + if (!themeIsLight()) { + document.body.classList.add("light-theme"); + localStorage.setItem('theme', 'light'); + + let iconElement = toggleThemeButtonGetElement().children[0]; + iconElement.classList.remove("fa-toggle-on"); + iconElement.classList.add("fa-toggle-off"); + } +} + +function themeSetDark() { + if (!themeIsDark()) { + document.body.classList.remove("light-theme"); + localStorage.setItem('theme', 'dark'); + + let iconElement = toggleThemeButtonGetElement().children[0]; + iconElement.classList.remove("fa-toggle-off"); + iconElement.classList.add("fa-toggle-on"); + } +} + +function toggleThemeButtonGetElement() { + return document.getElementById("toggleThemeButton"); +} + +function toggleThemeButtonInit() { + let buttonElement = toggleThemeButtonGetElement(); + buttonElement.addEventListener("click", toggleTheme); + buttonElement.addEventListener('keydown', toggleThemeButtonHandleKeyDown()); +} + +function toggleThemeButtonHandleKeyDown() { + return function (event) { + if (event.code === 'Enter' || event.code === 'Space') { + toggleTheme(); + } + }; +} + +function userInputTextAreaGetElement() { + return document.getElementById("userInput"); +} + +function userInputTextAreaInit() { + let inputElement = userInputTextAreaGetElement(); + inputElement.addEventListener("keydown", userInputTextAreaHandleKeyDown()); + inputElement.addEventListener("input", userInputTextAreaUpdateHeight); +} + +function userInputTextAreaFocus() { + let inputElement = userInputTextAreaGetElement(); + inputElement.focus(); +} + +function userInputTextAreaClear() { + userInputTextAreaGetElement().value = ''; + userInputTextAreaUpdateHeight(); +} + +function userInputTextAreaUpdateHeight() { + let inputElement = userInputTextAreaGetElement(); + inputElement.style.height = 'auto'; + inputElement.style.height = (userInput.scrollHeight) + 'px'; +} + +function userInputTextAreaHandleKeyDown() { + return function (event) { + if (event.key === "Enter") { + if (!event.shiftKey) { + event.preventDefault(); + sendMessage(); + } + } + }; +} + +function varsInit() { + document.addEventListener('DOMContentLoaded', varsUpdateHeightsAndWidths); + window.addEventListener('resize', varsUpdateHeightsAndWidths); +} + +function varsUpdateHeightsAndWidths() { + let headerHeight = document.querySelector('#header').offsetHeight; + let userInputHeight = document.querySelector('#userInputPanel').offsetHeight; + document.documentElement.style.setProperty('--header-height', headerHeight + 'px'); + document.documentElement.style.setProperty('--input-height', userInputHeight + 'px'); +} + +function newChat() { + chatPanelClear(); + logoShow(); + userInputTextAreaFocus(); + streamingChatCompletionsClear(); +} + +function sendMessage() { + let inputElement = userInputTextAreaGetElement(); + let inputValue = inputElement.value; + + let notEmpty = inputValue.trim() !== ''; + if (notEmpty) { + let html = markdownToHtml(inputValue) || inputValue.replace(/\n/g, '
'); + chatPanelAppendMessage('user', html); + userInputTextAreaClear(); + varsUpdateHeightsAndWidths(); + streamingChatCompletionsProcessInput(inputValue); + } +} + +themeInit(); +markdownInit(); +userInputTextAreaInit(); +varsInit(); +streamingChatCompletionsInit(); +userInputTextAreaFocus(); + +window.sendMessage = sendMessage; +window.toggleTheme = toggleTheme; +window.newChat = newChat; diff --git a/src/ai/.x/templates/openai-webpage-with-functions/style.css b/src/ai/.x/templates/openai-webpage-with-functions/style.css new file mode 100644 index 00000000..2b1dd145 --- /dev/null +++ b/src/ai/.x/templates/openai-webpage-with-functions/style.css @@ -0,0 +1,367 @@ +:root { + --header-height: 0px; + --input-height: 0px; + --send-button-width: 36px; + --left-side-width: 250px; + --right-side-width: 0px; + --right-side-max-width: 768px; + --max-textarea-height: 200px; + --logo-size: 0.75in; + --logo-icon-size: 1.5em; + --border-radius: 10px; +} + +body { + background-color: #111; + color: #f2f2f2; + font-size: medium; + font-family: system-ui; + height: 100vh; + margin: 0px; + overflow: hidden; + max-height: 100vh; +} + +#header { + color: #222; +} + +body.light-theme #header { + color: #f2f2f2; +} + +#logo { + display: block; + margin-left: auto; + margin-right: auto; + margin-top: calc((100vh - var(--header-height) - var(--input-height) - 80px - var(--logo-size)) / 100 * 33); + filter: grayscale(50%); + width: var(--logo-size); + height: var(--logo-size); +} + +#logoIcon { + margin-bottom: calc(var(--logo-icon-size) / 4); + margin-right: calc(var(--logo-icon-size) / 4); + filter: grayscale(50%); + width: var(--logo-icon-size); + height: var(--logo-icon-size); +} + +#leftSide { + background-color: #000; + color: #f2f2f2; + width: var(--left-side-width); + max-width: var(--left-side-width); + height: 100vh; + max-height: 100vh; + overflow-y: auto; +} + +#newChatButton { + border: none; + cursor: pointer; + border-radius: var(--border-radius); + /* background-co lor: #557CB4; */ + width: calc(var(--left-side-width) - 16px); + margin-top: 16px; + margin-left: auto; + margin-right: auto; +} + +#rightSide { + width: 100%; + margin-left: auto; + margin-right: auto; + max-width: var(--right-side-max-width); +} + +#rightSideInside { + margin-left: auto; + margin-right: auto; + max-width: var(--right-side-max-width); +} + +#toggleThemeButton { + position: fixed; + top: 10px; + right: 0px; + cursor: pointer; + color: #fff; +} + +#chatPanel { + height: 100%; + max-height: calc(100vh - var(--header-height) - var(--input-height) - 32px); + overflow-y: auto; +} + +#sendButton { + border: none; + cursor: pointer; + font-size: 1em; + border-radius: var(--border-radius); + background-color: #557CB4; + width: var(--send-button-width); + padding: 0px; +} + +#userInputPanel { + display: flex; + max-width: 768px; +} + +#userInput { + margin-right: 15px; + width: 100%; + max-height: var(--max-textarea-height); + border-radius: var(--border-radius); + border-width: 2px; +} + +textarea { + resize: none; + background-color: #111; + color: #f2f2f2; +} + +body.light-theme textarea { + background-color: #fff; + color: #111; +} + +textarea.w3-border { + border-color: #333 !important; +} + +body.light-theme textarea.w3-border { + border-color: #ddd !important; +} + +textarea.w3-border:focus-visible { + border-color: #555 !important; + outline: none; +} + +body.light-theme textarea.w3-border:focus-visible { + border-color: #bbb !important; + outline: none; +} + +.user { + color: #d8d8d8; + background-color: #111; + border-radius: var(--border-radius); +} + +.computer { + color: #d8d8d8; + background-color: #111; + border-radius: var(--border-radius); +} + +div.user { + margin-bottom: 8px; + margin-right: 0px; + text-align: left; +} + +div.computer { + margin-bottom: 8px; + margin-right: 0px; + text-align: left; +} + +.message-author { + font-weight: bold; + padding-top: calc(var(--border-radius) / 2); + padding-left: var(--border-radius); + padding-right: var(--border-radius); +} + +p.message-author, p.message-author p { + margin: 0px; +} + +.message-content { + padding-left: var(--border-radius); + padding-bottom: calc(var(--border-radius) / 2); + padding-right: var(--border-radius); +} + +p.message-content, p.message-content p { + margin-top: 0px; + margin-left: 0px; + margin-right: 0px; +} + +.light-theme { + background-color: #fff; +} + +body.light-theme #toggleThemeButton { + color: #888; +} + +body.light-theme .user { + background-color: #fdfdfd; + color: #111; +} + +body.light-theme .computer { + background-color: #fdfdfd; + color: #111; +} + +#userInput::-webkit-scrollbar { + display: none; +} +#userInput { + -ms-overflow-style: none; + scrollbar-width: none; +} + +::-webkit-scrollbar { + height: 1rem; + width: .5rem; + background-color: #111; +} + +body.light-theme ::-webkit-scrollbar { + background-color: #fdfdfd; +} + +::-webkit-scrollbar:horizontal { + height: .5rem; + width: 1rem +} + +::-webkit-scrollbar:vertical { + height: .5rem; + width: 1rem +} + +::-webkit-scrollbar-track { + background-color: transparent; + border-radius: 9999px; +} + +::-webkit-scrollbar-thumb { + background-color: #0a0a0a; + border-color: rgba(255,255,255,var(--tw-border-opacity)); + border-radius: 9999px; + border-width: 1px; +} + +body.light-theme ::-webkit-scrollbar-thumb { + background-color: #fafafa; +} + +::-webkit-scrollbar-thumb:hover { + background-color: rgba(217,217,227,var(--tw-bg-opacity)) +} + + +.hljs { + margin: 0px; + padding: 16px; + padding-right: 0px; + border-radius: var(--border-radius); + overflow-x: auto; + max-width: 90vw; +} + +/* + +Atom One Dark by Daniel Gamage +Original One Dark Syntax theme from https://github.com/atom/one-dark-syntax + +base: #282c34 +mono-1: #abb2bf +mono-2: #818896 +mono-3: #5c6370 +hue-1: #56b6c2 +hue-2: #61aeee +hue-3: #c678dd +hue-4: #98c379 +hue-5: #e06c75 +hue-5-2: #be5046 +hue-6: #d19a66 +hue-6-2: #e6c07b + +*/ + +.hljs { + color: #abb2bf; + background: #282c34; + } + + .hljs-comment, + .hljs-quote { + color: #5c6370; + font-style: italic; + } + + .hljs-doctag, + .hljs-keyword, + .hljs-formula { + color: #c678dd; + } + + .hljs-section, + .hljs-name, + .hljs-selector-tag, + .hljs-deletion, + .hljs-subst { + color: #e06c75; + } + + .hljs-literal { + color: #56b6c2; + } + + .hljs-string, + .hljs-regexp, + .hljs-addition, + .hljs-attribute, + .hljs-meta .hljs-string { + color: #98c379; + } + + .hljs-attr, + .hljs-variable, + .hljs-template-variable, + .hljs-type, + .hljs-selector-class, + .hljs-selector-attr, + .hljs-selector-pseudo, + .hljs-number { + color: #d19a66; + } + + .hljs-symbol, + .hljs-bullet, + .hljs-link, + .hljs-meta, + .hljs-selector-id, + .hljs-title { + color: #61aeee; + } + + .hljs-built_in, + .hljs-title.class_, + .hljs-class .hljs-title { + color: #e6c07b; + } + + .hljs-emphasis { + font-style: italic; + } + + .hljs-strong { + font-weight: bold; + } + + .hljs-link { + text-decoration: underline; + } diff --git a/src/ai/.x/templates/openai-webpage-with-functions/webpack.config.js b/src/ai/.x/templates/openai-webpage-with-functions/webpack.config.js new file mode 100644 index 00000000..b3b87bf1 --- /dev/null +++ b/src/ai/.x/templates/openai-webpage-with-functions/webpack.config.js @@ -0,0 +1,20 @@ +const path = require('path'); +const webpack = require('webpack'); +const Dotenv = require('dotenv-webpack'); + +module.exports = { + entry: './src/script.js', + output: { + filename: 'main.js', + path: path.resolve(__dirname, 'dist'), + }, + plugins: [ + new Dotenv(), + new webpack.DefinePlugin({ + 'process.env.ENDPOINT': JSON.stringify(process.env.ENDPOINT), + 'process.env.AZURE_API_KEY': JSON.stringify(process.env.AZURE_API_KEY), + 'process.env.DEPLOYMENT_NAME': JSON.stringify(process.env.DEPLOYMENT_NAME), + 'process.env.SYSTEM_PROMPT': JSON.stringify(process.env.SYSTEM_PROMPT), + }), + ], +}; \ No newline at end of file diff --git a/src/ai/.x/templates/openai-webpage/.vscode/launch.json b/src/ai/.x/templates/openai-webpage/.vscode/launch.json deleted file mode 100644 index 30fc6258..00000000 --- a/src/ai/.x/templates/openai-webpage/.vscode/launch.json +++ /dev/null @@ -1,11 +0,0 @@ -{ - "version": "0.2.0", - "configurations": [ - { - "type": "chrome", - "request": "launch", - "name": "Launch Chrome", - "file": "${workspaceFolder}/index.html" - } - ] -} \ No newline at end of file diff --git a/src/ai/.x/templates/openai-webpage/.vscode/tasks.json b/src/ai/.x/templates/openai-webpage/.vscode/tasks.json deleted file mode 100644 index d5460be9..00000000 --- a/src/ai/.x/templates/openai-webpage/.vscode/tasks.json +++ /dev/null @@ -1,17 +0,0 @@ -{ - "version": "2.0.0", - "tasks": [ - { - "label": "install dependencies", - "type": "shell", - "command": "npm install", - "problemMatcher": [] - }, - { - "label": "build", - "type": "shell", - "command": "npx webpack", - "problemMatcher": [] - } - ] -} \ No newline at end of file diff --git a/src/ai/.x/templates/openai-webpage/_.json b/src/ai/.x/templates/openai-webpage/_.json deleted file mode 100644 index b996236c..00000000 --- a/src/ai/.x/templates/openai-webpage/_.json +++ /dev/null @@ -1,8 +0,0 @@ -{ - "_Name": "OpenAI Webpage (Streaming + Functions)", - "_Language": "Javascript", - "OPENAI_ENDPOINT": "", - "OPENAI_API_KEY": "", - "AZURE_OPENAI_CHAT_DEPLOYMENT": "", - "AZURE_OPENAI_SYSTEM_PROMPT": "You are a helpful AI assistant." -} \ No newline at end of file diff --git a/src/ai/.x/templates/openai-webpage/src/ChatCompletionsCustomFunctions.js b/src/ai/.x/templates/openai-webpage/src/ChatCompletionsCustomFunctions.js deleted file mode 100644 index 1776c03e..00000000 --- a/src/ai/.x/templates/openai-webpage/src/ChatCompletionsCustomFunctions.js +++ /dev/null @@ -1,16 +0,0 @@ -function getCurrentDate() { - const date = new Date(); - return `${date.getFullYear()}-${date.getMonth() + 1}-${date.getDate()}`; -} - -const getCurrentDateSchema = { - name: "get_current_date", - description: "Get the current date", - parameters: { - type: "object", - properties: {}, - }, -}; - -exports.getCurrentDate = getCurrentDate; -exports.getCurrentDateSchema = getCurrentDateSchema; diff --git a/src/ai/Program_AI.cs b/src/ai/Program_AI.cs index f987de40..b71b31c9 100644 --- a/src/ai/Program_AI.cs +++ b/src/ai/Program_AI.cs @@ -4,27 +4,84 @@ // using System; +using System.Diagnostics; using System.Linq; using System.Threading.Tasks; using Azure.AI.Details.Common.CLI.Telemetry; +using Azure.AI.Details.Common.CLI.Telemetry.Events; namespace Azure.AI.Details.Common.CLI { public class AiProgram { - static async Task Main(string[] args) + static async Task Main(string[] args) { - IProgramData data = new AiProgramData(); - int exitCode = Program.Main(data, args); + IProgramData data = null; + Stopwatch stopwatch = new Stopwatch(); + int exitCode = int.MinValue; - if (data.Telemetry != null) + try { - await data.Telemetry.DisposeAsync() - .ConfigureAwait(false); + bool isDebug = args.Length > 0 && args[0] == "debug"; + if (isDebug) + { + Console.WriteLine($"StopWatch: Started at {DateTime.Now}"); + } + + stopwatch.Start(); + + data = new AiProgramData(); + exitCode = Program.Main(data, args); + stopwatch.Stop(); + + if (isDebug) + { + Console.WriteLine($"StopWatch: Stopped at {DateTime.Now} ({GetStopWatchElapsedAsString(stopwatch.Elapsed)})"); + } + + if (data.Telemetry != null) + { + await data.Telemetry.DisposeAsync() + .ConfigureAwait(false); + } + + return exitCode; + } + catch (Exception) + { + exitCode = -1; + throw; } + finally + { + if (data?.Telemetry != null) + { + await data.Telemetry.LogEventAsync(new ExitedTelemetryEvent() + { + ExitCode = exitCode, + Elapsed = stopwatch.Elapsed + }).ConfigureAwait(false); + + await data.Telemetry.DisposeAsync() + .ConfigureAwait(false); + } + } + } + + static string GetStopWatchElapsedAsString(TimeSpan elapsed) + { + var elapsedMilliseconds = elapsed.TotalMilliseconds; + var elapsedSeconds = elapsed.TotalSeconds; + var elapsedMinutes = elapsed.TotalMinutes; + var elapsedHours = elapsed.TotalHours; + + var elapsedString = elapsedSeconds < 1 ? $"{elapsedMilliseconds} ms" + : elapsedMinutes < 1 ? $"{elapsedSeconds:0.00} sec" + : elapsedHours < 1 ? $"{elapsedMinutes:0.00} min" + : $"{elapsedHours:0.00} hr"; - Environment.Exit(exitCode); + return elapsedString; } } @@ -62,11 +119,11 @@ public AiProgramData() #endregion #region help command data - public string HelpCommandTokens => "wizard;dev;init;config;chat;flow;speech;vision;language;search;service;tool;samples;code;eval;run"; + public string HelpCommandTokens => "wizard;dev;init;config;chat;flow;speech;vision;language;search;service;tool;samples;eval;run"; #endregion #region config command data - public string ConfigScopeTokens => $"wizard;dev;init;chat;flow;speech;vision;language;search;service;tool;samples;code;eval;run;*"; + public string ConfigScopeTokens => $"wizard;dev;init;chat;flow;speech;vision;language;search;service;tool;samples;eval;run;*"; #endregion #region zip option data @@ -127,8 +184,6 @@ public bool DispatchRunCommand(ICommandValues values) "search" => (new SearchCommand(values)).RunCommand(), "service" => (new ServiceCommand(values)).RunCommand(), "tool" => (new ToolCommand(values)).RunCommand(), - "samples" => (new SamplesCommand(values)).RunCommand(), - "code" => (new CodeCommand(values)).RunCommand(), "eval" => (new EvalCommand(values)).RunCommand(), "wizard" => (new ScenarioWizardCommand(values)).RunCommand(), "dev" => (new DevCommand(values)).RunCommand(), @@ -156,8 +211,6 @@ public bool DispatchParseCommand(INamedValueTokens tokens, ICommandValues values "search" => SearchCommandParser.ParseCommand(tokens, values), "service" => ServiceCommandParser.ParseCommand(tokens, values), "tool" => ToolCommandParser.ParseCommand(tokens, values), - "samples" => SamplesCommandParser.ParseCommand(tokens, values), - "code" => CodeCommandParser.ParseCommand(tokens, values), "wizard" => ScenarioWizardCommandParser.ParseCommand(tokens, values), "dev" => DevCommandParser.ParseCommand(tokens, values), "run" => RunJobCommandParser.ParseCommand(tokens, values), @@ -181,9 +234,6 @@ public bool DispatchParseCommandValues(INamedValueTokens tokens, ICommandValues "search" => SearchCommandParser.ParseCommandValues(tokens, values), "service" => ServiceCommandParser.ParseCommandValues(tokens, values), "tool" => ToolCommandParser.ParseCommandValues(tokens, values), - "samples" => SamplesCommandParser.ParseCommandValues(tokens, values), - "code" => CodeCommandParser.ParseCommandValues(tokens, values), - "complete" => CompleteCommandParser.ParseCommandValues(tokens, values), "wizard" => ScenarioWizardCommandParser.ParseCommandValues(tokens, values), "dev" => DevCommandParser.ParseCommandValues(tokens, values), "run" => RunJobCommandParser.ParseCommandValues(tokens, values), diff --git a/src/ai/ai-cli.csproj b/src/ai/ai-cli.csproj index 3dae7d80..3f6fb4cd 100644 --- a/src/ai/ai-cli.csproj +++ b/src/ai/ai-cli.csproj @@ -1,9 +1,8 @@  - net7.0 + net8.0 ai - false Exe win-x64;linux-x64 Debug;Release @@ -27,6 +26,20 @@ true + + false + + + + + true + true + true + true + true + linker.xml + + $(LocalBuildSDKBinPath) bin diff --git a/src/ai/commands/code_command.cs b/src/ai/commands/code_command.cs deleted file mode 100644 index 91c77aa4..00000000 --- a/src/ai/commands/code_command.cs +++ /dev/null @@ -1,67 +0,0 @@ -// -// Copyright (c) Microsoft. All rights reserved. -// Licensed under the MIT license. See LICENSE.md file in the project root for full license information. -// - -using System; -using System.Collections.Generic; -using System.Diagnostics; -using System.IO; -using System.Linq; -using System.Net; -using System.Text; -using System.Text.RegularExpressions; -using System.Text.Json; -using System.Text.Json.Serialization; -using System.Threading; -using System.Threading.Tasks; -using Newtonsoft.Json.Linq; - -namespace Azure.AI.Details.Common.CLI -{ - public class CodeCommand : Command - { - internal CodeCommand(ICommandValues values) - { - _values = values.ReplaceValues(); - _quiet = _values.GetOrDefault("x.quiet", false); - _verbose = _values.GetOrDefault("x.verbose", true); - } - - internal bool RunCommand() - { - try - { - RunCodeCommand(); - } - catch (WebException ex) - { - ConsoleHelpers.WriteLineError($"\n ERROR: {ex.Message}"); - JsonHelpers.PrintJson(HttpHelpers.ReadWriteJson(ex.Response, _values, "code")); - } - - return _values.GetOrDefault("passed", true); - } - - private bool RunCodeCommand() - { - DoCommand(_values.GetCommand()); - return _values.GetOrDefault("passed", true); - } - - private void DoCommand(string command) - { - CheckPath(); - - switch (command) - { - default: - _values.AddThrowError("WARNING:", $"'{command.Replace('.', ' ')}' NOT YET IMPLEMENTED!!"); - break; - } - } - - private bool _quiet = false; - private bool _verbose = false; - } -} diff --git a/src/ai/commands/complete_command.cs b/src/ai/commands/complete_command.cs deleted file mode 100644 index a8020294..00000000 --- a/src/ai/commands/complete_command.cs +++ /dev/null @@ -1,199 +0,0 @@ -// -// Copyright (c) Microsoft. All rights reserved. -// Licensed under the MIT license. See LICENSE.md file in the project root for full license information. -// - -using System; -using System.Diagnostics; -using System.IO; -using System.Linq; -using System.Text; -using System.Threading; -using System.Threading.Tasks; -using System.Collections.Generic; -using Newtonsoft.Json; -using Newtonsoft.Json.Linq; -using System.Net; -using Azure.AI.OpenAI; - -namespace Azure.AI.Details.Common.CLI -{ - public class CompleteCommand : Command - { - internal CompleteCommand(ICommandValues values) - { - _values = values.ReplaceValues(); - } - - internal bool RunCommand() - { - Complete(); - return _values.GetOrDefault("passed", true); - } - - private void Complete() - { - StartCommand(); - - var kind = _values["complete.input.type"]; - switch (kind) - { - case "": - case null: - case "interactive": - // SynthesizeInteractive(false); - // break; - - case "interactive+": - CompleteInteractively(true); - break; - - // TODO: Add support for other input types - } - - StopCommand(); - DisposeAfterStop(); - DeleteTemporaryFiles(); - } - - private void CompleteInteractively(bool repeatedly = false) - { - var client = CreateOpenAIClient(out var deployment); - var options = CreateCompletionOptions(); - - while (true) - { - Console.Write("[complete] >>> "); - var text = ConsoleHelpers.ReadLineOrDefault("", "exit"); - - if (text.ToLower() == "") break; - if (text.ToLower() == "stop") break; - if (text.ToLower() == "quit") break; - if (text.ToLower() == "exit") break; - - var task = GetCompletionsAsync(client, deployment, options, text); - WaitForStopOrCancel(task); - - if (!repeatedly) break; - if (_canceledEvent.WaitOne(0)) break; - } - } - - private async Task> GetCompletionsAsync(OpenAIClient client, string deployment, CompletionsOptions options, string text) - { - options.Prompts.Clear(); - options.Prompts.Add(text); - options.DeploymentName = deployment; - var response = await client.GetCompletionsAsync(options); - - Console.WriteLine(); - Console.WriteLine(response.Value.Choices[0].Text); - Console.WriteLine(); - - return response; - } - - private CompletionsOptions CreateCompletionOptions() - { - var options = new CompletionsOptions(); - // messages.ToList().ForEach(m => options.Messages.Add(m)); - - // options.MaxTokens = TryParse(maxTokens, _defaultMaxTokens); - // options.Temperature = TryParse(temperature, _defaultTemperature); - // options.FrequencyPenalty = TryParse(frequencyPenalty, _defaultFrequencyPenalty); - // options.PresencePenalty = TryParse(presencePenalty, _defaultPresencePenalty); - - // if (!string.IsNullOrEmpty(stop)) - // { - // var stops = stop.Split('\n', StringSplitOptions.RemoveEmptyEntries).ToList(); - // stops.ForEach(s => options.StopSequences.Add(s)); - // } - - return options; - } - - private OpenAIClient CreateOpenAIClient(out string deployment) - { - var key = _values["service.config.key"]; - var host = _values["service.config.host"]; - var region = _values["service.config.region"]; - var endpoint = ConfigEndpointUriToken.Data().GetOrDefault(_values); - var tokenValue = _values["service.config.token.value"]; - - deployment = ConfigDeploymentToken.Data().GetOrDefault(_values); - - if (string.IsNullOrEmpty(endpoint) && string.IsNullOrEmpty(region) && string.IsNullOrEmpty(host)) - { - _values.AddThrowError("ERROR:", $"Creating OpenAIClient; requires one of: region, endpoint, or host."); - } - else if (!string.IsNullOrEmpty(region) && string.IsNullOrEmpty(tokenValue) && string.IsNullOrEmpty(key)) - { - _values.AddThrowError("ERROR:", $"Creating OpenAIClient; use of region requires one of: key or token."); - } - else if (string.IsNullOrEmpty(deployment)) - { - _values.AddThrowError("ERROR:", $"Creating OpenAIClient; requires deployment."); - } - - if (!string.IsNullOrEmpty(endpoint)) - { - return new OpenAIClient( - new Uri(endpoint!), - new AzureKeyCredential(key!)); - } - else if (!string.IsNullOrEmpty(host)) - { - _values.AddThrowError("ERROR:", $"Creating OpenAIClient; Not-yet-implemented create from host."); - return null; - } - else // if (!string.IsNullOrEmpty(region)) - { - _values.AddThrowError("ERROR:", $"Creating OpenAIClient; Not-yet-implemented create from region."); - return null; - } - } - - private void WaitForStopOrCancel(Task> task) - { - var interval = 100; - - while (!task.Wait(interval)) - { - if (_stopEvent.WaitOne(0)) break; - if (_canceledEvent.WaitOne(0)) break; - } - } - - private void StartCommand() - { - CheckPath(); - // CheckCompleteInput(); - - // _display = new DisplayHelper(_values); - - // _output = new OutputHelper(_values); - // _output.StartOutput(); - - // var id = _values["complete.input.id"]; - // _output.EnsureOutputAll("complete.input.id", id); - // _output.EnsureOutputEach("complete.input.id", id); - - _lock = new SpinLock(); - _lock.StartLock(); - } - - private void StopCommand() - { - _lock.StopLock(5000); - _stopEvent.Set(); - - // _output.CheckOutput(); - // _output.StopOutput(); - } - - private SpinLock _lock = null; - - // OutputHelper _output = null; - // DisplayHelper _display = null; - } -} diff --git a/src/ai/commands/dev_command.cs b/src/ai/commands/dev_command.cs index 59ca3525..cda027cb 100644 --- a/src/ai/commands/dev_command.cs +++ b/src/ai/commands/dev_command.cs @@ -45,7 +45,7 @@ private bool RunDevCommand() private void DoCommand(string command) { - CheckPath(); + StartCommand(); switch (command) { @@ -57,15 +57,20 @@ private void DoCommand(string command) _values.AddThrowError("WARNING:", $"'{command.Replace('.', ' ')}' NOT YET IMPLEMENTED!!"); break; } + + StopCommand(); + DisposeAfterStop(); + DeleteTemporaryFiles(); } private void DoNew() { var newWhat = string.Join(" ", ArgXToken.GetArgs(_values)); + var language = ProgrammingLanguageToken.Data().GetOrDefault(_values); switch (newWhat) { case ".env": DoNewEnv(); break; - default: DoNewTemplate(newWhat); break; + default: DoNewTemplate(newWhat, language); break; } } @@ -80,23 +85,21 @@ private void DoNewEnv() ConfigEnvironmentHelpers.PrintEnvironment(env); } - private void DoNewTemplate(string templateName) + private void DoNewTemplate(string templateName, string language) { - var filesInDirAlready = FileHelpers.FindFiles(".", "*").Count() > 0; - var outputDirectory = !filesInDirAlready ? "." : templateName; + var outputDirectory = templateName + ProgrammingLanguageToken.GetSuffix(language); var instructions = InstructionsToken.Data().GetOrDefault(_values); - if (!TemplateFactory.GenerateTemplateFiles(templateName, instructions, outputDirectory, _quiet, _verbose)) - { - _values.AddThrowError("WARNING:", $"Template '{templateName}' not found", - "", - "TRY:", $"{Program.Name} dev new list"); - } + var found = TemplateFactory.GenerateTemplateFiles(templateName, language, instructions, outputDirectory, _quiet, _verbose); + CheckGenerateTemplateFileWarnings(templateName, language, found); } private void DoNewList() { - TemplateFactory.ListTemplates(); + var newWhat = string.Join(" ", ArgXToken.GetArgs(_values)); + var language = ProgrammingLanguageToken.Data().GetOrDefault(_values); + + TemplateFactory.ListTemplates(newWhat, language); } private void DoDevShell() @@ -113,16 +116,29 @@ private void DoDevShell() ConfigEnvironmentHelpers.SetEnvironment(env); Console.WriteLine(); - var runCommand = RunCommandToken.Data().GetOrDefault(_values); - UpdateFileNameArguments(runCommand, ref fileName, ref arguments); + var runCommand = RunCommandScriptToken.Data().GetOrDefault(_values); + + // var processOutput = string.IsNullOrEmpty(runCommand) + // ? ProcessHelpers.RunShellCommandAsync(fileName, arguments, env, null, null, null, false).Result + // : ProcessHelpers.RunShellCommandAsync(runCommand, env, null, null, null, false).Result; + + // var exitCode = processOutput.ExitCode; + + UpdateFileNameArguments(runCommand, ref fileName, ref arguments, out var deleteWhenDone); var process = ProcessHelpers.StartProcess(fileName, arguments, env, false); process.WaitForExit(); - if (process.ExitCode != 0) + if (!string.IsNullOrEmpty(deleteWhenDone)) + { + File.Delete(deleteWhenDone); + } + + var exitCode = process.ExitCode; + if (exitCode != 0) { Console.WriteLine("\n(ai dev shell) FAILED!\n"); - _values.AddThrowError("ERROR:", $"Shell exited with code {process.ExitCode}"); + _values.AddThrowError("ERROR:", $"Shell exited with code {exitCode}"); } else { @@ -130,22 +146,38 @@ private void DoDevShell() } } - private static void UpdateFileNameArguments(string runCommand, ref string fileName, ref string arguments) + private static void UpdateFileNameArguments(string runCommand, ref string fileName, ref string arguments, out string? deleteTempFileWhenDone) { + deleteTempFileWhenDone = null; + if (!string.IsNullOrEmpty(runCommand)) { - var parts = runCommand.Split(new char[] { ' ' }, 2); - var inPath = FileHelpers.FileExistsInOsPath(parts[0]) || (OS.IsWindows() && FileHelpers.FileExistsInOsPath(parts[0] + ".exe")); + var isSingleLine = !runCommand.Contains('\n') && !runCommand.Contains('\r'); + if (isSingleLine) + { + var parts = runCommand.Split(new char[] { ' ' }, 2); + var inPath = FileHelpers.FileExistsInOsPath(parts[0]) || (OS.IsWindows() && FileHelpers.FileExistsInOsPath(parts[0] + ".exe")); + + var filePart = parts[0]; + var argsPart = parts.Length == 2 ? parts[1] : null; - var filePart = parts[0]; - var argsPart = parts.Length == 2 ? parts[1] : null; + fileName = inPath ? filePart : fileName; + arguments = inPath ? argsPart : (OS.IsLinux() + ? $"-lic \"{runCommand}\"" + : $"/c \"{runCommand}\""); - fileName = inPath ? filePart : fileName; - arguments = inPath ? argsPart : (OS.IsLinux() - ? $"-lic \"{runCommand}\"" - : $"/c \"{runCommand}\""); + Console.WriteLine($"Running command: {runCommand}\n"); + } + else + { + deleteTempFileWhenDone = Path.GetTempFileName() + (OS.IsWindows() ? ".cmd" : ".sh"); + File.WriteAllText(deleteTempFileWhenDone, runCommand); - Console.WriteLine($"Running command: {runCommand}\n"); + fileName = OS.IsLinux() ? "bash" : "cmd.exe"; + arguments = OS.IsLinux() ? $"-lic \"{deleteTempFileWhenDone}\"" : $"/c \"{deleteTempFileWhenDone}\""; + + Console.WriteLine($"Running script:\n\n{runCommand}\n"); + } } } @@ -161,6 +193,76 @@ private void DisplayBanner(string which) } } + private void CheckGenerateTemplateFileWarnings(string templateName, string language, object check) + { + if (check != null && check is TemplateFactory.Group) + { + var group = check as TemplateFactory.Group; + var groupHasZeroLanguages = string.IsNullOrEmpty(group.Languages); + var groupHasMultipleLanguages = group.Languages.Contains(','); + var groupHasOneLanguage = !groupHasZeroLanguages && !groupHasMultipleLanguages; + + var languageSupplied = !string.IsNullOrEmpty(language); + if (languageSupplied) + { + if (groupHasZeroLanguages || groupHasOneLanguage) + { + _values.AddThrowError("WARNING:", $"Template '{templateName}' does not support language '{language}'.", + "", + "TRY:", $"{Program.Name} dev new {templateName}"); + } + else + { + _values.AddThrowError("WARNING:", $"Template '{templateName}' doesn't support language '{language}'.", + "", + "TRY:", $"{Program.Name} dev new {templateName} --LANGUAGE", + "", + "WHERE:", $"LANGUAGE is one of {group.Languages}"); + } + } + else + { + _values.AddThrowError("WARNING:", $"Template '{templateName}' supports multiple languages.", + "", + "TRY:", $"{Program.Name} dev new {templateName} --LANGUAGE", + "", + "WHERE:", $"LANGUAGE is one of {group.Languages}"); + } + } + if (check == null) + { + _values.AddThrowError("WARNING:", $"Template '{templateName}' not found.", + "", + "TRY:", $"{Program.Name} dev new list"); + } + } + + private void StartCommand() + { + CheckPath(); + LogHelpers.EnsureStartLogFile(_values); + + // _display = new DisplayHelper(_values); + + // _output = new OutputHelper(_values); + // _output.StartOutput(); + + _lock = new SpinLock(); + _lock.StartLock(); + } + + private void StopCommand() + { + _lock.StopLock(5000); + + // LogHelpers.EnsureStopLogFile(_values); + // _output.CheckOutput(); + // _output.StopOutput(); + + _stopEvent.Set(); + } + + private SpinLock _lock = null; private readonly bool _quiet; private readonly bool _verbose; } diff --git a/src/ai/commands/parsers/code_command_parser.cs b/src/ai/commands/parsers/code_command_parser.cs deleted file mode 100644 index 967eb3ec..00000000 --- a/src/ai/commands/parsers/code_command_parser.cs +++ /dev/null @@ -1,78 +0,0 @@ -// -// Copyright (c) Microsoft. All rights reserved. -// Licensed under the MIT license. See LICENSE.md file in the project root for full license information. -// - -using System; -using System.Collections.Generic; -using System.Linq; - -namespace Azure.AI.Details.Common.CLI -{ - class CodeCommandParser : CommandParser - { - public static bool ParseCommand(INamedValueTokens tokens, ICommandValues values) - { - return ParseCommands(_commands, _partialCommands, tokens, values, x => GetCommandParsers(x)); - } - - public static bool ParseCommandValues(INamedValueTokens tokens, ICommandValues values) - { - return ParseCommandValues("code", GetCommandParsers(values), tokens, values); - } - - private static readonly (string name, bool valuesRequired)[] _commands = { - ("code", true) - }; - - private static readonly string[] _partialCommands = { - "code" - }; - - private static IEnumerable GetCommandParsers(ICommandValues values) - { - var commandName = values.GetCommand(); - foreach (var command in _commands) - { - if (commandName == command.name) - { - return _codePlaceHolderParsers; - } - } - - return null; - } - - #region private data - - public class CommonCodeNamedValueTokenParsers : NamedValueTokenParserList - { - public CommonCodeNamedValueTokenParsers() : base( - - new NamedValueTokenParser(null, "x.command", "11", "1"), - - new ExpectOutputTokenParser(), - new DiagnosticLogTokenParser(), - new CommonNamedValueTokenParsers(), - - new NamedValueTokenParser("--ini", "ini.file", "10", "1", "@"), - - new NamedValueTokenParser(null, "x.command.expand.file.name", "11111", "1"), - - ConfigEndpointUriToken.Parser(), - ConfigDeploymentToken.Parser() - - ) - { - } - } - - private static INamedValueTokenParser[] _codePlaceHolderParsers = { - - new CommonCodeNamedValueTokenParsers() - - }; - - #endregion - } -} diff --git a/src/ai/commands/parsers/complete_command_parser.cs b/src/ai/commands/parsers/complete_command_parser.cs deleted file mode 100644 index 9e193375..00000000 --- a/src/ai/commands/parsers/complete_command_parser.cs +++ /dev/null @@ -1,48 +0,0 @@ -// -// Copyright (c) Microsoft. All rights reserved. -// Licensed under the MIT license. See LICENSE.md file in the project root for full license information. -// - -using System; -using System.Collections.Generic; -using System.Linq; - -namespace Azure.AI.Details.Common.CLI -{ - class CompleteCommandParser : CommandParser - { - public static bool ParseCommand(INamedValueTokens tokens, ICommandValues values) - { - return ParseCommand("complete", completeCommandParsers, tokens, values); - } - - public static bool ParseCommandValues(INamedValueTokens tokens, ICommandValues values) - { - return ParseCommandValues("complete", completeCommandParsers, tokens, values); - } - - #region private data - - private static INamedValueTokenParser[] completeCommandParsers = { - - new NamedValueTokenParser(null, "x.command", "11", "1", "complete"), - - new ExpectOutputTokenParser(), - new DiagnosticLogTokenParser(), - new CommonNamedValueTokenParsers(), - - new NamedValueTokenParser("--ini", "ini.file", "10", "1", "@"), - - new NamedValueTokenParser(null, "x.command.expand.file.name", "11111", "1"), - - ConfigEndpointUriToken.Parser(), - ConfigDeploymentToken.Parser(), - - new NamedValueTokenParser("--interactive", "complete.input.interactive", "001", "0", null, null, "interactive", "complete.input.type"), - new NamedValueTokenParser("--interactive+", "complete.input.interactive+", "001", "0", null, null, "interactive+", "complete.input.type"), - new NamedValueTokenParser(null, "complete.input.type", "111", "1", "interactive;interactive+;text;ssml;text.file;ssml.file"), - }; - - #endregion - } -} diff --git a/src/ai/commands/parsers/dev_command_parser.cs b/src/ai/commands/parsers/dev_command_parser.cs index 31f124dd..5ddc0ab4 100644 --- a/src/ai/commands/parsers/dev_command_parser.cs +++ b/src/ai/commands/parsers/dev_command_parser.cs @@ -79,11 +79,12 @@ public CommonDevNamedValueTokenParsers() : base( new CommonDevNamedValueTokenParsers(), ArgXToken.Parser(), InstructionsToken.Parser(), + ProgrammingLanguageToken.Parser(), }; private static INamedValueTokenParser[] _devShellParsers = { new CommonDevNamedValueTokenParsers(), - RunCommandToken.Parser() + RunCommandScriptToken.Parser() }; } } diff --git a/src/ai/commands/parsers/samples_command_parser.cs b/src/ai/commands/parsers/samples_command_parser.cs deleted file mode 100644 index bdbeb248..00000000 --- a/src/ai/commands/parsers/samples_command_parser.cs +++ /dev/null @@ -1,78 +0,0 @@ -// -// Copyright (c) Microsoft. All rights reserved. -// Licensed under the MIT license. See LICENSE.md file in the project root for full license information. -// - -using System; -using System.Collections.Generic; -using System.Linq; - -namespace Azure.AI.Details.Common.CLI -{ - class SamplesCommandParser : CommandParser - { - public static bool ParseCommand(INamedValueTokens tokens, ICommandValues values) - { - return ParseCommands(_commands, _partialCommands, tokens, values, x => GetCommandParsers(x)); - } - - public static bool ParseCommandValues(INamedValueTokens tokens, ICommandValues values) - { - return ParseCommandValues("samples", GetCommandParsers(values), tokens, values); - } - - private static readonly (string name, bool valuesRequired)[] _commands = { - ("samples", true) - }; - - private static readonly string[] _partialCommands = { - "samples" - }; - - private static IEnumerable GetCommandParsers(ICommandValues values) - { - var commandName = values.GetCommand(); - foreach (var command in _commands) - { - if (commandName == command.name) - { - return _samplesPlaceHolderParsers; - } - } - - return null; - } - - #region private data - - public class CommonSamplesNamedValueTokenParsers : NamedValueTokenParserList - { - public CommonSamplesNamedValueTokenParsers() : base( - - new NamedValueTokenParser(null, "x.command", "11", "1"), - - new ExpectOutputTokenParser(), - new DiagnosticLogTokenParser(), - new CommonNamedValueTokenParsers(), - - new NamedValueTokenParser("--ini", "ini.file", "10", "1", "@"), - - new NamedValueTokenParser(null, "x.command.expand.file.name", "11111", "1"), - - ConfigEndpointUriToken.Parser(), - ConfigDeploymentToken.Parser() - - ) - { - } - } - - private static INamedValueTokenParser[] _samplesPlaceHolderParsers = { - - new CommonSamplesNamedValueTokenParsers() - - }; - - #endregion - } -} diff --git a/src/ai/commands/samples_command.cs b/src/ai/commands/samples_command.cs deleted file mode 100644 index bbb6574a..00000000 --- a/src/ai/commands/samples_command.cs +++ /dev/null @@ -1,67 +0,0 @@ -// -// Copyright (c) Microsoft. All rights reserved. -// Licensed under the MIT license. See LICENSE.md file in the project root for full license information. -// - -using System; -using System.Collections.Generic; -using System.Diagnostics; -using System.IO; -using System.Linq; -using System.Net; -using System.Text; -using System.Text.RegularExpressions; -using System.Text.Json; -using System.Text.Json.Serialization; -using System.Threading; -using System.Threading.Tasks; -using Newtonsoft.Json.Linq; - -namespace Azure.AI.Details.Common.CLI -{ - public class SamplesCommand : Command - { - internal SamplesCommand(ICommandValues values) - { - _values = values.ReplaceValues(); - _quiet = _values.GetOrDefault("x.quiet", false); - _verbose = _values.GetOrDefault("x.verbose", true); - } - - internal bool RunCommand() - { - try - { - RunSamplesCommand(); - } - catch (WebException ex) - { - ConsoleHelpers.WriteLineError($"\n ERROR: {ex.Message}"); - JsonHelpers.PrintJson(HttpHelpers.ReadWriteJson(ex.Response, _values, "samples")); - } - - return _values.GetOrDefault("passed", true); - } - - private bool RunSamplesCommand() - { - DoCommand(_values.GetCommand()); - return _values.GetOrDefault("passed", true); - } - - private void DoCommand(string command) - { - CheckPath(); - - switch (command) - { - default: - _values.AddThrowError("WARNING:", $"'{command.Replace('.', ' ')}' NOT YET IMPLEMENTED!!"); - break; - } - } - - private bool _quiet = false; - private bool _verbose = false; - } -} diff --git a/src/ai/commands/service_command.cs b/src/ai/commands/service_command.cs index 8f952e99..b5409ad5 100644 --- a/src/ai/commands/service_command.cs +++ b/src/ai/commands/service_command.cs @@ -52,7 +52,6 @@ private bool RunServiceCommand() private void DoCommand(string command) { StartCommand(); - CheckPath(); switch (command) { diff --git a/src/ai/helpers/config_environment_helpers.cs b/src/ai/helpers/config_environment_helpers.cs index e89529e9..f2d0efe1 100644 --- a/src/ai/helpers/config_environment_helpers.cs +++ b/src/ai/helpers/config_environment_helpers.cs @@ -22,6 +22,10 @@ public static Dictionary GetEnvironment(INamedValues values) env.Add("AZURE_AI_PROJECT_NAME", ReadConfig(values, "project")); env.Add("AZURE_AI_RESOURCE_NAME", ReadConfig(values, "resource")); + env.Add("AZURE_OPENAI_ENDPOINT", ReadConfig(values, "chat.endpoint")); + env.Add("AZURE_OPENAI_KEY", ReadConfig(values, "chat.key")); + env.Add("AZURE_OPENAI_API_VERSION", ChatCommand.GetOpenAIClientVersionNumber()); + env.Add("AZURE_OPENAI_CHAT_DEPLOYMENT", ReadConfig(values, "chat.deployment")); env.Add("AZURE_OPENAI_EVALUATION_DEPLOYMENT", ReadConfig(values, "chat.evaluation.model.deployment.name") ?? ReadConfig(values, "chat.deployment")); env.Add("AZURE_OPENAI_EMBEDDING_DEPLOYMENT", ReadConfig(values, "search.embedding.model.deployment.name")); diff --git a/src/ai/linker.xml b/src/ai/linker.xml new file mode 100644 index 00000000..45f2eaed --- /dev/null +++ b/src/ai/linker.xml @@ -0,0 +1,8 @@ + + + + + + + + \ No newline at end of file diff --git a/src/common/Program.cs b/src/common/Program.cs index 2ed739ac..1e3c4484 100644 --- a/src/common/Program.cs +++ b/src/common/Program.cs @@ -16,58 +16,46 @@ public class Program public static int Main(IProgramData data, string[] mainArgs) { - int exitCode = int.MinValue; + _data = data; - try + var _ = _data.Telemetry.LogEventAsync(new LaunchedTelemetryEvent()); + + var screen = ConsoleGui.Screen.Current; + Console.OutputEncoding = Encoding.UTF8; + Console.CancelKeyPress += (s, e) => { - _data = data; + e.Cancel = true; + screen.SetCursorVisible(true); + screen.ResetColors(); + Console.WriteLine(" received... terminating ... "); + Environment.Exit(1); + }; - var _ = _data.Telemetry.LogEventAsync(new LaunchedTelemetryEvent()); + ICommandValues values = new CommandValues(); + INamedValueTokens tokens = new CmdLineTokenSource(mainArgs, values); - var screen = ConsoleGui.Screen.Current; - Console.OutputEncoding = Encoding.UTF8; - Console.CancelKeyPress += (s, e) => - { - e.Cancel = true; - screen.SetCursorVisible(true); - screen.ResetColors(); - Console.WriteLine(" received... terminating ... "); - Environment.Exit(1); - }; - - ICommandValues values = new CommandValues(); - INamedValueTokens tokens = new CmdLineTokenSource(mainArgs, values); - - exitCode = ParseCommand(tokens, values); - if (exitCode == 0 && !values.DisplayHelpRequested()) - { - DisplayBanner(values); - DisplayParsedValues(values); - exitCode = RunCommand(values) ? 0 : 1; - } + int exitCode = ParseCommand(tokens, values); + if (exitCode == 0 && !values.DisplayHelpRequested()) + { + DisplayBanner(values); + DisplayParsedValues(values); + exitCode = RunCommand(values) ? 0 : 1; + } - if (values.GetOrDefault("x.pause", false)) - { - Console.Write("Press ENTER to exit... "); - Console.ReadLine(); - } + if (values.GetOrDefault("x.pause", false)) + { + Console.Write("Press ENTER to exit... "); + Console.ReadLine(); + } - var dumpArgs = string.Join(" ", mainArgs); - DebugDumpCommandLineArgs(dumpArgs); + var dumpArgs = string.Join(" ", mainArgs); + DebugDumpCommandLineArgs(dumpArgs); - if (OS.IsLinux()) Console.WriteLine(); + if (OS.IsLinux()) Console.WriteLine(); - AI.DBG_TRACE_INFO($"Command line was: {dumpArgs}"); - AI.DBG_TRACE_INFO($"Exit code: {exitCode}"); - return exitCode; - } - finally - { - var _ = _data?.Telemetry.LogEventAsync(new ExitedTelemetryEvent() - { - ExitCode = exitCode - }); - } + AI.DBG_TRACE_INFO($"Command line was: {dumpArgs}"); + AI.DBG_TRACE_INFO($"Exit code: {exitCode}"); + return exitCode; } public static int RunInternal(params string[] mainArgs) @@ -136,7 +124,7 @@ private static void DisplayBanner(ICommandValues values) if (values.GetOrDefault("x.cls", false)) Console.Clear(); Console.WriteLine(GetDisplayBannerText()); - Console.WriteLine("Copyright (c) 2023 Microsoft Corporation. All Rights Reserved."); + Console.WriteLine("Copyright (c) 2024 Microsoft Corporation. All Rights Reserved."); Console.WriteLine(""); var warning = Program.WarningBanner; @@ -347,46 +335,46 @@ private static bool RunCommand(ICommandValues values) private static IProgramData _data; - public static string Name => _data.Name; + public static string Name => _data?.Name; - public static string DisplayName => _data.DisplayName; + public static string DisplayName => _data?.DisplayName; - public static string WarningBanner => _data.WarningBanner; + public static string WarningBanner => _data?.WarningBanner; - public static string TelemetryUserAgent => _data.TelemetryUserAgent; + public static string TelemetryUserAgent => _data?.TelemetryUserAgent; - public static string Exe => _data.Exe; + public static string Exe => _data?.Exe; - public static string Dll => _data.Dll; + public static string Dll => _data?.Dll; - public static Type ResourceAssemblyType => _data.ResourceAssemblyType; + public static Type ResourceAssemblyType => _data?.ResourceAssemblyType; - public static Assembly ResourceAssembly => _data.ResourceAssemblyType.Assembly; + public static Assembly ResourceAssembly => _data?.ResourceAssemblyType.Assembly; - public static Type BindingAssemblySdkType => _data.BindingAssemblySdkType; + public static Type BindingAssemblySdkType => _data?.BindingAssemblySdkType; - public static string SERVICE_RESOURCE_DISPLAY_NAME_ALL_CAPS => _data.SERVICE_RESOURCE_DISPLAY_NAME_ALL_CAPS; + public static string SERVICE_RESOURCE_DISPLAY_NAME_ALL_CAPS => _data?.SERVICE_RESOURCE_DISPLAY_NAME_ALL_CAPS; - public static string CognitiveServiceResourceKind => _data.CognitiveServiceResourceKind; + public static string CognitiveServiceResourceKind => _data?.CognitiveServiceResourceKind; - public static string CognitiveServiceResourceSku => _data.CognitiveServiceResourceSku; + public static string CognitiveServiceResourceSku => _data?.CognitiveServiceResourceSku; - public static bool InitConfigsEndpoint => _data.InitConfigsEndpoint; + public static bool InitConfigsEndpoint => _data != null && _data.InitConfigsEndpoint; - public static bool InitConfigsSubscription => _data.InitConfigsSubscription; + public static bool InitConfigsSubscription => _data != null && _data.InitConfigsSubscription; - public static string HelpCommandTokens => _data.HelpCommandTokens; + public static string HelpCommandTokens => _data?.HelpCommandTokens; - public static string ConfigScopeTokens => _data.ConfigScopeTokens; + public static string ConfigScopeTokens => _data?.ConfigScopeTokens; - public static string[] ZipIncludes => _data.ZipIncludes; + public static string[] ZipIncludes => _data?.ZipIncludes; - public static bool DispatchRunCommand(ICommandValues values) => _data.DispatchRunCommand(values); - public static bool DispatchParseCommand(INamedValueTokens tokens, ICommandValues values) => _data.DispatchParseCommand(tokens, values); - public static bool DispatchParseCommandValues(INamedValueTokens tokens, ICommandValues values) => _data.DispatchParseCommandValues(tokens, values); - public static bool DisplayKnownErrors(ICommandValues values, Exception ex) => _data.DisplayKnownErrors(values, ex); + public static bool DispatchRunCommand(ICommandValues values) => _data != null && _data.DispatchRunCommand(values); + public static bool DispatchParseCommand(INamedValueTokens tokens, ICommandValues values) => _data != null && _data.DispatchParseCommand(tokens, values); + public static bool DispatchParseCommandValues(INamedValueTokens tokens, ICommandValues values) => _data != null && _data.DispatchParseCommandValues(tokens, values); + public static bool DisplayKnownErrors(ICommandValues values, Exception ex) => _data != null && _data.DisplayKnownErrors(values, ex); - public static IEventLoggerHelpers EventLoggerHelpers => _data.EventLoggerHelpers; + public static IEventLoggerHelpers EventLoggerHelpers => _data?.EventLoggerHelpers; public static ITelemetry Telemetry => _data.Telemetry; } diff --git a/src/common/Telemetry/Events/ExitedTelemetryEvent.cs b/src/common/Telemetry/Events/ExitedTelemetryEvent.cs index 1a79b10d..a5014a24 100644 --- a/src/common/Telemetry/Events/ExitedTelemetryEvent.cs +++ b/src/common/Telemetry/Events/ExitedTelemetryEvent.cs @@ -11,5 +11,7 @@ namespace Azure.AI.Details.Common.CLI.Telemetry.Events public string Name => "exited"; public int ExitCode { get; init; } + + public TimeSpan? Elapsed { get; init; } } } diff --git a/src/common/common.csproj b/src/common/common.csproj index 4517fc49..31e67cab 100644 --- a/src/common/common.csproj +++ b/src/common/common.csproj @@ -1,7 +1,7 @@ - net7.0 + net8.0 enable Azure.AI.CLI.Common Azure.AI.Details.Common.CLI diff --git a/src/common/details/azcli/AzCliConsoleGui_PickOrCreateAndConfig_CognitiveServicesResource_OpenAiKind.cs b/src/common/details/azcli/AzCliConsoleGui_PickOrCreateAndConfig_CognitiveServicesResource_OpenAiKind.cs index 78b74169..1b69e166 100644 --- a/src/common/details/azcli/AzCliConsoleGui_PickOrCreateAndConfig_CognitiveServicesResource_OpenAiKind.cs +++ b/src/common/details/azcli/AzCliConsoleGui_PickOrCreateAndConfig_CognitiveServicesResource_OpenAiKind.cs @@ -19,7 +19,19 @@ namespace Azure.AI.Details.Common.CLI { public partial class AzCliConsoleGui { - public static async Task PickOrCreateAndConfigCognitiveServicesOpenAiKindResource(bool interactive, bool allowSkipDeployments, string subscriptionId, string regionFilter = null, string groupFilter = null, string resourceFilter = null, string kinds = null, string sku = null, bool yes = false) + public static async Task PickOrCreateAndConfigCognitiveServicesOpenAiKindResource( + bool interactive, + bool allowSkipDeployments, + string subscriptionId, + string regionFilter = null, + string groupFilter = null, + string resourceFilter = null, + string kinds = null, + string sku = null, + bool yes = false, + string chatDeploymentFilter = null, + string embeddingsDeploymentFilter = null, + string evaluationsDeploymentFilter = null) { kinds ??= "OpenAI;AIServices"; var sectionHeader = "AZURE OPENAI RESOURCE"; @@ -27,7 +39,15 @@ public partial class AzCliConsoleGui var regionLocation = !string.IsNullOrEmpty(regionFilter) ? await AzCliConsoleGui.PickRegionLocationAsync(interactive, regionFilter) : new AzCli.AccountRegionLocationInfo(); var resource = await AzCliConsoleGui.PickOrCreateCognitiveResource(sectionHeader, interactive, subscriptionId, regionLocation.Name, groupFilter, resourceFilter, kinds, sku, yes); - var (chatDeployment, embeddingsDeployment, evaluationDeployment, keys) = await PickOrCreateAndConfigCognitiveServicesOpenAiKindResourceDeployments(sectionHeader, interactive, allowSkipDeployments, subscriptionId, resource); + var (chatDeployment, embeddingsDeployment, evaluationDeployment, keys) = await PickOrCreateAndConfigCognitiveServicesOpenAiKindResourceDeployments( + sectionHeader, + interactive, + allowSkipDeployments, + subscriptionId, + resource, + chatDeploymentFilter, + embeddingsDeploymentFilter, + evaluationsDeploymentFilter); return new AzCli.CognitiveServicesResourceInfoEx { @@ -44,11 +64,20 @@ public partial class AzCliConsoleGui }; } - public static async Task<(AzCli.CognitiveServicesDeploymentInfo?, AzCli.CognitiveServicesDeploymentInfo?, AzCli.CognitiveServicesDeploymentInfo?, AzCli.CognitiveServicesKeyInfo)> PickOrCreateAndConfigCognitiveServicesOpenAiKindResourceDeployments(string sectionHeader, bool interactive, bool allowSkipDeployments, string subscriptionId, AzCli.CognitiveServicesResourceInfo resource) + public static async Task<(AzCli.CognitiveServicesDeploymentInfo?, AzCli.CognitiveServicesDeploymentInfo?, AzCli.CognitiveServicesDeploymentInfo?, AzCli.CognitiveServicesKeyInfo)> + PickOrCreateAndConfigCognitiveServicesOpenAiKindResourceDeployments( + string sectionHeader, + bool interactive, + bool allowSkipDeployments, + string subscriptionId, + AzCli.CognitiveServicesResourceInfo resource, + string chatDeploymentFilter = null, + string embeddingsDeploymentFilter = null, + string evaluationsDeploymentFilter = null) { - var chatDeployment = await AzCliConsoleGui.PickOrCreateCognitiveServicesResourceDeployment(interactive, allowSkipDeployments, "Chat", subscriptionId, resource.Group, resource.RegionLocation, resource.Name, null); - var embeddingsDeployment = await AzCliConsoleGui.PickOrCreateCognitiveServicesResourceDeployment(interactive, allowSkipDeployments, "Embeddings", subscriptionId, resource.Group, resource.RegionLocation, resource.Name, null); - var evaluationDeployment = await AzCliConsoleGui.PickOrCreateCognitiveServicesResourceDeployment(interactive, allowSkipDeployments, "Evaluation", subscriptionId, resource.Group, resource.RegionLocation, resource.Name, null); + var chatDeployment = await AzCliConsoleGui.PickOrCreateCognitiveServicesResourceDeployment(interactive, allowSkipDeployments, "Chat", subscriptionId, resource.Group, resource.RegionLocation, resource.Name, chatDeploymentFilter); + var embeddingsDeployment = await AzCliConsoleGui.PickOrCreateCognitiveServicesResourceDeployment(interactive, allowSkipDeployments, "Embeddings", subscriptionId, resource.Group, resource.RegionLocation, resource.Name, embeddingsDeploymentFilter); + var evaluationDeployment = await AzCliConsoleGui.PickOrCreateCognitiveServicesResourceDeployment(interactive, allowSkipDeployments, "Evaluation", subscriptionId, resource.Group, resource.RegionLocation, resource.Name, evaluationsDeploymentFilter); var keys = await AzCliConsoleGui.LoadCognitiveServicesResourceKeys(sectionHeader, subscriptionId, resource); if (resource.Kind == "AIServices") diff --git a/src/common/details/azcli/AzCliConsoleGui_SubscriptionPicker.cs b/src/common/details/azcli/AzCliConsoleGui_SubscriptionPicker.cs index 6517231f..b5ea398a 100644 --- a/src/common/details/azcli/AzCliConsoleGui_SubscriptionPicker.cs +++ b/src/common/details/azcli/AzCliConsoleGui_SubscriptionPicker.cs @@ -77,7 +77,7 @@ public static async Task PickSubscriptionIdAsync(bool allowInteractiveLo throw new ApplicationException($"*** ERROR: Loading subscriptions ***\n{response.Output.StdError}"); } - var needLogin = response.Output.StdError != null && (response.Output.StdError.Contains("az login") || response.Output.StdError.Contains("refresh token")); + var needLogin = response.Output.StdError != null && (response.Output.StdError.Split('\'', '"').Contains("az login") || response.Output.StdError.Contains("refresh token")); if (needLogin) { bool cancelLogin = !allowInteractiveLogin; diff --git a/src/common/details/commands/init_command.cs b/src/common/details/commands/init_command.cs index 350d8969..8969f1a7 100644 --- a/src/common/details/commands/init_command.cs +++ b/src/common/details/commands/init_command.cs @@ -58,12 +58,9 @@ private async Task DoCommand(string command) DisplayInitServiceBanner(); - CheckPath(); - var interactive = _values.GetOrDefault("init.service.interactive", true); var runId = _values.GetOrSet("init.run_id", Guid.NewGuid); - if (!interactive) ThrowInteractiveNotSupportedApplicationException(); // POST-IGNITE: TODO: Add back non-interactive mode support switch (command) { @@ -98,6 +95,7 @@ private async Task DoCommand(string command) private async Task DoInitRootAsync() { var interactive = _values.GetOrDefault("init.service.interactive", true); + if (!interactive) ThrowInteractiveNotSupportedApplicationException(); // POST-IGNITE: TODO: Add back non-interactive mode support ConsoleHelpers.WriteLineWithHighlight("`AI INIT`\n\n Initializes (creates, selects, or attaches to) AI Projects and services.\n"); @@ -388,6 +386,8 @@ private Task DoInitSubscriptionId(bool interactive) private async Task DoInitRootHubResource(bool interactive) { + if (!interactive) ThrowInteractiveNotSupportedApplicationException(); // POST-IGNITE: TODO: Add back non-interactive mode support + await DoInitSubscriptionId(interactive); await DoInitHubResource(interactive); } @@ -413,6 +413,8 @@ private Task DoInitHubResource(bool interactive) private async Task DoInitRootProject(bool interactive, bool allowCreate = true, bool allowPick = true) { + if (!interactive) ThrowInteractiveNotSupportedApplicationException(); // POST-IGNITE: TODO: Add back non-interactive mode support + await DoInitSubscriptionId(interactive); await DoInitProject(interactive, allowCreate, allowPick); } @@ -470,7 +472,11 @@ private async Task DoInitOpenAi(bool interactive, bool allowSkipDeployments = tr var sku = _values.GetOrDefault("init.service.cognitiveservices.resource.sku", Program.CognitiveServiceResourceSku); var yes = _values.GetOrDefault("init.service.cognitiveservices.terms.agree", false); - var resource = await AzCliConsoleGui.PickOrCreateAndConfigCognitiveServicesOpenAiKindResource(interactive, allowSkipDeployments, subscriptionId, regionFilter, groupFilter, resourceFilter, kind, sku, yes); + var chatDeploymentFilter = _values.GetOrDefault("init.chat.model.deployment.name", ""); + var embeddingsDeploymentFilter = _values.GetOrDefault("init.embeddings.model.deployment.name", ""); + var evaluationsDeploymentFilter = _values.GetOrDefault("init.evaluation.model.deployment.name", ""); + + var resource = await AzCliConsoleGui.PickOrCreateAndConfigCognitiveServicesOpenAiKindResource(interactive, allowSkipDeployments, subscriptionId, regionFilter, groupFilter, resourceFilter, kind, sku, yes, chatDeploymentFilter, embeddingsDeploymentFilter, evaluationsDeploymentFilter); _values.Reset("service.openai.deployments.picked", "true"); SubscriptionToken.Data().Set(_values, subscriptionId); @@ -485,6 +491,8 @@ private async Task DoInitOpenAi(bool interactive, bool allowSkipDeployments = tr private async Task DoInitRootCognitiveServicesAIServicesKind(bool interactive) { + if (!interactive) ThrowInteractiveNotSupportedApplicationException(); // POST-IGNITE: TODO: Add back non-interactive mode support + await DoInitSubscriptionId(interactive); await DoInitCognitiveServicesAIServicesKind(interactive); } @@ -513,6 +521,8 @@ private async Task DoInitCognitiveServicesAIServicesKind(bool interactive, bool private async Task DoInitRootCognitiveServicesCognitiveServicesKind(bool interactive) { + if (!interactive) ThrowInteractiveNotSupportedApplicationException(); // POST-IGNITE: TODO: Add back non-interactive mode support + await DoInitSubscriptionId(interactive); await DoInitCognitiveServicesCognitiveServicesKind(interactive); } @@ -539,6 +549,8 @@ private async Task DoInitCognitiveServicesCognitiveServicesKind(bool interactive private async Task DoInitRootSearch(bool interactive) { + if (!interactive) ThrowInteractiveNotSupportedApplicationException(); // POST-IGNITE: TODO: Add back non-interactive mode support + await DoInitSubscriptionId(interactive); await DoInitSearch(interactive, false); } diff --git a/src/common/details/commands/parsers/init_command_parser.cs b/src/common/details/commands/parsers/init_command_parser.cs index 3c5cab3a..5c5ba615 100644 --- a/src/common/details/commands/parsers/init_command_parser.cs +++ b/src/common/details/commands/parsers/init_command_parser.cs @@ -81,6 +81,10 @@ public CommonInitNamedValueTokenParsers() : base( new NamedValueTokenParser("--sku", "init.service.cognitiveservices.resource.sku", "00001", "1"), new NamedValueTokenParser("--yes", "init.service.cognitiveservices.terms.agree", "00001", "1;0", "true;false", null, "true"), + new NamedValueTokenParser(null, "init.chat.model.deployment.name", "01010", "1"), + new NamedValueTokenParser(null, "init.embeddings.model.deployment.name", "01010", "1"), + new NamedValueTokenParser(null, "init.evaluation.model.deployment.name", "01010", "1"), + new NamedValueTokenParser("--interactive", "init.service.interactive", "001", "1;0", "true;false", null, "true") // new NamedValueTokenParser(null, "init.output.azcli.command.file", "01100", "1", "@@"), diff --git a/src/common/details/commands/runjob_command.cs b/src/common/details/commands/runjob_command.cs index 6190723a..bcffced4 100644 --- a/src/common/details/commands/runjob_command.cs +++ b/src/common/details/commands/runjob_command.cs @@ -66,6 +66,8 @@ private bool DoRunJob() var itemArgs = _values.GetOrDefault("run.input.post.item.args", "").Replace(';', ' '); var preItemArgs = _values.GetOrDefault("run.input.pre.item.args", "").Replace(';', ' '); + var inputPath = _values.GetOrDefault("x.input.path", Directory.GetCurrentDirectory()); + var processOk = !string.IsNullOrEmpty(process); var commandOk = !string.IsNullOrEmpty(command); var scriptOk = !string.IsNullOrEmpty(script); @@ -75,7 +77,7 @@ private bool DoRunJob() var app = processOk && process == Program.Name; if (app && jobOk && !job.StartsWith("@")) job = $"@{job}"; - var startPath = UpdateJobStartPath(ref job, _values.GetOrDefault("x.input.path", Directory.GetCurrentDirectory())); + var startPath = UpdateJobStartPath(ref job, inputPath); if (!processOk && scriptOk) processOk = UpdateProcessIfFileNotExist(script, ref process); if (!processOk && commandOk) processOk = UpdateProcessIfFileNotExist(command, ref process); diff --git a/src/common/details/console/gui/Screen.cs b/src/common/details/console/gui/Screen.cs index f1ceac09..7fccb0e5 100644 --- a/src/common/details/console/gui/Screen.cs +++ b/src/common/details/console/gui/Screen.cs @@ -328,11 +328,23 @@ private static Colors GetColorsNow() return new Colors(Console.ForegroundColor, Console.BackgroundColor); } + private static int TryCatchNoThrow(Func function, int defaultResult) + { + try + { + return function(); + } + catch (Exception) + { + return defaultResult; + } + } + private static Screen _current = new Screen(); - private int _initialWidth = Console.WindowWidth; - private int _initialHeight = Console.WindowHeight; - private int _initialTop = Console.CursorTop; // Console.WindowTop; - private int _initialLeft = Console.WindowLeft; + private int _initialWidth = TryCatchNoThrow(() => Console.WindowWidth, 200); + private int _initialHeight = TryCatchNoThrow(() => Console.WindowHeight, 50); + private int _initialTop = TryCatchNoThrow(() => Console.CursorTop, 0); + private int _initialLeft = TryCatchNoThrow(() => Console.WindowLeft, 0); private bool _initialCursorVisible = GetCursorVisible(); private Colors _initialColors = GetColorsNow(); private int _biggestYSoFar = 0; diff --git a/src/common/details/helpers/file_helpers.cs b/src/common/details/helpers/file_helpers.cs index 50fd1ea4..ab78c7e6 100644 --- a/src/common/details/helpers/file_helpers.cs +++ b/src/common/details/helpers/file_helpers.cs @@ -55,7 +55,11 @@ public static IEnumerable Combine(string path1, IEnumerable path public static string NormalizePath(string outputDirectory) { - return new DirectoryInfo(outputDirectory).FullName; + var normalized = new DirectoryInfo(outputDirectory).FullName; + var cwd = Directory.GetCurrentDirectory(); + return normalized.StartsWith(cwd) && normalized.Length > cwd.Length + 1 + ? normalized.Substring(cwd.Length + 1) + : normalized; } } @@ -78,26 +82,32 @@ public static string AppendToFileName(string fileName, string appendBeforeExtens return Path.Combine(file.DirectoryName, $"{Path.GetFileNameWithoutExtension(file.FullName)}{appendBeforeExtension}{file.Extension}{appendAfterExtension}"); } - public static IEnumerable FindFiles(string path, string pattern, INamedValues values = null) + public static IEnumerable FindFiles(string path, string pattern, INamedValues values = null, bool checkOverrides = true, bool checkResources = true) { - return FindFiles(PathHelpers.Combine(path, pattern), values); + return FindFiles(PathHelpers.Combine(path, pattern), values, checkOverrides, checkResources); } - public static IEnumerable FindFiles(string fileNames, INamedValues values = null) + public static IEnumerable FindFiles(string fileNames, INamedValues values = null, bool checkOverrides = true, bool checkResources = true) { var currentDir = Directory.GetCurrentDirectory(); foreach (var item in fileNames.Split(new char[] { ';', '\r', '\n' }, StringSplitOptions.RemoveEmptyEntries)) { - var overrides = FindOverrides(item); - foreach (var name in overrides) + if (checkOverrides) { - yield return name; + var overrides = FindOverrides(item); + foreach (var name in overrides) + { + yield return name; + } } - var resources = FindResources(item); - foreach (var resource in resources) + if (checkResources) { - yield return resource; + var resources = FindResources(item); + foreach (var resource in resources) + { + yield return resource; + } } if (IsResource(item) || IsOverride(item)) continue; @@ -943,7 +953,7 @@ public static bool ResourceExists(string fileName) public static bool IsResource(string fileName) { - return !string.IsNullOrEmpty(fileName) && fileName.StartsWith(Program.Exe); + return !string.IsNullOrEmpty(fileName) && !string.IsNullOrEmpty(Program.Exe) && fileName.StartsWith(Program.Exe); } private static string ResourceNameFromFileName(string fileName) @@ -1427,7 +1437,7 @@ private static string CheckDotDirectory(string checkPath, bool mustExist = true, } private const string resourcePrefix = "Azure.AI.Details.Common.CLI.resources"; - private static readonly string overridePrefix = $"${Program.Name.ToUpper()}"; + private static readonly string overridePrefix = $"${Program.Name?.ToUpper()}"; private const string defaultDataPath = @";./;../;../../;../../../;../../../../;{config.path};"; diff --git a/src/common/details/helpers/process_helpers.cs b/src/common/details/helpers/process_helpers.cs index 5448718d..4f866452 100644 --- a/src/common/details/helpers/process_helpers.cs +++ b/src/common/details/helpers/process_helpers.cs @@ -47,12 +47,13 @@ public static Process StartBrowser(string url) : null; } - public static Process StartProcess(string fileName, string arguments, Dictionary addToEnvironment = null, bool redirect = true) + public static Process StartProcess(string fileName, string arguments, Dictionary addToEnvironment = null, bool redirectOutput = true, bool redirectInput = false) { var start = new ProcessStartInfo(fileName, arguments); start.UseShellExecute = false; - start.RedirectStandardOutput = redirect; - start.RedirectStandardError = redirect; + start.RedirectStandardOutput = redirectOutput; + start.RedirectStandardError = redirectOutput; + start.RedirectStandardInput = redirectInput; if (addToEnvironment != null) { @@ -65,49 +66,80 @@ public static Process StartProcess(string fileName, string arguments, Dictionary return Process.Start(start); } - public static async Task RunShellCommandAsync(string command, string arguments, Dictionary addToEnvironment = null, Action stdOutHandler = null, Action stdErrHandler = null, Action mergedOutputHandler = null) + public static async Task RunShellCommandAsync(string commandLine, Dictionary addToEnvironment = null, Action stdOutHandler = null, Action stdErrHandler = null, Action mergedOutputHandler = null, bool captureOutput = true) + { + var isWindows = RuntimeInformation.IsOSPlatform(OSPlatform.Windows); + var command = isWindows ? "cmd" : "bash"; + var arguments = isWindows ? $"/c \"{commandLine}\"" : $"-li \"{commandLine}\""; + return await RunShellCommandAsync(command, arguments, addToEnvironment, stdOutHandler, stdErrHandler, mergedOutputHandler, captureOutput); + } + + public static async Task RunShellCommandAsync(string command, string arguments, Dictionary addToEnvironment = null, Action stdOutHandler = null, Action stdErrHandler = null, Action mergedOutputHandler = null, bool captureOutput = true) { SHELL_DEBUG_TRACE($"COMMAND: {command} {arguments} {DictionaryToString(addToEnvironment)}"); - var stdOut = new StringBuilder(); - var stdErr = new StringBuilder(); - var mergedOutput = new StringBuilder(); + var redirectOutput = captureOutput || stdOutHandler != null || stdErrHandler != null || mergedOutputHandler != null; + + var outDoneSignal = new ManualResetEvent(false); + var errDoneSignal = new ManualResetEvent(false); + var sbOut = new StringBuilder(); + var sbErr = new StringBuilder(); + var sbMerged = new StringBuilder(); + var stdOutReceived = (string data) => { if (data != null) { - stdOut.AppendLine(data); - mergedOutput.AppendLine(data); + sbOut.AppendLine(data); + sbMerged.AppendLine(data); if (stdOutHandler != null) stdOutHandler(data); if (mergedOutputHandler != null) mergedOutputHandler(data); } + else + { + outDoneSignal.Set(); + } }; var stdErrReceived = (string data) => { if (data != null) { - stdErr.AppendLine(data); - mergedOutput.AppendLine(data); + sbErr.AppendLine(data); + sbMerged.AppendLine(data); if (stdErrHandler != null) stdErrHandler(data); if (mergedOutputHandler != null) mergedOutputHandler(data); } + else + { + errDoneSignal.Set(); + } }; - var process = TryCatchHelpers.TryCatchNoThrow(() => StartShellCommandProcess(command, arguments, addToEnvironment), null, out Exception processException); + var process = TryCatchHelpers.TryCatchNoThrow(() => StartShellCommandProcess(command, arguments, addToEnvironment, redirectOutput), null, out Exception processException); if (process == null) { SHELL_DEBUG_TRACE($"ERROR: {processException}"); return new ProcessOutput() { StdError = processException.ToString() }; } - process.OutputDataReceived += (sender, e) => stdOutReceived(e.Data); - process.ErrorDataReceived += (sender, e) => stdErrReceived(e.Data); - process.BeginOutputReadLine(); - process.BeginErrorReadLine(); + + if (redirectOutput) + { + process.OutputDataReceived += (sender, e) => stdOutReceived(e.Data); + process.ErrorDataReceived += (sender, e) => stdErrReceived(e.Data); + process.BeginOutputReadLine(); + process.BeginErrorReadLine(); + } await process.WaitForExitAsync(); + if (redirectOutput) + { + outDoneSignal.WaitOne(); + errDoneSignal.WaitOne(); + } + var output = new ProcessOutput(); - output.StdOutput = process != null ? stdOut.ToString().Trim(' ', '\r', '\n') : ""; - output.StdError = process != null ? stdErr.ToString().Trim(' ', '\r', '\n') : processException.ToString(); - output.MergedOutput = process != null ? mergedOutput.ToString().Trim(' ', '\r', '\n') : ""; + output.StdOutput = process != null ? sbOut.ToString().Trim(' ', '\r', '\n') : ""; + output.StdError = process != null ? sbErr.ToString().Trim(' ', '\r', '\n') : processException.ToString(); + output.MergedOutput = process != null ? sbMerged.ToString().Trim(' ', '\r', '\n') : ""; output.ExitCode = process != null ? process.ExitCode : -1; if (!string.IsNullOrEmpty(output.StdOutput)) SHELL_DEBUG_TRACE($"---\nSTDOUT\n---\n{output.StdOutput}"); @@ -129,12 +161,12 @@ public static async Task RunShellCommandAsync(string command, str return x; } - private static Process StartShellCommandProcess(string command, string arguments, Dictionary addToEnvironment = null) + private static Process StartShellCommandProcess(string command, string arguments, Dictionary addToEnvironment = null, bool captureOutput = true) { var isWindows = RuntimeInformation.IsOSPlatform(OSPlatform.Windows); return isWindows - ? StartProcess("cmd", $"/c {command} {arguments}", addToEnvironment) - : StartProcess(command, arguments, addToEnvironment); + ? StartProcess("cmd", $"/c {command} {arguments}", addToEnvironment, captureOutput) + : StartProcess(command, arguments, addToEnvironment, captureOutput); } private static void SHELL_DEBUG_TRACE(string message,[CallerLineNumber] int line = 0, [CallerMemberName] string? caller = null, [CallerFilePath] string? file = null) diff --git a/src/common/details/named_values/tokens/programming_language_token.cs b/src/common/details/named_values/tokens/programming_language_token.cs new file mode 100644 index 00000000..0e9ddffb --- /dev/null +++ b/src/common/details/named_values/tokens/programming_language_token.cs @@ -0,0 +1,49 @@ +// +// Copyright (c) Microsoft. All rights reserved. +// Licensed under the MIT license. See LICENSE.md file in the project root for full license information. +// + +namespace Azure.AI.Details.Common.CLI +{ + public class ProgrammingLanguageToken + { + public static string GetExtension(string language) + { + return language?.ToLower() switch + { + "c#" => ".cs", + "go" => ".go", + "java" => ".java", + "javascript" => ".js", + "python" => ".py", + "typescript" => ".ts", + _ => string.Empty + }; + } + + public static string GetSuffix(string language) + { + return GetExtension(language).Replace(".", "-"); + } + + public static NamedValueTokenData Data() => new NamedValueTokenData(_optionName, _fullName, _optionExample, _requiredDisplayName); + public static INamedValueTokenParser Parser() => new NamedValueTokenParserList( + new NamedValueTokenParser(_optionName, _fullName, "01", "1", "C#;c#;cs;Go;go;Java;java;JavaScript;javascript;js;Python;python;py;TypeScript;typescript;ts"), + new NamedValueTokenParser("--C#", "programming.language.csharp", "001", "0", null, null, "C#", _fullName), + new NamedValueTokenParser("--CS", "programming.language.csharp", "001", "0", null, null, "C#", _fullName), + new NamedValueTokenParser("--Go", "programming.language.go", "001", "0", null, null, "Go", _fullName), + new NamedValueTokenParser("--Java", "programming.language.java", "001", "0", null, null, "Java", _fullName), + new NamedValueTokenParser("--JavaScript", "programming.language.javascript", "001", "0", null, null, "JavaScript", _fullName), + new NamedValueTokenParser("--JS", "programming.language.javascript", "001", "0", null, null, "JavaScript", _fullName), + new NamedValueTokenParser("--Python", "programming.language.python", "001", "0", null, null, "Python", _fullName), + new NamedValueTokenParser("--PY", "programming.language.python", "001", "0", null, null, "Python", _fullName), + new NamedValueTokenParser("--TypeScript", "programming.language.typescript", "001", "0", null, null, "TypeScript", _fullName), + new NamedValueTokenParser("--TS", "programming.language.typescript", "001", "0", null, null, "TypeScript", _fullName) + ); + + private const string _requiredDisplayName = "programming language"; + private const string _optionName = "--language"; + private const string _optionExample = "LANGUAGE"; + private const string _fullName = "programming.language"; + } +} diff --git a/src/common/details/named_values/tokens/run_command_token.cs b/src/common/details/named_values/tokens/run_command_script_token.cs similarity index 53% rename from src/common/details/named_values/tokens/run_command_token.cs rename to src/common/details/named_values/tokens/run_command_script_token.cs index 77410da2..f3f0a934 100644 --- a/src/common/details/named_values/tokens/run_command_token.cs +++ b/src/common/details/named_values/tokens/run_command_script_token.cs @@ -5,14 +5,14 @@ namespace Azure.AI.Details.Common.CLI { - public class RunCommandToken + public class RunCommandScriptToken { public static NamedValueTokenData Data() => new NamedValueTokenData(_optionName, _fullName, _optionExample, _requiredDisplayName); - public static INamedValueTokenParser Parser() => new NamedValueTokenParser(_optionName, _fullName, "10;01", "1"); + public static INamedValueTokenParser Parser() => new NamedValueTokenParser(_optionName, _fullName, "100;010;010", "1"); - private const string _requiredDisplayName = "run command"; - private const string _optionName = "--run-command"; - private const string _optionExample = "COMMAND"; - private const string _fullName = "run.command"; + private const string _requiredDisplayName = "run shell command/script"; + private const string _optionName = "--script"; + private const string _optionExample = "COMMAND/SCRIPT"; + private const string _fullName = "run.command.script"; } } diff --git a/src/extensions/helper_functions_extension/FileHelperFunctions.cs b/src/extensions/helper_functions_extension/FileHelperFunctions.cs index d19f5ec8..da22af35 100644 --- a/src/extensions/helper_functions_extension/FileHelperFunctions.cs +++ b/src/extensions/helper_functions_extension/FileHelperFunctions.cs @@ -42,15 +42,20 @@ public static bool DirectoryCreate(string directoryName) return true; } + [HelperFunctionDescription("List files; lists all files regardless of name; only in current directory")] + public static string FindAllFilesInCurrentDirectory() + { + return FindAllFilesMatchingPattern("*"); + } - [HelperFunctionDescription("List files; lists all files regardless of name")] - public static string FindAllFiles() + [HelperFunctionDescription("List files; lists all files regardless of name; searches current directory and all sub-directories")] + public static string FindAllFilesRecursively() { - return FindFilesMatchingPattern("**/*"); + return FindAllFilesMatchingPattern("**/*"); } - [HelperFunctionDescription("List files; lists files matching pattern")] - public static string FindFilesMatchingPattern([HelperFunctionParameterDescription("The pattern to search for; use '**/*.ext' to search sub-directories")] string pattern) + [HelperFunctionDescription("List files; lists all files matching pattern; searches current directory, and if pattern includes '**', all sub-directories")] + public static string FindAllFilesMatchingPattern([HelperFunctionParameterDescription("The pattern to search for; use '**/*.ext' to search sub-directories")] string pattern) { var files = FileHelpers.FindFiles(".", pattern); return string.Join("\n", files); diff --git a/src/extensions/helper_functions_extension/helper_functions_extension.csproj b/src/extensions/helper_functions_extension/helper_functions_extension.csproj index 3761f3cf..f7bb9259 100644 --- a/src/extensions/helper_functions_extension/helper_functions_extension.csproj +++ b/src/extensions/helper_functions_extension/helper_functions_extension.csproj @@ -2,7 +2,7 @@ Azure.AI.CLI.Extensions.HelperFunctions - net7.0 + net8.0 Azure.AI.Details.Common.CLI.Extensions.HelperFunctions enable enable diff --git a/src/extensions/template_extension/TemplateFactory.cs b/src/extensions/template_extension/TemplateFactory.cs index 75008553..60cbee84 100644 --- a/src/extensions/template_extension/TemplateFactory.cs +++ b/src/extensions/template_extension/TemplateFactory.cs @@ -9,77 +9,91 @@ using System.Text; using System.Threading.Tasks; using Azure.AI.Details.Common.CLI; +using Azure.AI.Details.Common.CLI.ConsoleGui; namespace Azure.AI.Details.Common.CLI.Extensions.Templates { public class TemplateFactory { - public static bool ListTemplates() + public class Item { - var root = FileHelpers.FileNameFromResourceName("templates") + "/"; - var files = FileHelpers.FindFilesInTemplatePath("*", null).ToList(); + public string LongName { get; set; } = string.Empty; + public string ShortName { get; set; } = string.Empty; + public string Language { get; set; } = string.Empty; + public string UniqueName { get; set; } = string.Empty; + } - var templateShortNames = files - .Select(x => x.Replace(root, string.Empty)) - .Where(x => x.EndsWith("_.json")) - .Select(x => x.Split(new char[] { '\\', '/' }).FirstOrDefault()) - .Where(x => x != null) - .Select(x => x!) - .Distinct() - .ToList(); - templateShortNames.Sort(); + public class Group + { + public string LongName { get; set; } = string.Empty; + public string ShortName { get; set; } = String.Empty; + public string Languages { get { return string.Join(", ", Items.OrderBy(x => x.Language).Select(x => x.Language)); } } + public List Items { get; set; } = new List(); + } - var templateLongNames = new List(); - var languages = new List(); - foreach (var item in templateShortNames) + public static bool ListTemplates(string? templateFilter, string? languageFilter) + { + var groups = GetFilteredTemplateGroups(templateFilter, languageFilter); + if (groups.Count == 0) { - var parameters = GetParameters(item); - var longName = parameters["_Name"]; - var language = parameters["_Language"]; - - templateLongNames.Add(longName); - languages.Add(language); + ConsoleHelpers.WriteLineError($"No matching templates found\n"); + groups = GetTemplateGroups(); } - templateShortNames.Insert(0, ".env"); - templateLongNames.Insert(0, "Environment Variables"); - languages.Insert(0, ""); - var longNameLabel = "Name"; var shortNameLabel = "Short Name"; var languageLabel = "Language"; var widths = new int[3]; - widths[0] = Math.Max(longNameLabel.Length, templateLongNames.Max(x => x.Length)); - widths[1] = Math.Max(shortNameLabel.Length, templateShortNames.Max(x => x.Length)); - widths[2] = Math.Max(languageLabel.Length, languages.Max(x => x.Length)); + widths[0] = Math.Max(longNameLabel.Length, groups.Max(x => x.LongName.Length)); + widths[1] = Math.Max(shortNameLabel.Length, groups.Max(x => x.ShortName.Length)); + widths[2] = Math.Max(languageLabel.Length, groups.Max(x => x.Languages.Length)); + + var hideLongName = !Console.IsOutputRedirected && Screen.GetRightColumn() < widths.Sum() + 4 * 2 + 1; - Console.WriteLine($"{longNameLabel.PadRight(widths[0])} {shortNameLabel.PadRight(widths[1])} {languageLabel.PadRight(widths[2])}"); - Console.WriteLine($"{"-".PadRight(widths[0], '-')} {"-".PadRight(widths[1], '-')} {"-".PadRight(widths[2], '-')}"); + if (!hideLongName) Console.Write($"{longNameLabel.PadRight(widths[0])} "); + Console.WriteLine($"{shortNameLabel.PadRight(widths[1])} {languageLabel.PadRight(widths[2])}"); - for (int i = 0; i < templateShortNames.Count; i++) + if (!hideLongName) Console.Write($"{"-".PadRight(widths[0], '-')} "); + Console.WriteLine($"{"-".PadRight(widths[1], '-')} {"-".PadRight(widths[2], '-')}"); + + for (int i = 0; i < groups.Count; i++) { - var longName = templateLongNames[i]; - var shortName = templateShortNames[i].Replace('_', '-'); - var language = languages[i]; - Console.WriteLine($"{longName.PadRight(widths[0])} {shortName.PadRight(widths[1])} {language.PadRight(widths[2])}"); + var longName = groups[i].LongName; + var shortName = groups[i].ShortName.Replace('_', '-'); + var languages = groups[i].Languages; + + if (!hideLongName) Console.Write($"{longName.PadRight(widths[0])} "); + Console.WriteLine($"{shortName.PadRight(widths[1])} {languages.PadRight(widths[2])}"); } return true; } - public static bool GenerateTemplateFiles(string templateName, string instructions, string outputDirectory, bool quiet, bool verbose) + public static object? GenerateTemplateFiles(string templateName, string language, string instructions, string outputDirectory, bool quiet, bool verbose) { - var root = FileHelpers.FileNameFromResourceName("templates") + "/"; + var groups = GetTemplateGroups(); + var groupFound = groups.Where(x => x.ShortName == templateName).FirstOrDefault() + ?? groups.Where(x => x.LongName == templateName).FirstOrDefault(); + if (groupFound == null) return null; + + var templateFound = !string.IsNullOrEmpty(language) + ? groupFound.Items.Where(x => x.Language == language).FirstOrDefault() + : groupFound.Items.Count != 1 + ? groupFound.Items.Where(x => x.Language == string.Empty).FirstOrDefault() + : groupFound.Items.FirstOrDefault(); + if (templateFound == null) return groupFound; + + templateName = templateFound.UniqueName; - templateName = templateName.Replace('-', '_'); + var normalizedTemplateName = templateName.Replace('-', '_'); var generator = new TemplateGenerator(); - - var files = GetTemplateFileNames(templateName, generator); + + var files = GetTemplateFileNames(normalizedTemplateName, generator).ToList(); if (files.Count() == 0) { - templateName = templateName.Replace(" ", "_"); - files = GetTemplateFileNames(templateName, generator); + normalizedTemplateName = normalizedTemplateName.Replace(" ", "_"); + files = GetTemplateFileNames(normalizedTemplateName, generator).ToList(); if (files.Count() == 0) { return false; @@ -87,10 +101,13 @@ public static bool GenerateTemplateFiles(string templateName, string instruction } outputDirectory = PathHelpers.NormalizePath(outputDirectory); - var message = $"Generating '{templateName}' in '{outputDirectory}' ({files.Count()} files)..."; + var message = templateName != outputDirectory + ? $"Generating '{templateName}' in '{outputDirectory}' ({files.Count()} files)..." + : $"Generating '{templateName}' ({files.Count()} files)..."; if (!quiet) Console.WriteLine($"{message}\n"); - var generated = ProcessTemplates(templateName, generator, files, outputDirectory); + files.Sort(); + var generated = ProcessTemplates(normalizedTemplateName, generator, files, outputDirectory); foreach (var item in generated) { var file = item.Replace(outputDirectory, string.Empty).Trim('\\', '/'); @@ -132,6 +149,87 @@ public static bool GenerateTemplateFiles(string templateName, string instruction return true; } + private static List GetTemplateGroups() + { + var root = FileHelpers.FileNameFromResourceName("templates") + "/"; + var files = FileHelpers.FindFilesInTemplatePath("*", null).ToList(); + + var uniqueNames = files + .Select(x => x.Replace(root, string.Empty)) + .Where(x => x.EndsWith("_.json")) + .Select(x => x.Split(new char[] { '\\', '/' }).FirstOrDefault()) + .Where(x => x != null) + .Select(x => x!) + .Distinct() + .ToList(); + uniqueNames.Sort(); + + var templates = new List(); + foreach (var uniqueName in uniqueNames) + { + var parameters = GetParameters(uniqueName); + var longName = parameters["_LongName"]; + var shortName = parameters["_ShortName"]; + var language = parameters["_Language"]; + + templates.Add(new Item() + { + LongName = longName, + ShortName = shortName, + Language = language, + UniqueName = uniqueName + }); + } + + templates.Add(new Item() + { + LongName = "Environment Variables", + ShortName = ".env", + Language = string.Empty, + UniqueName = ".env" + }); + + var grouped = templates + .GroupBy(x => x.LongName) + .Select(x => new Group() + { + LongName = x.Key, + ShortName = x.First().ShortName, + Items = x.ToList() + }) + .OrderBy(x => x.ShortName) + .ToList(); + return grouped; + } + + private static List GetFilteredTemplateGroups(string? templateFilter, string? languageFilter) + { + var groups = GetTemplateGroups(); + if (string.IsNullOrEmpty(templateFilter) && string.IsNullOrEmpty(languageFilter)) return groups; + + var filtered = groups + .Where(x => string.IsNullOrEmpty(templateFilter) || x.ShortName.Contains(templateFilter) || x.LongName.Contains(templateFilter)) + .Where(x => string.IsNullOrEmpty(languageFilter) || x.Languages.Split(", ").Contains(languageFilter) || x.Languages == string.Empty) + .ToList(); + + if (filtered.Count > 0 && !string.IsNullOrEmpty(languageFilter)) + { + groups.Clear(); + foreach (var item in filtered) + { + groups.Add(new Group() + { + LongName = item.LongName, + ShortName = item.ShortName, + Items = item.Items.Where(x => x.Language == languageFilter).ToList() + }); + } + return groups; + } + + return filtered; + } + private static IEnumerable GetTemplateFileNames(string templateName, TemplateGenerator generator) { var files = FileHelpers.FindFilesInTemplatePath($"{templateName}/*", null).ToList(); diff --git a/src/extensions/template_extension/template_extension.csproj b/src/extensions/template_extension/template_extension.csproj index 6c522271..8d58fd94 100644 --- a/src/extensions/template_extension/template_extension.csproj +++ b/src/extensions/template_extension/template_extension.csproj @@ -2,7 +2,7 @@ Azure.AI.CLI.Extensions.Templates - net7.0 + net8.0 Azure.AI.Details.Common.CLI.Extensions.Templates enable enable diff --git a/src/extensions/test_helper_functions_extension/test_helper_functions_extension.csproj b/src/extensions/test_helper_functions_extension/test_helper_functions_extension.csproj index 80004a38..e2bf0920 100644 --- a/src/extensions/test_helper_functions_extension/test_helper_functions_extension.csproj +++ b/src/extensions/test_helper_functions_extension/test_helper_functions_extension.csproj @@ -2,7 +2,7 @@ Azure.AI.CLI.Extensions.HelperFunctions.Test - net7.0 + net8.0 Azure.AI.Details.Common.CLI.Extensions.HelperFunctions.Test enable enable diff --git a/src/spx/spx-cli.csproj b/src/spx/spx-cli.csproj index 42bca76b..4c2752f8 100644 --- a/src/spx/spx-cli.csproj +++ b/src/spx/spx-cli.csproj @@ -90,7 +90,7 @@ - + diff --git a/src/telemetry/telemetry.aria.csproj b/src/telemetry/telemetry.aria.csproj index f470838f..dcb1897a 100644 --- a/src/telemetry/telemetry.aria.csproj +++ b/src/telemetry/telemetry.aria.csproj @@ -1,7 +1,7 @@  - net7.0 + net8.0 enable enable Azure.AI.Details.Common.CLI.Telemetry diff --git a/src/vz/vz-cli.csproj b/src/vz/vz-cli.csproj index 16d4fd72..afbd9e3b 100644 --- a/src/vz/vz-cli.csproj +++ b/src/vz/vz-cli.csproj @@ -85,7 +85,7 @@ - + diff --git a/tests/Azure-AI-CLI-TestFramework-Default-Tags.yaml b/tests/Azure-AI-CLI-TestFramework-Default-Tags.yaml new file mode 100644 index 00000000..a254aab6 --- /dev/null +++ b/tests/Azure-AI-CLI-TestFramework-Default-Tags.yaml @@ -0,0 +1,2 @@ +cli: ai +workingDirectory: ../testresults diff --git a/tests/test.yaml b/tests/test.yaml new file mode 100644 index 00000000..d7b1bac3 --- /dev/null +++ b/tests/test.yaml @@ -0,0 +1,93 @@ +- name: simulate pass + simulate: Passed + +- name: simulate skipped + simulate: Skipped + +- test1: run --script "echo hello" +- test2: run --script "echo oh yeah?" + +- name: try1a command ... ai + command: ai + +- name: try1b command ... ai run --script "echo hello" + command: ai run --script "echo hello" + expect: hello + +- name: try2a script ... ai + script: ai + +- name: try2b script ... ai run --script "echo hello" + script: ai run --script "echo hello" + expect: hello + +- name: simple help test + script: ai + expect: | + (?# ---------- BANNER) + AI - Azure AI CLI, Version [01]\.[0-9].[0-9] + Copyright \(c\) 2024 Microsoft Corporation\. All Rights Reserved\. + + This PUBLIC PREVIEW version may change at any time\. + See: https://aka\.ms/azure-ai-cli-public-preview + + ___ ____ ___ _____ + / _ /_ / / _ |/_ _/ + / __ |/ /_/ __ |_/ /_ + /_/ |_/___/_/ |_/____/ + + USAGE: ai \[\.\.\.\]\r?$\n + ^\r?$\n + ^HELP\r?$\n + ^\r?$\n + ^ ai help\r?$\n + ^ ai help init\r?$\n + ^\r?$\n + ^COMMANDS\r?$\n + ^\r?$\n + ^ ai init \[\.\.\.\] \(see: ai help init\)\r?$\n + ^ ai config \[\.\.\.\] \(see: ai help config\)\r?$\n + ^\r?$\n + ^ ai dev \[\.\.\.\] \(see: ai help dev\)\r?$\n + ^\r?$\n + ^ ai chat \[\.\.\.\] \(see: ai help chat\)\r?$\n + ^ ai flow \[\.\.\.\] \(see: ai help flow\)\r?$\n + ^\r?$\n + ^ ai search \[\.\.\.\] \(see: ai help search\)\r?$\n + ^ ai speech \[\.\.\.\] \(see: ai help speech\)\r?$\n + ^\r?$\n + ^ ai service \[\.\.\.\] \(see: ai help service\)\r?$\n + ^\r?$\n + ^EXAMPLES\r?$\n + ^\r?$\n + ^ ai init\r?$\n + ^ ai chat --interactive --system @prompt\.txt\r?$\n + ^\r?$\n + ^ ai search index update --name MyIndex --files \*\.md\r?$\n + ^ ai chat --interactive --system @prompt\.txt --index-name MyIndex\r?$\n + ^\r?$\n + ^SEE ALSO\r?$\n + ^\r?$\n + ^ ai help examples\r?$\n + ^\r?$\n + ^ ai help find "prompt"\r?$\n + ^ ai help find "prompt" --expand\r?$\n + ^\r?$\n + ^ ai help find topics "examples"\r?$\n + ^ ai help list topics\r?$\n + ^\r?$\n + ^ ai help documentation\r?$\n + ^\r?$\n + +- name: dev new list + command: dev new list + arguments: + expect: | + ^Name +Short +Name +Language +\r?$\n + ^-+ +-+ +-+\r?$\n + ^Environment +Variables +\.env *\r?$\n + ^Helper +Function +Class +Library +helper-functions +C# *\r?$\n + ^OpenAI +Chat +Completions +openai-chat +C#, +Go, +Java, +JavaScript, +Python *\r?$\n + ^OpenAI +Chat +Completions +\(Streaming\) +openai-chat-streaming +C#, +Go, +Java, +JavaScript, +Python *\r?$\n + ^OpenAI +Chat +Completions +\(w/ +Data +\+ +AI +Search\) +openai-chat-streaming-with-data +C#, +Go, +Java, +JavaScript, +Python *\r?$\n + ^OpenAI +Chat +Completions +\(w/ +Functions\) +openai-chat-streaming-with-functions +C#, +Go, +JavaScript, +Python *\r?$\n diff --git a/tests/test2.yaml b/tests/test2.yaml new file mode 100644 index 00000000..a3ea4935 --- /dev/null +++ b/tests/test2.yaml @@ -0,0 +1,16 @@ +- name: simple help test + script: | + ai help + ai dev + ai dev new + ai dev new --help + + expect: | + AI + USAGE + COMMANDS + EXAMPLES + ADDITIONAL TOPICS + +- name: simple chat example + command: chat --question "tell me a joke" --save chat.job diff --git a/tests/test3.yaml b/tests/test3.yaml new file mode 100644 index 00000000..842cadc0 --- /dev/null +++ b/tests/test3.yaml @@ -0,0 +1,419 @@ +- area: ai init + tags: [before] + tests: + - name: ai init openai + command: ai init openai + arguments: + subscription: e72e5254-f265-4e95-9bd2-9ee8e7329051 + name: robch-oai-eastus2 + chat-deployment-name: gpt-4-32k-0613 + embeddings-deployment-name: text-embedding-ada-002-2 + evaluation-deployment-name: gpt-4-32k-0613 + interactive: false + - name: ai init speech + command: ai init speech + arguments: + subscription: e72e5254-f265-4e95-9bd2-9ee8e7329051 + name: robch-cranky-red-koala-ais + interactive: false + +- name: test ai chat + command: ai chat --question "Why is the sky blue, what's it called" --index-name @none + expect: Rayleigh + +- name: test ai chat built in functions + command: ai chat --interactive --built-in-functions + input: | + Create a file named "test.txt" with the following content: "Hello, World!" + What files are in the current directory? + Show me what's in the file "test.txt" + expect: | + assistant-function: CreateFileAndSaveText + assistant-function: FindAllFilesInCurrentDirectory + test.txt + Hello, World! + +- name: dev new environment + command: ai dev new .env + +- class: dev new helper-functions + steps: + - name: generate template + command: ai dev new helper-functions --instructions "Create a helper function named GetPersonsAge that returns ages of people; John is 55; Jane is 53; everyone else, return unknown" + - name: build template + bash: | + cd helper-functions + dotnet build + - name: run template + command: ai chat --interactive --helper-functions helper-functions/bin/Debug/net8.0/HelperFunctionsProject.dll + input: | + What is my name? + How old is John? + How old is Jane? + How old is Bob? + expect: | + assistant-function: GetUsersName\({}\) = + assistant-function: GetPersonsAge\({ + John + }\) = + 55 + assistant-function: GetPersonsAge\({ + Jane + }\) = + 53 + [Uu]nknown + +- area: ai dev new openai-chat + tests: + + - class: dev new openai-chat (c#) + steps: + - name: generate template + command: ai dev new openai-chat --cs + - name: build template + bash: | + cd openai-chat-cs + dotnet build + - name: run template + command: ai dev shell --run "openai-chat-cs\bin\Debug\net8.0\OpenAIChatCompletions" + input: |- + Tell me a joke + Tell me another joke + expect-gpt: | + The output should contain exactly two jokes. + tag: skip + + - class: dev new openai-chat (go) + steps: + - name: generate template + command: ai dev new openai-chat --go + - name: build template + bash: | + cd openai-chat-go + go mod tidy + go build + - name: run template + command: ai dev shell --run "openai-chat-go\openai_chat_completions_hello_world" + input: |- + Tell me a joke + Tell me another joke + tag: skip + + - class: dev new openai-chat (java) + steps: + - name: generate template + command: ai dev new openai-chat --java + - name: restore packages + bash: | + cd openai-chat-java + mvn clean package + - name: build template + bash: | + cd openai-chat-java + javac -cp "target/lib/*" src/OpenAIChatCompletionsClass.java src/Main.java -d out + - name: run template + command: ai dev shell --run "cd openai-chat-java && java -cp \"out;target/lib/*\" Main" + input: |- + Tell me a joke + Tell me another joke + tag: skip + + - class: dev new openai-chat (javascript) + steps: + - name: generate template + command: ai dev new openai-chat --javascript + - name: build template + bash: | + cd openai-chat-js + npm install + - name: run template + command: ai dev shell --run "cd openai-chat-js && node main.js" + input: |- + Tell me a joke + Tell me another joke + tag: skip + + - class: dev new openai-chat (python) + steps: + - name: generate template + command: ai dev new openai-chat --python + - name: build template + bash: | + cd openai-chat-py + pip install -r requirements.txt + tag: skip + - name: run template + command: ai dev shell --run "cd openai-chat-py && python openai_chat_completions.py" + input: |- + Tell me a joke + Tell me another joke + tag: skip + +- area: ai dev new openai-chat-streaming + tests: + + - class: dev new openai-chat-streaming (c#) + steps: + - name: generate template + command: ai dev new openai-chat-streaming --cs + - name: build template + bash: | + cd openai-chat-streaming-cs + dotnet build + - name: run template + command: ai dev shell --run "openai-chat-streaming-cs\bin\Debug\net8.0\OpenAIChatCompletionsStreaming" + input: |- + Tell me a joke + Tell me another joke + tag: skip + + - class: dev new openai-chat-streaming (go) + steps: + - name: generate template + command: ai dev new openai-chat-streaming --go + - name: build template + bash: | + cd openai-chat-streaming-go + go mod tidy + go build + - name: run template + command: ai dev shell --run "openai-chat-streaming-go\openai_chat_completions_streaming_hello_world" + input: |- + Tell me a joke + Tell me another joke + tag: skip + + - class: dev new openai-chat-streaming (java) + steps: + - name: generate template + command: ai dev new openai-chat-streaming --java + - name: restore packages + bash: | + cd openai-chat-streaming-java + mvn clean package + - name: build template + bash: | + cd openai-chat-streaming-java + javac -cp "target/lib/*" src/OpenAIChatCompletionsStreamingClass.java src/Main.java -d out + - name: run template + command: ai dev shell --run "cd openai-chat-streaming-java && java -cp \"out;target/lib/*\" Main" + input: |- + Tell me a joke + Tell me another joke + tag: skip + + command: ai dev new openai-chat-streaming --java + + - class: dev new openai-chat-streaming (javascript) + steps: + - name: generate template + command: ai dev new openai-chat-streaming --javascript + - name: build template + bash: | + cd openai-chat-streaming-js + npm install + - name: run template + command: ai dev shell --run "cd openai-chat-streaming-js && node main.js" + input: |- + Tell me a joke + Tell me another joke + tag: skip + + - class: dev new openai-chat-streaming (python) + steps: + - name: generate template + command: ai dev new openai-chat-streaming --python + - name: build template + bash: | + cd openai-chat-streaming-py + pip install -r requirements.txt + tag: skip + - name: run template + command: ai dev shell --run "cd openai-chat-streaming-py && python main.py" + input: |- + Tell me a joke + Tell me another joke + tag: skip + +- area: ai dev new openai-chat-streaming-with-data + tests: + + - class: dev new openai-chat-streaming-with-data (c#) + steps: + - name: generate template + command: ai dev new openai-chat-streaming-with-data --cs + - name: build template + bash: | + cd openai-chat-streaming-with-data-cs + dotnet build + - name: run template + command: ai dev shell --run "openai-chat-streaming-with-data-cs\bin\Debug\net8.0\OpenAIChatCompletionsWithDataStreaming" + input: |- + What parameter should i use to select my resources? + tag: skip + + - class: dev new openai-chat-streaming-with-data (javascript) + steps: + - name: generate template + command: ai dev new openai-chat-streaming-with-data --javascript + - name: build template + bash: | + cd openai-chat-streaming-with-data-js + npm install + - name: run template + command: ai dev shell --run "cd openai-chat-streaming-with-data-js && node main.js" + input: |- + What parameter should i use to select my resources? + tag: skip + + - class: dev new openai-chat-streaming-with-data (python) + steps: + - name: generate template + command: ai dev new openai-chat-streaming-with-data --python + - name: build template + bash: | + cd openai-chat-streaming-with-data-py + pip install -r requirements.txt + tag: skip + - name: run template + command: ai dev shell --run "cd openai-chat-streaming-with-data-py && python main.py" + input: |- + What parameter should i use to select my resources? + tag: skip + + - class: dev new openai-chat-streaming-with-data (go) + steps: + - name: generate template + command: ai dev new openai-chat-streaming-with-data --go + - name: build template + script: | + cd openai-chat-streaming-with-data-go + go mod tidy + - name: run template + command: ai dev shell --run "cd openai-chat-streaming-with-data-go && go run .\main.go .\openai_chat_completions_streaming_with_data_hello_world.go" + input: |- + What parameter should i use to initialize? + tag: skip + +- area: ai dev new openai-chat-streaming-with-functions + tests: + + - class: dev new openai-chat-streaming-with-functions (c#) + steps: + - name: generate template + command: ai dev new openai-chat-streaming-with-functions --cs + - name: build template + bash: | + cd openai-chat-streaming-with-functions-cs + dotnet build + - name: run template + command: ai dev shell --run "cd openai-chat-streaming-with-functions-cs && bin\Debug\net8.0\OpenAIChatCompletionsFunctionsStreaming" + input: |- + What is the date? + What is the time? + tag: skip + + - class: dev new openai-chat-streaming-with-functions (go) + steps: + - name: generate template + command: ai dev new openai-chat-streaming-with-functions --go + - name: build template + bash: | + cd openai-chat-streaming-with-functions-go + go mod tidy + go build + - name: run template + command: ai dev shell --run "cd openai-chat-streaming-with-functions-go && openai_chat_completions_functions_streaming_hello_world" + input: |- + What is the date? + What is the time? + tag: skip + + - class: dev new openai-chat-streaming-with-functions (javascript) + steps: + - name: generate template + command: ai dev new openai-chat-streaming-with-functions --javascript + - name: build template + bash: | + cd openai-chat-streaming-with-functions-js + npm install + - name: run template + command: ai dev shell --run "cd openai-chat-streaming-with-functions-js && node main.js" + input: |- + What is the date? + What is the time? + tag: skip + + - class: dev new openai-chat-streaming-with-functions (python) + steps: + - name: generate template + command: ai dev new openai-chat-streaming-with-functions --python + - name: build template + bash: | + cd openai-chat-streaming-with-functions-py + pip install -r requirements.txt + tag: skip + - name: run template + command: ai dev shell --run "cd openai-chat-streaming-with-functions-py && python main.py" + input: |- + What is the date? + What is the time? + tag: skip + +- area: ai dev new openai-webpage + tests: + + - class: dev new openai-webpage (javascript) + steps: + - name: generate template + command: ai dev new openai-webpage --javascript + - name: build template + bash: | + cd openai-webpage-js + npm install + - name: pack template + bash: | + cd openai-webpage-js + npx webpack + + - class: dev new openai-webpage (typescript) + steps: + - name: generate template + command: ai dev new openai-webpage --typescript + - name: build template + bash: | + cd openai-webpage-ts + npm install + - name: pack template + bash: | + cd openai-webpage-ts + npx webpack + +- area: ai dev new openai-webpage-with-functions + tests: + + - class: dev new openai-webpage-with-functions (javascript) + steps: + - name: generate template + command: ai dev new openai-webpage-with-functions --javascript + - name: build template + bash: | + cd openai-webpage-with-functions-js + npm install + - name: pack template + bash: | + cd openai-webpage-with-functions-js + npx webpack + + - class: dev new openai-webpage-with-functions (typescript) + steps: + - name: generate template + command: ai dev new openai-webpage-with-functions --typescript + - name: build template + bash: | + cd openai-webpage-with-functions-ts + npm install + - name: pack template + bash: | + cd openai-webpage-with-functions-ts + npx webpack diff --git a/tests/testadapter/Properties/AssemblyInfo.cs b/tests/testadapter/Properties/AssemblyInfo.cs new file mode 100644 index 00000000..95e7b07d --- /dev/null +++ b/tests/testadapter/Properties/AssemblyInfo.cs @@ -0,0 +1,37 @@ +using System; +using System.Reflection; +using System.Runtime.CompilerServices; +using System.Runtime.InteropServices; + +// General Information about an assembly is controlled through the following +// set of attributes. Change these attribute values to modify the information +// associated with an assembly. +[assembly: AssemblyTitle("YamlTestAdapter")] +[assembly: AssemblyDescription("")] +[assembly: AssemblyConfiguration("")] +[assembly: AssemblyCompany("")] +[assembly: AssemblyProduct("YamlTestAdapter")] +[assembly: AssemblyCopyright("Copyright © 2024")] +[assembly: AssemblyTrademark("")] +[assembly: AssemblyCulture("")] + +// Setting ComVisible to false makes the types in this assembly not visible +// to COM components. If you need to access a type in this assembly from +// COM, set the ComVisible attribute to true on that type. +[assembly: ComVisible(false)] + +// The following GUID is for the ID of the typelib if this project is exposed to COM +[assembly: Guid("EE266A17-DBFD-4C18-BCEB-C6F6CE76E6CC")] + +// Version information for an assembly consists of the following four values: +// +// Major Version +// Minor Version +// Build Number +// Revision +// +// You can specify all the values or you can default the Build and Revision Numbers +// by using the '*' as shown below: +// [assembly: AssemblyVersion("1.0.*")] +[assembly: AssemblyVersion("1.0.0.0")] +[assembly: AssemblyFileVersion("1.0.0.0")] diff --git a/tests/testadapter/README.md b/tests/testadapter/README.md new file mode 100644 index 00000000..48d22e4e --- /dev/null +++ b/tests/testadapter/README.md @@ -0,0 +1,206 @@ +# `ai` CLI Yaml Test Adapter + +PRE-REQUISITES: +* `ai` must be accessible in `PATH` +* `ai` must be configured as required for tests (run `ai init`, or use `ai config --set KEY=VALUE` for all required information) +- see: https://crbn.us/searchdocs?ai +- OR ... + ```dotnetcli + dotnet tool install --global Azure.AI.CLI + ai init + ``` + +## Run ALL tests + +**dotnet test** +From fresh clone (one step, CLI): +* DEBUG: + ```dotnetcli + dotnet test --logger:trx + ``` +* RELEASE: + ```dotnetcli + dotnet test --configuration release --logger:trx + ``` + +OR ... [Build](#BUILD) first, then w/CLI: +* DEBUG: + ```dotnetcli + cd tests\testadapter\bin\Debug\net8.0 + dotnet test Azure.AI.CLI.TestAdapter.dll --logger:trx + ``` +* RELEASE: + ```dotnetcli + cd tests\testadapter\bin\Release\net8.0 + dotnet test Azure.AI.CLI.TestAdapter.dll --logger:trx --logger:console;verbosity=normal + ``` + +**dotnet vstest** +OR ... [Build](#BUILD) first, then w/CLI: +* DEBUG: + ```dotnetcli + cd tests\testadapter\bin\Debug\net8.0 + dotnet vstest Azure.AI.CLI.TestAdapter.dll --logger:trx + ``` +* RELEASE: + ```dotnetcli + cd tests\testadapter\bin\Release\net8.0 + dotnet vstest Azure.AI.CLI.TestAdapter.dll --logger:trx --logger:console;verbosity=normal + ``` + +**VS 2019+** +OR ... [Build](#BUILD) first, then w/Visual Studio 2019+: +* Open Test Explorer (`T`) +* Run all tests (`V`) + +--- +## LIST tests + +**dotnet test** +From fresh clone (one step, CLI): +* DEBUG: + ```dotnetcli + dotnet test -t + ``` +* RELEASE: + ```dotnetcli + dotnet test --configuration release -t + ``` + +OR ... [Build](#BUILD) first, then w/CLI: +* DEBUG: + ```dotnetcli + cd tests\testadapter\bin\Debug\net8.0 + dotnet test Azure.AI.CLI.TestAdapter.dll -t + ``` +* RELEASE: + ```dotnetcli + cd tests\testadapter\bin\Release\net8.0 + dotnet test Azure.AI.CLI.TestAdapter.dll -t + ``` + +**dotnet vstest** +OR ... [Build](#BUILD) first, then w/CLI: +* DEBUG: + ```dotnetcli + cd tests\testadapter\bin\Debug\net8.0 + dotnet vstest Azure.AI.CLI.TestAdapter.dll -lt + ``` +* RELEASE: + ```dotnetcli + cd tests\testadapter\bin\Release\net8.0 + dotnet vstest Azure.AI.CLI.TestAdapter.dll -lt + ``` + +--- +## Run SOME tests + +**dotnet test** +From fresh clone (one step, CLI): +* DEBUG: + ```dotnetcli + dotnet test --filter:name~PARTIAL_NAME + ``` +* RELEASE: + ```dotnetcli + dotnet test --configuration release --filter:name~PARTIAL_NAME + ``` + +OR ... [Build](#BUILD) first, then w/CLI: + +* DEBUG: + ```dotnetcli + cd tests\testadapter\bin\Debug\net8.0 + dotnet test --filter:name~PARTIAL_NAME Azure.AI.CLI.TestAdapter.dll + ``` +* RELEASE: + ```dotnetcli + cd tests\testadapter\bin\Release\net8.0 + dotnet test --filter:name~PARTIAL_NAME Azure.AI.CLI.TestAdapter.dll + ``` + +**dotnet vstest** +OR ... [Build](#BUILD) first, then w/CLI: +* DEBUG: + ```dotnetcli + cd tests\testadapter\bin\Debug\net8.0 + dotnet vstest Azure.AI.CLI.TestAdapter.dll --logger:trx --testcasefilter:name~PARTIAL_NAME + ``` +* RELEASE: + ```dotnetcli + cd tests\testadapter\bin\Release\net8.0 + dotnet vstest Azure.AI.CLI.TestAdapter.dll --logger:trx --testcasefilter:name~PARTIAL_NAME + ``` + +**VS 2019+** +OR ... [Build](#BUILD) first, then w/Visual Studio 2019+: +* Open Test Explorer (`T`) +- Select tests (w/ mouse: `Left-click`, extend w/`Shift-left-click` and/or `Ctrl-left-click`) +- OR ... ``, enter search criteria, press `` +* Run selected tests (w/ mouse: `Right-click`, click on `Run`) + +**Additional CLI test case filters** + +`Operator[|&]` + +Where Operator is one of `=`, `!=` or `~` (Operator ~ has 'contains' +semantics and is applicable for string properties like DisplayName). + +Parenthesis () can be used to group sub-expressions. + +| property | aliases | example | +|-|-|-| +| Name | DisplayName | `Name=NAME` +| | | `Name!=NAME` +| | | `Name~PARTIAL` +| fqn | FullyQualifiedName | `fqn=yaml.FILE.AREA.CLASS.NAME` +| | | `fqn!=yaml.FILE.AREA.CLASS.NAME` +| | | `fqn~PARTIAL` +| command | | `command~recognize` +| | | `command~synthesize` +| | | `command~translate` +| | | `command~weather` +| | | `command~mp3` +| script | | `script~echo` +| | | `script~recognize` +| | | `script~weather` +| | | `script~mp3` +| expect | | `expect~RECOGNIZED:` +| not-expect | | `not-expect~ERROR` +| log-expect | | `log-expect~path:` +| log-not-expect | | `log-not-expect~ERROR` + +--- +# BUILD + +**dotnet build** +* DEBUG: `dotnet build` +* RELEASE: `dotnet build --configuration release` + +**VS 2019+** +* Open `ai-cli.sln` +* Select `Debug` or `Release` +* Run (``) + +--- + +## ADDITIONAL OPTIONS + +**dotnet test** +Console logging: `-v` or `--verbosity` followed one of: +* `q[uiet]` +* `m[inimal]` +* `n[ormal]` +* `d[etailed]` +* `diag[nostic]` + +e.g. `dotnet test --configuration release --v n` + +**dotnet vstest** +Console logging: `--logger:console`, optionally followed by one of: +* `;verbosity=quiet` +* `;verbosity=minimal` +* `;verbosity=normal` +* `;verbosity=detailed` + +e.g. `dotnet vstest Azure.AI.CLI.TestAdapter.dll --logger:trx --logger:console;verbosity=normal` diff --git a/tests/testadapter/TestAdapterTest.runsettings b/tests/testadapter/TestAdapterTest.runsettings new file mode 100644 index 00000000..dc0165f9 --- /dev/null +++ b/tests/testadapter/TestAdapterTest.runsettings @@ -0,0 +1,7 @@ + + + . + + + + \ No newline at end of file diff --git a/tests/testadapter/TestDiscoverer.cs b/tests/testadapter/TestDiscoverer.cs new file mode 100644 index 00000000..1f1a5d27 --- /dev/null +++ b/tests/testadapter/TestDiscoverer.cs @@ -0,0 +1,42 @@ +using Microsoft.VisualStudio.TestPlatform.ObjectModel; +using Microsoft.VisualStudio.TestPlatform.ObjectModel.Adapter; +using Microsoft.VisualStudio.TestPlatform.ObjectModel.Logging; +using System; +using System.Collections.Generic; +using System.Diagnostics; +using System.IO; +using System.Linq; +using System.Text; +using System.Threading; +using System.Threading.Tasks; +using Azure.AI.Details.Common.CLI.TestFramework; + +namespace Azure.AI.Details.Common.CLI.TestAdapter +{ + [FileExtension(YamlTestFramework.YamlFileExtension)] + [FileExtension(YamlTestAdapter.DllFileExtension)] + [DefaultExecutorUri(YamlTestAdapter.Executor)] + public class TestDiscoverer : ITestDiscoverer + { + public void DiscoverTests(IEnumerable sources, IDiscoveryContext discoveryContext, IMessageLogger logger, ITestCaseDiscoverySink discoverySink) + { + try + { + Logger.Log(logger); + Logger.Log($"TestDiscoverer.DiscoverTests(): ENTER"); + Logger.Log($"TestDiscoverer.DiscoverTests(): count={sources.Count()}"); + foreach (var test in YamlTestAdapter.GetTestsFromFiles(sources)) + { + test.ExecutorUri = new Uri(YamlTestAdapter.Executor); + discoverySink.SendTestCase(test); + } + Logger.Log($"TestDiscoverer.DiscoverTests(): EXIT"); + } + catch (Exception ex) + { + Logger.Log($"EXCEPTION: {ex.Message}\nSTACK: {ex.StackTrace}"); + throw; + } + } + } +} diff --git a/tests/testadapter/TestExecutor.cs b/tests/testadapter/TestExecutor.cs new file mode 100644 index 00000000..0bcf0a65 --- /dev/null +++ b/tests/testadapter/TestExecutor.cs @@ -0,0 +1,46 @@ +using Microsoft.VisualStudio.TestPlatform.ObjectModel; +using Microsoft.VisualStudio.TestPlatform.ObjectModel.Adapter; +using Microsoft.VisualStudio.TestPlatform.ObjectModel.Logging; +using System; +using System.Collections.Generic; +using System.Diagnostics; +using System.IO; +using System.Linq; +using System.Text; +using System.Threading; +using System.Threading.Tasks; +using Azure.AI.Details.Common.CLI.TestFramework; + +namespace Azure.AI.Details.Common.CLI.TestAdapter +{ + [ExtensionUri(YamlTestAdapter.Executor)] + public class TextExecutor : ITestExecutor + { + public void RunTests(IEnumerable tests, IRunContext runContext, IFrameworkHandle frameworkHandle) + { + tests = tests.ToList(); // force enumeration + + Logger.Log(frameworkHandle); + Logger.Log($"TextExecutor.RunTests(IEnumerable(): ENTER"); + Logger.Log($"TextExecutor.RunTests(IEnumerable(): count={tests.Count()}"); + YamlTestAdapter.RunTests(tests, runContext, frameworkHandle); + Logger.Log($"TextExecutor.RunTests(IEnumerable(): EXIT"); + } + + public void RunTests(IEnumerable sources, IRunContext runContext, IFrameworkHandle frameworkHandle) + { + sources = sources.ToList(); // force enumeration + + Logger.Log(frameworkHandle); + Logger.Log($"TextExecutor.RunTests(IEnumerable(): ENTER"); + Logger.Log($"TextExecutor.RunTests(IEnumerable(): count={sources.Count()}"); + RunTests(YamlTestAdapter.GetTestsFromFiles(sources), runContext, frameworkHandle); + Logger.Log($"TextExecutor.RunTests(IEnumerable(): EXIT"); + } + + public void Cancel() + { + Logger.Log($"TextExecutor.Cancel(): ENTER/EXIT"); + } + } +} diff --git a/tests/testadapter/YamlTestAdapter.cs b/tests/testadapter/YamlTestAdapter.cs new file mode 100644 index 00000000..956c5f43 --- /dev/null +++ b/tests/testadapter/YamlTestAdapter.cs @@ -0,0 +1,81 @@ +using Microsoft.VisualStudio.TestPlatform.ObjectModel; +using Microsoft.VisualStudio.TestPlatform.ObjectModel.Adapter; +using Microsoft.VisualStudio.TestPlatform.ObjectModel.Logging; +using System; +using System.Collections.Generic; +using System.Diagnostics; +using System.IO; +using System.Linq; +using System.Reflection; +using System.Text; +using System.Threading; +using System.Threading.Tasks; +using System.Threading.Tasks.Dataflow; +using Azure.AI.Details.Common.CLI.TestFramework; + +namespace Azure.AI.Details.Common.CLI.TestAdapter +{ + public class YamlTestAdapter + { + public static IEnumerable GetTestsFromFiles(IEnumerable sources) + { + Logger.Log($"YamlTestAdapter.GetTestsFromFiles(source.Count={sources.Count()})"); + + var tests = new List(); + foreach (var source in sources) + { + Logger.Log($"YamlTestAdapter.GetTestsFromFiles('{source}')"); + tests.AddRange(GetTestsFromFile(source)); + } + + Logger.Log($"YamlTestAdapter.GetTestsFromFiles() found count={tests.Count()}"); + return tests; + } + + public static IEnumerable GetTestsFromFile(string source) + { + Logger.Log($"YamlTestAdapter.GetTestsFromFile('{source}')"); + + var file = new FileInfo(source); + Logger.Log($"YamlTestAdapter.GetTestsFromFile('{source}'): Extension={file.Extension}"); + + return file.Extension.Trim('.') == YamlTestFramework.YamlFileExtension.Trim('.') + ? YamlTestFramework.GetTestsFromYaml(source, file).ToList() + : GetTestsFromTestAdapterOrReferenceDirectory(source, file).ToList(); + } + + public static void RunTests(IEnumerable tests, IRunContext runContext, IFrameworkHandle frameworkHandle) + { + var filtered = YamlTestCaseFilter.FilterTestCases(tests, runContext); + YamlTestFramework.RunTests(filtered, new YamlTestFrameworkHandleHost(frameworkHandle)); + } + + #region private methods + + private static IEnumerable GetTestsFromTestAdapterOrReferenceDirectory(string source, FileInfo file) + { + var sourceOk = + source.Contains("Azure.AI.CLI.TestAdapter") || + Assembly.LoadFile(source).GetReferencedAssemblies().Count(x => x.Name.Contains("Azure.AI.CLI.TestAdapter")) > 0; + + // foreach (var a in Assembly.LoadFile(source).GetReferencedAssemblies()) + // { + // Logger.Log($"a.Name={a.Name}"); + // Logger.Log($"a.FullName={a.FullName}"); + // } + + Logger.Log($"YamlTestAdapter.GetTestsFromTestAdapterOrReferenceDirectory('{source}'): sourceOk = {sourceOk}"); + + return !sourceOk + ? Enumerable.Empty() + : YamlTestFramework.GetTestsFromDirectory(source, file.Directory); + } + + #endregion + + #region test adapter registration data + public const string DllFileExtension = ".dll"; + public const string Executor = "executor://ai/cli/TestAdapter/v1"; + #endregion + } +} diff --git a/tests/testadapter/YamlTestAdapter.csproj b/tests/testadapter/YamlTestAdapter.csproj new file mode 100644 index 00000000..c359a4ce --- /dev/null +++ b/tests/testadapter/YamlTestAdapter.csproj @@ -0,0 +1,13 @@ + + + + net8.0 + + + + + + + + + \ No newline at end of file diff --git a/tests/testadapter/YamlTestAdapterCommon.targets b/tests/testadapter/YamlTestAdapterCommon.targets new file mode 100644 index 00000000..3da5c71a --- /dev/null +++ b/tests/testadapter/YamlTestAdapterCommon.targets @@ -0,0 +1,68 @@ + + + + + net8.0 + Library + Azure.AI.CLI.TestAdapter + false + + + True + 1.1.0 + + + + $(LocalBuildSDKBinPath) + bin + + + + + + x64 + $(LocalBinOutputPath)\Release\ + + + + + x64 + $(LocalBinOutputPath)\Debug\ + + + + + $(DefineConstants);UNIX + $(LocalBinOutputPath)\ReleaseUnixOS + + + + + $(DefineConstants);UNIX + $(LocalBinOutputPath)\DebugUnixOS + + + + + + 1.0.0 + $(TAAssemblyVersion) + + + + $(TAAssemblyVersion) + $(TAAssemblyVersion) + $(TAAssemblyInformationalVersion) + + + + + + + + + + + + + diff --git a/tests/testadapter/YamlTestFrameworkHandleHost.cs b/tests/testadapter/YamlTestFrameworkHandleHost.cs new file mode 100644 index 00000000..6b9f08e1 --- /dev/null +++ b/tests/testadapter/YamlTestFrameworkHandleHost.cs @@ -0,0 +1,30 @@ +using Microsoft.VisualStudio.TestPlatform.ObjectModel; +using Microsoft.VisualStudio.TestPlatform.ObjectModel.Adapter; + +namespace Azure.AI.Details.Common.CLI.TestFramework +{ + public class YamlTestFrameworkHandleHost : IYamlTestFrameworkHost + { + private readonly IFrameworkHandle _frameworkHandle; + + public YamlTestFrameworkHandleHost(IFrameworkHandle frameworkHandle) + { + _frameworkHandle = frameworkHandle; + } + + public void RecordStart(TestCase testCase) + { + _frameworkHandle.RecordStart(testCase); + } + + public void RecordResult(TestResult testResult) + { + _frameworkHandle.RecordResult(testResult); + } + + public void RecordEnd(TestCase testCase, TestOutcome outcome) + { + _frameworkHandle.RecordEnd(testCase, outcome); + } + } +} diff --git a/tests/testadapter/YamlTestRunnerTriggerAttribute.cs b/tests/testadapter/YamlTestRunnerTriggerAttribute.cs new file mode 100644 index 00000000..b2d3157c --- /dev/null +++ b/tests/testadapter/YamlTestRunnerTriggerAttribute.cs @@ -0,0 +1,21 @@ +using Microsoft.VisualStudio.TestPlatform.ObjectModel; +using Microsoft.VisualStudio.TestPlatform.ObjectModel.Adapter; +using Microsoft.VisualStudio.TestPlatform.ObjectModel.Logging; +using System; +using System.Collections.Generic; +using System.Diagnostics; +using System.IO; +using System.Linq; +using System.Text; +using System.Threading; +using System.Threading.Tasks; + +namespace Azure.AI.Details.Common.CLI.TestAdapter +{ + public class YamlTestRunnerTriggerAttribute : Attribute + { + public YamlTestRunnerTriggerAttribute() + { + } + } +} diff --git a/tests/testframework/IYamlTestFrameworkHost.cs b/tests/testframework/IYamlTestFrameworkHost.cs new file mode 100644 index 00000000..383b278d --- /dev/null +++ b/tests/testframework/IYamlTestFrameworkHost.cs @@ -0,0 +1,11 @@ +using Microsoft.VisualStudio.TestPlatform.ObjectModel; + +namespace Azure.AI.Details.Common.CLI.TestFramework +{ + public interface IYamlTestFrameworkHost + { + void RecordStart(TestCase testCase); + void RecordResult(TestResult testResult); + void RecordEnd(TestCase testCase, TestOutcome outcome); + } +} diff --git a/tests/testframework/Logger.cs b/tests/testframework/Logger.cs new file mode 100644 index 00000000..6b6822ec --- /dev/null +++ b/tests/testframework/Logger.cs @@ -0,0 +1,126 @@ +using Microsoft.VisualStudio.TestPlatform.ObjectModel; +using Microsoft.VisualStudio.TestPlatform.ObjectModel.Adapter; +using Microsoft.VisualStudio.TestPlatform.ObjectModel.Logging; +using System; +using System.Collections.Generic; +using System.Diagnostics; +using System.IO; +using System.Linq; +using System.Text; +using System.Threading; +using System.Threading.Tasks; + +namespace Azure.AI.Details.Common.CLI.TestFramework +{ + public class Logger + { + public static void Log(IMessageLogger logger) + { + Logger.logger = logger; + } + + public static void Log(string text) + { + LogInfo(text); + Logger.DbgTraceInfo(text); + } + + public static void LogIf(bool log, string text) + { + if (log) Log(text); + } + + #region log methods + + public static void LogInfo(string text) + { + using (var mutex = new Mutex(false, "Logger Mutex")) + { + mutex.WaitOne(); + File.AppendAllText(_logPath, $"{DateTime.Now}: INFO: {text}\n"); + mutex.ReleaseMutex(); + } + } + + public static void LogWarning(string text) + { + using (var mutex = new Mutex(false, "Logger Mutex")) + { + mutex.WaitOne(); + File.AppendAllText(_logPath, $"{DateTime.Now}: WARNING: {text}\n"); + mutex.ReleaseMutex(); + } + } + + public static void LogError(string text) + { + using (var mutex = new Mutex(false, "Logger Mutex")) + { + mutex.WaitOne(); + File.AppendAllText(_logPath, $"{DateTime.Now}: ERROR: {text}\n"); + mutex.ReleaseMutex(); + } + } + + #endregion + + #region dbg trace methods + + public static void DbgTraceInfo(string text) + { +#if DEBUG + TraceInfo(text); +#endif + } + + public static void DbgTraceWarning(string text) + { +#if DEBUG + TraceWarning(text); +#endif + } + + public static void DbgTraceError(string text) + { +#if DEBUG + TraceError(text); +#endif + } + + #endregion + + #region trace methods + + public static void TraceInfo(string text) + { + logger?.SendMessage(TestMessageLevel.Informational, $"{DateTime.Now}: {text}"); + } + + public static void TraceWarning(string text) + { + logger?.SendMessage(TestMessageLevel.Warning, $"{DateTime.Now}: {text}"); + } + + public static void TraceError(string text) + { + logger?.SendMessage(TestMessageLevel.Error, $"{DateTime.Now}: {text}"); + } + + #endregion + + #region private methods and data + + private static string GetLogPath() + { + var pid = Process.GetCurrentProcess().Id.ToString(); + var time = DateTime.Now.ToFileTime().ToString(); + return $"log-ai-cli-test-framework-{time}-{pid}.log"; + } + + private static IMessageLogger logger = null; + + private static string _logPath = GetLogPath(); + + #endregion + } +} diff --git a/tests/testframework/Properties/AssemblyInfo.cs b/tests/testframework/Properties/AssemblyInfo.cs new file mode 100644 index 00000000..c553fe3d --- /dev/null +++ b/tests/testframework/Properties/AssemblyInfo.cs @@ -0,0 +1,36 @@ +using System.Reflection; +using System.Runtime.CompilerServices; +using System.Runtime.InteropServices; + +// General Information about an assembly is controlled through the following +// set of attributes. Change these attribute values to modify the information +// associated with an assembly. +[assembly: AssemblyTitle("YamlTestFramework")] +[assembly: AssemblyDescription("")] +[assembly: AssemblyConfiguration("")] +[assembly: AssemblyCompany("")] +[assembly: AssemblyProduct("YamlTestFramework")] +[assembly: AssemblyCopyright("Copyright © 2024")] +[assembly: AssemblyTrademark("")] +[assembly: AssemblyCulture("")] + +// Setting ComVisible to false makes the types in this assembly not visible +// to COM components. If you need to access a type in this assembly from +// COM, set the ComVisible attribute to true on that type. +[assembly: ComVisible(false)] + +// The following GUID is for the ID of the typelib if this project is exposed to COM +[assembly: Guid("9409c89f-ae64-4d4f-820e-e4248512733a")] + +// Version information for an assembly consists of the following four values: +// +// Major Version +// Minor Version +// Build Number +// Revision +// +// You can specify all the values or you can default the Build and Revision Numbers +// by using the '*' as shown below: +// [assembly: AssemblyVersion("1.0.*")] +[assembly: AssemblyVersion("1.0.0.0")] +[assembly: AssemblyFileVersion("1.0.0.0")] diff --git a/tests/testframework/TestResultHelpers.cs b/tests/testframework/TestResultHelpers.cs new file mode 100644 index 00000000..2416d953 --- /dev/null +++ b/tests/testframework/TestResultHelpers.cs @@ -0,0 +1,21 @@ +using System.Collections.Generic; +using System.Linq; +using Microsoft.VisualStudio.TestPlatform.ObjectModel; + +namespace Azure.AI.Details.Common.CLI.TestFramework +{ + public class TestResultHelpers + { + public static TestOutcome TestOutcomeFromResults(IEnumerable results) + { + var failed = results.Count(x => x.Outcome == TestOutcome.Failed) > 0; + var skipped = results.Count(x => x.Outcome == TestOutcome.Skipped) > 0; + var notFound = results.Count(x => x.Outcome == TestOutcome.NotFound) > 0 || results.Count() == 0; + + return failed ? TestOutcome.Failed + : skipped ? TestOutcome.Skipped + : notFound ? TestOutcome.NotFound + : TestOutcome.Passed; + } + } +} diff --git a/tests/testframework/YamlHelpers.cs b/tests/testframework/YamlHelpers.cs new file mode 100644 index 00000000..33ce53b7 --- /dev/null +++ b/tests/testframework/YamlHelpers.cs @@ -0,0 +1,58 @@ +using System; +using System.IO; +using YamlDotNet.RepresentationModel; +using YamlDotNet.Serialization; + +namespace Azure.AI.Details.Common.CLI.TestFramework +{ + public class YamlHelpers + { + public static YamlStream ParseYamlStream(string fullName) + { + var stream = new YamlStream(); + var text = File.OpenText(fullName); + var error = string.Empty; + + try + { + stream.Load(text); + } + catch (YamlDotNet.Core.YamlException ex) + { + var where = $"{fullName}({ex.Start.Line},{ex.Start.Column})"; + error = $"Error parsing YAML (YamlException={ex.GetType()}):\n {where}\n {ex.Message}"; + } + catch (Exception ex) + { + var where = fullName; + error = $"Error parsing YAML (YamlException={ex.GetType()}):\n {where}\n {ex.Message}"; + } + + if (!string.IsNullOrEmpty(error)) + { + Logger.LogError(error); + Logger.TraceError(error); + } + + return stream; + } + + public static string ToYamlOrJsonString(YamlNode node, bool yaml) + { + var serializer = yaml + ? new SerializerBuilder().Build() + : new SerializerBuilder().JsonCompatible().Build(); + + using var writer = new StringWriter(); + var stream = new YamlStream { new YamlDocument(node) }; + stream.Save(writer); + + using var reader = new StringReader(writer.ToString()); + var deserializer = new Deserializer(); + var yamlObject = deserializer.Deserialize(reader); + + var trimmed = serializer.Serialize(yamlObject).Trim('\r', '\n'); + return yaml ? trimmed : trimmed.Replace("\t", "\\t").Replace("\f", "\\f"); + } + } +} diff --git a/tests/testframework/YamlNodeExtensions.cs b/tests/testframework/YamlNodeExtensions.cs new file mode 100644 index 00000000..e8e8be3f --- /dev/null +++ b/tests/testframework/YamlNodeExtensions.cs @@ -0,0 +1,113 @@ +using Microsoft.VisualStudio.TestPlatform.ObjectModel; +using System; +using System.Collections.Generic; +using System.IO; +using System.Linq; +using System.Text; +using YamlDotNet.RepresentationModel; + +namespace Azure.AI.Details.Common.CLI.TestFramework +{ + public static class YamlNodeExtensions + { + public static string ToYamlString(this YamlNode node) + { + return YamlHelpers.ToYamlOrJsonString(node, true); + } + + public static string ToJsonString(this YamlNode node) + { + return YamlHelpers.ToYamlOrJsonString(node, false); + } + + public static YamlScalarNode ConvertScalarSequenceToMultiLineTsvScalarNode(this YamlNode yaml, TestCase test, string[] keys) + { + var text = yaml.ConvertScalarSequenceToMultilineTsvString(keys); + if (text == null) + { + text = $"Invalid sequence or sequence value at {test.CodeFilePath}({yaml.Start.Line},{yaml.Start.Column})"; + Logger.Log(text); + } + + return new YamlScalarNode(text); + } + + public static string ConvertScalarSequenceToMultilineTsvString(this YamlNode node, string[] keys = null) + { + // ensure it's a sequence + var ok = node is YamlSequenceNode; + if (!ok) return null; + + var lines = new List(); + foreach (var item in (node as YamlSequenceNode).Children) + { + var line = item is YamlScalarNode + ? (item as YamlScalarNode).Value + : item is YamlSequenceNode + ? item.ConvertScalarSequenceToTsvString(keys) + : item.ConvertScalarMapToTsvString(keys); + + // ensure each item is either scalar, or sequence of scalar + var invalidItem = (line == null); + Logger.LogIf(invalidItem, $"Invalid item at ({item.Start.Line},{item.Start.Column})"); + if (invalidItem) return null; + + lines.Add(line); + } + return string.Join("\n", lines); + } + + public static string ConvertScalarSequenceToTsvString(this YamlNode node, string[] keys = null) + { + // ensure it's a sequence (list/array) + var sequence = node as YamlSequenceNode; + if (sequence == null) return null; + + // ensure there are no non-scalar children + var count = sequence.Count(x => !(x is YamlScalarNode)); + Logger.LogIf(count > 0, $"Invalid: (non-scalar) count({count}) > 0"); + if (count > 0) return null; + + // join the scalar children separated by tabs + var tsv = string.Join("\t", sequence.Children + .Select(x => (x as YamlScalarNode).Value)); + + // if we don't have enough items, append empty string columns (count of items == count of tabs + 1) + while (tsv.Count(x => x == '\t') + 1 < keys?.Length) + { + tsv += "\t"; + } + + tsv = tsv.Replace('\n', '\f'); + Logger.Log($"YamlNodeExtensions.ConvertScalarSequenceToTsvString: tsv='{tsv}'"); + return tsv; + } + + public static string ConvertScalarMapToTsvString(this YamlNode node, string[] keys) + { + // ensure it's a mapping node and we have keys + var mapping = node as YamlMappingNode; + if (mapping == null || keys == null) return null; + + // ensure there are no non-scalar kvp children + var count = mapping.Count(x => !(x.Key is YamlScalarNode) || !(x.Value is YamlScalarNode)); + Logger.LogIf(count > 0, $"Invalid: (non-scalar key or value) count({count}) > 0"); + if (count > 0) return null; + + // ensure the key specified is in the list of keys + count = mapping.Count(x => !keys.Contains((x.Key as YamlScalarNode).Value)); + Logger.LogIf(count > 0, $"Invalid: key not found count({count}) > 0"); + if (count > 0) return null; + + // join the scalar children ordered by keys, separated by tabs + var tsv = string.Join("\t", keys + .Select(key => mapping.Children.ContainsKey(key) + ? (mapping.Children[key] as YamlScalarNode).Value + : "")); + + tsv = tsv.Replace('\n', '\f'); + Logger.Log($"YamlNodeExtensions.ConvertScalarMapToTsvString: tsv='{tsv}'"); + return tsv; + } + } +} diff --git a/tests/testframework/YamlTagHelpers.cs b/tests/testframework/YamlTagHelpers.cs new file mode 100644 index 00000000..c9356c06 --- /dev/null +++ b/tests/testframework/YamlTagHelpers.cs @@ -0,0 +1,126 @@ +using System; +using System.Collections.Generic; +using System.IO; +using System.Linq; +using YamlDotNet.RepresentationModel; + +namespace Azure.AI.Details.Common.CLI.TestFramework +{ + public class YamlTagHelpers + { + public static FileInfo GetYamlDefaultTagsFullFileName(DirectoryInfo directory) + { + var found = directory.GetFiles(YamlTestFramework.YamlDefaultTagsFileName); + return found.Length == 1 + ? found[0] + : directory.Parent != null + ? GetYamlDefaultTagsFullFileName(directory.Parent) + : null; + } + + public static Dictionary> GetDefaultTags(DirectoryInfo directory) + { + var defaultTags = new Dictionary>(); + + var defaultsFile = GetYamlDefaultTagsFullFileName(directory)?.FullName; + if (defaultsFile != null) + { + Logger.Log($"Loading default tags from {defaultsFile}"); + var parsed = YamlHelpers.ParseYamlStream(defaultsFile); + if (parsed.Documents.Count() > 0) + { + var tagsNode = parsed.Documents[0].RootNode; + if (tagsNode != null) + { + defaultTags = UpdateCopyTags(defaultTags, null, tagsNode); + } + } + } + + return defaultTags; + } + + public static Dictionary> UpdateCopyTags(Dictionary> tags, YamlMappingNode mapping) + { + var tagNode = mapping.Children.ContainsKey("tag") ? mapping.Children["tag"] : null; + var tagsNode = mapping.Children.ContainsKey("tags") ? mapping.Children["tags"] : null; + if (tagNode == null && tagsNode == null) return tags; + + return UpdateCopyTags(tags, tagNode, tagsNode); + } + + private static Dictionary> UpdateCopyTags(Dictionary> tags, YamlNode tagNode, YamlNode tagsNode) + { + // make a copy that we'll update and return + tags = new Dictionary>(tags); + + var value = (tagNode as YamlScalarNode)?.Value; + AddOptionalTag(tags, "tag", value); + + var values = (tagsNode as YamlScalarNode)?.Value; + AddOptionalCommaSeparatedTags(tags, values); + + AddOptionalNameValueTags(tags, tagsNode as YamlMappingNode); + AddOptionalTagsForEachChild(tags, tagsNode as YamlSequenceNode); + + return tags; + } + + private static void AddOptionalTag(Dictionary> tags, string name, string value) + { + if (!string.IsNullOrEmpty(name) && !string.IsNullOrEmpty(value)) + { + if (!tags.ContainsKey(name)) + { + tags.Add(name, new List()); + } + tags[name].Add(value); + } + } + + private static void AddOptionalCommaSeparatedTags(Dictionary> tags, string values) + { + if (values != null) + { + foreach (var tag in values.Split(",".ToArray(), StringSplitOptions.RemoveEmptyEntries)) + { + AddOptionalTag(tags, "tag", tag); + } + } + } + + private static void AddOptionalNameValueTags(Dictionary> tags, YamlMappingNode mapping) + { + var children = mapping?.Children; + if (children == null) return; + + foreach (var child in children) + { + var key = (child.Key as YamlScalarNode)?.Value; + var value = (child.Value as YamlScalarNode)?.Value; + AddOptionalTag(tags, key, value); + } + } + + private static void AddOptionalTagsForEachChild(Dictionary> tags, YamlSequenceNode sequence) + { + var children = sequence?.Children; + if (children == null) return; + + foreach (var child in children) + { + if (child is YamlScalarNode) + { + AddOptionalTag(tags, "tag", (child as YamlScalarNode).Value); + continue; + } + + if (child is YamlMappingNode) + { + AddOptionalNameValueTags(tags, child as YamlMappingNode); + continue; + } + } + } + } +} diff --git a/tests/testframework/YamlTestCaseFilter.cs b/tests/testframework/YamlTestCaseFilter.cs new file mode 100644 index 00000000..32b2837b --- /dev/null +++ b/tests/testframework/YamlTestCaseFilter.cs @@ -0,0 +1,151 @@ +using Microsoft.VisualStudio.TestPlatform.ObjectModel; +using Microsoft.VisualStudio.TestPlatform.ObjectModel.Adapter; +using Microsoft.VisualStudio.TestPlatform.ObjectModel.Logging; +using System; +using System.Collections.Generic; +using System.Diagnostics; +using System.IO; +using System.Linq; +using System.Text; +using System.Threading; +using System.Threading.Tasks; + +namespace Azure.AI.Details.Common.CLI.TestFramework +{ + public class YamlTestCaseFilter + { + public static IEnumerable FilterTestCases(IEnumerable tests, IEnumerable criteria) + { + // example 1: "ai init openai" "ai init speech" -skip -nightly + // > test must contain either: + // > * "ai", "init", and "openai" in EXACTLY that order in any one single field/property, or + // > * "ai", "init", and "speech" in EXACTLY that order in any one single field/property + // > test must not contain "skip" in any field/property + // > test must not contain "nightly" in any field/property + + // example 2: +ai +init +openai -skip -nightly + // > test must contain ALL three of "ai", "init", and "openai" in any field/property + // > * they do NOT need to be in the same field/property + // > test must not contain "skip" in any field/property + // > test must not contain "nightly" in any field/property + + // example 3: "ai dev new" "ai init speech" +java +build -skip + // > test must contain, either: + // > * "ai", "init", and "openai" in EXACTLY that order in any one single field/property, or + // > * "ai", "init", and "speech" in EXACTLY that order in any one single field/property + // > test must contain "java" in any field/property + // > test must contain "build" in any field/property + // > test must not contain "skip" in any field/property + + var sourceCriteria = new List(); + var mustMatchCriteria = new List(); + var mustNotMatchCriteria = new List(); + + foreach (var criterion in criteria) + { + var isMustMatch = criterion.StartsWith("+"); + var isMustNotMatch = criterion.StartsWith("-"); + var isSource = !isMustMatch && !isMustNotMatch; + + if (isSource) sourceCriteria.Add(criterion); + if (isMustMatch) mustMatchCriteria.Add(criterion.Substring(1)); + if (isMustNotMatch) mustNotMatchCriteria.Add(criterion.Substring(1)); + } + + var unfiltered = sourceCriteria.Count > 0 + ? tests.Where(test => + sourceCriteria.Any(criterion => + TestContainsText(test, criterion))) + : tests; + + if (mustMatchCriteria.Count > 0) + { + unfiltered = unfiltered.Where(test => + mustMatchCriteria.All(criterion => + TestContainsText(test, criterion))); + } + + if (mustNotMatchCriteria.Count > 0) + { + unfiltered = unfiltered.Where(test => + mustNotMatchCriteria.All(criterion => + !TestContainsText(test, criterion))); + } + + return unfiltered; + } + + public static IEnumerable FilterTestCases(IEnumerable tests, IRunContext runContext) + { + tests = tests.ToList(); // force enumeration + + var names = GetSupportedFilterableNames(tests); + var filter = runContext.GetTestCaseFilter(names, null); + return tests.Where(test => filter == null || filter.MatchTestCase(test, name => GetPropertyValue(test, name))).ToList(); + } + + private static HashSet GetSupportedFilterableNames(IEnumerable tests) + { + var filterable = new HashSet(supportedFilterProperties); + foreach (var test in tests) + { + foreach (var trait in test.Traits) + { + filterable.Add(trait.Name); + } + } + + if (filterable.Contains("tag")) filterable.Add("tags"); + + return filterable; + } + + private static object GetPropertyValue(TestCase test, string name) + { + switch (name.ToLower()) + { + case "name": + case "displayname": return test.DisplayName; + + case "fqn": + case "fullyqualifiedname": return test.FullyQualifiedName; + + case "cli": return YamlTestProperties.Get(test, "cli"); + case "command": return YamlTestProperties.Get(test, "command"); + case "script": return YamlTestProperties.Get(test, "script"); + case "bash": return YamlTestProperties.Get(test, "bash"); + + case "foreach": return YamlTestProperties.Get(test, "foreach"); + case "arguments": return YamlTestProperties.Get(test, "arguments"); + case "input": return YamlTestProperties.Get(test, "input"); + + case "expect": return YamlTestProperties.Get(test, "expect"); + case "expect-gpt": return YamlTestProperties.Get(test, "expect-gpt"); + case "not-expect": return YamlTestProperties.Get(test, "not-expect"); + + case "parallelize": return YamlTestProperties.Get(test, "parallelize"); + case "simulate": return YamlTestProperties.Get(test, "simulate"); + case "skipOnFailure": return YamlTestProperties.Get(test, "skipOnFailure"); + + case "timeout": return YamlTestProperties.Get(test, "timeout"); + case "working-directory": return YamlTestProperties.Get(test, "working-directory"); + } + + var tags = test.Traits.Where(x => x.Name == name || name == "tags"); + if (tags.Count() == 0) return null; + + return tags.Select(x => x.Value).ToArray(); + } + + private static bool TestContainsText(TestCase test, string text) + { + return test.DisplayName.Contains(text) + || test.FullyQualifiedName.Contains(text) + || test.Traits.Any(x => x.Name == text || x.Value.Contains(text)) + || supportedFilterProperties.Any(property => GetPropertyValue(test, property)?.ToString().Contains(text) == true); + } + + + private static readonly string[] supportedFilterProperties = { "DisplayName", "FullyQualifiedName", "Category", "cli", "command", "script", "bash", "foreach", "arguments", "input", "expect", "expect-gpt", "not-expect", "parallelize", "simulate", "skipOnFailure" }; + } +} diff --git a/tests/testframework/YamlTestCaseParser.cs b/tests/testframework/YamlTestCaseParser.cs new file mode 100644 index 00000000..4daeca02 --- /dev/null +++ b/tests/testframework/YamlTestCaseParser.cs @@ -0,0 +1,458 @@ +using System; +using System.Collections.Generic; +using System.Diagnostics; +using System.IO; +using System.Linq; +using System.Text; +using Microsoft.VisualStudio.TestPlatform.ObjectModel; +using YamlDotNet.Helpers; +using YamlDotNet.RepresentationModel; + +namespace Azure.AI.Details.Common.CLI.TestFramework +{ + public class YamlTestCaseParser + { + public static IEnumerable TestCasesFromYaml(string source, FileInfo file) + { + var area = GetRootArea(file); + var parsed = YamlHelpers.ParseYamlStream(file.FullName); + return TestCasesFromYamlStream(source, file, area, parsed).ToList(); + } + + #region private methods + + private static IEnumerable TestCasesFromYamlStream(string source, FileInfo file, string area, YamlStream parsed) + { + var tests = new List(); + var defaultTags = YamlTagHelpers.GetDefaultTags(file.Directory); + foreach (var document in parsed?.Documents) + { + var fromDocument = TestCasesFromYamlDocumentRootNode(source, file, document.RootNode, area, defaultClassName, defaultTags); + if (fromDocument != null) + { + tests.AddRange(fromDocument); + } + } + return tests; + } + + private static IEnumerable TestCasesFromYamlDocumentRootNode(string source, FileInfo file, YamlNode node, string area, string @class, Dictionary> tags) + { + return node is YamlMappingNode + ? TestCasesFromYamlMapping(source, file, node as YamlMappingNode, area, @class, tags) + : TestCasesFromYamlSequence(source, file, node as YamlSequenceNode, area, @class, tags); + } + + private static IEnumerable TestCasesFromYamlMapping(string source, FileInfo file, YamlMappingNode mapping, string area, string @class, Dictionary> tags) + { + var children = CheckForChildren(source, file, mapping, area, @class, tags); + if (children != null) + { + return children; + } + + var test = GetTestFromNode(source, file, mapping, area, @class, tags); + if (test != null) + { + return new[] { test }; + } + + return null; + } + + private static IEnumerable TestCasesFromYamlSequence(string source, FileInfo file, YamlSequenceNode sequence, string area, string @class, Dictionary> tags) + { + var tests = new List(); + if (sequence == null) return tests; + + foreach (YamlMappingNode mapping in sequence.Children) + { + var fromMapping = TestCasesFromYamlMapping(source, file, mapping, area, @class, tags); + if (fromMapping != null) + { + tests.AddRange(fromMapping); + } + } + + return tests; + } + + private static TestCase GetTestFromNode(string source, FileInfo file, YamlMappingNode mapping, string area, string @class, Dictionary> tags, int stepNumber = 0) + { + string cli = GetScalarString(mapping, tags, "cli"); + string parallelize = GetScalarString(mapping, tags, "parallelize"); + string skipOnFailure = GetScalarString(mapping, tags, "skipOnFailure"); + + string simulate = GetScalarString(mapping, "simulate"); + string command = GetScalarString(mapping, "command"); + string script = GetScalarString(mapping, "script"); + string bash = GetScalarString(mapping, "bash"); + + string fullyQualifiedName = command == null && script == null && bash == null + ? GetFullyQualifiedNameAndCommandFromShortForm(mapping, area, @class, ref command, stepNumber) + : GetFullyQualifiedName(mapping, area, @class, stepNumber); + fullyQualifiedName ??= GetFullyQualifiedName(area, @class, $"Expected YAML node ('name') at {file.FullName}({mapping.Start.Line})", 0); + + var simulating = !string.IsNullOrEmpty(simulate); + var neitherOrBoth = (command == null) == (script == null && bash == null); + if (neitherOrBoth && !simulating) + { + var message = $"Error parsing YAML: expected/unexpected key ('name', 'command', 'script', 'bash', 'arguments') at {file.FullName}({mapping.Start.Line})"; + Logger.LogError(message); + Logger.TraceError(message); + return null; + } + + Logger.Log($"YamlTestCaseParser.GetTests(): new TestCase('{fullyQualifiedName}')"); + var test = new TestCase(fullyQualifiedName, new Uri(YamlTestFramework.FakeExecutor), source) + { + CodeFilePath = file.FullName, + LineNumber = mapping.Start.Line + }; + + SetTestCaseProperty(test, "cli", cli); + SetTestCaseProperty(test, "command", command); + SetTestCaseProperty(test, "script", script); + SetTestCaseProperty(test, "bash", bash); + SetTestCaseProperty(test, "simulate", simulate); + SetTestCaseProperty(test, "parallelize", parallelize); + SetTestCaseProperty(test, "skipOnFailure", skipOnFailure); + + var timeout = GetScalarString(mapping, tags, "timeout", YamlTestFramework.DefaultTimeout); + SetTestCaseProperty(test, "timeout", timeout); + + var workingDirectory = GetScalarString(mapping, tags, "workingDirectory", file.Directory.FullName); + SetTestCaseProperty(test, "working-directory", workingDirectory); + + SetTestCasePropertyMap(test, "foreach", mapping, "foreach", workingDirectory); + SetTestCasePropertyMap(test, "arguments", mapping, "arguments", workingDirectory); + SetTestCasePropertyMap(test, "input", mapping, "input", workingDirectory); + + SetTestCaseProperty(test, "expect", mapping, "expect"); + SetTestCaseProperty(test, "expect-gpt", mapping, "expect-gpt"); + SetTestCaseProperty(test, "not-expect", mapping, "not-expect"); + + SetTestCaseTagsAsTraits(test, YamlTagHelpers.UpdateCopyTags(tags, mapping)); + + CheckInvalidTestCaseNodes(file, mapping, test); + return test; + } + + private static IEnumerable CheckForChildren(string source, FileInfo file, YamlMappingNode mapping, string area, string @class, Dictionary> tags) + { + if (mapping.Children.ContainsKey("steps") && mapping.Children["steps"] is YamlSequenceNode stepsSequence) + { + @class = GetScalarString(mapping, "class", @class); + area = UpdateArea(mapping, area); + tags = YamlTagHelpers.UpdateCopyTags(tags, mapping); + + return TestCasesFromYamlSequenceOfSteps(source, file, stepsSequence, area, @class, tags); + } + + if (mapping.Children.ContainsKey("tests") && mapping.Children["tests"] is YamlSequenceNode testsSequence) + { + @class = GetScalarString(mapping, "class", @class); + area = UpdateArea(mapping, area); + tags = YamlTagHelpers.UpdateCopyTags(tags, mapping); + + return TestCasesFromYamlSequence(source, file, testsSequence, area, @class, tags).ToList(); + } + + return null; + } + + private static IEnumerable TestCasesFromYamlSequenceOfSteps(string source, FileInfo file, YamlSequenceNode sequence, string area, string @class, Dictionary> tags) + { + var tests = new List(); + for (int i = 0; i < sequence.Children.Count; i++) + { + var mapping = sequence.Children[i] as YamlMappingNode; + var test = GetTestFromNode(source, file, mapping, area, @class, tags, i + 1); + if (test != null) + { + tests.Add(test); + } + } + + if (tests.Count > 0) + { + SetTestCaseProperty(tests[0], "parallelize", "true"); + } + + for (int i = 1; i < tests.Count; i++) + { + SetTestCaseProperty(tests[i - 1], "nextStepId", tests[i].Id.ToString()); + SetTestCaseProperty(tests[i], "parallelize", "false"); + } + + return tests; + } + + private static void CheckInvalidTestCaseNodes(FileInfo file, YamlMappingNode mapping, TestCase test) + { + foreach (YamlScalarNode key in mapping.Children.Keys) + { + if (!IsValidTestCaseNode(key.Value) && !test.DisplayName.EndsWith(key.Value)) + { + var error = $"Error parsing YAML: Unexpected YAML key/value ('{key.Value}', '{test.DisplayName}') in {file.FullName}({mapping[key].Start.Line})"; + test.DisplayName = error; + Logger.LogError(error); + Logger.TraceError(error); + } + } + } + + private static bool IsValidTestCaseNode(string value) + { + return ";area;class;name;cli;command;script;bash;timeout;foreach;arguments;input;expect;expect-gpt;not-expect;parallelize;simulate;skipOnFailure;tag;tags;workingDirectory;".IndexOf($";{value};") >= 0; + } + + private static void SetTestCaseProperty(TestCase test, string propertyName, YamlMappingNode mapping, string mappingName) + { + string value = GetScalarString(mapping, mappingName); + SetTestCaseProperty(test, propertyName, value); + } + + private static void SetTestCaseProperty(TestCase test, string propertyName, string value) + { + if (value != null) + { + YamlTestProperties.Set(test, propertyName, value); + } + } + + private static void SetTestCasePropertyMap(TestCase test, string propertyName, YamlMappingNode testNode, string mappingName, string workingDirectory) + { + var ok = testNode.Children.ContainsKey(mappingName); + if (!ok) return; + + var argumentsNode = testNode.Children[mappingName]; + if (argumentsNode == null) return; + + if (argumentsNode is YamlScalarNode) + { + var value = (argumentsNode as YamlScalarNode).Value; + SetTestCaseProperty(test, propertyName, $"\"{value}\""); + } + else if (argumentsNode is YamlMappingNode) + { + var asMapping = argumentsNode as YamlMappingNode; + SetTestCasePropertyMap(test, propertyName, asMapping + .Select(x => NormalizeToScalarKeyValuePair(test, x, workingDirectory))); + } + else if (argumentsNode is YamlSequenceNode) + { + var asSequence = argumentsNode as YamlSequenceNode; + + SetTestCasePropertyMap(test, propertyName, asSequence + .Select(mapping => (mapping as YamlMappingNode)? + .Select(x => NormalizeToScalarKeyValuePair(test, x, workingDirectory)))); + } + } + + private static void SetTestCasePropertyMap(TestCase test, string propertyName, IEnumerable>> kvss) + { + // flatten the kvs + var kvs = kvss.SelectMany(x => x); + + // ensure all keys are unique, if not, transform appropriately + var keys = kvs.GroupBy(kv => (kv.Key as YamlScalarNode)?.Value).Select(g => g.Key).ToArray(); + if (keys.Length < kvs.Count()) + { + Logger.Log($"keys.Length={keys.Length}, kvs.Count={kvs.Count()}"); + Logger.Log($"keys='{string.Join(",", keys)}'"); + + var values = new List(); + foreach (var items in kvss) + { + var map = new YamlMappingNode(items); + values.Add(map.ConvertScalarMapToTsvString(keys)); + } + + var combinedKey = new YamlScalarNode(string.Join("\t", keys)); + var combinedValue = new YamlScalarNode(string.Join("\n", values)); + var combinedKv = new KeyValuePair(combinedKey, combinedValue); + kvs = new List>(new[] { combinedKv }); + } + + SetTestCasePropertyMap(test, propertyName, kvs); + } + + private static void SetTestCasePropertyMap(TestCase test, string propertyName, IEnumerable> kvs) + { + var newMap = new YamlMappingNode(kvs); + SetTestCaseProperty(test, propertyName, newMap.ToJsonString()); + } + + private static KeyValuePair NormalizeToScalarKeyValuePair(TestCase test, KeyValuePair item, string workingDirectory = null) + { + var key = item.Key; + var keyOk = key is YamlScalarNode; + var value = item.Value; + var valueOk = value is YamlScalarNode; + if (keyOk && valueOk) return item; + + string[] keys = null; + if (!keyOk) + { + var text = key.ConvertScalarSequenceToTsvString(); + if (text == null) + { + text = $"Invalid key at {test.CodeFilePath}({key.Start.Line},{key.Start.Column})"; + Logger.Log(text); + } + else if (text.Contains('\t')) + { + keys = text.Split('\t'); + } + key = new YamlScalarNode(text); + } + + if (!valueOk) + { + value = value.ConvertScalarSequenceToMultiLineTsvScalarNode(test, keys); + } + else + { + var scalarValue = value.ToJsonString().Trim('\"'); + if (TryGetFileContentFromScalar(scalarValue, workingDirectory, out string fileContent)) + { + value = fileContent; + if (!(value is YamlScalarNode)) + { + value = value.ConvertScalarSequenceToMultiLineTsvScalarNode(test, keys); + } + } + } + + Logger.Log($"YamlTestCaseParser.NormalizeToScalarKeyValuePair: key='{(key as YamlScalarNode).Value}', value='{(value as YamlScalarNode).Value}'"); + return new KeyValuePair(key, value); + } + + private static bool TryGetFileContentFromScalar(string scalar, string workingDirectory, out string fileContent) + { + // Treat this scalar value as file if it starts with '@' and does not have InvalidFileNameChars + if (scalar.StartsWith("@") && Path.GetFileName(scalar).IndexOfAny(Path.GetInvalidFileNameChars()) == -1) + { + var fileName = scalar.Substring(1); + + // check if the file already exists + var filePath = fileName; + if (!File.Exists(filePath)) + { + filePath = Path.Combine(workingDirectory, fileName); + } + + Logger.Log($"YamlTestCaseParser.TryGetFileContentFromScalar: Read file contents from {filePath}"); + if (File.Exists(filePath)) + { + fileContent = File.ReadAllText(filePath); + return true; + } + } + + fileContent = ""; + return false; + } + + private static string GetScalarString(YamlMappingNode mapping, Dictionary> tags, string mappingName, string defaultValue = null) + { + var value = GetScalarString(mapping, mappingName, null); + if (value != null) return value; + + if (tags.ContainsKey(mappingName)) + { + value = tags[mappingName].Last(); + } + + return value ?? defaultValue; + } + + private static string GetScalarString(YamlMappingNode mapping, string mappingName, string defaultValue = null) + { + var ok = mapping.Children.ContainsKey(mappingName); + if (!ok) return defaultValue; + + var node = mapping.Children[mappingName] as YamlScalarNode; + var value = node?.Value; + + return value ?? defaultValue; + } + + private static string GetYamlNodeAsString(YamlMappingNode mapping, string nodeName, string defaultValue = null) + { + var ok = mapping.Children.ContainsKey(nodeName); + if (!ok) return defaultValue; + + var node = mapping.Children[nodeName]; + var value = node?.ToYamlString(); + + return value ?? defaultValue; + } + + private static string GetRootArea(FileInfo file) + { + return $"{file.Extension.TrimStart('.')}.{file.Name.Remove(file.Name.LastIndexOf(file.Extension))}"; + } + + private static string UpdateArea(YamlMappingNode mapping, string area) + { + var subArea = GetScalarString(mapping, "area"); + return string.IsNullOrEmpty(subArea) + ? area + : $"{area}.{subArea}"; + } + + private static string GetFullyQualifiedName(YamlMappingNode mapping, string area, string @class, int stepNumber) + { + var name = GetScalarString(mapping, "name"); + if (name == null) return null; + + area = UpdateArea(mapping, area); + @class = GetScalarString(mapping, "class", @class); + + return GetFullyQualifiedName(area, @class, name, stepNumber); + } + + private static string GetFullyQualifiedNameAndCommandFromShortForm(YamlMappingNode mapping, string area, string @class, ref string command, int stepNumber) + { + // if there's only one invalid mapping node, we'll treat it's key as "name" and value as "command" + var invalid = mapping.Children.Keys.Where(key => !IsValidTestCaseNode((key as YamlScalarNode).Value)); + if (invalid.Count() == 1 && command == null) + { + var name = (invalid.FirstOrDefault() as YamlScalarNode).Value; + if (name == null) return null; + + command = GetScalarString(mapping, name); + area = UpdateArea(mapping, area); + @class = GetScalarString(mapping, "class", @class); + + return GetFullyQualifiedName(area, @class, name, stepNumber); + } + + return null; + } + + private static string GetFullyQualifiedName(string area, string @class, string name, int stepNumber) + { + return stepNumber > 0 + ? $"{area}.{@class}.{stepNumber:D2}.{name}" + : $"{area}.{@class}.{name}"; + } + + private static void SetTestCaseTagsAsTraits(TestCase test, Dictionary> tags) + { + foreach (var tag in tags) + { + foreach (var value in tag.Value) + { + test.Traits.Add(tag.Key, value); + } + } + } + + private const string defaultClassName = "TestCases"; + + #endregion + } +} diff --git a/tests/testframework/YamlTestCaseRunner.cs b/tests/testframework/YamlTestCaseRunner.cs new file mode 100644 index 00000000..86bb566f --- /dev/null +++ b/tests/testframework/YamlTestCaseRunner.cs @@ -0,0 +1,977 @@ +using System; +using System.Collections.Generic; +using System.Diagnostics; +using System.IO; +using System.Linq; +using System.Linq.Expressions; +using System.Runtime.InteropServices; +using System.Text; +using System.Threading; +using System.Threading.Tasks; +using Microsoft.VisualStudio.TestPlatform.ObjectModel; +using Microsoft.VisualStudio.TestPlatform.ObjectModel.Adapter; +using Newtonsoft.Json; +using Newtonsoft.Json.Linq; +using YamlDotNet.RepresentationModel; + +namespace Azure.AI.Details.Common.CLI.TestFramework +{ + + public class YamlTestCaseRunner + { + public static IList RunAndRecordTestCase(TestCase test, IYamlTestFrameworkHost host) + { + TestCaseStart(test, host); + var results = TestCaseRun(test, host); + + var outcome = TestResultHelpers.TestOutcomeFromResults(results); + TestCaseStop(test, host, outcome); + + return results; + } + + #region private methods + + private static void TestCaseStart(TestCase test, IYamlTestFrameworkHost host) + { + Logger.Log($"YamlTestCaseRunner.TestCaseStart({test.DisplayName})"); + host.RecordStart(test); + } + + private static IList TestCaseRun(TestCase test, IYamlTestFrameworkHost host) + { + Logger.Log($"YamlTestCaseRunner.TestCaseRun({test.DisplayName})"); + + // run the test case, getting all the results, prior to recording any of those results + // (not doing this in this order seems to, for some reason, cause "foreach" test cases to run 5 times!?) + var results = TestCaseGetResults(test).ToList(); + foreach (var result in results) + { + host.RecordResult(result); + } + + return results; + } + + private static IEnumerable TestCaseGetResults(TestCase test) + { + Logger.Log($"YamlTestCaseRunner.TestCaseGetResults: ENTER"); + + var cli = YamlTestProperties.Get(test, "cli") ?? ""; + var command = YamlTestProperties.Get(test, "command"); + var script = YamlTestProperties.Get(test, "script"); + var bash = YamlTestProperties.Get(test, "bash"); + + var scriptIsBash = !string.IsNullOrEmpty(bash); + if (scriptIsBash) script = bash; + + var isWindows = RuntimeInformation.IsOSPlatform(OSPlatform.Windows); + if (!isWindows) scriptIsBash = true; + + var @foreach = YamlTestProperties.Get(test, "foreach"); + var arguments = YamlTestProperties.Get(test, "arguments"); + var input = YamlTestProperties.Get(test, "input"); + var expect = YamlTestProperties.Get(test, "expect"); + var expectGpt = YamlTestProperties.Get(test, "expect-gpt"); + var notExpect = YamlTestProperties.Get(test, "not-expect"); + var workingDirectory = YamlTestProperties.Get(test, "working-directory"); + var timeout = int.Parse(YamlTestProperties.Get(test, "timeout")); + var simulate = YamlTestProperties.Get(test, "simulate"); + var skipOnFailure = YamlTestProperties.Get(test, "skipOnFailure") switch { "true" => true, _ => false }; + + var basePath = new FileInfo(test.CodeFilePath).DirectoryName; + workingDirectory = Path.Combine(basePath, workingDirectory ?? ""); + var tryCreateWorkingDirectory = !string.IsNullOrEmpty(workingDirectory) && !Directory.Exists(workingDirectory); + if (tryCreateWorkingDirectory) Directory.CreateDirectory(workingDirectory); + + var expanded = ExpandForEachGroups(@foreach); + Logger.Log($"YamlTestCaseRunner.TestCaseGetResults: expanded count = {expanded.Count()}"); + + foreach (var foreachItem in expanded) + { + var start = DateTime.Now; + + var outcome = string.IsNullOrEmpty(simulate) + ? RunTestCase(test, skipOnFailure, cli, command, script, scriptIsBash, foreachItem, arguments, input, expect, expectGpt, notExpect, workingDirectory, timeout, out string stdOut, out string stdErr, out string errorMessage, out string stackTrace, out string additional, out string debugTrace) + : SimulateTestCase(test, simulate, cli, command, script, scriptIsBash, foreachItem, arguments, input, expect, expectGpt, notExpect, workingDirectory, out stdOut, out stdErr, out errorMessage, out stackTrace, out additional, out debugTrace); + + // #if DEBUG + // additional += outcome == TestOutcome.Failed ? $"\nEXTRA: {ExtraDebugInfo()}" : ""; + // #endif + + var stop = DateTime.Now; + var result = CreateTestResult(test, start, stop, stdOut, stdErr, errorMessage, stackTrace, additional, debugTrace, outcome); + if (!string.IsNullOrEmpty(foreachItem) && foreachItem != "{}") + { + result.DisplayName = GetTestResultDisplayName(test.DisplayName, foreachItem); + } + yield return result; + } + + Logger.Log($"YamlTestCaseRunner.TestCaseGetResults: EXIT"); + } + + private static string GetTestResultDisplayName(string testDisplayName, string foreachItem) + { + var testResultDisplayName = testDisplayName; + + if(JToken.Parse(foreachItem).Type == JTokenType.Object) + { + // get JObject properties + JObject foreachItemObject = JObject.Parse(foreachItem); + foreach(var property in foreachItemObject.Properties()) + { + var keys = property.Name.Split(new char[] { '\t' }); + var values = property.Value.Value().Split(new char[] { '\t' }); + + for (int i = 0; i < keys.Length; i++) + { + if (testResultDisplayName.Contains("{" + keys[i] + "}")) + { + testResultDisplayName = testResultDisplayName.Replace("{" +keys[i] + "}", values[i]); + } + } + } + } + + // if the testDisplayName was not templatized, ie, it had no {} + if (testResultDisplayName == testDisplayName) + { + return $"{testDisplayName}: {RedactSensitiveDataFromForeachItem(foreachItem)}"; + } + + return testResultDisplayName; + } + + // Finds "token" in foreach key and redacts its value + private static string RedactSensitiveDataFromForeachItem(string foreachItem) + { + var foreachObject = JObject.Parse(foreachItem); + + var sb = new StringBuilder(); + var sw = new StringWriter(sb); + + using (JsonWriter writer = new JsonTextWriter(sw){Formatting = Formatting.None}) + { + writer.WriteStartObject(); + foreach (var item in foreachObject) + { + if (string.IsNullOrWhiteSpace(item.Value.ToString())) + { + continue; + } + var keys = item.Key.ToLower().Split(new char[] {'\t'}); + + // find index of "token" in foreach key and redact its value to avoid getting it displayed + var tokenIndex = Array.IndexOf(keys, "token"); + var valueString = item.Value; + + if (tokenIndex >= 0) + { + var values = item.Value.ToString().Split(new char[] {'\t'}); + if (values.Count() == keys.Count()) + { + values[tokenIndex] = "***"; + valueString = string.Join("\t", values); + } + } + writer.WritePropertyName(item.Key); + writer.WriteValue(valueString); + } + + writer.WriteEndObject(); + } + + return sb.ToString(); + } + + private static IEnumerable ExpandForEachGroups(string @foreach) + { + var kvs = KeyValuePairsFromJson(@foreach, false) + .Select(kv => new KeyValuePair>( + kv.Key, + kv.Value.Split("\n".ToCharArray(), StringSplitOptions.RemoveEmptyEntries))); + + var dicts = new[] { new Dictionary() }.ToList(); + foreach (var item in kvs) + { + var lines = item.Value; + dicts = lines.SelectMany( + line => dicts.Select( + d => DupAndAdd(d, item.Key, line))) + .ToList(); + } + + return dicts.Select(d => JsonConvert.SerializeObject(d)); + } + + private static Dictionary DupAndAdd(Dictionary d, string key, string value) + { + var dup = new Dictionary(d); + dup[key] = value; + return dup; + } + + private static TestOutcome RunTestCase(TestCase test, bool skipOnFailure, string cli, string command, string script, bool scriptIsBash, string @foreach, string arguments, string input, string expect, string expectGpt, string notExpect, string workingDirectory, int timeout, out string stdOut, out string stdErr, out string errorMessage, out string stackTrace, out string additional, out string debugTrace) + { + var outcome = TestOutcome.None; + + additional = $"START TIME: {DateTime.Now}"; + debugTrace = ""; + stackTrace = script; + + List filesToDelete = null; + + var sbOut = new StringBuilder(); + var sbErr = new StringBuilder(); + var sbMerged = new StringBuilder(); + + try + { + var useCmd = !scriptIsBash; + script = WriteTextToTempFile(script, useCmd ? "cmd" : null); + + expect = WriteTextToTempFile(expect); + notExpect = WriteTextToTempFile(notExpect); + + var kvs = KeyValuePairsFromJson(arguments, true); + kvs.AddRange(KeyValuePairsFromJson(@foreach, false)); + kvs = ConvertValuesToAtArgs(kvs, ref filesToDelete); + + var startArgs = GetStartInfo(out string startProcess, cli, command, script, scriptIsBash, kvs, expect, notExpect, ref filesToDelete); + stackTrace = $"{startProcess} {startArgs}\n{stackTrace ?? string.Empty}"; + + Logger.Log($"Process.Start('{startProcess} {startArgs}')"); + var startInfo = new ProcessStartInfo(startProcess, startArgs) + { + UseShellExecute = false, + RedirectStandardInput = true, + RedirectStandardError = true, + RedirectStandardOutput = true, + WorkingDirectory = workingDirectory + }; + UpdatePathEnvironment(startInfo); + + var process = Process.Start(startInfo); + process.StandardInput.WriteLine(input ?? string.Empty); + process.StandardInput.Close(); + + var outDoneSignal = new ManualResetEvent(false); + var errDoneSignal = new ManualResetEvent(false); + process.OutputDataReceived += (sender, e) => AppendLineOrSignal(e.Data, sbOut, sbMerged, outDoneSignal); + process.ErrorDataReceived += (sender, e) => AppendLineOrSignal(e.Data, sbErr, sbMerged, errDoneSignal); + process.BeginOutputReadLine(); + process.BeginErrorReadLine(); + + var exitedNotKilled = WaitForExit(process, timeout); + outcome = exitedNotKilled && process.ExitCode == 0 + ? TestOutcome.Passed + : skipOnFailure + ? TestOutcome.Skipped + : TestOutcome.Failed; + + if (exitedNotKilled) + { + outDoneSignal.WaitOne(); + errDoneSignal.WaitOne(); + } + + var exitCode = exitedNotKilled + ? process.ExitCode.ToString() + : $"(did not exit; timedout; killed)"; + var exitTime = exitedNotKilled + ? process.ExitTime.ToString() + : DateTime.UtcNow.ToString(); + + errorMessage = $"EXIT CODE: {exitCode}"; + additional = additional + + $" STOP TIME: {exitTime}" + + $" EXIT CODE: {exitCode}"; + } + catch (Exception ex) + { + outcome = TestOutcome.Failed; + errorMessage = ex.Message; + debugTrace = ex.ToString(); + stackTrace = $"{stackTrace}\n{ex.StackTrace}"; + } + finally + { + if (script != null) File.Delete(script); + if (expect != null) File.Delete(expect); + if (notExpect != null) File.Delete(notExpect); + filesToDelete?.ForEach(x => File.Delete(x)); + } + + stdOut = sbOut.ToString(); + stdErr = sbErr.ToString(); + + return outcome == TestOutcome.Passed && !string.IsNullOrEmpty(expectGpt) + ? CheckExpectGptOutcome(sbMerged.ToString(), expectGpt, ref stdOut, ref stdErr) + : outcome; + } + + private static List> ConvertValuesToAtArgs(List> kvs, ref List files) + { + var newList = new List>(); + foreach (var item in kvs) + { + if (item.Value.Count(x => x == '\t' || x == '\r' || x == '\n' || x == '\f' || x == '\"') > 0) + { + string file = WriteMultilineTsvToTempFile(item.Value, ref files); + newList.Add(new KeyValuePair(item.Key, $"@{file}")); + } + else + { + newList.Add(item); + } + } + + return newList; + } + + private static List> KeyValuePairsFromJson(string json, bool allowSimpleString) + { + var kvs = new List>(); + if (!string.IsNullOrEmpty(json)) + { + Logger.Log($"KeyValuePairsFromJson: 'json'='{json}'"); + var parsed = JToken.Parse(json); + if (parsed.Type == JTokenType.String && allowSimpleString) + { + // if it's a simple string, there is no "key" for the argument... pass it as value with an empty string as key + // this will ensure that an additional '--' isn't emitted preceding the string-only arguments + kvs.Add(new KeyValuePair("", parsed.Value())); + } + else if (parsed.Type != JTokenType.Object) + { + // if it's not a simple string, it must be an object... if it's not, we'll just log and continue + Logger.Log("KeyValuePairsFromJson: Invalid json (only supports `\"string\"`, or `{\"mapItem1\": \"value1\", \"...\": \"...\"}`!"); + } + else + { + foreach (var item in parsed as JObject) + { + kvs.Add(new KeyValuePair(item.Key, item.Value.Value())); + } + } + } + return kvs; + } + + private static string WriteMultilineTsvToTempFile(string text, ref List files) + { + files ??= new List(); + + var lines = text.Split('\r', '\n'); + var newLines = new List(); + foreach (var line in lines) + { + if (!line.Contains('\f')) + { + newLines.Add(line); + continue; + } + + var values = line.Split('\t'); + var newValues = new List(); + foreach (var value in values) + { + if (!value.Contains('\f')) + { + newValues.Add(value); + continue; + } + + var newValue = WriteTextToTempFile(value.Replace('\f', '\n')); + files.Add(newValue); + + newValues.Add($"@{newValue}"); + } + + newLines.Add(string.Join("\t", newValues)); + } + + var newText = string.Join("\n", newLines); + var file = WriteTextToTempFile(newText); + files.Add(file); + return file; + } + + private static string WriteTextToTempFile(string text, string extension = null) + { + if (!string.IsNullOrEmpty(text)) + { + var tempFile = Path.GetTempFileName(); + if (!string.IsNullOrEmpty(extension)) + { + tempFile = $"{tempFile}.{extension}"; + } + + File.WriteAllText(tempFile, text); + + var content = File.ReadAllText(tempFile).Replace("\n", "\\n"); + Logger.Log($"FILE: {tempFile}: '{content}'"); + + return tempFile; + } + return null; + } + + private static string FindCacheCli(string cli) + { + if (_cliCache.ContainsKey(cli)) + { + return _cliCache[cli]; + } + + var found = FindCli(cli); + _cliCache[cli] = found; + + return found; + } + + private static string FindCli(string cli) + { + var specified = !string.IsNullOrEmpty(cli); + if (specified) + { + var found = FindCliOrNull(cli); + return found != null + ? CliFound(cli, found) // use what we found + : CliNotFound(cli); // use what was specified + } + else + { + var clis = new[] { "ai", "spx", "vz" }; + var found = PickCliOrNull(clis); + return found != null + ? PickCliFound(clis, found) // use what we found + : PickCliNotFound(clis, clis[0]); // use ai + } + } + + private static string FindCliOrNull(string cli) + { + var dll = $"{cli}.dll"; + var exe = RuntimeInformation.IsOSPlatform(OSPlatform.Windows) ? $"{cli}.exe" : cli; + + var path1 = Environment.GetEnvironmentVariable("PATH"); + var path2 = Directory.GetCurrentDirectory(); + var path3 = (new FileInfo(typeof(YamlTestCaseRunner).Assembly.Location)).DirectoryName; + var path = $"{path3}{Path.PathSeparator}{path2}{Path.PathSeparator}{path1}"; + + var paths = path.Split(Path.PathSeparator); + foreach (var part2 in new string[]{ "", "net6.0"}) + { + foreach (var part1 in paths) + { + var checkExe = Path.Combine(part1, part2, exe); + if (File.Exists(checkExe)) + { + // Logger.TraceInfo($"FindCliOrNull: Found CLI: {checkExe}"); + var checkDll = FindCliDllOrNull(checkExe, dll); + if (checkDll != null) + { + // Logger.TraceInfo($"FindCliOrNull: Found DLL: {checkDll}"); + return checkExe; + } + } + } + } + + return null; + } + + private static string FindCliDllOrNull(string cli, string dll) + { + var fi = new FileInfo(cli); + if (!fi.Exists) return null; + + var check = Path.Combine(fi.DirectoryName, dll); + if (File.Exists(check)) return check; + + var matches = fi.Directory.GetFiles(dll, SearchOption.AllDirectories); + if (matches.Length == 1) return matches.First().FullName; + + return null; + } + + private static string CliFound(string cli, string found) + { + Logger.Log($"CliFound: CLI specified ({cli}); found; using {found}"); + return found; + } + + private static string CliNotFound(string cli) + { + var message = $"CliNotFound: CLI specified ({cli}); tried searching PATH and working directory; not found; using {cli}"; + Logger.LogWarning(message); + // Logger.TraceWarning(message); + return cli; + } + + private static string PickCliOrNull(IEnumerable clis) + { + var cliOrNulls = new List(); + foreach (var cli in clis) + { + cliOrNulls.Add(FindCliOrNull(cli)); + } + + var clisFound = cliOrNulls.Where(cli => !string.IsNullOrEmpty(cli)); + return clisFound.Count() == 1 + ? clisFound.First() + : null; + } + + private static void PickCliUpdateYamlDefaultsFileWarning(IEnumerable clis) + { + var message = string.Join(" or ", clis.Select(cli => $"`cli: {cli}`")); + message = $"PickCli: CLI not specified; please create/update {YamlTestFramework.YamlDefaultTagsFileName} with one of: {message}"; + Logger.LogWarning(message); + Logger.TraceWarning(message); + } + + private static string PickCliFound(IEnumerable clis, string cli) + { + PickCliUpdateYamlDefaultsFileWarning(clis); + + var message = $"PickCliFound: CLI not specified; found 1 CLI; using {cli}"; + Logger.LogInfo(message); + Logger.TraceInfo(message); + return cli; + } + + private static string PickCliNotFound(IEnumerable clis, string cli) + { + PickCliUpdateYamlDefaultsFileWarning(clis); + + var message = $"PickCliNotFound: CLI not specified; tried searching PATH and working directory; found 0 or >1 CLIs; using {cli}"; + Logger.LogInfo(message); + Logger.TraceInfo(message); + return cli; + } + + private static IEnumerable GetPossibleRunTimeLocations() + { + if (RuntimeInformation.IsOSPlatform(OSPlatform.Windows)) + { + return new string[]{ "", "runtimes/win-x64/native/", "../runtimes/win-x64/native/" }; + } + else if (RuntimeInformation.IsOSPlatform(OSPlatform.Linux)) + { + return new string[]{ "", "runtimes/linux-x64/native/", "../../runtimes/linux-x64/native/" }; + } + else if (RuntimeInformation.IsOSPlatform(OSPlatform.OSX)) + { + return new string[]{ "", "runtimes/osx-x64/native/", "../../runtimes/osx-x64/native/" }; + } + return new string[]{ "" }; + } + + static void UpdatePathEnvironment(ProcessStartInfo startInfo) + { + var cli = new FileInfo(startInfo.FileName); + if (cli.Exists) + { + var dll = FindCliDllOrNull(cli.FullName, cli.Name.Replace(".exe", "") + ".dll"); + if (dll != null) + { + var cliPath = cli.Directory.FullName; + var dllPath = new FileInfo(dll).Directory.FullName; + + var isWindows = RuntimeInformation.IsOSPlatform(OSPlatform.Windows); + var pathVar = isWindows ? "PATH" : "LD_LIBRARY_PATH"; + var path = Environment.GetEnvironmentVariable(pathVar) ?? ""; + + var locations = GetPossibleRunTimeLocations(); + path = AddToPath(path, cliPath, locations); + path = AddToPath(path, dllPath, locations); + + startInfo.Environment.Add(pathVar, path); + Logger.LogInfo($"UpdatePathEnvironment: {pathVar}={path}"); + } + } + } + + private static string AddToPath(string path, string value, IEnumerable locations) + { + foreach (var location in locations) + { + var check = Path.Combine(value, location); + if (Directory.Exists(check)) + { + path = AddToPath(path, check); + } + } + return path; + } + + private static string AddToPath(string path, string value) + { + var paths = path.Split(Path.PathSeparator); + return !paths.Contains(value) + ? $"{value}{Path.PathSeparator}{path}".Trim(Path.PathSeparator) + : path; + } + + private static bool WaitForExit(Process process, int timeout) + { + var completed = process.WaitForExit(timeout); + if (!completed) + { + var name = process.ProcessName; + var message = $"Timedout! Stopping process ({name})..."; + Logger.LogWarning(message); + Logger.TraceWarning(message); + + process.StandardInput.WriteLine("\x3"); // try ctrl-c first + process.StandardInput.Close(); + completed = process.WaitForExit(200); + + message = "Timedout! Sent " + (completed ? "; stopped" : "; trying Kill()"); + Logger.LogWarning(message); + Logger.TraceWarning(message); + + if (!completed) + { + process.Kill(); + var killed = process.HasExited ? "Done." : "Failed!"; + + message = $"Timedout! Killing process ({name})... {killed}"; + Logger.LogWarning(message); + Logger.TraceWarning(message); + } + } + + return completed; + } + + private static string GetStartInfo(out string startProcess, string cli, string command, string script, bool scriptIsBash, List> kvs, string expect, string notExpect, ref List files) + { + startProcess = FindCacheCli(cli); + + var isCommand = !string.IsNullOrEmpty(command) || string.IsNullOrEmpty(script); + if (isCommand) + { + command = $"{command} {GetKeyValueArgs(kvs)}"; + + var expectLess = string.IsNullOrEmpty(expect) && string.IsNullOrEmpty(notExpect); + if (expectLess) return command; + + command = WriteTextToTempFile(command); + files ??= new List(); + files.Add(command); + + return $"run --command @{command} {GetAtArgs(expect, notExpect)}"; + } + + if (scriptIsBash) + { + var bash = RuntimeInformation.IsOSPlatform(OSPlatform.Windows) + ? EnsureFindCacheGetBashExe() + : "/bin/bash"; + return $"run --process \"{bash}\" --pre.script -l --script \"{script}\" {GetKeyValueArgs(kvs)} {GetAtArgs(expect, notExpect)}"; + } + + return $"run --cmd --script \"{script}\" {GetKeyValueArgs(kvs)} {GetAtArgs(expect, notExpect)}"; + } + + private static string EnsureFindCacheGetBashExe() + { + var gitBash = FindCacheGitBashExe(); + if (gitBash == null || gitBash == "bash.exe") + { + throw new Exception("Could not Git for Windows bash.exe in PATH!"); + } + return gitBash; + } + + private static string FindCacheGitBashExe() + { + var bashExe = "bash.exe"; + if (_cliCache.ContainsKey(bashExe)) + { + return _cliCache[bashExe]; + } + + var found = FindGitBashExe(); + _cliCache[bashExe] = found; + + return found; + } + + private static string FindGitBashExe() + { + var found = FileHelpers.FindFilesInOsPath("bash.exe"); + return found.Where(x => x.ToLower().Contains("git")).FirstOrDefault() ?? "bash.exe"; + } + + private static string GetAtArgs(string expect, string notExpect) + { + var atArgs = $""; + if (!string.IsNullOrEmpty(expect)) atArgs += $" --expect @{expect}"; + if (!string.IsNullOrEmpty(notExpect)) atArgs += $" --not expect @{notExpect}"; + return atArgs.TrimStart(' '); + } + + private static string GetKeyValueArgs(List> kvs) + { + var args = new StringBuilder(); + foreach (var item in kvs) + { + if (!string.IsNullOrEmpty(item.Key)) + { + if (item.Key.Contains('\t')) + { + var key = item.Key.Replace('\t', ';'); + args.Append($"--foreach {key} in "); + } + else + { + args.Append($"--{item.Key} "); + } + + if (!string.IsNullOrEmpty(item.Value)) + { + args.Append($"\"{item.Value}\" "); + } + } + else if (!string.IsNullOrEmpty(item.Value)) + { + args.Append(item.Value); + } + } + return args.ToString().TrimEnd(); + } + + private static TestOutcome SimulateTestCase(TestCase test, string simulate, string cli, string command, string script, bool scriptIsBash, string @foreach, string arguments, string input, string expect, string expectGpt, string notExpect, string workingDirectory, out string stdOut, out string stdErr, out string errorMessage, out string stackTrace, out string additional, out string debugTrace) + { + var sb = new StringBuilder(); + sb.AppendLine($"cli='{cli?.Replace("\n", "\\n")}'"); + sb.AppendLine($"command='{command?.Replace("\n", "\\n")}'"); + sb.AppendLine($"script='{script?.Replace("\n", "\\n")}'"); + sb.AppendLine($"scriptIsBash='{scriptIsBash}'"); + sb.AppendLine($"foreach='{@foreach?.Replace("\n", "\\n")}'"); + sb.AppendLine($"arguments='{arguments?.Replace("\n", "\\n")}'"); + sb.AppendLine($"input='{input?.Replace("\n", "\\n")}'"); + sb.AppendLine($"expect='{expect?.Replace("\n", "\\n")}'"); + sb.AppendLine($"not-expect='{notExpect?.Replace("\n", "\\n")}'"); + sb.AppendLine($"working-directory='{workingDirectory}'"); + + stdOut = sb.ToString(); + stdErr = "STDERR"; + additional = "ADDITIONAL-INFO"; + debugTrace = "DEBUG-TRACE"; + errorMessage = "ERRORMESSAGE"; + stackTrace = "STACKTRACE"; + + var outcome = OutcomeFromString(simulate); + if (outcome == TestOutcome.Passed) + { + stdErr = null; + debugTrace = null; + errorMessage = null; + } + + return outcome; + } + + private static TestOutcome OutcomeFromString(string simulate) + { + TestOutcome outcome = TestOutcome.None; + switch (simulate?.ToLower()) + { + case "failed": + outcome = TestOutcome.Failed; + break; + + case "skipped": + outcome = TestOutcome.Skipped; + break; + + case "passed": + outcome = TestOutcome.Passed; + break; + } + + return outcome; + } + + private static void TestCaseStop(TestCase test, IYamlTestFrameworkHost host, TestOutcome outcome) + { + Logger.Log($"YamlTestCaseRunner.TestCaseStop({test.DisplayName})"); + host.RecordEnd(test, outcome); + } + + private static TestResult CreateTestResult(TestCase test, DateTime start, DateTime stop, string stdOut, string stdErr, string errorMessage, string stackTrace, string additional, string debugTrace, TestOutcome outcome) + { + Logger.Log($"YamlTestCaseRunner.TestRecordResult({test.DisplayName})"); + + var result = new TestResult(test) { Outcome = outcome }; + result.Messages.Add(new TestResultMessage(TestResultMessage.StandardOutCategory, stdOut)); + result.Messages.Add(new TestResultMessage(TestResultMessage.StandardErrorCategory, stdErr)); + result.Messages.Add(new TestResultMessage(TestResultMessage.AdditionalInfoCategory, additional)); + result.Messages.Add(new TestResultMessage(TestResultMessage.DebugTraceCategory, debugTrace)); + result.ErrorMessage = errorMessage; + result.ErrorStackTrace = stackTrace; + result.StartTime = start; + result.EndTime = stop; + result.Duration = stop - start; + + Logger.Log("----------------------------\n\n"); + Logger.Log($" STDOUT: {stdOut}"); + Logger.Log($" STDERR: {stdErr}"); + Logger.Log($" STACK: {stackTrace}"); + Logger.Log($" ERROR: {errorMessage}"); + Logger.Log($" OUTCOME: {outcome}"); + Logger.Log($"ADDITIONAL: {additional}"); + Logger.Log($"DEBUGTRACE: {debugTrace}"); + Logger.Log("----------------------------\n\n"); + + return result; + } + + private static string ExtraDebugInfo() + { + var sb = new StringBuilder(); + + var cwd = new DirectoryInfo(Directory.GetCurrentDirectory()); + sb.AppendLine($"CURRENT DIRECTORY: {cwd.FullName}"); + + var files = cwd.GetFiles("*", SearchOption.AllDirectories); + foreach (var file in files) + { + sb.AppendLine($"{file.Length,10} {file.CreationTime.Date:MM/dd/yyyy} {file.CreationTime:hh:mm:ss tt} {file.FullName}"); + } + + var variables = Environment.GetEnvironmentVariables(); + var keys = new List(variables.Count); + foreach (var key in variables.Keys) keys.Add(key as string); + + keys.Sort(); + foreach (var key in keys) + { + var value = variables[key] as string; + sb.AppendLine($"{key,-20} {value}"); + } + + return sb.ToString(); + } + + private static TestOutcome CheckExpectGptOutcome(string output, string expectGpt, ref string stdOut, ref string stdErr) + { + var outcome = ExpectGptOutcome(output, expectGpt, out var gptStdOut, out var gptStdErr, out var gptMerged); + if (outcome == TestOutcome.Failed) + { + if (!string.IsNullOrEmpty(gptStdOut)) stdOut = $"{stdOut}\n--expect-gpt--\n{gptStdOut}\n".Trim('\n'); + if (!string.IsNullOrEmpty(gptStdErr)) stdErr = $"{stdErr}\n--expect-gpt--\n{gptStdErr}\n".Trim('\n'); + } + return outcome; + } + + private static TestOutcome ExpectGptOutcome(string output, string expect, out string gptStdOut, out string gptStdErr, out string gptMerged) + { + var outcome = TestOutcome.None; + + var sbOut = new StringBuilder(); + var sbErr = new StringBuilder(); + var sbMerged = new StringBuilder(); + + var question = new StringBuilder(); + question.AppendLine($"Here's the console output:\n\n{output}\n"); + question.AppendLine($"Here's the expectation:\n\n{expect}\n"); + question.AppendLine("You **must always** answer \"PASS\" if the expectation is met."); + question.AppendLine("You **must always** answer \"FAIL\" if the expectation is not met."); + question.AppendLine("You **must only** answer \"PASS\" or \"FAIL\"."); + var questionTempFile = WriteTextToTempFile(question.ToString()); + + try + { + var startProcess = FindCacheCli("ai"); + var startArgs = $"chat --quiet true --index-name @none --question @{questionTempFile}"; + var startInfo = new ProcessStartInfo(startProcess, startArgs) + { + UseShellExecute = false, + RedirectStandardInput = true, + RedirectStandardError = true, + RedirectStandardOutput = true + }; + + Logger.Log($"ExpectGptOutcome: Process.Start('{startProcess} {startArgs}')"); + var process = Process.Start(startInfo); + process.StandardInput.Close(); + + var outDoneSignal = new ManualResetEvent(false); + var errDoneSignal = new ManualResetEvent(false); + process.OutputDataReceived += (sender, e) => AppendLineOrSignal(e.Data, sbOut, sbMerged, outDoneSignal); + process.ErrorDataReceived += (sender, e) => AppendLineOrSignal(e.Data, sbErr, sbMerged, errDoneSignal); + process.BeginOutputReadLine(); + process.BeginErrorReadLine(); + + var exitedNotKilled = WaitForExit(process, 60000); + if (exitedNotKilled) + { + outDoneSignal.WaitOne(); + errDoneSignal.WaitOne(); + } + + var passed = exitedNotKilled && process.ExitCode == 0; + outcome = passed ? TestOutcome.Passed : TestOutcome.Failed; + + var timedoutOrKilled = !exitedNotKilled; + if (timedoutOrKilled) + { + var message = "ExpectGptOutcome: WARNING: Timedout or killed!"; + sbErr.AppendLine(message); + sbMerged.AppendLine(message); + Logger.LogWarning(message); + } + } + catch (Exception ex) + { + outcome = TestOutcome.Failed; + + var exception = $"ExpectGptOutcome: EXCEPTION: {ex.Message}"; + sbErr.AppendLine(exception); + sbMerged.AppendLine(exception); + Logger.Log(exception); + } + + File.Delete(questionTempFile); + gptStdOut = sbOut.ToString(); + gptStdErr = sbErr.ToString(); + gptMerged = sbMerged.ToString(); + + if (outcome == TestOutcome.Passed) + { + Logger.Log($"ExpectGptOutcome: Checking for 'PASS' in '{gptMerged}'"); + + var passed = gptMerged.Contains("PASS") || gptMerged.Contains("TRUE") || gptMerged.Contains("YES"); + outcome = passed ? TestOutcome.Passed : TestOutcome.Failed; + + Logger.Log($"ExpectGptOutcome: {outcome}"); + } + + return outcome; + } + + private static void AppendLineOrSignal(string? text, StringBuilder sb1, StringBuilder sb2, ManualResetEvent signal) + { + if (text != null) + { + sb1.AppendLine(text); + sb2.AppendLine(text); + } + else + { + signal.Set(); + } + } + + #endregion + + private static Dictionary _cliCache = new Dictionary(); + } +} diff --git a/tests/testframework/YamlTestFramework.cs b/tests/testframework/YamlTestFramework.cs new file mode 100644 index 00000000..ee60b57a --- /dev/null +++ b/tests/testframework/YamlTestFramework.cs @@ -0,0 +1,226 @@ +using Microsoft.VisualStudio.TestPlatform.ObjectModel; +using Microsoft.VisualStudio.TestPlatform.ObjectModel.Logging; +using System; +using System.Collections.Generic; +using System.Diagnostics; +using System.IO; +using System.Linq; +using System.Reflection; +using System.Text; +using System.Threading; +using System.Threading.Tasks; +using System.Threading.Tasks.Dataflow; + +namespace Azure.AI.Details.Common.CLI.TestFramework +{ + public class YamlTestFramework + { + public static IEnumerable GetTestsFromDirectory(string source, DirectoryInfo directory) + { + Logger.Log($"YamlTestFramework.GetTestsFromDirectory('{source}', '{directory.FullName}'): ENTER"); + + directory = YamlTagHelpers.GetYamlDefaultTagsFullFileName(directory)?.Directory ?? directory; + var files = FindFiles(directory); + var tests = files.SelectMany(file => GetTestsFromYaml(source, file)); + + Logger.Log($"YamlTestFramework.GetTestsFromDirectory('{source}', '{directory.FullName}'): EXIT"); + return tests.ToList(); + } + + public static IEnumerable GetTestsFromYaml(string source, FileInfo file) + { + Logger.Log($"YamlTestFramework.GetTestsFromYaml('{source}', '{file.FullName}'): ENTER"); + var tests = YamlTestCaseParser.TestCasesFromYaml(source, file); + + Logger.Log($"YamlTestFramework.GetTestsFromYaml('{source}', '{file.FullName}'): EXIT"); + return tests; + } + + public static IDictionary> RunTests(IEnumerable tests, IYamlTestFrameworkHost host) + { + var resultsByTestCaseId = new Dictionary>(); + + tests = tests.ToList(); // force enumeration + var groupedByPriority = GroupTestCasesByPriority(tests); + + foreach (var priorityGroup in groupedByPriority) + { + if (priorityGroup.Count == 0) continue; + + var resultsByTestCaseIdForGroup = RunAndRecordTests(host, priorityGroup); + foreach (var resultsForTestCase in resultsByTestCaseIdForGroup) + { + var testCaseId = resultsForTestCase.Key; + var testResults = resultsForTestCase.Value; + resultsByTestCaseId[testCaseId] = testResults; + } + } + + return resultsByTestCaseId; + } + + #region private methods + + private static IDictionary> RunAndRecordTests(IYamlTestFrameworkHost host, IEnumerable tests) + { + InitRunAndRecordTestCaseMaps(tests, out var testFromIdMap, out var completionFromIdMap); + + RunAndRecordParallelizedTestCases(host, testFromIdMap, completionFromIdMap); + RunAndRecordRemainingTestCases(host, testFromIdMap, completionFromIdMap); + + return GetRunAndRecordTestResultsMap(completionFromIdMap); + } + + private static void InitRunAndRecordTestCaseMaps(IEnumerable tests, out Dictionary testFromIdMap, out Dictionary>> completionFromIdMap) + { + testFromIdMap = new Dictionary(); + completionFromIdMap = new Dictionary>>(); + foreach (var test in tests) + { + var id = test.Id.ToString(); + testFromIdMap[id] = test; + completionFromIdMap[id] = new TaskCompletionSource>(); + } + } + + private static IDictionary> GetRunAndRecordTestResultsMap(Dictionary>> completionFromIdMap) + { + var resultsPerTestCase = completionFromIdMap.Select(x => x.Value.Task.Result); + + var resultsMap = new Dictionary>(); + foreach (var resultsForCase in resultsPerTestCase) + { + var test = resultsForCase.FirstOrDefault()?.TestCase; + if (test == null) continue; + + var id = test.Id.ToString(); + resultsMap[id] = resultsForCase; + } + + return resultsMap; + } + + private static void RunAndRecordParallelizedTestCases(IYamlTestFrameworkHost host, Dictionary testFromIdMap, Dictionary>> completionFromIdMap) + { + var parallelTests = testFromIdMap + .Select(x => x.Value) + .Where(test => YamlTestProperties.Get(test, "parallelize") == "true") + .ToList(); + + foreach (var test in parallelTests) + { + ThreadPool.QueueUserWorkItem(state => + { + var parallelTestId = test.Id.ToString(); + RunAndRecordTestCaseSteps(host, testFromIdMap, completionFromIdMap, parallelTestId); + }); + } + + Logger.Log($"YamlTestFramework.RunAndRecordParallelizedTestCases() ==> Waiting for parallel tests to complete"); + var parallelCompletions = completionFromIdMap + .Where(x => parallelTests.Any(y => y.Id.ToString() == x.Key)) + .Select(x => x.Value.Task); + Task.WaitAll(parallelCompletions.ToArray()); + Logger.Log($"YamlTestFramework.RunAndRecordParallelizedTestCases() ==> All parallel tests complete"); + } + + private static void RunAndRecordTestCaseSteps(IYamlTestFrameworkHost host, Dictionary testFromIdMap, Dictionary>> completionFromIdMap, string firstTestId) + { + var firstTest = testFromIdMap[firstTestId]; + var firstTestResults = RunAndRecordTestCase(firstTest, host); + var firstTestOutcome = TestResultHelpers.TestOutcomeFromResults(firstTestResults); + // defer setting completion until all steps are complete + + var checkTest = firstTest; + while (true) + { + var nextStepId = YamlTestProperties.Get(checkTest, "nextStepId"); + if (string.IsNullOrEmpty(nextStepId)) + { + Logger.LogInfo($"YamlTestFramework.RunAndRecordTestCaseSteps() ==> No nextStepId for test '{checkTest.DisplayName}'"); + break; + } + + var stepTest = testFromIdMap.ContainsKey(nextStepId) ? testFromIdMap[nextStepId] : null; + if (stepTest == null) + { + Logger.LogError($"YamlTestFramework.RunAndRecordTestCaseSteps() ==> ERROR: nextStepId '{nextStepId}' not found for test '{checkTest.DisplayName}'"); + break; + } + + var stepCompletion = completionFromIdMap.ContainsKey(nextStepId) ? completionFromIdMap[nextStepId] : null; + if (stepCompletion == null) + { + Logger.LogError($"YamlTestFramework.RunAndRecordTestCaseSteps() ==> ERROR: nextStepId '{nextStepId}' completion not found for test '{checkTest.DisplayName}'"); + break; + } + + var stepResults = RunAndRecordTestCase(stepTest, host); + var stepOutcome = TestResultHelpers.TestOutcomeFromResults(stepResults); + Logger.Log($"YamlTestFramework.RunAndRecordTestCaseSteps() ==> Setting completion outcome for {stepTest.DisplayName} to {stepOutcome}"); + completionFromIdMap[nextStepId].SetResult(stepResults); + + checkTest = stepTest; + } + + // now that all steps are complete, set the completion outcome + completionFromIdMap[firstTestId].SetResult(firstTestResults); + Logger.Log($"YamlTestFramework.RunAndRecordTestCaseSteps() ==> Setting completion; outcome for {firstTest.DisplayName}: {firstTestOutcome}"); + } + + private static void RunAndRecordRemainingTestCases(IYamlTestFrameworkHost host, Dictionary testFromIdMap, Dictionary>> completionFromIdMap) + { + var remainingTests = completionFromIdMap + .Where(x => x.Value.Task.Status != TaskStatus.RanToCompletion) + .Select(x => testFromIdMap[x.Key]); + foreach (var test in remainingTests) + { + var outcome = RunAndRecordTestCase(test, host); + completionFromIdMap[test.Id.ToString()].SetResult(outcome); + } + } + + private static IEnumerable FindFiles(DirectoryInfo directory) + { + return directory.GetFiles($"*{YamlFileExtension}", SearchOption.AllDirectories) + .Where(file => file.Name != YamlDefaultTagsFileName); + } + + private static bool IsTrait(Trait trait, string check) + { + return trait.Name == check || trait.Value == check; + } + + private static List> GroupTestCasesByPriority(IEnumerable tests) + { + Logger.Log($"YamlTestFramework.GroupTestCasesByPriority()"); + + var before = tests.Where(test => test.Traits.Count(x => IsTrait(x, "before")) > 0); + var after = tests.Where(test => test.Traits.Count(x => IsTrait(x, "after")) > 0); + var middle = tests.Where(test => !before.Contains(test) && !after.Contains(test)); + + var testsList = new List>(); + testsList.Add(before.ToList()); + testsList.Add(middle.ToList()); + testsList.Add(after.ToList()); + Logger.Log("YamlTestFramework.GroupTestCasesByPriority() ==> {string.Join('\n', tests.Select(x => x.Name))}"); + + return testsList; + } + + private static IList RunAndRecordTestCase(TestCase test, IYamlTestFrameworkHost host) + { + Logger.Log($"YamlTestFramework.TestRunAndRecord({test.DisplayName})"); + return YamlTestCaseRunner.RunAndRecordTestCase(test, host); + } + + #endregion + + #region constants + public const string YamlFileExtension = ".yaml"; + public const string FakeExecutor = "executor://ai/cli/TestFramework/v1"; + public const string YamlDefaultTagsFileName = "Azure-AI-CLI-TestFramework-Default-Tags.yaml"; + public const string DefaultTimeout = "600000"; + #endregion + } +} diff --git a/tests/testframework/YamlTestFramework.csproj b/tests/testframework/YamlTestFramework.csproj new file mode 100644 index 00000000..3c4af653 --- /dev/null +++ b/tests/testframework/YamlTestFramework.csproj @@ -0,0 +1,13 @@ + + + + net8.0 + + + + + + + + + \ No newline at end of file diff --git a/tests/testframework/YamlTestFrameworkCommon.targets b/tests/testframework/YamlTestFrameworkCommon.targets new file mode 100644 index 00000000..4646d02c --- /dev/null +++ b/tests/testframework/YamlTestFrameworkCommon.targets @@ -0,0 +1,68 @@ + + + + + net8.0 + Library + Azure.AI.CLI.TestFramework + false + + + True + 1.1.0 + + + + $(LocalBuildSDKBinPath) + bin + + + + + + x64 + $(LocalBinOutputPath)\Release\ + + + + + x64 + $(LocalBinOutputPath)\Debug\ + + + + + $(DefineConstants);UNIX + $(LocalBinOutputPath)\ReleaseUnixOS + + + + + $(DefineConstants);UNIX + $(LocalBinOutputPath)\DebugUnixOS + + + + + + 1.0.0 + $(TFAssemblyVersion) + + + + $(TFAssemblyVersion) + $(TFAssemblyVersion) + $(TFAssemblyInformationalVersion) + + + + + + + + + + + + + diff --git a/tests/testframework/YamlTestProperties.cs b/tests/testframework/YamlTestProperties.cs new file mode 100644 index 00000000..133bb02b --- /dev/null +++ b/tests/testframework/YamlTestProperties.cs @@ -0,0 +1,58 @@ +using System; +using System.Collections.Generic; +using System.Diagnostics; +using System.IO; +using System.Linq; +using Microsoft.VisualStudio.TestPlatform.ObjectModel; +using Microsoft.VisualStudio.TestPlatform.ObjectModel.Adapter; +using YamlDotNet.RepresentationModel; + +namespace Azure.AI.Details.Common.CLI.TestFramework +{ + public class YamlTestProperties + { + public static void Set(TestCase test, string name, string value) + { + Logger.Log($"YamlTestProperties.Set('{name}'='{value.Replace("\n", "\\n")}')"); + if (!string.IsNullOrEmpty(value)) + { + var property = properties[name]; + test.SetPropertyValue(property, value); + } + } + + public static string Get(TestCase test, string name, string defaultValue = null) + { + var value = test.GetPropertyValue(properties[name], defaultValue); + Logger.LogIf(!string.IsNullOrEmpty(value), $"TestCaseProperties.Get('{name}') = '{value?.Replace("\n", "\\n")}'"); + return value; + } + + #region private methods and data + private static TestProperty RegisterTestCaseProperty(string name) + { + return TestProperty.Register($"YamlTestCase.{name}", name, typeof(string), TestPropertyAttributes.Hidden, typeof(TestCase)); + } + + private static readonly Dictionary properties = new Dictionary() { + { "cli", RegisterTestCaseProperty("CLI") }, + { "command", RegisterTestCaseProperty("Command") }, + { "script", RegisterTestCaseProperty("Script") }, + { "bash", RegisterTestCaseProperty("Bash") }, + { "parallelize", RegisterTestCaseProperty("Parallelize") }, + { "nextStepId", RegisterTestCaseProperty("nextStepId") }, + { "foreach", RegisterTestCaseProperty("ForEach") }, + { "arguments", RegisterTestCaseProperty("Arguments") }, + { "input", RegisterTestCaseProperty("Input")}, + { "expect", RegisterTestCaseProperty("Expect") }, + { "expect-gpt", RegisterTestCaseProperty("ExpectGpt") }, + { "not-expect", RegisterTestCaseProperty("NotExpect") }, + { "simulate", RegisterTestCaseProperty("Simulate") }, + { "skipOnFailure", RegisterTestCaseProperty("SkipOnFailure") }, + { "timeout", RegisterTestCaseProperty("Timeout") }, + { "working-directory", RegisterTestCaseProperty("WorkingDirectory") } + }; + + #endregion + } +} diff --git a/tests/testrunner/Program.cs b/tests/testrunner/Program.cs new file mode 100644 index 00000000..291755be --- /dev/null +++ b/tests/testrunner/Program.cs @@ -0,0 +1,224 @@ +// +// Copyright (c) Microsoft. All rights reserved. +// Licensed under the MIT license. See LICENSE.md file in the project root for full license information. +// + +using System; +using System.Collections.Generic; +using System.Diagnostics; +using System.IO; +using System.Linq; +using System.Reflection; +using System.Runtime.InteropServices; +using System.Text; +using System.Threading.Tasks; +using Azure.AI.Details.Common.CLI.TestFramework; +using Microsoft.VisualStudio.TestPlatform.ObjectModel; + +namespace Azure.AI.Details.Common.CLI.TestRunner +{ + public class Program + { + public static int Main(string[] args) + { + if (args.Length == 0) + { + return DisplayUsage(); + } + + var command = args[0]; + return command switch + { + "list" => DoCommand(args.Skip(1).ToArray(), true, false), + "run" => DoCommand(args.Skip(1).ToArray(), false, true), + _ => DisplayUsage() + }; + } + + private static int DisplayUsage() + { + Console.WriteLine("AIT - Azure AI CLI Test runner, Version 1.0.0"); + Console.WriteLine("Copyright (c) 2024 Microsoft Corporation. All Rights Reserved."); + Console.WriteLine(); + Console.WriteLine("USAGE: ait list [...]"); + Console.WriteLine(" OR: ait run [...]"); + Console.WriteLine(); + Console.WriteLine(" FILES"); + Console.WriteLine(" --file FILE"); + Console.WriteLine(" --files FILE1 [FILE2 [...]]"); + Console.WriteLine(" --files PATTERN1 [PATTERN2 [...]]"); + Console.WriteLine(); + Console.WriteLine(" TESTS"); + Console.WriteLine(" --test TEXT"); + Console.WriteLine(" --tests TEXT1 [TEXT2 [...]]"); + Console.WriteLine(); + Console.WriteLine(" FILTERING"); + Console.WriteLine(" --contains TEXT1 [TEXT2 [...]]"); + Console.WriteLine(" --remove TEXT1 [TEXT2 [...]]"); + Console.WriteLine(); + Console.WriteLine("EXAMPLES"); + Console.WriteLine(); + Console.WriteLine(" EXAMPLE 1: List tests from two files, that contain both 'nightly' and 'java', but not 'skip'"); + Console.WriteLine(); + Console.WriteLine(" ait list --files test1.yaml test2.yaml --contains nightly java --remove skip"); + Console.WriteLine(); + Console.WriteLine(" EXAMPLE 2: Run tests from files under current directory, that contain 'setup' or 'nightly', and 'java', but not 'skip'"); + Console.WriteLine(); + Console.WriteLine(" ait run --tests setup nightly --contains java --remove skip"); + Console.WriteLine(); + Console.WriteLine(" EXAMPLE 3: Run tests from files under 'tests' directory, that contain 'test3', but not 'skip'"); + Console.WriteLine(); + Console.WriteLine(" ait run --files ../tests/**/*.yaml --contains test3 --remove skip"); + + return 1; + } + + private static int DoCommand(string[] args, bool list, bool run) + { + var tests = FindAndFilterTests(args); + if (tests == null) return 1; + + if (list) return DoListTests(tests) ? 0 : 1; + if (run) return DoRunTests(tests) ? 0 : 1; + + return 1; + } + + private static IEnumerable FindAndFilterTests(string[] args) + { + var parsedOk = ParseFilesAndFilterArgs(args, out var files, out var filters); + if (!parsedOk) return null; + + var atLeastOneFileSpecified = files.Any(); + var tests = atLeastOneFileSpecified + ? files.SelectMany(file => YamlTestFramework.GetTestsFromYaml(file.FullName, file)).ToList() + : YamlTestFramework.GetTestsFromDirectory("ait", new DirectoryInfo(".")).ToList(); + + return YamlTestCaseFilter.FilterTestCases(tests, filters); + } + + private static bool ParseFilesAndFilterArgs(string[] args, out IList files, out IList filters) + { + var filesAsList = new List(); + files = filesAsList; + filters = new List(); + + for (int i = 0; i < args.Length; i++) + { + if (args[i] == "--file" || args[i] == "--files") + { + if (i + 1 >= args.Length || args[i + 1].StartsWith("--")) + { + Console.WriteLine($"Expected a file or pattern after '{args[i]}'."); + return false; + } + + do + { + i++; + var pattern = args[i]; + var found = FindFiles(pattern); + if (found.Count() == 0) + { + Console.WriteLine($"No files found for pattern '{pattern}'."); + return false; + } + + filesAsList.AddRange(found); + } + while (i + 1 < args.Length && !args[i + 1].StartsWith("--")); + } + else if (args[i] == "--search") + { + if (i + 1 >= args.Length || args[i + 1].StartsWith("--")) + { + Console.WriteLine($"Expected text after '{args[i]}'."); + return false; + } + + do + { + i++; + filters.Add(args[i]); + } + while (i + 1 < args.Length && !args[i + 1].StartsWith("--")); + } + else if (args[i] == "--test" || args[i] == "--tests") + { + if (i + 1 >= args.Length || args[i + 1].StartsWith("--")) + { + Console.WriteLine($"Expected text after '{args[i]}'."); + return false; + } + + do + { + i++; + filters.Add(args[i]); + } + while (i + 1 < args.Length && !args[i + 1].StartsWith("--")); + } + else if (args[i] == "--contains") + { + if (i + 1 >= args.Length || args[i + 1].StartsWith("--")) + { + Console.WriteLine($"Expected text after '{args[i]}'."); + return false; + } + + do + { + i++; + filters.Add($"+{args[i]}"); // `+` means MUST contain text + } + while (i + 1 < args.Length && !args[i + 1].StartsWith("--")); + } + else if (args[i] == "--remove") + { + if (i + 1 >= args.Length || args[i + 1].StartsWith("--")) + { + Console.WriteLine($"Expected text after '{args[i]}'."); + return false; + } + + do + { + i++; + filters.Add($"-{args[i]}"); // `-` means MUST NOT contain text + } + while (i + 1 < args.Length && !args[i + 1].StartsWith("--")); + } + else + { + Console.WriteLine($"Invalid command line argument at '{args[i]}'."); + return false; + } + } + + return true; + } + + private static IList FindFiles(string pattern) + { + var files = FileHelpers.FindFiles(Directory.GetCurrentDirectory(), pattern, null, false, false); + return files.Select(x => new FileInfo(x)).ToList(); + } + + private static bool DoListTests(IEnumerable tests) + { + foreach (var test in tests) + { + Console.WriteLine(test.FullyQualifiedName); + } + + return true; + } + + private static bool DoRunTests(IEnumerable tests) + { + var consoleHost = new YamlTestFrameworkConsoleHost(); + var resultsByTestCaseId = YamlTestFramework.RunTests(tests, consoleHost); + return consoleHost.Finish(resultsByTestCaseId); + } + } +} diff --git a/tests/testrunner/Properties/AssemblyInfo.cs b/tests/testrunner/Properties/AssemblyInfo.cs new file mode 100644 index 00000000..d280765b --- /dev/null +++ b/tests/testrunner/Properties/AssemblyInfo.cs @@ -0,0 +1,36 @@ +using System.Reflection; +using System.Runtime.CompilerServices; +using System.Runtime.InteropServices; + +// General Information about an assembly is controlled through the following +// set of attributes. Change these attribute values to modify the information +// associated with an assembly. +[assembly: AssemblyTitle("YamlTestRunner")] +[assembly: AssemblyDescription("")] +[assembly: AssemblyConfiguration("")] +[assembly: AssemblyCompany("")] +[assembly: AssemblyProduct("YamlTestRunner")] +[assembly: AssemblyCopyright("Copyright © 2024")] +[assembly: AssemblyTrademark("")] +[assembly: AssemblyCulture("")] + +// Setting ComVisible to false makes the types in this assembly not visible +// to COM components. If you need to access a type in this assembly from +// COM, set the ComVisible attribute to true on that type. +[assembly: ComVisible(false)] + +// The following GUID is for the ID of the typelib if this project is exposed to COM +[assembly: Guid("AF47877C-87D2-449B-B52B-AD90D6FDF609")] + +// Version information for an assembly consists of the following four values: +// +// Major Version +// Minor Version +// Build Number +// Revision +// +// You can specify all the values or you can default the Build and Revision Numbers +// by using the '*' as shown below: +// [assembly: AssemblyVersion("1.0.*")] +[assembly: AssemblyVersion("1.0.0.0")] +[assembly: AssemblyFileVersion("1.0.0.0")] diff --git a/tests/testrunner/YamlTestFrameworkConsoleHost.cs b/tests/testrunner/YamlTestFrameworkConsoleHost.cs new file mode 100644 index 00000000..2494002e --- /dev/null +++ b/tests/testrunner/YamlTestFrameworkConsoleHost.cs @@ -0,0 +1,438 @@ +using System; +using System.Collections.Generic; +using System.IO; +using System.Linq; +using System.Text.RegularExpressions; +using System.Xml; +using Microsoft.VisualStudio.TestPlatform.ObjectModel; +using Microsoft.VisualStudio.TestPlatform.ObjectModel.Adapter; + +namespace Azure.AI.Details.Common.CLI.TestFramework +{ + public class YamlTestFrameworkConsoleHost : IYamlTestFrameworkHost + { + public YamlTestFrameworkConsoleHost() + { + } + + public void RecordStart(TestCase testCase) + { + _startTime ??= DateTime.Now; + _testCases.Add(testCase); + SetExecutionId(testCase, Guid.NewGuid()); + + lock (this) + { + Console.ForegroundColor = ConsoleColor.DarkGray; + Console.WriteLine("Starting test: " + testCase.FullyQualifiedName); + Console.ResetColor(); + } + } + + public void RecordResult(TestResult testResult) + { + _testResults.Add(testResult); + PrintResult(testResult); + } + + public void RecordEnd(TestCase testCase, TestOutcome outcome) + { + _endTime = DateTime.Now; + } + + public bool Finish(IDictionary> resultsByTestCaseId) + { + var allResults = resultsByTestCaseId.Values.SelectMany(x => x); + var failedResults = allResults.Where(x => x.Outcome == TestOutcome.Failed).ToList(); + var passedResults = allResults.Where(x => x.Outcome == TestOutcome.Passed).ToList(); + var skippedResults = allResults.Where(x => x.Outcome == TestOutcome.Skipped).ToList(); + var passed = failedResults.Count == 0; + + if (failedResults.Count > 0) + { + Console.ResetColor(); + Console.WriteLine(); + Console.BackgroundColor = ConsoleColor.Red; + Console.ForegroundColor = ConsoleColor.White; + Console.Write("FAILURE SUMMARY:"); + Console.ResetColor(); + Console.WriteLine(); + failedResults.ForEach(r => PrintResult(r)); + } + else + { + Console.WriteLine(); + } + + var count = allResults.Count(); + var duration = _endTime != null && _startTime != null ? FormattedDuration((_endTime.Value - _startTime.Value).TotalMilliseconds) : "0 ms"; + Console.BackgroundColor = ConsoleColor.Blue; + Console.ForegroundColor = ConsoleColor.White; + Console.Write("TEST RESULT SUMMARY:"); + Console.ResetColor(); + Console.Write("\nTests: "); + Console.ForegroundColor = ConsoleColor.Blue; + Console.Write($"{count}"); + Console.ForegroundColor = ConsoleColor.DarkGray; + Console.WriteLine($" ({duration})"); + + var resultsFile = WriteResultFile(); + + var fi = new FileInfo(resultsFile); + Console.ResetColor(); + Console.Write("Results: "); + Console.ForegroundColor = ConsoleColor.Blue; + Console.Write(fi.FullName); + Console.ResetColor(); + Console.WriteLine("\n"); + + Console.ForegroundColor = ConsoleColor.Green; + Console.Write($"Passed: {passedResults.Count}"); + + if (failedResults.Count > 0) + { + Console.ForegroundColor = ConsoleColor.DarkGray; + Console.Write(", "); + Console.ForegroundColor = ConsoleColor.Red; + Console.Write($"Failed: {failedResults.Count}"); + } + + if (skippedResults.Count > 0) + { + Console.ForegroundColor = ConsoleColor.DarkGray; + Console.Write(", "); + Console.ForegroundColor = ConsoleColor.Yellow; + Console.Write($"Skipped: {skippedResults.Count}"); + } + + Console.ResetColor(); + Console.WriteLine("\n"); + + return passed; + } + + public string WriteResultFile() + { + var assembly = typeof(YamlTestFrameworkConsoleHost).Assembly; + var assemblyPath = assembly.Location; + + _startTime ??= DateTime.Now; + _endTime ??= DateTime.Now; + + var resultFile = "test-results.trx"; + var testRunId = Guid.NewGuid().ToString(); + var testListId = "8c84fa94-04c1-424b-9868-57a2d4851a1d"; + var testType = "13cdc9d9-ddb5-4fa4-a97d-d965ccfc6d4b"; + var userName = Environment.UserName; + var machineName = Environment.MachineName; + var userAtMachine = userName.Split('\\', '/').Last() + "@" + machineName; + var testRunName = userAtMachine + " " + _endTime.Value.ToString("yyyy-MM-dd HH:mm:ss"); + + XmlWriterSettings settings = new XmlWriterSettings(); + settings.Indent = true; + settings.IndentChars = " "; + settings.NewLineChars = "\n"; + settings.NewLineHandling = NewLineHandling.Replace; + settings.OmitXmlDeclaration = false; + + var writer = XmlWriter.Create(resultFile, settings); + writer.WriteStartDocument(); + writer.WriteStartElement("", "TestRun", "http://microsoft.com/schemas/VisualStudio/TeamTest/2010"); + writer.WriteAttributeString("id", testRunId); + writer.WriteAttributeString("name", testRunName); + writer.WriteAttributeString("runUser", userName); + + writer.WriteStartElement("Times"); + writer.WriteAttributeString("creation", _endTime.Value.ToString("o")); + writer.WriteAttributeString("queuing", _endTime.Value.ToString("o")); + writer.WriteAttributeString("start", _startTime.Value.ToString("o")); + writer.WriteAttributeString("finish", _endTime.Value.ToString("o")); + writer.WriteEndElement(); + + writer.WriteStartElement("Results"); + foreach (var testResult in _testResults) + { + var executionId = GetExecutionId(testResult.TestCase).ToString(); + var stdout = testResult.Messages.FirstOrDefault(x => x.Category == TestResultMessage.StandardOutCategory)?.Text; + var stderr = testResult.Messages.FirstOrDefault(x => x.Category == TestResultMessage.StandardErrorCategory)?.Text; + var debugTrace = testResult.Messages.FirstOrDefault(x => x.Category == TestResultMessage.DebugTraceCategory)?.Text; + var message = testResult.Messages.FirstOrDefault(x => x.Category == TestResultMessage.AdditionalInfoCategory)?.Text; + + writer.WriteStartElement("UnitTestResult"); + writer.WriteAttributeString("executionId", executionId); + writer.WriteAttributeString("testId", testResult.TestCase.Id.ToString()); + writer.WriteAttributeString("testName", testResult.TestCase.FullyQualifiedName); + writer.WriteAttributeString("computerName", machineName); + writer.WriteAttributeString("duration", testResult.Duration.ToString()); + writer.WriteAttributeString("startTime", testResult.StartTime.DateTime.ToString("o")); + writer.WriteAttributeString("endTime", testResult.EndTime.DateTime.ToString("o")); + writer.WriteAttributeString("testType", testType); + writer.WriteAttributeString("outcome", OutcomeToString(testResult.Outcome)); + writer.WriteAttributeString("testListId", testListId); + writer.WriteAttributeString("relativeResultsDirectory", Guid.NewGuid().ToString()); + writer.WriteStartElement("Output"); + + if (!string.IsNullOrEmpty(stdout)) + { + writer.WriteStartElement("StdOut"); + writer.WriteRaw(System.Security.SecurityElement + .Escape(stdout.Replace("\u001b", string.Empty)) + .Replace("\r\n", " \n")); + writer.WriteEndElement(); + } + + if (!string.IsNullOrEmpty(stderr)) + { + writer.WriteStartElement("StdErr"); + writer.WriteRaw(System.Security.SecurityElement + .Escape(stderr.Replace("\u001b", string.Empty)) + .Replace("\r\n", " \n")); + writer.WriteEndElement(); + } + + if (!string.IsNullOrEmpty(debugTrace)) + { + writer.WriteElementString("DebugTrace", debugTrace); + } + + writer.WriteStartElement("ErrorInfo"); + writer.WriteElementString("Message", testResult.ErrorMessage); + writer.WriteElementString("StackTrace", testResult.ErrorStackTrace); + writer.WriteEndElement(); + writer.WriteStartElement("TextMessages"); + + if (!string.IsNullOrEmpty(message)) + { + writer.WriteElementString("Message", message); + } + writer.WriteEndElement(); + writer.WriteEndElement(); + writer.WriteEndElement(); + } + writer.WriteEndElement(); + + writer.WriteStartElement("TestDefinitions"); + foreach (var testCase in _testCases) + { + var executionId = GetExecutionId(testCase).ToString(); + var qualifiedParts = testCase.FullyQualifiedName.Split('.'); + var className = string.Join(".", qualifiedParts.Take(qualifiedParts.Length - 1)); + var name = qualifiedParts.Last(); + writer.WriteStartElement("UnitTest"); + writer.WriteAttributeString("name", testCase.DisplayName); + writer.WriteAttributeString("storage", assemblyPath); + writer.WriteAttributeString("id", testCase.Id.ToString()); + writer.WriteStartElement("Execution"); + writer.WriteAttributeString("id", executionId); + writer.WriteEndElement(); + writer.WriteStartElement("TestMethod"); + writer.WriteAttributeString("codeBase", assemblyPath); + writer.WriteAttributeString("adapterTypeName", testCase.ExecutorUri.ToString()); + writer.WriteAttributeString("className", className); + writer.WriteAttributeString("name", name); + writer.WriteEndElement(); + writer.WriteEndElement(); + } + writer.WriteEndElement(); + + writer.WriteStartElement("TestEntries"); + foreach (var testCase in _testCases) + { + var executionId = GetExecutionId(testCase).ToString(); + writer.WriteStartElement("TestEntry"); + writer.WriteAttributeString("testId", testCase.Id.ToString()); + writer.WriteAttributeString("executionId", executionId); + writer.WriteAttributeString("testListId", testListId); + writer.WriteEndElement(); + } + writer.WriteEndElement(); + + writer.WriteStartElement("TestLists"); + writer.WriteStartElement("TestList"); + writer.WriteAttributeString("name", "Results Not in a List"); + writer.WriteAttributeString("id", testListId); + writer.WriteEndElement(); + writer.WriteStartElement("TestList"); + writer.WriteAttributeString("name", "All Loaded Results"); + writer.WriteAttributeString("id", "19431567-8539-422a-85d7-44ee4e166bda"); + writer.WriteEndElement(); + writer.WriteEndElement(); + + writer.WriteStartElement("ResultSummary"); + writer.WriteAttributeString("outcome", "Completed"); + + writer.WriteStartElement("Counters"); + writer.WriteAttributeString("total", _testResults.Count.ToString()); + writer.WriteAttributeString("executed", _testResults.Count(r => IsExecuted(r)).ToString()); + writer.WriteAttributeString("passed", _testResults.Count(r => IsPassed(r)).ToString()); + writer.WriteAttributeString("failed", _testResults.Count(r => IsFailed(r)).ToString()); + writer.WriteAttributeString("error", _testResults.Count(r => IsError(r)).ToString()); + writer.WriteAttributeString("timeout", _testResults.Count(r => IsTimeout(r)).ToString()); + writer.WriteAttributeString("aborted", _testResults.Count(r => IsAborted(r)).ToString()); + writer.WriteAttributeString("inconclusive", _testResults.Count(r => IsInConclusive(r)).ToString()); + writer.WriteAttributeString("passedButRunAborted", _testResults.Count(r => IsPassedButRunaborted(r)).ToString()); + writer.WriteAttributeString("notRunnable", _testResults.Count(r => IsNotRunnable(r)).ToString()); + writer.WriteAttributeString("notExecuted", _testResults.Count(r => IsNotExecuted(r)).ToString()); + writer.WriteAttributeString("disconnected", _testResults.Count(r => IsDisconnected(r)).ToString()); + writer.WriteAttributeString("warning", _testResults.Count(r => IsWarning(r)).ToString()); + writer.WriteAttributeString("completed", "0"); + writer.WriteAttributeString("inProgress", "0"); + writer.WriteAttributeString("pending", "0"); + writer.WriteEndElement(); + + writer.WriteStartElement("Output"); + writer.WriteElementString("StdOut", ""); + writer.WriteEndElement(); + + writer.WriteEndElement(); + + writer.WriteEndElement(); + writer.WriteEndDocument(); + + writer.Close(); + writer.Dispose(); + + return resultFile; + } + + private void PrintResult(TestResult testResult) + { + lock (this) + { + Console.ForegroundColor = ConsoleColor.DarkGray; + if (testResult.Outcome == TestOutcome.Passed) Console.ForegroundColor = ConsoleColor.Green; + if (testResult.Outcome == TestOutcome.Skipped) Console.ForegroundColor = ConsoleColor.Yellow; + if (testResult.Outcome == TestOutcome.Failed) Console.ForegroundColor = ConsoleColor.Red; + + var duration = FormattedDuration(testResult.Duration.TotalMilliseconds); + Console.WriteLine($"{testResult.Outcome} ({duration}): {testResult.TestCase.FullyQualifiedName}"); + Console.ResetColor(); + + if (testResult.Outcome == TestOutcome.Failed) + { + var hasStack = !string.IsNullOrEmpty(testResult.ErrorStackTrace); + if (hasStack) Console.WriteLine(testResult.ErrorStackTrace.Trim('\r', '\n')); + + var hasErr = !string.IsNullOrEmpty(testResult.ErrorMessage); + if (hasErr) Console.WriteLine(testResult.ErrorMessage.Trim('\r', '\n')); + + if (hasErr || hasStack) Console.WriteLine(); + } + } + } + + private static string FormattedDuration(double ms) + { + var secs = ms / 1000; + var duration = ms >= 1000 + ? secs.ToString("0.00") + " seconds" + : ms.ToString("0") + " ms"; + return duration; + } + + private static string OutcomeToString(TestOutcome outcome) + { + return outcome switch { + TestOutcome.None => "None", + TestOutcome.Passed => "Passed", + TestOutcome.Failed => "Failed", + TestOutcome.Skipped => "NotExecuted", + TestOutcome.NotFound => "NotFound", + _ => "None", + }; + } + + private bool IsExecuted(TestResult r) + { + return IsPassed(r) || IsFailed(r); + } + + private static bool IsPassed(TestResult r) + { + return r.Outcome == TestOutcome.Passed; + } + + private static bool IsFailed(TestResult r) + { + return r.Outcome == TestOutcome.Failed; + } + + private static bool IsError(TestResult r) + { + return false; + // return r.Outcome == TestOutcome.Error; + } + + private static bool IsTimeout(TestResult r) + { + return false; + // return r.Outcome == TestOutcome.Timeout; + } + + private static bool IsAborted(TestResult r) + { + return false; + // return r.Outcome == TestOutcome.Aborted; + } + + private static bool IsInConclusive(TestResult r) + { + return false; + // return r.Outcome == TestOutcome.Inconclusive; + } + + private static bool IsPassedButRunaborted(TestResult r) + { + return false; + // return r.Outcome == TestOutcome.PassedButRunAborted; + } + + private static bool IsNotRunnable(TestResult r) + { + return false; + // return r.Outcome == TestOutcome.NotRunnable; + } + + private static bool IsNotExecuted(TestResult r) + { + return false; + // return r.Outcome == TestOutcome.NotExecuted; + } + + private static bool IsDisconnected(TestResult r) + { + return false; + // return r.Outcome == TestOutcome.Disconnected; + } + + private static bool IsWarning(TestResult r) + { + return false; + // return r.Outcome == TestOutcome.Warning; + } + + private void SetExecutionId(TestCase testCase, Guid guid) + { + lock (_testToExecutionMap) + { + _testToExecutionMap[testCase.Id] = guid; + } + } + + private Guid GetExecutionId(TestCase testCase) + { + lock (_testToExecutionMap) + { + return _testToExecutionMap[testCase.Id]; + } + } + + private DateTime? _startTime; + private DateTime? _endTime; + + private List _testCases = new List(); + private Dictionary _testToExecutionMap = new Dictionary(); + private List _testResults = new List(); + } +} + diff --git a/tests/testrunner/YamlTestRunner.csproj b/tests/testrunner/YamlTestRunner.csproj new file mode 100644 index 00000000..e04b5a5f --- /dev/null +++ b/tests/testrunner/YamlTestRunner.csproj @@ -0,0 +1,14 @@ + + + + net8.0 + Exe + + + + + + + + + \ No newline at end of file diff --git a/tests/testrunner/YamlTestRunnerCommon.targets b/tests/testrunner/YamlTestRunnerCommon.targets new file mode 100644 index 00000000..5ae2055c --- /dev/null +++ b/tests/testrunner/YamlTestRunnerCommon.targets @@ -0,0 +1,56 @@ + + + + + net8.0 + ait + false + + + 1.1.0 + + + + $(LocalBuildSDKBinPath) + bin + + + + + + x64 + $(LocalBinOutputPath)\Release\ + + + + + x64 + $(LocalBinOutputPath)\Debug\ + + + + + $(DefineConstants);UNIX + $(LocalBinOutputPath)\ReleaseUnixOS + + + + + $(DefineConstants);UNIX + $(LocalBinOutputPath)\DebugUnixOS + + + + + + 1.0.0 + $(TFAssemblyVersion) + + + + $(TFAssemblyVersion) + $(TFAssemblyVersion) + $(TFAssemblyInformationalVersion) + + +