diff --git a/.gitignore b/.gitignore
index 34ada0e2..6bc5c4bd 100644
--- a/.gitignore
+++ b/.gitignore
@@ -4,3 +4,4 @@
**/bin/*/net6.0/*
**/bin/*/net7.0/*
**/obj/*
+ideas/website/node_modules/**
diff --git a/ideas/cool-helper-function-scenarios/HelperFunctionsProject.csproj b/ideas/cool-helper-function-scenarios/HelperFunctionsProject.csproj
index 336911a0..36bc2604 100644
--- a/ideas/cool-helper-function-scenarios/HelperFunctionsProject.csproj
+++ b/ideas/cool-helper-function-scenarios/HelperFunctionsProject.csproj
@@ -12,7 +12,7 @@
-
+
diff --git a/src/ai/.x/templates/helper_functions/HelperFunctionsProject.csproj._ b/src/ai/.x/templates/helper_functions/HelperFunctionsProject.csproj._
index 69938ae4..6c9cb517 100644
--- a/src/ai/.x/templates/helper_functions/HelperFunctionsProject.csproj._
+++ b/src/ai/.x/templates/helper_functions/HelperFunctionsProject.csproj._
@@ -15,7 +15,7 @@
-
+
diff --git a/src/ai/.x/templates/openai-chat-js/package.json b/src/ai/.x/templates/openai-chat-js/package.json
index 2996e379..63b70f82 100644
--- a/src/ai/.x/templates/openai-chat-js/package.json
+++ b/src/ai/.x/templates/openai-chat-js/package.json
@@ -7,9 +7,9 @@
"test": "echo \"Error: no test specified\" && exit 1"
},
"author": "",
- "license": "ISC",
+ "license": "MIT",
"dependencies": {
- "@azure/openai": "^1.0.0-beta.7"
+ "@azure/openai": "1.0.0-beta.8"
}
}
\ No newline at end of file
diff --git a/src/ai/.x/templates/openai-chat-streaming-js/ChatCompletionsStreaming.js b/src/ai/.x/templates/openai-chat-streaming-js/ChatCompletionsStreaming.js
new file mode 100644
index 00000000..be985d45
--- /dev/null
+++ b/src/ai/.x/templates/openai-chat-streaming-js/ChatCompletionsStreaming.js
@@ -0,0 +1,85 @@
+<#@ template hostspecific="true" #>
+<#@ output extension=".js" encoding="utf-8" #>
+<#@ parameter type="System.String" name="OPENAI_ENDPOINT" #>
+<#@ parameter type="System.String" name="OPENAI_API_KEY" #>
+<#@ parameter type="System.String" name="AZURE_OPENAI_CHAT_DEPLOYMENT" #>
+<#@ parameter type="System.String" name="AZURE_OPENAI_SYSTEM_PROMPT" #>
+const { OpenAIClient, AzureKeyCredential } = require("@azure/openai");
+
+class OpenAIStreamingChatCompletions {
+ constructor(systemPrompt, endpoint, azureApiKey, deploymentName) {
+ this.systemPrompt = systemPrompt;
+ this.endpoint = endpoint;
+ this.azureApiKey = azureApiKey;
+ this.deploymentName = deploymentName;
+ this.client = new OpenAIClient(this.endpoint, new AzureKeyCredential(this.azureApiKey));
+ this.clearConversation();
+ }
+
+ clearConversation() {
+ this.messages = [
+ { role: 'system', content: this.systemPrompt }
+ ];
+ }
+
+ async getChatCompletions(userInput, callback) {
+ this.messages.push({ role: 'user', content: userInput });
+
+ const events = this.client.listChatCompletions(this.deploymentName, this.messages);
+
+ let contentComplete = '';
+ for await (const event of events) {
+ for (const choice of event.choices) {
+
+ let content = choice.delta?.content;
+ if (choice.finishReason === 'length') {
+ content = `${content}\nERROR: Exceeded token limit!`;
+ }
+
+ if (content != null) {
+ callback(content);
+ await new Promise(r => setTimeout(r, 50)); // delay to simulate real-time output, word by word
+ contentComplete += content;
+ }
+ }
+ }
+
+ this.messages.push({ role: 'assistant', content: contentComplete });
+ return contentComplete;
+ }
+}
+
+const readline = require('readline');
+const rl = readline.createInterface({
+ input: process.stdin,
+ output: process.stdout
+});
+
+async function main() {
+ const endpoint = process.env["OPENAI_ENDPOINT"] || "<#= OPENAI_ENDPOINT #>";
+ const azureApiKey = process.env["OPENAI_API_KEY"] || "<#= OPENAI_API_KEY #>";
+ const deploymentName = process.env["AZURE_OPENAI_CHAT_DEPLOYMENT"] || "<#= AZURE_OPENAI_CHAT_DEPLOYMENT #>" ;
+ const systemPrompt = process.env["AZURE_OPENAI_SYSTEM_PROMPT"] || "<#= AZURE_OPENAI_SYSTEM_PROMPT #>" ;
+
+ const streamingChatCompletions = new OpenAIStreamingChatCompletions(systemPrompt, endpoint, azureApiKey, deploymentName);
+
+ while (true) {
+
+ const input = await new Promise(resolve => rl.question('User: ', resolve));
+ if (input === 'exit' || input === '') break;
+
+ let response = await streamingChatCompletions.getChatCompletions(input, (content) => {
+ console.log(`assistant-streaming: ${content}`);
+ });
+
+ console.log(`\nAssistant: ${response}\n`);
+ }
+
+ console.log('Bye!');
+}
+
+main().catch((err) => {
+ console.error("The sample encountered an error:", err);
+});
+
+module.exports = { main };
diff --git a/src/ai/.x/templates/openai-chat-streaming-js/_.json b/src/ai/.x/templates/openai-chat-streaming-js/_.json
new file mode 100644
index 00000000..0fa5ea1b
--- /dev/null
+++ b/src/ai/.x/templates/openai-chat-streaming-js/_.json
@@ -0,0 +1,8 @@
+{
+ "_Name": "OpenAI Chat Completions (Streaming) in JavaScript",
+ "_Language": "JavaScript",
+ "OPENAI_ENDPOINT": "",
+ "OPENAI_API_KEY": "",
+ "AZURE_OPENAI_CHAT_DEPLOYMENT": "",
+ "AZURE_OPENAI_SYSTEM_PROMPT": "You are a helpful AI assistant."
+}
\ No newline at end of file
diff --git a/src/ai/.x/templates/openai-chat-streaming-js/package.json b/src/ai/.x/templates/openai-chat-streaming-js/package.json
new file mode 100644
index 00000000..1d208557
--- /dev/null
+++ b/src/ai/.x/templates/openai-chat-streaming-js/package.json
@@ -0,0 +1,15 @@
+{
+ "name": "openai-chat-streaming",
+ "version": "1.0.0",
+ "description": "",
+ "main": "ChatCompletionsStreaming.js",
+ "scripts": {
+ "test": "echo \"Error: no test specified\" && exit 1"
+ },
+ "author": "",
+ "license": "MIT",
+ "dependencies": {
+ "@azure/openai": "1.0.0-beta.8"
+ }
+ }
+
\ No newline at end of file
diff --git a/src/ai/.x/templates/openai-chat-streaming/OpenAIChatCompletionsStreaming.csproj._ b/src/ai/.x/templates/openai-chat-streaming/OpenAIChatCompletionsStreaming.csproj._
index c46383e8..4677a2e0 100644
--- a/src/ai/.x/templates/openai-chat-streaming/OpenAIChatCompletionsStreaming.csproj._
+++ b/src/ai/.x/templates/openai-chat-streaming/OpenAIChatCompletionsStreaming.csproj._
@@ -14,7 +14,7 @@
-
+
\ No newline at end of file
diff --git a/src/ai/.x/templates/openai-chat-streaming/OpenAIChatCompletionsStreamingClass.cs b/src/ai/.x/templates/openai-chat-streaming/OpenAIChatCompletionsStreamingClass.cs
index a37007f4..3eff8a12 100644
--- a/src/ai/.x/templates/openai-chat-streaming/OpenAIChatCompletionsStreamingClass.cs
+++ b/src/ai/.x/templates/openai-chat-streaming/OpenAIChatCompletionsStreamingClass.cs
@@ -28,12 +28,12 @@ public class <#= ClassName #>
options = new ChatCompletionsOptions();
options.DeploymentName = deploymentName;
- options.Messages.Add(new ChatMessage(ChatRole.System, systemPrompt));
+ options.Messages.Add(new ChatRequestSystemMessage(systemPrompt));
}
public async Task GetChatCompletionsStreamingAsync(string userPrompt, Action callback = null)
{
- options.Messages.Add(new ChatMessage(ChatRole.User, userPrompt));
+ options.Messages.Add(new ChatRequestUserMessage(userPrompt));
var responseContent = string.Empty;
var response = await client.GetChatCompletionsStreamingAsync(options);
@@ -56,7 +56,7 @@ public async Task GetChatCompletionsStreamingAsync(string userPrompt, Ac
responseContent += content;
}
- options.Messages.Add(new ChatMessage(ChatRole.Assistant, responseContent));
+ options.Messages.Add(new ChatRequestAssistantMessage(responseContent));
return responseContent;
}
diff --git a/src/ai/.x/templates/openai-chat-with-azure-ai-search/OpenAIChatWithAzureAISearch.csproj._ b/src/ai/.x/templates/openai-chat-with-azure-ai-search/OpenAIChatWithAzureAISearch.csproj._
index a2709913..51ad3f8f 100644
--- a/src/ai/.x/templates/openai-chat-with-azure-ai-search/OpenAIChatWithAzureAISearch.csproj._
+++ b/src/ai/.x/templates/openai-chat-with-azure-ai-search/OpenAIChatWithAzureAISearch.csproj._
@@ -13,7 +13,7 @@
-
+
\ No newline at end of file
diff --git a/src/ai/.x/templates/openai-chat-with-azure-ai-search/OpenAIChatWithAzureAISearchClass.cs b/src/ai/.x/templates/openai-chat-with-azure-ai-search/OpenAIChatWithAzureAISearchClass.cs
index f7ad61e0..e6847165 100644
--- a/src/ai/.x/templates/openai-chat-with-azure-ai-search/OpenAIChatWithAzureAISearchClass.cs
+++ b/src/ai/.x/templates/openai-chat-with-azure-ai-search/OpenAIChatWithAzureAISearchClass.cs
@@ -26,37 +26,36 @@ public async Task ChatUsingYourOwnData()
{
var client = new OpenAIClient(new Uri(_openAIEndpoint), new DefaultAzureCredential());
- AzureCognitiveSearchChatExtensionConfiguration contosoExtensionConfig = new()
+ var contosoExtensionConfig = new AzureCognitiveSearchChatExtensionConfiguration()
{
SearchEndpoint = new Uri(_searchEndpoint),
+ Key = _searchApiKey,
IndexName = _searchIndexName,
};
- contosoExtensionConfig.SetSearchKey(_searchApiKey);
-
ChatCompletionsOptions chatCompletionsOptions = new()
{
DeploymentName = _openAIDeploymentName,
Messages =
{
- new ChatMessage(ChatRole.System, "You are a helpful assistant that answers questions about the Contoso product database."),
- new ChatMessage(ChatRole.User, "What are the best-selling Contoso products this month?")
+ new ChatRequestSystemMessage("You are a helpful assistant that answers questions about the Contoso product database."),
+ new ChatRequestUserMessage("What are the best-selling Contoso products this month?")
},
- AzureExtensionsOptions = new AzureChatExtensionsOptions()
+ AzureExtensionsOptions = new()
{
Extensions = { contosoExtensionConfig }
}
};
Response response = await client.GetChatCompletionsAsync(chatCompletionsOptions);
- ChatMessage message = response.Value.Choices[0].Message;
+ var message = response.Value.Choices[0].Message;
Console.WriteLine($"{message.Role}: {message.Content}");
Console.WriteLine("Citations and other information:");
- foreach (ChatMessage contextMessage in message.AzureExtensionsContext.Messages)
+ foreach (var contextMessage in message.AzureExtensionsContext.Messages)
{
Console.WriteLine($"{contextMessage.Role}: {contextMessage.Content}");
}
diff --git a/src/ai/.x/templates/openai-chat/OpenAIChatCompletions.csproj._ b/src/ai/.x/templates/openai-chat/OpenAIChatCompletions.csproj._
index c46383e8..4677a2e0 100644
--- a/src/ai/.x/templates/openai-chat/OpenAIChatCompletions.csproj._
+++ b/src/ai/.x/templates/openai-chat/OpenAIChatCompletions.csproj._
@@ -14,7 +14,7 @@
-
+
\ No newline at end of file
diff --git a/src/ai/.x/templates/openai-chat/OpenAIChatCompletionsClass.cs b/src/ai/.x/templates/openai-chat/OpenAIChatCompletionsClass.cs
index 57a84fce..060ae8ce 100644
--- a/src/ai/.x/templates/openai-chat/OpenAIChatCompletionsClass.cs
+++ b/src/ai/.x/templates/openai-chat/OpenAIChatCompletionsClass.cs
@@ -28,23 +28,23 @@ public class <#= ClassName #>
options = new ChatCompletionsOptions();
options.DeploymentName = deploymentName;
- options.Messages.Add(new ChatMessage(ChatRole.System, systemPrompt));
+ options.Messages.Add(new ChatRequestSystemMessage(systemPrompt));
}
public string GetChatCompletion(string userPrompt)
{
- options.Messages.Add(new ChatMessage(ChatRole.User, userPrompt));
+ options.Messages.Add(new ChatRequestUserMessage(userPrompt));
var response = client.GetChatCompletions(options);
var responseContent = response.Value.Choices[0].Message.Content;
- options.Messages.Add(new ChatMessage(ChatRole.Assistant, responseContent));
+ options.Messages.Add(new ChatRequestAssistantMessage(responseContent));
return responseContent;
}
public static void Main(string[] args)
{
- var chat = new <#= ClassName #>();
+ var chat = new OpenAIHelloWorldClass();
while (true)
{
diff --git a/src/ai/.x/templates/openai-functions-streaming-js/ChatCompletionsCustomFunctions.js b/src/ai/.x/templates/openai-functions-streaming-js/ChatCompletionsCustomFunctions.js
new file mode 100644
index 00000000..ad3c3e8d
--- /dev/null
+++ b/src/ai/.x/templates/openai-functions-streaming-js/ChatCompletionsCustomFunctions.js
@@ -0,0 +1,42 @@
+function getCurrentWeather(function_arguments) {
+ const location = JSON.parse(function_arguments).location;
+ return `The weather in ${location} is 72 degrees and sunny.`;
+ };
+
+const getCurrentWeatherSchema = {
+ name: "get_current_weather",
+ description: "Get the current weather in a given location",
+ parameters: {
+ type: "object",
+ properties: {
+ location: {
+ type: "string",
+ description: "The city and state, e.g. San Francisco, CA",
+ },
+ unit: {
+ type: "string",
+ enum: ["celsius", "fahrenheit"],
+ },
+ },
+ required: ["location"],
+ },
+};
+
+function getCurrentDate() {
+ const date = new Date();
+ return `${date.getFullYear()}-${date.getMonth() + 1}-${date.getDate()}`;
+}
+
+const getCurrentDateSchema = {
+ name: "get_current_date",
+ description: "Get the current date",
+ parameters: {
+ type: "object",
+ properties: {},
+ },
+};
+
+exports.getCurrentWeather = getCurrentWeather;
+exports.getCurrentWeatherSchema = getCurrentWeatherSchema;
+exports.getCurrentDate = getCurrentDate;
+exports.getCurrentDateSchema = getCurrentDateSchema;
diff --git a/src/ai/.x/templates/openai-functions-streaming-js/ChatCompletionsFunctionsStreaming.js b/src/ai/.x/templates/openai-functions-streaming-js/ChatCompletionsFunctionsStreaming.js
new file mode 100644
index 00000000..b379f063
--- /dev/null
+++ b/src/ai/.x/templates/openai-functions-streaming-js/ChatCompletionsFunctionsStreaming.js
@@ -0,0 +1,61 @@
+const { OpenAIClient, AzureKeyCredential } = require("@azure/openai");
+const { FunctionFactory } = require("./FunctionFactory");
+const { FunctionCallContext } = require("./FunctionCallContext");
+
+class ChatCompletionsFunctionsStreaming {
+ constructor(systemPrompt, endpoint, azureApiKey, deploymentName, functionFactory) {
+ this.systemPrompt = systemPrompt;
+ this.endpoint = endpoint;
+ this.azureApiKey = azureApiKey;
+ this.deploymentName = deploymentName;
+ this.client = new OpenAIClient(this.endpoint, new AzureKeyCredential(this.azureApiKey));
+ this.functionFactory = functionFactory || new FunctionFactory();
+ this.clearConversation();
+ }
+
+ clearConversation() {
+ this.messages = [
+ { role: 'system', content: this.systemPrompt }
+ ];
+ this.functionCallContext = new FunctionCallContext(this.functionFactory, this.messages);
+ }
+
+ async getChatCompletions(userInput, callback) {
+ this.messages.push({ role: 'user', content: userInput });
+
+ let contentComplete = "";
+ while (true) {
+ const events = this.client.listChatCompletions(this.deploymentName, this.messages, {
+ functions: this.functionFactory.getFunctionSchemas(),
+ });
+
+ for await (const event of events) {
+ for (const choice of event.choices) {
+
+ this.functionCallContext.checkForUpdate(choice);
+
+ let content = choice.delta?.content;
+ if (choice.finishReason === 'length') {
+ content = `${content}\nERROR: Exceeded token limit!`;
+ }
+
+ if (content != null) {
+ callback(content);
+ await new Promise(r => setTimeout(r, 50)); // delay to simulate real-time output, word by word
+ contentComplete += content;
+ }
+ }
+ }
+
+ if (this.functionCallContext.tryCallFunction() !== undefined) {
+ this.functionCallContext.clear();
+ continue;
+ }
+
+ this.messages.push({ role: 'assistant', content: contentComplete });
+ return contentComplete;
+ }
+ }
+}
+
+exports.ChatCompletionsFunctionsStreaming = ChatCompletionsFunctionsStreaming;
\ No newline at end of file
diff --git a/src/ai/.x/templates/openai-functions-streaming-js/FunctionCallContext.js b/src/ai/.x/templates/openai-functions-streaming-js/FunctionCallContext.js
new file mode 100644
index 00000000..79cd69eb
--- /dev/null
+++ b/src/ai/.x/templates/openai-functions-streaming-js/FunctionCallContext.js
@@ -0,0 +1,47 @@
+class FunctionCallContext {
+ constructor(function_factory, messages) {
+ this.function_factory = function_factory;
+ this.messages = messages;
+ this.function_name = "";
+ this.function_arguments = "";
+ }
+
+ checkForUpdate(choice) {
+ let updated = false;
+
+ const name = choice.delta?.functionCall?.name;
+ if (name !== undefined) {
+ this.function_name = name;
+ updated = true;
+ }
+
+ const args = choice.delta?.functionCall?.arguments;
+ if (args !== undefined) {
+ this.function_arguments = `${this.function_arguments}${args}`;
+ updated = true;
+ }
+
+ return updated;
+ }
+
+ tryCallFunction() {
+ let result = this.function_factory.tryCallFunction(this.function_name, this.function_arguments);
+ if (result === undefined) {
+ return undefined;
+ }
+
+ console.log(`assistant-function: ${this.function_name}(${this.function_arguments}) => ${result}`);
+
+ this.messages.push({ role: 'assistant', function_call: { name: this.function_name, arguments: this.function_arguments } });
+ this.messages.push({ role: 'function', content: result, name: this.function_name });
+
+ return result;
+ }
+
+ clear() {
+ this.function_name = "";
+ this.function_arguments = "";
+ }
+}
+
+exports.FunctionCallContext = FunctionCallContext;
diff --git a/src/ai/.x/templates/openai-functions-streaming-js/FunctionFactory.js b/src/ai/.x/templates/openai-functions-streaming-js/FunctionFactory.js
new file mode 100644
index 00000000..63282684
--- /dev/null
+++ b/src/ai/.x/templates/openai-functions-streaming-js/FunctionFactory.js
@@ -0,0 +1,24 @@
+class FunctionFactory {
+ constructor() {
+ this.functions = {};
+ }
+
+ addFunction(schema, fun) {
+ this.functions[schema.name] = { schema: schema, function: fun };
+ }
+
+ getFunctionSchemas() {
+ return Object.values(this.functions).map(value => value.schema);
+ }
+
+ tryCallFunction(function_name, function_arguments) {
+ const function_info = this.functions[function_name];
+ if (function_info === undefined) {
+ return undefined;
+ }
+
+ return function_info.function(function_arguments);
+ }
+}
+
+exports.FunctionFactory = FunctionFactory;
diff --git a/src/ai/.x/templates/openai-functions-streaming-js/Main.js b/src/ai/.x/templates/openai-functions-streaming-js/Main.js
new file mode 100644
index 00000000..786a26af
--- /dev/null
+++ b/src/ai/.x/templates/openai-functions-streaming-js/Main.js
@@ -0,0 +1,51 @@
+<#@ template hostspecific="true" #>
+<#@ output extension=".js" encoding="utf-8" #>
+<#@ parameter type="System.String" name="OPENAI_ENDPOINT" #>
+<#@ parameter type="System.String" name="OPENAI_API_KEY" #>
+<#@ parameter type="System.String" name="AZURE_OPENAI_CHAT_DEPLOYMENT" #>
+<#@ parameter type="System.String" name="AZURE_OPENAI_SYSTEM_PROMPT" #>
+const customFunctions = require("./ChatCompletionsCustomFunctions");
+const { getCurrentWeatherSchema, getCurrentWeather } = customFunctions;
+const { getCurrentDateSchema, getCurrentDate } = customFunctions;
+const { FunctionFactory } = require("./FunctionFactory");
+const { ChatCompletionsFunctionsStreaming } = require("./ChatCompletionsFunctionsStreaming");
+
+const readline = require('readline');
+const rl = readline.createInterface({
+ input: process.stdin,
+ output: process.stdout
+});
+
+async function main() {
+
+ let factory = new FunctionFactory();
+ factory.addFunction(getCurrentWeatherSchema, getCurrentWeather);
+ factory.addFunction(getCurrentDateSchema, getCurrentDate);
+
+ const endpoint = process.env["OPENAI_ENDPOINT"] || "<#= OPENAI_ENDPOINT #>";
+ const azureApiKey = process.env["OPENAI_API_KEY"] || "<#= OPENAI_API_KEY #>";
+ const deploymentName = process.env["AZURE_OPENAI_CHAT_DEPLOYMENT"] || "<#= AZURE_OPENAI_CHAT_DEPLOYMENT #>" ;
+ const systemPrompt = process.env["AZURE_OPENAI_SYSTEM_PROMPT"] || "<#= AZURE_OPENAI_SYSTEM_PROMPT #>" ;
+
+ const streamingChatCompletions = new ChatCompletionsFunctionsStreaming(systemPrompt, endpoint, azureApiKey, deploymentName, factory);
+
+ while (true) {
+
+ const input = await new Promise(resolve => rl.question('User: ', resolve));
+ if (input === 'exit' || input === '') break;
+
+ let response = await streamingChatCompletions.getChatCompletions(input, (content) => {
+ console.log(`assistant-streaming: ${content}`);
+ });
+
+ console.log(`\nAssistant: ${response}\n`);
+ }
+
+ console.log('Bye!');
+}
+
+main().catch((err) => {
+ console.error("The sample encountered an error:", err);
+});
+
+module.exports = { main };
diff --git a/src/ai/.x/templates/openai-functions-streaming-js/_.json b/src/ai/.x/templates/openai-functions-streaming-js/_.json
new file mode 100644
index 00000000..7223f71b
--- /dev/null
+++ b/src/ai/.x/templates/openai-functions-streaming-js/_.json
@@ -0,0 +1,8 @@
+{
+ "_Name": "OpenAI Chat Completions (Functions) in JavaScript",
+ "_Language": "JavaScript",
+ "OPENAI_ENDPOINT": "",
+ "OPENAI_API_KEY": "",
+ "AZURE_OPENAI_CHAT_DEPLOYMENT": "",
+ "AZURE_OPENAI_SYSTEM_PROMPT": "You are a helpful AI assistant."
+}
\ No newline at end of file
diff --git a/src/ai/.x/templates/openai-functions-streaming-js/package.json b/src/ai/.x/templates/openai-functions-streaming-js/package.json
new file mode 100644
index 00000000..9532927e
--- /dev/null
+++ b/src/ai/.x/templates/openai-functions-streaming-js/package.json
@@ -0,0 +1,15 @@
+{
+ "name": "openai-functions-streaming",
+ "version": "1.0.0",
+ "description": "",
+ "main": "Main.js",
+ "scripts": {
+ "test": "echo \"Error: no test specified\" && exit 1"
+ },
+ "author": "",
+ "license": "MIT",
+ "dependencies": {
+ "@azure/openai": "1.0.0-beta.8"
+ }
+ }
+
\ No newline at end of file
diff --git a/src/ai/.x/templates/openai-webpage/.env b/src/ai/.x/templates/openai-webpage/.env
new file mode 100644
index 00000000..191f56b3
--- /dev/null
+++ b/src/ai/.x/templates/openai-webpage/.env
@@ -0,0 +1,10 @@
+<#@ template hostspecific="true" #>
+<#@ output extension=".env" encoding="utf-8" #>
+<#@ parameter type="System.String" name="OPENAI_ENDPOINT" #>
+<#@ parameter type="System.String" name="OPENAI_API_KEY" #>
+<#@ parameter type="System.String" name="AZURE_OPENAI_CHAT_DEPLOYMENT" #>
+<#@ parameter type="System.String" name="AZURE_OPENAI_SYSTEM_PROMPT" #>
+AZURE_OPENAI_CHAT_DEPLOYMENT=<#= AZURE_OPENAI_CHAT_DEPLOYMENT #>
+OPENAI_API_KEY=<#= OPENAI_API_KEY #>
+OPENAI_ENDPOINT=<#= OPENAI_ENDPOINT #>
+AZURE_OPENAI_SYSTEM_PROMPT=<#= AZURE_OPENAI_SYSTEM_PROMPT #>
diff --git a/src/ai/.x/templates/openai-webpage/.vscode/launch.json b/src/ai/.x/templates/openai-webpage/.vscode/launch.json
new file mode 100644
index 00000000..30fc6258
--- /dev/null
+++ b/src/ai/.x/templates/openai-webpage/.vscode/launch.json
@@ -0,0 +1,11 @@
+{
+ "version": "0.2.0",
+ "configurations": [
+ {
+ "type": "chrome",
+ "request": "launch",
+ "name": "Launch Chrome",
+ "file": "${workspaceFolder}/index.html"
+ }
+ ]
+}
\ No newline at end of file
diff --git a/src/ai/.x/templates/openai-webpage/.vscode/tasks.json b/src/ai/.x/templates/openai-webpage/.vscode/tasks.json
new file mode 100644
index 00000000..d5460be9
--- /dev/null
+++ b/src/ai/.x/templates/openai-webpage/.vscode/tasks.json
@@ -0,0 +1,17 @@
+{
+ "version": "2.0.0",
+ "tasks": [
+ {
+ "label": "install dependencies",
+ "type": "shell",
+ "command": "npm install",
+ "problemMatcher": []
+ },
+ {
+ "label": "build",
+ "type": "shell",
+ "command": "npx webpack",
+ "problemMatcher": []
+ }
+ ]
+}
\ No newline at end of file
diff --git a/src/ai/.x/templates/openai-webpage/README.md b/src/ai/.x/templates/openai-webpage/README.md
new file mode 100644
index 00000000..8fee923d
--- /dev/null
+++ b/src/ai/.x/templates/openai-webpage/README.md
@@ -0,0 +1,35 @@
+# `ai` chat website
+
+This is a simple website chat interface that uses OpenAI's API to generate text responses to user input.
+
+User input is typed into a text box and added to the conversation as a message inside a chat panel. The panel scrolls up and the computer responds with streaming text output into another message in the chat panel. There is a left nav that has a "new chat" button and has a spot for future expansion w/ a list of historical chats.
+
+## Setup
+
+To build the website, run the following commands:
+
+```bash
+npm install
+npx webpack
+```
+
+To run the website, launch `index.html` in your browser.
+
+These setup steps are also represented in tasks.json and launch.json, so that you can build and run the website from within VS Code.
+
+## Project structure
+
+| Category | File | Description
+| --- | --- | ---
+| **SOURCE CODE** | ai.png | Logo/icon for the website.
+| | index.html | HTML file with controls and layout.
+| | style.css | CSS file with layout and styling.
+| | src/script.js | Main JS file with HTML to JS interactions.
+| | src/ChatCompletionsStreaming.js | Main JS file with JS to OpenAI interactions.
+| | |
+| **VS CODE** | .vscode/tasks.json | VS Code tasks to build and run the website.
+| | .vscode/launch.json | VS Code launch configuration to run the website.
+| | |
+| **BUILD + PACKAGING** | .env | Contains the API keys, endpoints, etc.
+| | package.json | Contains the dependencies.
+| | webpack.config.js | The webpack config file.
\ No newline at end of file
diff --git a/src/ai/.x/templates/openai-webpage/_.json b/src/ai/.x/templates/openai-webpage/_.json
new file mode 100644
index 00000000..b996236c
--- /dev/null
+++ b/src/ai/.x/templates/openai-webpage/_.json
@@ -0,0 +1,8 @@
+{
+ "_Name": "OpenAI Webpage (Streaming + Functions)",
+ "_Language": "Javascript",
+ "OPENAI_ENDPOINT": "",
+ "OPENAI_API_KEY": "",
+ "AZURE_OPENAI_CHAT_DEPLOYMENT": "",
+ "AZURE_OPENAI_SYSTEM_PROMPT": "You are a helpful AI assistant."
+}
\ No newline at end of file
diff --git a/src/ai/.x/templates/openai-webpage/ai.png b/src/ai/.x/templates/openai-webpage/ai.png
new file mode 100644
index 00000000..4ba344c9
Binary files /dev/null and b/src/ai/.x/templates/openai-webpage/ai.png differ
diff --git a/src/ai/.x/templates/openai-webpage/index.html b/src/ai/.x/templates/openai-webpage/index.html
new file mode 100644
index 00000000..e27151ba
--- /dev/null
+++ b/src/ai/.x/templates/openai-webpage/index.html
@@ -0,0 +1,62 @@
+
+
+
+
+
+
+
+
+ Chat Interface
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+

+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
\ No newline at end of file
diff --git a/src/ai/.x/templates/openai-webpage/package.json b/src/ai/.x/templates/openai-webpage/package.json
new file mode 100644
index 00000000..7d113850
--- /dev/null
+++ b/src/ai/.x/templates/openai-webpage/package.json
@@ -0,0 +1,22 @@
+{
+ "name": "chat-interface",
+ "version": "1.0.0",
+ "description": "Chat Interface with OpenAI",
+ "main": "script.js",
+ "scripts": {
+ "test": "echo \"Error: no test specified\" && exit 1"
+ },
+ "author": "",
+ "license": "MIT",
+ "dependencies": {
+ "@azure/openai": "1.0.0-beta.8",
+ "highlight.js": "^11.7.2",
+ "marked": "^4.0.10"
+ },
+ "keywords": [],
+ "devDependencies": {
+ "dotenv-webpack": "^7.0.3",
+ "webpack": "^5.89.0",
+ "webpack-cli": "^5.1.4"
+ }
+}
diff --git a/src/ai/.x/templates/openai-webpage/src/ChatCompletionsCustomFunctions.js b/src/ai/.x/templates/openai-webpage/src/ChatCompletionsCustomFunctions.js
new file mode 100644
index 00000000..1776c03e
--- /dev/null
+++ b/src/ai/.x/templates/openai-webpage/src/ChatCompletionsCustomFunctions.js
@@ -0,0 +1,16 @@
+function getCurrentDate() {
+ const date = new Date();
+ return `${date.getFullYear()}-${date.getMonth() + 1}-${date.getDate()}`;
+}
+
+const getCurrentDateSchema = {
+ name: "get_current_date",
+ description: "Get the current date",
+ parameters: {
+ type: "object",
+ properties: {},
+ },
+};
+
+exports.getCurrentDate = getCurrentDate;
+exports.getCurrentDateSchema = getCurrentDateSchema;
diff --git a/src/ai/.x/templates/openai-webpage/src/ChatCompletionsFunctionsStreaming.js b/src/ai/.x/templates/openai-webpage/src/ChatCompletionsFunctionsStreaming.js
new file mode 100644
index 00000000..b379f063
--- /dev/null
+++ b/src/ai/.x/templates/openai-webpage/src/ChatCompletionsFunctionsStreaming.js
@@ -0,0 +1,61 @@
+const { OpenAIClient, AzureKeyCredential } = require("@azure/openai");
+const { FunctionFactory } = require("./FunctionFactory");
+const { FunctionCallContext } = require("./FunctionCallContext");
+
+class ChatCompletionsFunctionsStreaming {
+ constructor(systemPrompt, endpoint, azureApiKey, deploymentName, functionFactory) {
+ this.systemPrompt = systemPrompt;
+ this.endpoint = endpoint;
+ this.azureApiKey = azureApiKey;
+ this.deploymentName = deploymentName;
+ this.client = new OpenAIClient(this.endpoint, new AzureKeyCredential(this.azureApiKey));
+ this.functionFactory = functionFactory || new FunctionFactory();
+ this.clearConversation();
+ }
+
+ clearConversation() {
+ this.messages = [
+ { role: 'system', content: this.systemPrompt }
+ ];
+ this.functionCallContext = new FunctionCallContext(this.functionFactory, this.messages);
+ }
+
+ async getChatCompletions(userInput, callback) {
+ this.messages.push({ role: 'user', content: userInput });
+
+ let contentComplete = "";
+ while (true) {
+ const events = this.client.listChatCompletions(this.deploymentName, this.messages, {
+ functions: this.functionFactory.getFunctionSchemas(),
+ });
+
+ for await (const event of events) {
+ for (const choice of event.choices) {
+
+ this.functionCallContext.checkForUpdate(choice);
+
+ let content = choice.delta?.content;
+ if (choice.finishReason === 'length') {
+ content = `${content}\nERROR: Exceeded token limit!`;
+ }
+
+ if (content != null) {
+ callback(content);
+ await new Promise(r => setTimeout(r, 50)); // delay to simulate real-time output, word by word
+ contentComplete += content;
+ }
+ }
+ }
+
+ if (this.functionCallContext.tryCallFunction() !== undefined) {
+ this.functionCallContext.clear();
+ continue;
+ }
+
+ this.messages.push({ role: 'assistant', content: contentComplete });
+ return contentComplete;
+ }
+ }
+}
+
+exports.ChatCompletionsFunctionsStreaming = ChatCompletionsFunctionsStreaming;
\ No newline at end of file
diff --git a/src/ai/.x/templates/openai-webpage/src/FunctionCallContext.js b/src/ai/.x/templates/openai-webpage/src/FunctionCallContext.js
new file mode 100644
index 00000000..79cd69eb
--- /dev/null
+++ b/src/ai/.x/templates/openai-webpage/src/FunctionCallContext.js
@@ -0,0 +1,47 @@
+class FunctionCallContext {
+ constructor(function_factory, messages) {
+ this.function_factory = function_factory;
+ this.messages = messages;
+ this.function_name = "";
+ this.function_arguments = "";
+ }
+
+ checkForUpdate(choice) {
+ let updated = false;
+
+ const name = choice.delta?.functionCall?.name;
+ if (name !== undefined) {
+ this.function_name = name;
+ updated = true;
+ }
+
+ const args = choice.delta?.functionCall?.arguments;
+ if (args !== undefined) {
+ this.function_arguments = `${this.function_arguments}${args}`;
+ updated = true;
+ }
+
+ return updated;
+ }
+
+ tryCallFunction() {
+ let result = this.function_factory.tryCallFunction(this.function_name, this.function_arguments);
+ if (result === undefined) {
+ return undefined;
+ }
+
+ console.log(`assistant-function: ${this.function_name}(${this.function_arguments}) => ${result}`);
+
+ this.messages.push({ role: 'assistant', function_call: { name: this.function_name, arguments: this.function_arguments } });
+ this.messages.push({ role: 'function', content: result, name: this.function_name });
+
+ return result;
+ }
+
+ clear() {
+ this.function_name = "";
+ this.function_arguments = "";
+ }
+}
+
+exports.FunctionCallContext = FunctionCallContext;
diff --git a/src/ai/.x/templates/openai-webpage/src/FunctionFactory.js b/src/ai/.x/templates/openai-webpage/src/FunctionFactory.js
new file mode 100644
index 00000000..63282684
--- /dev/null
+++ b/src/ai/.x/templates/openai-webpage/src/FunctionFactory.js
@@ -0,0 +1,24 @@
+class FunctionFactory {
+ constructor() {
+ this.functions = {};
+ }
+
+ addFunction(schema, fun) {
+ this.functions[schema.name] = { schema: schema, function: fun };
+ }
+
+ getFunctionSchemas() {
+ return Object.values(this.functions).map(value => value.schema);
+ }
+
+ tryCallFunction(function_name, function_arguments) {
+ const function_info = this.functions[function_name];
+ if (function_info === undefined) {
+ return undefined;
+ }
+
+ return function_info.function(function_arguments);
+ }
+}
+
+exports.FunctionFactory = FunctionFactory;
diff --git a/src/ai/.x/templates/openai-webpage/src/script.js b/src/ai/.x/templates/openai-webpage/src/script.js
new file mode 100644
index 00000000..477ce271
--- /dev/null
+++ b/src/ai/.x/templates/openai-webpage/src/script.js
@@ -0,0 +1,287 @@
+const marked = require("marked");
+const hljs = require("highlight.js");
+
+const customFunctions = require("./ChatCompletionsCustomFunctions");
+const { getCurrentDateSchema, getCurrentDate } = customFunctions;
+const { FunctionFactory } = require("./FunctionFactory");
+
+const { ChatCompletionsFunctionsStreaming } = require('./ChatCompletionsFunctionsStreaming');
+let streamingChatCompletions;
+
+function streamingChatCompletionsInit() {
+
+ let factory = new FunctionFactory();
+ factory.addFunction(getCurrentDateSchema, getCurrentDate);
+
+ const endpoint = process.env.OPENAI_ENDPOINT;
+ const azureApiKey = process.env.OPENAI_API_KEY;
+ const deploymentName = process.env.AZURE_OPENAI_CHAT_DEPLOYMENT;
+ const systemPrompt = "You are a helpful AI assistant.";
+
+ if (!endpoint || endpoint.startsWith('');
+ chatPanel.scrollTop = chatPanel.scrollHeight;
+}
+
+function chatPanelGetElement() {
+ return document.getElementById("chatPanel");
+}
+
+function chatPanelAppendMessage(sender, message) {
+ logoHide();
+
+ let messageContent = document.createElement("p");
+ messageContent.className = "message-content";
+ messageContent.innerHTML = message;
+
+ let messageAuthor = document.createElement("p");
+ messageAuthor.className = "message-author";
+ messageAuthor.innerHTML = sender == "user" ? "You" : "Assistant";
+
+ let divContainingBoth = document.createElement("div");
+ divContainingBoth.className = sender === "user" ? "user" : "computer";
+ divContainingBoth.appendChild(messageAuthor);
+ divContainingBoth.appendChild(messageContent);
+
+ let chatPanel = chatPanelGetElement();
+ chatPanel.appendChild(divContainingBoth);
+ chatPanelScrollToBottom();
+
+ return messageContent;
+}
+
+function chatPanelIsScrollAtBottom() {
+ let chatPanel = chatPanelGetElement();
+ let atBottom = Math.abs(chatPanel.scrollHeight - chatPanel.clientHeight - chatPanel.scrollTop) < 1;
+ return atBottom;
+}
+
+function chatPanelScrollToBottom() {
+ let chatPanel = chatPanelGetElement();
+ chatPanel.scrollTop = chatPanel.scrollHeight;
+}
+
+function chatPanelClear() {
+ let chatPanel = chatPanelGetElement();
+ chatPanel.innerHTML = '';
+}
+
+function logoGetElement() {
+ return document.getElementById("logo");
+}
+
+function logoShow() {
+ let logo = logoGetElement();
+ logo.style.display = "block";
+}
+
+function logoHide() {
+ let logo = logoGetElement();
+ logo.style.display = "none";
+}
+
+function markdownInit() {
+ marked.setOptions({
+ highlight: function (code, lang) {
+ let hl = lang === undefined || lang === ''
+ ? hljs.highlightAuto(code).value
+ : hljs.highlight(lang, code).value;
+ return `${hl}
`;
+ }
+ });
+}
+
+function markdownToHtml(markdownText) {
+ try {
+ return marked.parse(markdownText);
+ }
+ catch (error) {
+ return undefined;
+ }
+}
+
+function themeInit() {
+ let currentTheme = localStorage.getItem('theme');
+ if (currentTheme === 'dark') {
+ themeSetDark();
+ }
+ else if (currentTheme === 'light') {
+ themeSetLight();
+ }
+ toggleThemeButtonInit();
+}
+
+function themeIsLight() {
+ return document.body.classList.contains("light-theme");
+}
+
+function themeIsDark() {
+ return !themeIsLight();
+}
+
+function toggleTheme() {
+ if (themeIsLight()) {
+ themeSetDark();
+ } else {
+ themeSetLight();
+ }
+}
+
+function themeSetLight() {
+ if (!themeIsLight()) {
+ document.body.classList.add("light-theme");
+ localStorage.setItem('theme', 'light');
+
+ let iconElement = toggleThemeButtonGetElement().children[0];
+ iconElement.classList.remove("fa-toggle-on");
+ iconElement.classList.add("fa-toggle-off");
+ }
+}
+
+function themeSetDark() {
+ if (!themeIsDark()) {
+ document.body.classList.remove("light-theme");
+ localStorage.setItem('theme', 'dark');
+
+ let iconElement = toggleThemeButtonGetElement().children[0];
+ iconElement.classList.remove("fa-toggle-off");
+ iconElement.classList.add("fa-toggle-on");
+ }
+}
+
+function toggleThemeButtonGetElement() {
+ return document.getElementById("toggleThemeButton");
+}
+
+function toggleThemeButtonInit() {
+ let buttonElement = toggleThemeButtonGetElement();
+ buttonElement.addEventListener("click", toggleTheme);
+ buttonElement.addEventListener('keydown', toggleThemeButtonHandleKeyDown());
+}
+
+function toggleThemeButtonHandleKeyDown() {
+ return function (event) {
+ if (event.code === 'Enter' || event.code === 'Space') {
+ toggleTheme();
+ }
+ };
+}
+
+function userInputTextAreaGetElement() {
+ return document.getElementById("userInput");
+}
+
+function userInputTextAreaInit() {
+ let inputElement = userInputTextAreaGetElement();
+ inputElement.addEventListener("keydown", userInputTextAreaHandleKeyDown());
+ inputElement.addEventListener("input", userInputTextAreaUpdateHeight);
+}
+
+function userInputTextAreaFocus() {
+ let inputElement = userInputTextAreaGetElement();
+ inputElement.focus();
+}
+
+function userInputTextAreaClear() {
+ userInputTextAreaGetElement().value = '';
+ userInputTextAreaUpdateHeight();
+}
+
+function userInputTextAreaUpdateHeight() {
+ let inputElement = userInputTextAreaGetElement();
+ inputElement.style.height = 'auto';
+ inputElement.style.height = (userInput.scrollHeight) + 'px';
+}
+
+function userInputTextAreaHandleKeyDown() {
+ return function (event) {
+ if (event.key === "Enter") {
+ if (!event.shiftKey) {
+ event.preventDefault();
+ sendMessage();
+ }
+ }
+ };
+}
+
+function varsInit() {
+ document.addEventListener('DOMContentLoaded', varsUpdateHeightsAndWidths);
+ window.addEventListener('resize', varsUpdateHeightsAndWidths);
+}
+
+function varsUpdateHeightsAndWidths() {
+ let headerHeight = document.querySelector('#header').offsetHeight;
+ let userInputHeight = document.querySelector('#userInputPanel').offsetHeight;
+ document.documentElement.style.setProperty('--header-height', headerHeight + 'px');
+ document.documentElement.style.setProperty('--input-height', userInputHeight + 'px');
+}
+
+function newChat() {
+ chatPanelClear();
+ logoShow();
+ userInputTextAreaFocus();
+ streamingChatCompletionsClear();
+}
+
+function sendMessage() {
+ let inputElement = userInputTextAreaGetElement();
+ let inputValue = inputElement.value;
+
+ let notEmpty = inputValue.trim() !== '';
+ if (notEmpty) {
+ let html = markdownToHtml(inputValue) || inputValue.replace(/\n/g, '
');
+ chatPanelAppendMessage('user', html);
+ userInputTextAreaClear();
+ varsUpdateHeightsAndWidths();
+ streamingChatCompletionsProcessInput(inputValue);
+ }
+}
+
+themeInit();
+markdownInit();
+userInputTextAreaInit();
+varsInit();
+streamingChatCompletionsInit();
+userInputTextAreaFocus();
+
+window.sendMessage = sendMessage;
+window.toggleTheme = toggleTheme;
+window.newChat = newChat;
diff --git a/src/ai/.x/templates/openai-webpage/style.css b/src/ai/.x/templates/openai-webpage/style.css
new file mode 100644
index 00000000..2b1dd145
--- /dev/null
+++ b/src/ai/.x/templates/openai-webpage/style.css
@@ -0,0 +1,367 @@
+:root {
+ --header-height: 0px;
+ --input-height: 0px;
+ --send-button-width: 36px;
+ --left-side-width: 250px;
+ --right-side-width: 0px;
+ --right-side-max-width: 768px;
+ --max-textarea-height: 200px;
+ --logo-size: 0.75in;
+ --logo-icon-size: 1.5em;
+ --border-radius: 10px;
+}
+
+body {
+ background-color: #111;
+ color: #f2f2f2;
+ font-size: medium;
+ font-family: system-ui;
+ height: 100vh;
+ margin: 0px;
+ overflow: hidden;
+ max-height: 100vh;
+}
+
+#header {
+ color: #222;
+}
+
+body.light-theme #header {
+ color: #f2f2f2;
+}
+
+#logo {
+ display: block;
+ margin-left: auto;
+ margin-right: auto;
+ margin-top: calc((100vh - var(--header-height) - var(--input-height) - 80px - var(--logo-size)) / 100 * 33);
+ filter: grayscale(50%);
+ width: var(--logo-size);
+ height: var(--logo-size);
+}
+
+#logoIcon {
+ margin-bottom: calc(var(--logo-icon-size) / 4);
+ margin-right: calc(var(--logo-icon-size) / 4);
+ filter: grayscale(50%);
+ width: var(--logo-icon-size);
+ height: var(--logo-icon-size);
+}
+
+#leftSide {
+ background-color: #000;
+ color: #f2f2f2;
+ width: var(--left-side-width);
+ max-width: var(--left-side-width);
+ height: 100vh;
+ max-height: 100vh;
+ overflow-y: auto;
+}
+
+#newChatButton {
+ border: none;
+ cursor: pointer;
+ border-radius: var(--border-radius);
+ /* background-co lor: #557CB4; */
+ width: calc(var(--left-side-width) - 16px);
+ margin-top: 16px;
+ margin-left: auto;
+ margin-right: auto;
+}
+
+#rightSide {
+ width: 100%;
+ margin-left: auto;
+ margin-right: auto;
+ max-width: var(--right-side-max-width);
+}
+
+#rightSideInside {
+ margin-left: auto;
+ margin-right: auto;
+ max-width: var(--right-side-max-width);
+}
+
+#toggleThemeButton {
+ position: fixed;
+ top: 10px;
+ right: 0px;
+ cursor: pointer;
+ color: #fff;
+}
+
+#chatPanel {
+ height: 100%;
+ max-height: calc(100vh - var(--header-height) - var(--input-height) - 32px);
+ overflow-y: auto;
+}
+
+#sendButton {
+ border: none;
+ cursor: pointer;
+ font-size: 1em;
+ border-radius: var(--border-radius);
+ background-color: #557CB4;
+ width: var(--send-button-width);
+ padding: 0px;
+}
+
+#userInputPanel {
+ display: flex;
+ max-width: 768px;
+}
+
+#userInput {
+ margin-right: 15px;
+ width: 100%;
+ max-height: var(--max-textarea-height);
+ border-radius: var(--border-radius);
+ border-width: 2px;
+}
+
+textarea {
+ resize: none;
+ background-color: #111;
+ color: #f2f2f2;
+}
+
+body.light-theme textarea {
+ background-color: #fff;
+ color: #111;
+}
+
+textarea.w3-border {
+ border-color: #333 !important;
+}
+
+body.light-theme textarea.w3-border {
+ border-color: #ddd !important;
+}
+
+textarea.w3-border:focus-visible {
+ border-color: #555 !important;
+ outline: none;
+}
+
+body.light-theme textarea.w3-border:focus-visible {
+ border-color: #bbb !important;
+ outline: none;
+}
+
+.user {
+ color: #d8d8d8;
+ background-color: #111;
+ border-radius: var(--border-radius);
+}
+
+.computer {
+ color: #d8d8d8;
+ background-color: #111;
+ border-radius: var(--border-radius);
+}
+
+div.user {
+ margin-bottom: 8px;
+ margin-right: 0px;
+ text-align: left;
+}
+
+div.computer {
+ margin-bottom: 8px;
+ margin-right: 0px;
+ text-align: left;
+}
+
+.message-author {
+ font-weight: bold;
+ padding-top: calc(var(--border-radius) / 2);
+ padding-left: var(--border-radius);
+ padding-right: var(--border-radius);
+}
+
+p.message-author, p.message-author p {
+ margin: 0px;
+}
+
+.message-content {
+ padding-left: var(--border-radius);
+ padding-bottom: calc(var(--border-radius) / 2);
+ padding-right: var(--border-radius);
+}
+
+p.message-content, p.message-content p {
+ margin-top: 0px;
+ margin-left: 0px;
+ margin-right: 0px;
+}
+
+.light-theme {
+ background-color: #fff;
+}
+
+body.light-theme #toggleThemeButton {
+ color: #888;
+}
+
+body.light-theme .user {
+ background-color: #fdfdfd;
+ color: #111;
+}
+
+body.light-theme .computer {
+ background-color: #fdfdfd;
+ color: #111;
+}
+
+#userInput::-webkit-scrollbar {
+ display: none;
+}
+#userInput {
+ -ms-overflow-style: none;
+ scrollbar-width: none;
+}
+
+::-webkit-scrollbar {
+ height: 1rem;
+ width: .5rem;
+ background-color: #111;
+}
+
+body.light-theme ::-webkit-scrollbar {
+ background-color: #fdfdfd;
+}
+
+::-webkit-scrollbar:horizontal {
+ height: .5rem;
+ width: 1rem
+}
+
+::-webkit-scrollbar:vertical {
+ height: .5rem;
+ width: 1rem
+}
+
+::-webkit-scrollbar-track {
+ background-color: transparent;
+ border-radius: 9999px;
+}
+
+::-webkit-scrollbar-thumb {
+ background-color: #0a0a0a;
+ border-color: rgba(255,255,255,var(--tw-border-opacity));
+ border-radius: 9999px;
+ border-width: 1px;
+}
+
+body.light-theme ::-webkit-scrollbar-thumb {
+ background-color: #fafafa;
+}
+
+::-webkit-scrollbar-thumb:hover {
+ background-color: rgba(217,217,227,var(--tw-bg-opacity))
+}
+
+
+.hljs {
+ margin: 0px;
+ padding: 16px;
+ padding-right: 0px;
+ border-radius: var(--border-radius);
+ overflow-x: auto;
+ max-width: 90vw;
+}
+
+/*
+
+Atom One Dark by Daniel Gamage
+Original One Dark Syntax theme from https://github.com/atom/one-dark-syntax
+
+base: #282c34
+mono-1: #abb2bf
+mono-2: #818896
+mono-3: #5c6370
+hue-1: #56b6c2
+hue-2: #61aeee
+hue-3: #c678dd
+hue-4: #98c379
+hue-5: #e06c75
+hue-5-2: #be5046
+hue-6: #d19a66
+hue-6-2: #e6c07b
+
+*/
+
+.hljs {
+ color: #abb2bf;
+ background: #282c34;
+ }
+
+ .hljs-comment,
+ .hljs-quote {
+ color: #5c6370;
+ font-style: italic;
+ }
+
+ .hljs-doctag,
+ .hljs-keyword,
+ .hljs-formula {
+ color: #c678dd;
+ }
+
+ .hljs-section,
+ .hljs-name,
+ .hljs-selector-tag,
+ .hljs-deletion,
+ .hljs-subst {
+ color: #e06c75;
+ }
+
+ .hljs-literal {
+ color: #56b6c2;
+ }
+
+ .hljs-string,
+ .hljs-regexp,
+ .hljs-addition,
+ .hljs-attribute,
+ .hljs-meta .hljs-string {
+ color: #98c379;
+ }
+
+ .hljs-attr,
+ .hljs-variable,
+ .hljs-template-variable,
+ .hljs-type,
+ .hljs-selector-class,
+ .hljs-selector-attr,
+ .hljs-selector-pseudo,
+ .hljs-number {
+ color: #d19a66;
+ }
+
+ .hljs-symbol,
+ .hljs-bullet,
+ .hljs-link,
+ .hljs-meta,
+ .hljs-selector-id,
+ .hljs-title {
+ color: #61aeee;
+ }
+
+ .hljs-built_in,
+ .hljs-title.class_,
+ .hljs-class .hljs-title {
+ color: #e6c07b;
+ }
+
+ .hljs-emphasis {
+ font-style: italic;
+ }
+
+ .hljs-strong {
+ font-weight: bold;
+ }
+
+ .hljs-link {
+ text-decoration: underline;
+ }
diff --git a/src/ai/.x/templates/openai-webpage/webpack.config.js b/src/ai/.x/templates/openai-webpage/webpack.config.js
new file mode 100644
index 00000000..b3b87bf1
--- /dev/null
+++ b/src/ai/.x/templates/openai-webpage/webpack.config.js
@@ -0,0 +1,20 @@
+const path = require('path');
+const webpack = require('webpack');
+const Dotenv = require('dotenv-webpack');
+
+module.exports = {
+ entry: './src/script.js',
+ output: {
+ filename: 'main.js',
+ path: path.resolve(__dirname, 'dist'),
+ },
+ plugins: [
+ new Dotenv(),
+ new webpack.DefinePlugin({
+ 'process.env.ENDPOINT': JSON.stringify(process.env.ENDPOINT),
+ 'process.env.AZURE_API_KEY': JSON.stringify(process.env.AZURE_API_KEY),
+ 'process.env.DEPLOYMENT_NAME': JSON.stringify(process.env.DEPLOYMENT_NAME),
+ 'process.env.SYSTEM_PROMPT': JSON.stringify(process.env.SYSTEM_PROMPT),
+ }),
+ ],
+};
\ No newline at end of file
diff --git a/src/ai/ai-cli.csproj b/src/ai/ai-cli.csproj
index ff7bd608..030c0714 100644
--- a/src/ai/ai-cli.csproj
+++ b/src/ai/ai-cli.csproj
@@ -115,7 +115,7 @@
-
+
diff --git a/src/ai/commands/chat_command.cs b/src/ai/commands/chat_command.cs
index 3ff48576..3338d274 100644
--- a/src/ai/commands/chat_command.cs
+++ b/src/ai/commands/chat_command.cs
@@ -125,7 +125,7 @@ private string ChatRunNonFunction()
options.Messages.Clear();
options.Messages.Add(systemMessage);
- options.Messages.Add(new ChatMessage(ChatRole.User, text));
+ options.Messages.Add(new ChatRequestUserMessage(text));
var message = GetChatCompletion(client, options, funcContext);
var answer = message.Content;
@@ -159,7 +159,7 @@ private string ChatRunNonFunction()
return results.ToString(Formatting.None);
}
- private ChatMessage GetChatCompletion(OpenAIClient client, ChatCompletionsOptions options, HelperFunctionCallContext funcContext)
+ private ChatResponseMessage GetChatCompletion(OpenAIClient client, ChatCompletionsOptions options, HelperFunctionCallContext funcContext)
{
while (true)
{
@@ -370,15 +370,15 @@ private async Task>> GetChatFunctionTextHandler(string
var setEnv = _values.GetOrDefault("chat.set.environment", true);
var env = setEnv ? ConfigEnvironmentHelpers.GetEnvironment(_values) : null;
- var messages = new List();
+ var messages = new List();
var systemPrompt = _values.GetOrDefault("chat.message.system.prompt", DefaultSystemPrompt);
- messages.Add(new ChatMessage(ChatRole.System, systemPrompt));
+ messages.Add(new ChatRequestSystemMessage(systemPrompt));
return await Task.Run(() => {
Func> handler = (string text) => {
- messages.Add(new ChatMessage(ChatRole.User, text));
+ messages.Add(new ChatRequestUserMessage(text));
DisplayAssistantPromptLabel();
Console.ForegroundColor = ConsoleColor.Gray;
@@ -420,7 +420,7 @@ private async Task>> GetChatFunctionTextHandler(string
DisplayAssistantPromptTextStreamingDone();
CheckWriteChatAnswerOutputFile(output);
- messages.Add(new ChatMessage(ChatRole.Assistant, output));
+ messages.Add(new ChatRequestAssistantMessage(output));
return Task.FromResult(output);
};
@@ -600,7 +600,7 @@ private void DisplayAssistantFunctionCall(HelperFunctionCallContext context, str
private async Task> GetChatCompletionsAsync(OpenAIClient client, ChatCompletionsOptions options, HelperFunctionCallContext funcContext, string text)
{
- options.Messages.Add(new ChatMessage(ChatRole.User, text));
+ options.Messages.Add(new ChatRequestUserMessage(text));
DisplayAssistantPromptLabel();
Console.ForegroundColor = ConsoleColor.Gray;
@@ -633,7 +633,7 @@ private async Task> GetChatCom
DisplayAssistantPromptTextStreamingDone();
CheckWriteChatAnswerOutputFile(contentComplete);
- options.Messages.Add(new ChatMessage(ChatRole.Assistant, contentComplete));
+ options.Messages.Add(new ChatRequestAssistantMessage(contentComplete));
return response;
}
@@ -655,7 +655,7 @@ private ChatCompletionsOptions CreateChatCompletionOptions(string deployment)
options.DeploymentName = deployment;
var systemPrompt = _values.GetOrDefault("chat.message.system.prompt", DefaultSystemPrompt);
- options.Messages.Add(new ChatMessage(ChatRole.System, systemPrompt));
+ options.Messages.Add(new ChatRequestSystemMessage(systemPrompt));
var textFile = _values["chat.message.history.text.file"];
if (!string.IsNullOrEmpty(textFile)) AddChatMessagesFromTextFile(options.Messages, textFile);
@@ -665,7 +665,7 @@ private ChatCompletionsOptions CreateChatCompletionOptions(string deployment)
var frequencyPenalty = _values["chat.options.frequency.penalty"];
var presencePenalty = _values["chat.options.presence.penalty"];
- options.MaxTokens = TryParse(maxTokens, _defaultMaxTokens);
+ options.MaxTokens = TryParse(maxTokens, null);
options.Temperature = TryParse(temperature, _defaultTemperature);
options.FrequencyPenalty = TryParse(frequencyPenalty, _defaultFrequencyPenalty);
options.PresencePenalty = TryParse(presencePenalty, _defaultPresencePenalty);
@@ -707,17 +707,16 @@ private void AddAzureExtensionOptions(ChatCompletionsOptions options)
var queryType = QueryTypeFrom(_values["service.config.search.query.type"]) ?? AzureCognitiveSearchQueryType.VectorSimpleHybrid;
- var search = new AzureCognitiveSearchChatExtensionConfiguration(
- AzureChatExtensionType.AzureCognitiveSearch,
- new Uri(searchEndpoint),
- indexName)
+ var search = new AzureCognitiveSearchChatExtensionConfiguration()
{
+ SearchEndpoint = new Uri(searchEndpoint),
+ Key = searchKey,
+ IndexName = indexName,
QueryType = queryType,
DocumentCount = 16,
EmbeddingEndpoint = embeddingEndpoint,
+ EmbeddingKey = embeddingsKey,
};
- search.SetEmbeddingKey(embeddingsKey);
- search.SetSearchKey(searchKey);
options.AzureExtensionsOptions = new() { Extensions = { search } };
}
@@ -920,7 +919,7 @@ private ChatRole UpdateRole(ref string line, ChatRole? currentRole = null)
return currentRole ?? ChatRole.System;
}
- private void AddChatMessagesFromTextFile(IList messages, string textFile)
+ private void AddChatMessagesFromTextFile(IList messages, string textFile)
{
var existing = FileHelpers.DemandFindFileInDataPath(textFile, _values, "chat history");
var text = FileHelpers.ReadAllText(existing, Encoding.Default);
@@ -945,29 +944,41 @@ private void AddChatMessagesFromTextFile(IList messages, string tex
if (i == 0 && role == ChatRole.System && FirstMessageIsDefaultSystemPrompt(messages, role))
{
- messages.First().Content = line;
+ messages[0] = new ChatRequestSystemMessage(line);
continue;
}
- messages.Add(new ChatMessage(role, line));
+ messages.Add(role == ChatRole.System
+ ? new ChatRequestSystemMessage(line)
+ : role == ChatRole.User
+ ? new ChatRequestUserMessage(line)
+ : new ChatRequestAssistantMessage(line));
}
}
- private static bool FirstMessageIsDefaultSystemPrompt(IList messages, ChatRole role)
+ private static bool FirstMessageIsDefaultSystemPrompt(IList messages, ChatRole role)
{
- return messages.Count() == 1
- && messages.First().Role == ChatRole.System
- && messages.First().Content == DefaultSystemPrompt;
+ var message = messages.FirstOrDefault() as ChatRequestSystemMessage;
+ return message != null && message.Content == DefaultSystemPrompt;
}
- private static string ConvertMessagesToJson(IList messages)
+ private static string ConvertMessagesToJson(IList messages)
{
var sb = new StringBuilder();
sb.Append("[");
foreach (var message in messages)
{
+ var user = message as ChatRequestUserMessage;
+ var system = message as ChatRequestSystemMessage;
+ var assistant = message as ChatRequestAssistantMessage;
+ var content = system?.Content ?? user?.Content ?? assistant?.Content;
+
+ var ok = !string.IsNullOrEmpty(content);
+ if (!ok) continue;
+
if (sb.Length > 1) sb.Append(",");
- sb.Append($"{{\"role\": \"{message.Role}\", \"content\": \"{message.Content}\"}}");
+
+ sb.Append($"{{\"role\": \"{message.Role}\", \"content\": \"{content}\"}}");
}
sb.Append("]");
var theDict = $"{{ \"messages\": {sb.ToString()} }}";
@@ -1049,7 +1060,7 @@ private void StopCommand()
// OutputHelper _output = null;
// DisplayHelper _display = null;
- private int TryParse(string? s, int defaultValue)
+ private int? TryParse(string? s, int? defaultValue)
{
return !string.IsNullOrEmpty(s) && int.TryParse(s, out var parsed) ? parsed : defaultValue;
}
@@ -1164,7 +1175,7 @@ private static Dictionary SampleData()
public const string DefaultSystemPrompt = "You are an AI assistant that helps people find information regarding Azure AI.";
- private const int _defaultMaxTokens = 800;
+ private const int _defaultMaxTokens = 0;
private const float _defaultTemperature = 0.7f;
private const float _defaultFrequencyPenalty = 0.0f;
private const float _defaultPresencePenalty = 0.0f;
diff --git a/src/common/details/helpers/file_helpers.cs b/src/common/details/helpers/file_helpers.cs
index 984d04f1..b3340b5b 100644
--- a/src/common/details/helpers/file_helpers.cs
+++ b/src/common/details/helpers/file_helpers.cs
@@ -533,7 +533,9 @@ public static byte[] ReadAllBytes(string fileName)
{
byte[] bytes = IsStandardInputReference(fileName)
? ConsoleHelpers.ReadAllStandardInputBytes()
- : File.ReadAllBytes(fileName);
+ : IsResource(fileName)
+ ? ReadAllResourceBytes(fileName)
+ : File.ReadAllBytes(fileName);
return bytes;
}
@@ -1073,6 +1075,18 @@ private static string ReadAllResourceText(string fileName, Encoding encoding)
return text;
}
+ private static byte[] ReadAllResourceBytes(string fileName)
+ {
+ var stream = GetResourceStream(fileName);
+ var length = stream.Length;
+
+ byte[] buffer = new byte[length];
+ stream.Read(buffer, 0, (int)length);
+
+ stream.Dispose();
+ return buffer;
+ }
+
public static bool IsStandardInputReference(string fileName)
{
return fileName == "-" || fileName == "stdin";
diff --git a/src/extensions/helper_functions_extension/HelperFunctionCallContext.cs b/src/extensions/helper_functions_extension/HelperFunctionCallContext.cs
index abf6b1a5..424fa9e1 100644
--- a/src/extensions/helper_functions_extension/HelperFunctionCallContext.cs
+++ b/src/extensions/helper_functions_extension/HelperFunctionCallContext.cs
@@ -9,7 +9,7 @@ public HelperFunctionCallContext(HelperFunctionFactory factory)
_factory = factory;
}
- public bool CheckForFunction(ChatMessage message)
+ public bool CheckForFunction(ChatResponseMessage message)
{
var updated = false;
diff --git a/src/extensions/helper_functions_extension/HelperFunctionFactory.cs b/src/extensions/helper_functions_extension/HelperFunctionFactory.cs
index 53cb1755..ee2aeb09 100644
--- a/src/extensions/helper_functions_extension/HelperFunctionFactory.cs
+++ b/src/extensions/helper_functions_extension/HelperFunctionFactory.cs
@@ -97,8 +97,8 @@ public bool TryCallFunction(ChatCompletionsOptions options, HelperFunctionCallCo
if (function.Key != null)
{
result = CallFunction(function.Key, function.Value, context.Arguments);
- options.Messages.Add(new ChatMessage() { Role = ChatRole.Assistant, FunctionCall = new FunctionCall(context.FunctionName, context.Arguments) });
- options.Messages.Add(new ChatMessage(ChatRole.Function, result) { Name = context.FunctionName });
+ options.Messages.Add(new ChatRequestAssistantMessage("") { FunctionCall = new FunctionCall(context.FunctionName, context.Arguments) });
+ options.Messages.Add(new ChatRequestFunctionMessage(context.FunctionName, result));
return true;
}
}
diff --git a/src/extensions/helper_functions_extension/HelperFunctionFactoryExtensions.cs b/src/extensions/helper_functions_extension/HelperFunctionFactoryExtensions.cs
index 00eeb2fd..9dffea19 100644
--- a/src/extensions/helper_functions_extension/HelperFunctionFactoryExtensions.cs
+++ b/src/extensions/helper_functions_extension/HelperFunctionFactoryExtensions.cs
@@ -14,6 +14,7 @@ public static HelperFunctionCallContext AddFunctions(this ChatCompletionsOptions
{
foreach (var function in functionFactory.GetFunctionDefinitions())
{
+ // options.Tools.Add(new ChatCompletionsFunctionToolDefinition(function));
options.Functions.Add(function);
}
diff --git a/src/extensions/helper_functions_extension/helper_functions_extension.csproj b/src/extensions/helper_functions_extension/helper_functions_extension.csproj
index 9235931c..3761f3cf 100644
--- a/src/extensions/helper_functions_extension/helper_functions_extension.csproj
+++ b/src/extensions/helper_functions_extension/helper_functions_extension.csproj
@@ -9,7 +9,7 @@
-
+
diff --git a/src/extensions/template_extension/TemplateFactory.cs b/src/extensions/template_extension/TemplateFactory.cs
index 0865c3e7..75008553 100644
--- a/src/extensions/template_extension/TemplateFactory.cs
+++ b/src/extensions/template_extension/TemplateFactory.cs
@@ -88,26 +88,17 @@ public static bool GenerateTemplateFiles(string templateName, string instruction
outputDirectory = PathHelpers.NormalizePath(outputDirectory);
var message = $"Generating '{templateName}' in '{outputDirectory}' ({files.Count()} files)...";
- if (!quiet) Console.WriteLine(message);
+ if (!quiet) Console.WriteLine($"{message}\n");
- var processed = ProcessTemplates(templateName, generator, files);
- foreach (var item in processed)
+ var generated = ProcessTemplates(templateName, generator, files, outputDirectory);
+ foreach (var item in generated)
{
- var file = item.Key;
- var text = item.Value;
- if (verbose) Console.WriteLine($"\nFILE: {file}:\n```\n{text}\n```");
-
- FileHelpers.WriteAllText(PathHelpers.Combine(outputDirectory, file), text, new UTF8Encoding(false));
+ var file = item.Replace(outputDirectory, string.Empty).Trim('\\', '/');
+ if (!quiet) Console.WriteLine($" {file}");
}
if (!quiet)
{
- Console.WriteLine();
- foreach (var item in processed)
- {
- Console.WriteLine($" {item.Key}");
- }
-
Console.WriteLine();
Console.WriteLine($"\r{message} DONE!\n");
}
@@ -202,30 +193,43 @@ private static Dictionary GetParameters(string templateName)
return parameters;
}
- private static Dictionary ProcessTemplates(string templateName, TemplateGenerator generator, IEnumerable files)
+ private static IEnumerable ProcessTemplates(string templateName, TemplateGenerator generator, IEnumerable files, string outputDirectory)
{
var root = FileHelpers.FileNameFromResourceName("templates") + "/";
- var processed = new Dictionary();
foreach (var file in files)
{
- var text = FileHelpers.ReadAllText(file, new UTF8Encoding(false));
- if (Program.Debug) Console.WriteLine($"```{file}\n{text}\n```");
-
if (!file.StartsWith(root)) throw new Exception("Invalid file name");
var outputFile = file.Substring(root.Length + templateName.Length + 1);
+ var outputFileWithPath = PathHelpers.Combine(outputDirectory, outputFile);
- var parsed = generator.ParseTemplate(file, text);
- var settings = TemplatingEngine.GetSettings(generator, parsed);
- settings.CompilerOptions = "-nullable:enable";
+ var isBinary = file.EndsWith(".png") || file.EndsWith(".ico");
+ if (!isBinary)
+ {
+ ProcessTemplate(generator, file, outputFileWithPath, out var generatedFileName, out var generatedContent);
+ FileHelpers.WriteAllText(generatedFileName, generatedContent, new UTF8Encoding(false));
+ yield return generatedFileName;
+ }
+ else
+ {
+ var bytes = FileHelpers.ReadAllBytes(file);
+ FileHelpers.WriteAllBytes(outputFileWithPath, bytes);
+ yield return outputFileWithPath;
+ }
+ }
+ }
- (string generatedFileName, string generatedContent) = generator.ProcessTemplateAsync(parsed, file, text, outputFile, settings).Result;
- if (Program.Debug) Console.WriteLine($"```{generatedFileName}\n{generatedContent}\n```");
+ private static void ProcessTemplate(TemplateGenerator generator, string file, string outputFile, out string generatedFileName, out string generatedContent)
+ {
+ var text = FileHelpers.ReadAllText(file, new UTF8Encoding(false));
+ if (Program.Debug) Console.WriteLine($"```{file}\n{text}\n```");
- processed.Add(generatedFileName, generatedContent);
- }
+ var parsed = generator.ParseTemplate(file, text);
+ var settings = TemplatingEngine.GetSettings(generator, parsed);
+ settings.CompilerOptions = "-nullable:enable";
- return processed;
+ (generatedFileName, generatedContent) = generator.ProcessTemplateAsync(parsed, file, text, outputFile, settings).Result;
+ if (Program.Debug) Console.WriteLine($"```{generatedFileName}\n{generatedContent}\n```");
}
}
}
\ No newline at end of file