Skip to content

Commit

Permalink
All changes required for ai dev new openai-webpage to work (#141)
Browse files Browse the repository at this point in the history
- update c# openai client library to beta12
- added openai-chat-streaming-js template (aka `ai dev new openai-chat-streaming-js`)
- added openai-functions-streaming-js template (aka `ai dev new openai-functions-streaming-js`)
- added openai-webpage template (aka `ai dev new openai-webpage`)
- fixed bug in `FileHelpers` where `ReadAllBytes` didn't work for resources
- fixed bug in `TemplateFactory` where `ProcessTemplates` didn't work for binary files
  • Loading branch information
robch authored Jan 3, 2024
1 parent 606bc37 commit f03e0a4
Show file tree
Hide file tree
Showing 43 changed files with 1,455 additions and 82 deletions.
1 change: 1 addition & 0 deletions .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -4,3 +4,4 @@
**/bin/*/net6.0/*
**/bin/*/net7.0/*
**/obj/*
ideas/website/node_modules/**
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,7 @@
<PackageReference Include="Azure.Core" Version="1.36.0" />
<PackageReference Include="Azure.Identity" Version="1.10.4" />
<PackageReference Include="Newtonsoft.Json" Version="13.0.3" />
<PackageReference Include="Azure.AI.OpenAI" Version="1.0.0-beta.9" />
<PackageReference Include="Azure.AI.OpenAI" Version="1.0.0-beta.12" />
</ItemGroup>

<ItemGroup>
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,7 @@
<PackageReference Include="Azure.Core" Version="1.36.0" />
<PackageReference Include="Azure.Identity" Version="1.10.4" />
<PackageReference Include="Newtonsoft.Json" Version="13.0.3" />
<PackageReference Include="Azure.AI.OpenAI" Version="1.0.0-beta.9" />
<PackageReference Include="Azure.AI.OpenAI" Version="1.0.0-beta.12" />
</ItemGroup>

<ItemGroup>
Expand Down
4 changes: 2 additions & 2 deletions src/ai/.x/templates/openai-chat-js/package.json
Original file line number Diff line number Diff line change
Expand Up @@ -7,9 +7,9 @@
"test": "echo \"Error: no test specified\" && exit 1"
},
"author": "",
"license": "ISC",
"license": "MIT",
"dependencies": {
"@azure/openai": "^1.0.0-beta.7"
"@azure/openai": "1.0.0-beta.8"
}
}

Original file line number Diff line number Diff line change
@@ -0,0 +1,85 @@
<#@ template hostspecific="true" #>
<#@ output extension=".js" encoding="utf-8" #>
<#@ parameter type="System.String" name="OPENAI_ENDPOINT" #>
<#@ parameter type="System.String" name="OPENAI_API_KEY" #>
<#@ parameter type="System.String" name="AZURE_OPENAI_CHAT_DEPLOYMENT" #>
<#@ parameter type="System.String" name="AZURE_OPENAI_SYSTEM_PROMPT" #>
const { OpenAIClient, AzureKeyCredential } = require("@azure/openai");

class OpenAIStreamingChatCompletions {
constructor(systemPrompt, endpoint, azureApiKey, deploymentName) {
this.systemPrompt = systemPrompt;
this.endpoint = endpoint;
this.azureApiKey = azureApiKey;
this.deploymentName = deploymentName;
this.client = new OpenAIClient(this.endpoint, new AzureKeyCredential(this.azureApiKey));
this.clearConversation();
}

clearConversation() {
this.messages = [
{ role: 'system', content: this.systemPrompt }
];
}

async getChatCompletions(userInput, callback) {
this.messages.push({ role: 'user', content: userInput });

const events = this.client.listChatCompletions(this.deploymentName, this.messages);

let contentComplete = '';
for await (const event of events) {
for (const choice of event.choices) {

let content = choice.delta?.content;
if (choice.finishReason === 'length') {
content = `${content}\nERROR: Exceeded token limit!`;
}

if (content != null) {
callback(content);
await new Promise(r => setTimeout(r, 50)); // delay to simulate real-time output, word by word
contentComplete += content;
}
}
}

this.messages.push({ role: 'assistant', content: contentComplete });
return contentComplete;
}
}

const readline = require('readline');
const rl = readline.createInterface({
input: process.stdin,
output: process.stdout
});

async function main() {
const endpoint = process.env["OPENAI_ENDPOINT"] || "<#= OPENAI_ENDPOINT #>";
const azureApiKey = process.env["OPENAI_API_KEY"] || "<#= OPENAI_API_KEY #>";
const deploymentName = process.env["AZURE_OPENAI_CHAT_DEPLOYMENT"] || "<#= AZURE_OPENAI_CHAT_DEPLOYMENT #>" ;
const systemPrompt = process.env["AZURE_OPENAI_SYSTEM_PROMPT"] || "<#= AZURE_OPENAI_SYSTEM_PROMPT #>" ;

const streamingChatCompletions = new OpenAIStreamingChatCompletions(systemPrompt, endpoint, azureApiKey, deploymentName);

while (true) {

const input = await new Promise(resolve => rl.question('User: ', resolve));
if (input === 'exit' || input === '') break;

let response = await streamingChatCompletions.getChatCompletions(input, (content) => {
console.log(`assistant-streaming: ${content}`);
});

console.log(`\nAssistant: ${response}\n`);
}

console.log('Bye!');
}

main().catch((err) => {
console.error("The sample encountered an error:", err);
});

module.exports = { main };
8 changes: 8 additions & 0 deletions src/ai/.x/templates/openai-chat-streaming-js/_.json
Original file line number Diff line number Diff line change
@@ -0,0 +1,8 @@
{
"_Name": "OpenAI Chat Completions (Streaming) in JavaScript",
"_Language": "JavaScript",
"OPENAI_ENDPOINT": "<insert your OpenAI endpoint here>",
"OPENAI_API_KEY": "<insert your OpenAI API key here>",
"AZURE_OPENAI_CHAT_DEPLOYMENT": "<insert your OpenAI deployment name here>",
"AZURE_OPENAI_SYSTEM_PROMPT": "You are a helpful AI assistant."
}
15 changes: 15 additions & 0 deletions src/ai/.x/templates/openai-chat-streaming-js/package.json
Original file line number Diff line number Diff line change
@@ -0,0 +1,15 @@
{
"name": "openai-chat-streaming",
"version": "1.0.0",
"description": "",
"main": "ChatCompletionsStreaming.js",
"scripts": {
"test": "echo \"Error: no test specified\" && exit 1"
},
"author": "",
"license": "MIT",
"dependencies": {
"@azure/openai": "1.0.0-beta.8"
}
}

Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,7 @@
<ItemGroup>
<PackageReference Include="Azure.Core" Version="1.36.0" />
<PackageReference Include="Azure.Identity" Version="1.10.4" />
<PackageReference Include="Azure.AI.OpenAI" Version="1.0.0-beta.9" />
<PackageReference Include="Azure.AI.OpenAI" Version="1.0.0-beta.12" />
</ItemGroup>

</Project>
Original file line number Diff line number Diff line change
Expand Up @@ -28,12 +28,12 @@ public class <#= ClassName #>

options = new ChatCompletionsOptions();
options.DeploymentName = deploymentName;
options.Messages.Add(new ChatMessage(ChatRole.System, systemPrompt));
options.Messages.Add(new ChatRequestSystemMessage(systemPrompt));
}

public async Task<string> GetChatCompletionsStreamingAsync(string userPrompt, Action<StreamingChatCompletionsUpdate> callback = null)
{
options.Messages.Add(new ChatMessage(ChatRole.User, userPrompt));
options.Messages.Add(new ChatRequestUserMessage(userPrompt));

var responseContent = string.Empty;
var response = await client.GetChatCompletionsStreamingAsync(options);
Expand All @@ -56,7 +56,7 @@ public async Task<string> GetChatCompletionsStreamingAsync(string userPrompt, Ac
responseContent += content;
}

options.Messages.Add(new ChatMessage(ChatRole.Assistant, responseContent));
options.Messages.Add(new ChatRequestAssistantMessage(responseContent));
return responseContent;
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,7 @@
<ItemGroup>
<PackageReference Include="Azure.Core" Version="1.36.0" />
<PackageReference Include="Azure.Identity" Version="1.10.4" />
<PackageReference Include="Azure.AI.OpenAI" Version="1.0.0-beta.9" />
<PackageReference Include="Azure.AI.OpenAI" Version="1.0.0-beta.12" />
</ItemGroup>

</Project>
Original file line number Diff line number Diff line change
Expand Up @@ -26,37 +26,36 @@ public async Task ChatUsingYourOwnData()
{
var client = new OpenAIClient(new Uri(_openAIEndpoint), new DefaultAzureCredential());

AzureCognitiveSearchChatExtensionConfiguration contosoExtensionConfig = new()
var contosoExtensionConfig = new AzureCognitiveSearchChatExtensionConfiguration()
{
SearchEndpoint = new Uri(_searchEndpoint),
Key = _searchApiKey,
IndexName = _searchIndexName,
};

contosoExtensionConfig.SetSearchKey(_searchApiKey);

ChatCompletionsOptions chatCompletionsOptions = new()
{
DeploymentName = _openAIDeploymentName,
Messages =
{
new ChatMessage(ChatRole.System, "You are a helpful assistant that answers questions about the Contoso product database."),
new ChatMessage(ChatRole.User, "What are the best-selling Contoso products this month?")
new ChatRequestSystemMessage("You are a helpful assistant that answers questions about the Contoso product database."),
new ChatRequestUserMessage("What are the best-selling Contoso products this month?")
},

AzureExtensionsOptions = new AzureChatExtensionsOptions()
AzureExtensionsOptions = new()
{
Extensions = { contosoExtensionConfig }
}
};

Response<ChatCompletions> response = await client.GetChatCompletionsAsync(chatCompletionsOptions);
ChatMessage message = response.Value.Choices[0].Message;
var message = response.Value.Choices[0].Message;

Console.WriteLine($"{message.Role}: {message.Content}");

Console.WriteLine("Citations and other information:");

foreach (ChatMessage contextMessage in message.AzureExtensionsContext.Messages)
foreach (var contextMessage in message.AzureExtensionsContext.Messages)
{
Console.WriteLine($"{contextMessage.Role}: {contextMessage.Content}");
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,7 @@
<ItemGroup>
<PackageReference Include="Azure.Core" Version="1.36.0" />
<PackageReference Include="Azure.Identity" Version="1.10.4" />
<PackageReference Include="Azure.AI.OpenAI" Version="1.0.0-beta.9" />
<PackageReference Include="Azure.AI.OpenAI" Version="1.0.0-beta.12" />
</ItemGroup>

</Project>
8 changes: 4 additions & 4 deletions src/ai/.x/templates/openai-chat/OpenAIChatCompletionsClass.cs
Original file line number Diff line number Diff line change
Expand Up @@ -28,23 +28,23 @@ public class <#= ClassName #>

options = new ChatCompletionsOptions();
options.DeploymentName = deploymentName;
options.Messages.Add(new ChatMessage(ChatRole.System, systemPrompt));
options.Messages.Add(new ChatRequestSystemMessage(systemPrompt));
}

public string GetChatCompletion(string userPrompt)
{
options.Messages.Add(new ChatMessage(ChatRole.User, userPrompt));
options.Messages.Add(new ChatRequestUserMessage(userPrompt));

var response = client.GetChatCompletions(options);
var responseContent = response.Value.Choices[0].Message.Content;
options.Messages.Add(new ChatMessage(ChatRole.Assistant, responseContent));
options.Messages.Add(new ChatRequestAssistantMessage(responseContent));

return responseContent;
}

public static void Main(string[] args)
{
var chat = new <#= ClassName #>();
var chat = new OpenAIHelloWorldClass();

while (true)
{
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,42 @@
function getCurrentWeather(function_arguments) {
const location = JSON.parse(function_arguments).location;
return `The weather in ${location} is 72 degrees and sunny.`;
};

const getCurrentWeatherSchema = {
name: "get_current_weather",
description: "Get the current weather in a given location",
parameters: {
type: "object",
properties: {
location: {
type: "string",
description: "The city and state, e.g. San Francisco, CA",
},
unit: {
type: "string",
enum: ["celsius", "fahrenheit"],
},
},
required: ["location"],
},
};

function getCurrentDate() {
const date = new Date();
return `${date.getFullYear()}-${date.getMonth() + 1}-${date.getDate()}`;
}

const getCurrentDateSchema = {
name: "get_current_date",
description: "Get the current date",
parameters: {
type: "object",
properties: {},
},
};

exports.getCurrentWeather = getCurrentWeather;
exports.getCurrentWeatherSchema = getCurrentWeatherSchema;
exports.getCurrentDate = getCurrentDate;
exports.getCurrentDateSchema = getCurrentDateSchema;
Original file line number Diff line number Diff line change
@@ -0,0 +1,61 @@
const { OpenAIClient, AzureKeyCredential } = require("@azure/openai");
const { FunctionFactory } = require("./FunctionFactory");
const { FunctionCallContext } = require("./FunctionCallContext");

class ChatCompletionsFunctionsStreaming {
constructor(systemPrompt, endpoint, azureApiKey, deploymentName, functionFactory) {
this.systemPrompt = systemPrompt;
this.endpoint = endpoint;
this.azureApiKey = azureApiKey;
this.deploymentName = deploymentName;
this.client = new OpenAIClient(this.endpoint, new AzureKeyCredential(this.azureApiKey));
this.functionFactory = functionFactory || new FunctionFactory();
this.clearConversation();
}

clearConversation() {
this.messages = [
{ role: 'system', content: this.systemPrompt }
];
this.functionCallContext = new FunctionCallContext(this.functionFactory, this.messages);
}

async getChatCompletions(userInput, callback) {
this.messages.push({ role: 'user', content: userInput });

let contentComplete = "";
while (true) {
const events = this.client.listChatCompletions(this.deploymentName, this.messages, {
functions: this.functionFactory.getFunctionSchemas(),
});

for await (const event of events) {
for (const choice of event.choices) {

this.functionCallContext.checkForUpdate(choice);

let content = choice.delta?.content;
if (choice.finishReason === 'length') {
content = `${content}\nERROR: Exceeded token limit!`;
}

if (content != null) {
callback(content);
await new Promise(r => setTimeout(r, 50)); // delay to simulate real-time output, word by word
contentComplete += content;
}
}
}

if (this.functionCallContext.tryCallFunction() !== undefined) {
this.functionCallContext.clear();
continue;
}

this.messages.push({ role: 'assistant', content: contentComplete });
return contentComplete;
}
}
}

exports.ChatCompletionsFunctionsStreaming = ChatCompletionsFunctionsStreaming;
Loading

0 comments on commit f03e0a4

Please sign in to comment.