Skip to content

Commit

Permalink
Chschrae/java streaming chat completions (#138)
Browse files Browse the repository at this point in the history
* added first attempt at template

* updated with new openaisdk and fixed code

---------

Co-authored-by: Chris Schraer <[email protected]>
  • Loading branch information
chschrae and Chris Schraer authored Dec 15, 2023
1 parent eeb54a8 commit 8a3efa9
Show file tree
Hide file tree
Showing 6 changed files with 176 additions and 0 deletions.
8 changes: 8 additions & 0 deletions src/ai/.x/templates/openai-chat-java-streaming/_.json
Original file line number Diff line number Diff line change
@@ -0,0 +1,8 @@
{
"_Name": "OpenAI Chat Completions in Java",
"_Language": "Java",
"OPENAI_ENDPOINT": "<insert your OpenAI endpoint here>",
"OPENAI_API_KEY": "<insert your OpenAI API key here>",
"AZURE_OPENAI_CHAT_DEPLOYMENT": "<insert your OpenAI deployment name here>",
"AZURE_OPENAI_SYSTEM_PROMPT": "You are a helpful AI assistant."
}
39 changes: 39 additions & 0 deletions src/ai/.x/templates/openai-chat-java-streaming/pom.xml
Original file line number Diff line number Diff line change
@@ -0,0 +1,39 @@
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<modelVersion>4.0.0</modelVersion>

<groupId>com.azure.ai.openai.samples</groupId>
<artifactId>openai-chat-java-streaming</artifactId>
<version>1.0-SNAPSHOT</version>

<dependencies>
<!-- https://mvnrepository.com/artifact/com.azure/azure-ai-openai -->
<dependency>
<groupId>com.azure</groupId>
<artifactId>azure-ai-openai</artifactId>
<version>1.0.0-beta.6</version>
</dependency>
</dependencies>

<build>
<plugins>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-dependency-plugin</artifactId>
<version>3.1.2</version>
<executions>
<execution>
<id>copy-dependencies</id>
<phase>prepare-package</phase>
<goals>
<goal>copy-dependencies</goal>
</goals>
<configuration>
<outputDirectory>${project.build.directory}/lib</outputDirectory>
</configuration>
</execution>
</executions>
</plugin>
</plugins>
</build>

</project>
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
mvn clean package
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
javac -cp target/lib/* src/OpenAIQuickstartStreaming.java -d out
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
java -cp out;target/lib/* OpenAIQuickstartStreaming
Original file line number Diff line number Diff line change
@@ -0,0 +1,126 @@
<#@ template hostspecific="true" #>
<#@ output extension=".java" encoding="utf-8" #>
<#@ parameter type="System.String" name="ClassName" #>
<#@ parameter type="System.String" name="OPENAI_ENDPOINT" #>
<#@ parameter type="System.String" name="OPENAI_API_KEY" #>
<#@ parameter type="System.String" name="AZURE_OPENAI_CHAT_DEPLOYMENT" #>
<#@ parameter type="System.String" name="AZURE_OPENAI_SYSTEM_PROMPT" #>
import com.azure.ai.openai.OpenAIAsyncClient;
import com.azure.ai.openai.OpenAIClient;
import com.azure.ai.openai.OpenAIClientBuilder;
import com.azure.ai.openai.models.ChatChoice;
import com.azure.ai.openai.models.ChatCompletions;
import com.azure.ai.openai.models.ChatCompletionsOptions;
import com.azure.ai.openai.models.ChatRequestAssistantMessage;
import com.azure.ai.openai.models.ChatRequestMessage;
import com.azure.ai.openai.models.ChatRequestSystemMessage;
import com.azure.ai.openai.models.ChatRole;
import com.azure.ai.openai.models.ChatRequestUserMessage;
import com.azure.ai.openai.models.ChatResponseMessage;
import com.azure.ai.openai.models.CompletionsUsage;
import com.azure.ai.openai.models.CompletionsFinishReason;
import com.azure.core.credential.AzureKeyCredential;

import reactor.core.publisher.Flux;

import java.time.Duration;
import java.util.ArrayList;
import java.util.concurrent.CompletableFuture;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.atomic.AtomicInteger;
import java.util.function.Consumer;
import java.util.List;
import java.util.Scanner;

public class OpenAIQuickstartStreaming {

private OpenAIAsyncClient client;
private ChatCompletionsOptions options;

private String key = (System.getenv("OPENAI_API_KEY") != null) ? System.getenv("OPENAI_API_KEY")
: "<insert your OpenAI API key here>";
private String endpoint = (System.getenv("OPENAI_ENDPOINT") != null) ? System.getenv("OPENAI_ENDPOINT")
: "<insert your OpenAI endpoint here>";
private String deploymentName = (System.getenv("AZURE_OPENAI_CHAT_DEPLOYMENT") != null)
? System.getenv("AZURE_OPENAI_CHAT_DEPLOYMENT")
: "<insert your OpenAI deployment name here>";
private String systemPrompt = (System.getenv("AZURE_OPENAI_SYSTEM_PROMPT") != null)
? System.getenv("AZURE_OPENAI_SYSTEM_PROMPT")
: "You are a helpful AI assistant.";

public OpenAIQuickstartStreaming() {

client = new OpenAIClientBuilder()
.endpoint(endpoint)
.credential(new AzureKeyCredential(key))
.buildAsyncClient();

List<ChatRequestMessage> chatMessages = new ArrayList<>();
chatMessages.add(new ChatRequestSystemMessage(systemPrompt));

options = new ChatCompletionsOptions(chatMessages);
options.setStream(true);
}

public Flux<ChatCompletions> getChatCompletionsStreamingAsync(String userPrompt,
Consumer<ChatResponseMessage> callback) {
options.getMessages().add(new ChatRequestUserMessage(userPrompt));

StringBuilder responseContent = new StringBuilder();
Flux<ChatCompletions> response = client.getChatCompletionsStream(deploymentName, options);

response.subscribe(chatResponse -> {
// Process each response as it comes in.
if (chatResponse.getChoices() != null) {
for (ChatChoice update : chatResponse.getChoices()) {
if (update.getDelta() == null || update.getDelta().getContent() == null)
continue;
callback.accept(update.getDelta());
String content = update.getDelta().getContent();

if (update.getFinishReason() == null)
continue;
if (update.getFinishReason() == CompletionsFinishReason.CONTENT_FILTERED) {
content = content + "\nWARNING: Content filtered!";
} else if (update.getFinishReason() == CompletionsFinishReason.TOKEN_LIMIT_REACHED) {
content = content + "\nERROR: Exceeded token limit!";
}

if (content.isEmpty())
continue;

responseContent.append(content);
}

options.getMessages().add(new ChatRequestAssistantMessage(responseContent.toString()));
}
}, error -> {
// Error encountered
}, () -> {
// Stream completed
});

return response; // response.toIterable();
}

public static void main(String[] args) {
OpenAIQuickstartStreaming chat = new OpenAIQuickstartStreaming();

Scanner scanner = new Scanner(System.in);
while (true) {
System.out.print("User: ");
String userPrompt = scanner.nextLine();
if (userPrompt.isEmpty() || "exit".equals(userPrompt))
break;

System.out.print("Assistant: ");
Flux<ChatCompletions> responseFlux = chat.getChatCompletionsStreamingAsync(userPrompt, update -> {
System.out.print(update.getContent());
});
// block until the last element is available
responseFlux.blockLast(Duration.ofSeconds(20));
System.out.println("");
}
scanner.close();
}
}

0 comments on commit 8a3efa9

Please sign in to comment.