Skip to content

Commit 217fb8b

Browse files
committed
Added support for stream_options in chat completions
Fixes: TheoKanning#502
1 parent e7de81c commit 217fb8b

File tree

4 files changed

+64
-1
lines changed

4 files changed

+64
-1
lines changed

api/src/main/java/com/theokanning/openai/completion/chat/ChatCompletionChunk.java

Lines changed: 7 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,5 @@
11
package com.theokanning.openai.completion.chat;
2+
import com.theokanning.openai.Usage;
23
import lombok.Data;
34

45
import java.util.List;
@@ -32,4 +33,9 @@ public class ChatCompletionChunk {
3233
* A list of all generated completions.
3334
*/
3435
List<ChatCompletionChoice> choices;
35-
}
36+
37+
/**
38+
* The API usage for this request
39+
*/
40+
Usage usage;
41+
}

api/src/main/java/com/theokanning/openai/completion/chat/ChatCompletionRequest.java

Lines changed: 7 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -54,6 +54,13 @@ public class ChatCompletionRequest {
5454
*/
5555
Boolean stream;
5656

57+
/**
58+
* Options for streaming response. Only set this when you set stream: true
59+
* <a href="https://platform.openai.com/docs/api-reference/chat/create#chat-create-stream_options">OpenAI Docs</a>
60+
*/
61+
@JsonProperty("stream_options")
62+
StreamOptions streamOptions;
63+
5764
/**
5865
* Up to 4 sequences where the API will stop generating further tokens.
5966
*/
Lines changed: 25 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,25 @@
1+
package com.theokanning.openai.completion.chat;
2+
3+
import com.fasterxml.jackson.annotation.JsonProperty;
4+
import lombok.AllArgsConstructor;
5+
import lombok.Data;
6+
import lombok.NoArgsConstructor;
7+
8+
/**
9+
* <p>Options for streaming response. Only set this when you set stream: true</p>
10+
* see <a href="https://platform.openai.com/docs/api-reference/chat/create#chat-create-stream_options">OpenAi documentation</a>
11+
*/
12+
@Data
13+
@NoArgsConstructor(force = true)
14+
@AllArgsConstructor
15+
public class StreamOptions {
16+
17+
/**
18+
* If set, an additional chunk will be streamed before the data: [DONE] message.
19+
* The usage field on this chunk shows the token usage statistics for the entire request, and the choices field will always be an empty array.
20+
* All other chunks will also include a usage field, but with a null value.
21+
*/
22+
@JsonProperty("include_usage")
23+
Boolean includeUsage;
24+
25+
}

service/src/test/java/com/theokanning/openai/service/ChatCompletionTest.java

Lines changed: 25 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -84,6 +84,31 @@ void streamChatCompletion() {
8484
assertNotNull(chunks.get(0).getChoices().get(0));
8585
}
8686

87+
@Test
88+
void streamChatCompletionWithStreamOptions() {
89+
final List<ChatMessage> messages = new ArrayList<>();
90+
final ChatMessage systemMessage = new ChatMessage(ChatMessageRole.SYSTEM.value(), "You are a dog and will speak as such.");
91+
messages.add(systemMessage);
92+
93+
ChatCompletionRequest chatCompletionRequest = ChatCompletionRequest
94+
.builder()
95+
.model("gpt-3.5-turbo")
96+
.messages(messages)
97+
.n(1)
98+
.maxTokens(50)
99+
.logitBias(new HashMap<>())
100+
.stream(true)
101+
.streamOptions(new StreamOptions(true))
102+
.build();
103+
104+
List<ChatCompletionChunk> chunks = new ArrayList<>();
105+
service.streamChatCompletion(chatCompletionRequest).blockingForEach(chunks::add);
106+
assertTrue(chunks.size() > 0);
107+
assertNotNull(chunks.get(0).getChoices().get(0));
108+
chunks.stream().limit(chunks.size() - 1).forEach(chunk -> assertNull(chunk.getUsage()));
109+
assertNotNull(chunks.get(chunks.size()-1).getUsage());
110+
}
111+
87112
@Test
88113
void createChatCompletionWithFunctions() {
89114
final List<ChatFunction> functions = Collections.singletonList(ChatFunction.builder()

0 commit comments

Comments
 (0)