File tree Expand file tree Collapse file tree 4 files changed +64
-1
lines changed
api/src/main/java/com/theokanning/openai/completion/chat
service/src/test/java/com/theokanning/openai/service Expand file tree Collapse file tree 4 files changed +64
-1
lines changed Original file line number Diff line number Diff line change 1
1
package com .theokanning .openai .completion .chat ;
2
+ import com .theokanning .openai .Usage ;
2
3
import lombok .Data ;
3
4
4
5
import java .util .List ;
@@ -32,4 +33,9 @@ public class ChatCompletionChunk {
32
33
* A list of all generated completions.
33
34
*/
34
35
List <ChatCompletionChoice > choices ;
35
- }
36
+
37
+ /**
38
+ * The API usage for this request
39
+ */
40
+ Usage usage ;
41
+ }
Original file line number Diff line number Diff line change @@ -54,6 +54,13 @@ public class ChatCompletionRequest {
54
54
*/
55
55
Boolean stream ;
56
56
57
+ /**
58
+ * Options for streaming response. Only set this when you set stream: true
59
+ * <a href="https://platform.openai.com/docs/api-reference/chat/create#chat-create-stream_options">OpenAI Docs</a>
60
+ */
61
+ @ JsonProperty ("stream_options" )
62
+ StreamOptions streamOptions ;
63
+
57
64
/**
58
65
* Up to 4 sequences where the API will stop generating further tokens.
59
66
*/
Original file line number Diff line number Diff line change
1
+ package com .theokanning .openai .completion .chat ;
2
+
3
+ import com .fasterxml .jackson .annotation .JsonProperty ;
4
+ import lombok .AllArgsConstructor ;
5
+ import lombok .Data ;
6
+ import lombok .NoArgsConstructor ;
7
+
8
+ /**
9
+ * <p>Options for streaming response. Only set this when you set stream: true</p>
10
+ * see <a href="https://platform.openai.com/docs/api-reference/chat/create#chat-create-stream_options">OpenAi documentation</a>
11
+ */
12
+ @ Data
13
+ @ NoArgsConstructor (force = true )
14
+ @ AllArgsConstructor
15
+ public class StreamOptions {
16
+
17
+ /**
18
+ * If set, an additional chunk will be streamed before the data: [DONE] message.
19
+ * The usage field on this chunk shows the token usage statistics for the entire request, and the choices field will always be an empty array.
20
+ * All other chunks will also include a usage field, but with a null value.
21
+ */
22
+ @ JsonProperty ("include_usage" )
23
+ Boolean includeUsage ;
24
+
25
+ }
Original file line number Diff line number Diff line change @@ -84,6 +84,31 @@ void streamChatCompletion() {
84
84
assertNotNull (chunks .get (0 ).getChoices ().get (0 ));
85
85
}
86
86
87
+ @ Test
88
+ void streamChatCompletionWithStreamOptions () {
89
+ final List <ChatMessage > messages = new ArrayList <>();
90
+ final ChatMessage systemMessage = new ChatMessage (ChatMessageRole .SYSTEM .value (), "You are a dog and will speak as such." );
91
+ messages .add (systemMessage );
92
+
93
+ ChatCompletionRequest chatCompletionRequest = ChatCompletionRequest
94
+ .builder ()
95
+ .model ("gpt-3.5-turbo" )
96
+ .messages (messages )
97
+ .n (1 )
98
+ .maxTokens (50 )
99
+ .logitBias (new HashMap <>())
100
+ .stream (true )
101
+ .streamOptions (new StreamOptions (true ))
102
+ .build ();
103
+
104
+ List <ChatCompletionChunk > chunks = new ArrayList <>();
105
+ service .streamChatCompletion (chatCompletionRequest ).blockingForEach (chunks ::add );
106
+ assertTrue (chunks .size () > 0 );
107
+ assertNotNull (chunks .get (0 ).getChoices ().get (0 ));
108
+ chunks .stream ().limit (chunks .size () - 1 ).forEach (chunk -> assertNull (chunk .getUsage ()));
109
+ assertNotNull (chunks .get (chunks .size ()-1 ).getUsage ());
110
+ }
111
+
87
112
@ Test
88
113
void createChatCompletionWithFunctions () {
89
114
final List <ChatFunction > functions = Collections .singletonList (ChatFunction .builder ()
You can’t perform that action at this time.
0 commit comments