-
Notifications
You must be signed in to change notification settings - Fork 431
/
Copy pathConversation.cs
215 lines (185 loc) · 10.9 KB
/
Conversation.cs
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
using System;
using System.Collections.Generic;
using System.Data;
using System.Linq;
using System.Text;
using System.Threading;
using System.Threading.Tasks;
using static System.Net.WebRequestMethods;
namespace OpenAI_API.Chat
{
/// <summary>
/// Represents on ongoing chat with back-and-forth interactions between the user and the chatbot. This is the simplest way to interact with the ChatGPT API, rather than manually using the ChatEnpoint methods. You do lose some flexibility though.
/// </summary>
public class Conversation
{
/// <summary>
/// An internal reference to the API endpoint, needed for API requests
/// </summary>
private ChatEndpoint _endpoint;
/// <summary>
/// Allows setting the parameters to use when calling the ChatGPT API. Can be useful for setting temperature, presence_penalty, and more. <see href="https://platform.openai.com/docs/api-reference/chat/create">Se OpenAI documentation for a list of possible parameters to tweak.</see>
/// </summary>
public ChatRequest RequestParameters { get; private set; }
/// <summary>
/// Specifies the model to use for ChatGPT requests. This is just a shorthand to access <see cref="RequestParameters"/>.Model
/// </summary>
public OpenAI_API.Models.Model Model
{
get
{
return RequestParameters.Model;
}
set
{
RequestParameters.Model = value;
}
}
/// <summary>
/// After calling <see cref="GetResponseFromChatbotAsync"/>, this contains the full response object which can contain useful metadata like token usages, <see cref="ChatChoice.FinishReason"/>, etc. This is overwritten with every call to <see cref="GetResponseFromChatbotAsync"/> and only contains the most recent result.
/// </summary>
public ChatResult MostRecentApiResult { get; private set; }
/// <summary>
/// Creates a new conversation with ChatGPT chat
/// </summary>
/// <param name="endpoint">A reference to the API endpoint, needed for API requests. Generally should be <see cref="OpenAIAPI.Chat"/>.</param>
/// <param name="model">Optionally specify the model to use for ChatGPT requests. If not specified, used <paramref name="defaultChatRequestArgs"/>.Model or falls back to <see cref="OpenAI_API.Models.Model.ChatGPTTurbo"/></param>
/// <param name="defaultChatRequestArgs">Allows setting the parameters to use when calling the ChatGPT API. Can be useful for setting temperature, presence_penalty, and more. See <see href="https://platform.openai.com/docs/api-reference/chat/create">OpenAI documentation for a list of possible parameters to tweak.</see></param>
public Conversation(ChatEndpoint endpoint, OpenAI_API.Models.Model model = null, ChatRequest defaultChatRequestArgs = null)
{
RequestParameters = new ChatRequest(defaultChatRequestArgs);
if (model != null)
RequestParameters.Model = model;
if (RequestParameters.Model == null)
RequestParameters.Model = Models.Model.ChatGPTTurbo;
_Messages = new List<ChatMessage>();
_endpoint = endpoint;
RequestParameters.NumChoicesPerMessage = 1;
RequestParameters.Stream = false;
}
/// <summary>
/// A list of messages exchanged so far. Do not modify this list directly. Instead, use <see cref="AppendMessage(ChatMessage)"/>, <see cref="AppendUserInput(string)"/>, <see cref="AppendSystemMessage(string)"/>, or <see cref="AppendExampleChatbotOutput(string)"/>.
/// </summary>
public IReadOnlyList<ChatMessage> Messages { get => _Messages; }
private List<ChatMessage> _Messages;
/// <summary>
/// Appends a <see cref="ChatMessage"/> to the chat history
/// </summary>
/// <param name="message">The <see cref="ChatMessage"/> to append to the chat history</param>
public void AppendMessage(ChatMessage message)
{
_Messages.Add(message);
}
/// <summary>
/// Creates and appends a <see cref="ChatMessage"/> to the chat history
/// </summary>
/// <param name="role">The <see cref="ChatMessageRole"/> for the message. Typically, a conversation is formatted with a system message first, followed by alternating user and assistant messages. See <see href="https://platform.openai.com/docs/guides/chat/introduction">the OpenAI docs</see> for more details about usage.</param>
/// <param name="content">The content of the message)</param>
public void AppendMessage(ChatMessageRole role, string content) => this.AppendMessage(new ChatMessage(role, content));
/// <summary>
/// Creates and appends a <see cref="ChatMessage"/> to the chat history with the Role of <see cref="ChatMessageRole.User"/>. The user messages help instruct the assistant. They can be generated by the end users of an application, or set by a developer as an instruction.
/// </summary>
/// <param name="content">Text content generated by the end users of an application, or set by a developer as an instruction</param>
public void AppendUserInput(string content) => this.AppendMessage(new ChatMessage(ChatMessageRole.User, content));
/// <summary>
/// Creates and appends a <see cref="ChatMessage"/> to the chat history with the Role of <see cref="ChatMessageRole.User"/>. The user messages help instruct the assistant. They can be generated by the end users of an application, or set by a developer as an instruction.
/// </summary>
/// <param name="userName">The name of the user in a multi-user chat</param>
/// <param name="content">Text content generated by the end users of an application, or set by a developer as an instruction</param>
public void AppendUserInputWithName(string userName, string content) => this.AppendMessage(new ChatMessage(ChatMessageRole.User, content) { Name = userName });
/// <summary>
/// Creates and appends a <see cref="ChatMessage"/> to the chat history with the Role of <see cref="ChatMessageRole.System"/>. The system message helps set the behavior of the assistant.
/// </summary>
/// <param name="content">text content that helps set the behavior of the assistant</param>
public void AppendSystemMessage(string content) => this.AppendMessage(new ChatMessage(ChatMessageRole.System, content));
/// <summary>
/// Creates and appends a <see cref="ChatMessage"/> to the chat history with the Role of <see cref="ChatMessageRole.Assistant"/>. Assistant messages can be written by a developer to help give examples of desired behavior.
/// </summary>
/// <param name="content">Text content written by a developer to help give examples of desired behavior</param>
public void AppendExampleChatbotOutput(string content) => this.AppendMessage(new ChatMessage(ChatMessageRole.Assistant, content));
#region Non-streaming
/// <summary>
/// Calls the API to get a response, which is appended to the current chat's <see cref="Messages"/> as an <see cref="ChatMessageRole.Assistant"/> <see cref="ChatMessage"/>.
/// </summary>
/// <returns>The string of the response from the chatbot API</returns>
public async Task<string> GetResponseFromChatbotAsync()
{
ChatRequest req = new ChatRequest(RequestParameters);
req.Messages = _Messages.ToList();
var res = await _endpoint.CreateChatCompletionAsync(req);
MostRecentApiResult = res;
if (res.Choices.Count > 0)
{
var newMsg = res.Choices[0].Message;
AppendMessage(newMsg);
return newMsg.Content;
}
return null;
}
/// <summary>
/// OBSOLETE: GetResponseFromChatbot() has been renamed to <see cref="GetResponseFromChatbotAsync"/> to follow .NET naming guidelines. This alias will be removed in a future version.
/// </summary>
/// <returns>The string of the response from the chatbot API</returns>
[Obsolete("Conversation.GetResponseFromChatbot() has been renamed to GetResponseFromChatbotAsync to follow .NET naming guidelines. Please update any references to GetResponseFromChatbotAsync(). This alias will be removed in a future version.", false)]
public Task<string> GetResponseFromChatbot() => GetResponseFromChatbotAsync();
#endregion
#region Streaming
/// <summary>
/// Calls the API to get a response, which is appended to the current chat's <see cref="Messages"/> as an <see cref="ChatMessageRole.Assistant"/> <see cref="ChatMessage"/>, and streams the results to the <paramref name="resultHandler"/> as they come in. <br/>
/// If you are on the latest C# supporting async enumerables, you may prefer the cleaner syntax of <see cref="StreamResponseEnumerableFromChatbotAsync"/> instead.
/// </summary>
/// <param name="resultHandler">An action to be called as each new result arrives.</param>
public async Task StreamResponseFromChatbotAsync(Action<string> resultHandler)
{
await foreach (string res in StreamResponseEnumerableFromChatbotAsync())
{
resultHandler(res);
}
}
/// <summary>
/// Calls the API to get a response, which is appended to the current chat's <see cref="Messages"/> as an <see cref="ChatMessageRole.Assistant"/> <see cref="ChatMessage"/>, and streams the results to the <paramref name="resultHandler"/> as they come in. <br/>
/// If you are on the latest C# supporting async enumerables, you may prefer the cleaner syntax of <see cref="StreamResponseEnumerableFromChatbotAsync"/> instead.
/// </summary>
/// <param name="resultHandler">An action to be called as each new result arrives, which includes the index of the result in the overall result set.</param>
public async Task StreamResponseFromChatbotAsync(Action<int, string> resultHandler)
{
int index = 0;
await foreach (string res in StreamResponseEnumerableFromChatbotAsync())
{
resultHandler(index++, res);
}
}
/// <summary>
/// Calls the API to get a response, which is appended to the current chat's <see cref="Messages"/> as an <see cref="ChatMessageRole.Assistant"/> <see cref="ChatMessage"/>, and streams the results as they come in. <br/>
/// If you are not using C# 8 supporting async enumerables or if you are using the .NET Framework, you may need to use <see cref="StreamResponseFromChatbotAsync(Action{string})"/> instead.
/// </summary>
/// <returns>An async enumerable with each of the results as they come in. See <see href="https://docs.microsoft.com/en-us/dotnet/csharp/whats-new/csharp-8#asynchronous-streams"/> for more details on how to consume an async enumerable.</returns>
public async IAsyncEnumerable<string> StreamResponseEnumerableFromChatbotAsync()
{
ChatRequest req = new ChatRequest(RequestParameters);
req.Messages = _Messages.ToList();
StringBuilder responseStringBuilder = new StringBuilder();
ChatMessageRole responseRole = null;
await foreach (var res in _endpoint.StreamChatEnumerableAsync(req))
{
if (res.Choices.FirstOrDefault()?.Delta is ChatMessage delta)
{
if (responseRole == null && delta.Role != null)
responseRole = delta.Role;
string deltaContent = delta.Content;
if (!string.IsNullOrEmpty(deltaContent))
{
responseStringBuilder.Append(deltaContent);
yield return deltaContent;
}
}
MostRecentApiResult = res;
}
if (responseRole != null)
{
AppendMessage(responseRole, responseStringBuilder.ToString());
}
}
#endregion
}
}