Skip to content

Commit eb23c26

Browse files
committed
Improve chat conversations and testing
1 parent 3057a2f commit eb23c26

File tree

4 files changed

+65
-12
lines changed

4 files changed

+65
-12
lines changed

OpenAI_API/Chat/ChatEndpoint.cs

Lines changed: 3 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -32,10 +32,11 @@ internal ChatEndpoint(OpenAIAPI api) : base(api) { }
3232
/// <summary>
3333
/// Creates an ongoing chat which can easily encapsulate the conversation. This is the simplest way to use the Chat endpoint.
3434
/// </summary>
35+
/// <param name="defaultChatRequestArgs">Allows setting the parameters to use when calling the ChatGPT API. Can be useful for setting temperature, presence_penalty, and more. See <see href="https://platform.openai.com/docs/api-reference/chat/create">OpenAI documentation for a list of possible parameters to tweak.</see></param>
3536
/// <returns></returns>
36-
public Conversation CreateConversation()
37+
public Conversation CreateConversation(ChatRequest defaultChatRequestArgs = null)
3738
{
38-
return new Conversation(this, defaultChatRequestArgs: DefaultChatRequestArgs);
39+
return new Conversation(this, defaultChatRequestArgs: defaultChatRequestArgs ?? DefaultChatRequestArgs);
3940
}
4041

4142
#region Non-streaming

OpenAI_API/Chat/Conversation.cs

Lines changed: 29 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -49,7 +49,7 @@ public OpenAI_API.Models.Model Model
4949
/// </summary>
5050
/// <param name="endpoint">A reference to the API endpoint, needed for API requests. Generally should be <see cref="OpenAIAPI.Chat"/>.</param>
5151
/// <param name="model">Optionally specify the model to use for ChatGPT requests. If not specified, used <paramref name="defaultChatRequestArgs"/>.Model or falls back to <see cref="OpenAI_API.Models.Model.ChatGPTTurbo"/></param>
52-
/// <param name="defaultChatRequestArgs">Allows setting the parameters to use when calling the ChatGPT API. Can be useful for setting temperature, presence_penalty, and more. <see href="https://platform.openai.com/docs/api-reference/chat/create">Se OpenAI documentation for a list of possible parameters to tweak.</see></param>
52+
/// <param name="defaultChatRequestArgs">Allows setting the parameters to use when calling the ChatGPT API. Can be useful for setting temperature, presence_penalty, and more. See <see href="https://platform.openai.com/docs/api-reference/chat/create">OpenAI documentation for a list of possible parameters to tweak.</see></param>
5353
public Conversation(ChatEndpoint endpoint, OpenAI_API.Models.Model model = null, ChatRequest defaultChatRequestArgs = null)
5454
{
5555
RequestParameters = new ChatRequest(defaultChatRequestArgs);
@@ -102,6 +102,8 @@ public void AppendMessage(ChatMessage message)
102102
/// <param name="content">Text content written by a developer to help give examples of desired behavior</param>
103103
public void AppendExampleChatbotOutput(string content) => this.AppendMessage(new ChatMessage(ChatMessageRole.Assistant, content));
104104

105+
#region Non-streaming
106+
105107
/// <summary>
106108
/// Calls the API to get a response, which is appended to the current chat's <see cref="Messages"/> as an <see cref="ChatMessageRole.Assistant"/> <see cref="ChatMessage"/>.
107109
/// </summary>
@@ -117,12 +119,16 @@ public async Task<string> GetResponseFromChatbot()
117119
if (res.Choices.Count > 0)
118120
{
119121
var newMsg = res.Choices[0].Message;
120-
AppendMessage(res.Choices[0].Message);
121-
return res.Choices[0].Message.Content;
122+
AppendMessage(newMsg);
123+
return newMsg.Content;
122124
}
123125
return null;
124126
}
125127

128+
#endregion
129+
130+
#region Streaming
131+
126132
/// <summary>
127133
/// Calls the API to get a response, which is appended to the current chat's <see cref="Messages"/> as an <see cref="ChatMessageRole.Assistant"/> <see cref="ChatMessage"/>, and streams the results to the <paramref name="resultHandler"/> as they come in. <br/>
128134
/// If you are on the latest C# supporting async enumerables, you may prefer the cleaner syntax of <see cref="StreamResponseEnumerableFromChatbotAsync"/> instead.
@@ -136,6 +142,20 @@ public async Task StreamResponseFromChatbotAsync(Action<string> resultHandler)
136142
}
137143
}
138144

145+
/// <summary>
146+
/// Calls the API to get a response, which is appended to the current chat's <see cref="Messages"/> as an <see cref="ChatMessageRole.Assistant"/> <see cref="ChatMessage"/>, and streams the results to the <paramref name="resultHandler"/> as they come in. <br/>
147+
/// If you are on the latest C# supporting async enumerables, you may prefer the cleaner syntax of <see cref="StreamResponseEnumerableFromChatbotAsync"/> instead.
148+
/// </summary>
149+
/// <param name="resultHandler">An action to be called as each new result arrives, which includes the index of the result in the overall result set.</param>
150+
public async Task StreamResponseFromChatbotAsync(Action<int, string> resultHandler)
151+
{
152+
int index = 0;
153+
await foreach (string res in StreamResponseEnumerableFromChatbotAsync())
154+
{
155+
resultHandler(index++, res);
156+
}
157+
}
158+
139159
/// <summary>
140160
/// Calls the API to get a response, which is appended to the current chat's <see cref="Messages"/> as an <see cref="ChatMessageRole.Assistant"/> <see cref="ChatMessage"/>, and streams the results as they come in. <br/>
141161
/// If you are not using C# 8 supporting async enumerables or if you are using the .NET Framework, you may need to use <see cref="StreamResponseFromChatbotAsync"/> instead.
@@ -153,7 +173,9 @@ public async IAsyncEnumerable<string> StreamResponseEnumerableFromChatbotAsync()
153173
{
154174
if (res.Choices.FirstOrDefault()?.Delta is ChatMessage delta)
155175
{
156-
responseRole = delta.Role;
176+
if (delta.Role != null)
177+
responseRole = delta.Role;
178+
157179
string deltaContent = delta.Content;
158180

159181
if (!string.IsNullOrEmpty(deltaContent))
@@ -162,12 +184,15 @@ public async IAsyncEnumerable<string> StreamResponseEnumerableFromChatbotAsync()
162184
yield return deltaContent;
163185
}
164186
}
187+
MostResentAPIResult = res;
165188
}
166189

167190
if (responseRole != null)
168191
{
169192
AppendMessage(responseRole, responseStringBuilder.ToString());
170193
}
171194
}
195+
196+
#endregion
172197
}
173198
}

OpenAI_API/Chat/IChatEndpoint.cs

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -17,7 +17,7 @@ public interface IChatEndpoint
1717
Task<ChatResult> CreateChatCompletionAsync(IList<ChatMessage> messages, Model model = null, double? temperature = null, double? top_p = null, int? numOutputs = null, int? max_tokens = null, double? frequencyPenalty = null, double? presencePenalty = null, IReadOnlyDictionary<string, float> logitBias = null, params string[] stopSequences);
1818
Task<ChatResult> CreateChatCompletionAsync(params ChatMessage[] messages);
1919
Task<ChatResult> CreateChatCompletionAsync(params string[] userMessages);
20-
Conversation CreateConversation();
20+
Conversation CreateConversation(ChatRequest defaultChatRequestArgs = null);
2121
Task StreamChatAsync(ChatRequest request, Action<ChatResult> resultHandler);
2222
IAsyncEnumerable<ChatResult> StreamChatEnumerableAsync(ChatRequest request);
2323
IAsyncEnumerable<ChatResult> StreamChatEnumerableAsync(IList<ChatMessage> messages, Model model = null, double? temperature = null, double? top_p = null, int? numOutputs = null, int? max_tokens = null, double? frequencyPenalty = null, double? presencePenalty = null, IReadOnlyDictionary<string, float> logitBias = null, params string[] stopSequences);

OpenAI_Tests/ChatEndpointTests.cs

Lines changed: 32 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -106,11 +106,6 @@ public void ChatBackAndForth()
106106
Assert.NotNull(res);
107107
Assert.IsNotEmpty(res);
108108
Assert.AreEqual("No", res.Trim());
109-
110-
foreach (ChatMessage msg in chat.Messages)
111-
{
112-
Console.WriteLine($"{msg.Role}: {msg.Content}");
113-
}
114109
}
115110

116111
[Test]
@@ -140,5 +135,37 @@ public async Task StreamCompletionEnumerableAsync_ShouldStreamData()
140135
Assert.That(chatResults.Select(cr => cr.Choices[0].Delta.Content).Count(c => !string.IsNullOrEmpty(c)) > 50);
141136
}
142137

138+
[Test]
139+
public async Task StreamingConversation()
140+
{
141+
var api = new OpenAI_API.OpenAIAPI();
142+
143+
var chat = api.Chat.CreateConversation();
144+
chat.RequestParameters.MaxTokens = 500;
145+
chat.RequestParameters.Temperature = 0.2;
146+
chat.Model = Model.ChatGPTTurbo;
147+
148+
chat.AppendSystemMessage("You are a helpful assistant who is really good at explaining things to students.");
149+
chat.AppendUserInput("Please explain to me how mountains are formed in great detail.");
150+
151+
string result = "";
152+
int streamParts = 0;
153+
154+
await foreach (var streamResultPart in chat.StreamResponseEnumerableFromChatbotAsync())
155+
{
156+
result += streamResultPart;
157+
streamParts++;
158+
}
159+
160+
Assert.NotNull(result);
161+
Assert.IsNotEmpty(result);
162+
Assert.That(result.ToLower().Contains("mountains"));
163+
Assert.Greater(result.Length, 200);
164+
Assert.Greater(streamParts, 5);
165+
166+
Assert.AreEqual(ChatMessageRole.User, chat.Messages.Last().Role);
167+
Assert.AreEqual(result, chat.Messages.Last().Content);
168+
}
169+
143170
}
144171
}

0 commit comments

Comments
 (0)