Skip to content

Commit e0de00a

Browse files
authored
Merge pull request #74 from JasonWei512/feature/stream-chat-response
Add streaming methods for Conversation class
2 parents d30ead8 + 44df440 commit e0de00a

File tree

2 files changed

+73
-0
lines changed

2 files changed

+73
-0
lines changed

OpenAI_API/Chat/Conversation.cs

Lines changed: 47 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -122,5 +122,52 @@ public async Task<string> GetResponseFromChatbot()
122122
}
123123
return null;
124124
}
125+
126+
/// <summary>
127+
/// Calls the API to get a response, which is appended to the current chat's <see cref="Messages"/> as an <see cref="ChatMessageRole.Assistant"/> <see cref="ChatMessage"/>, and streams the results to the <paramref name="resultHandler"/> as they come in. <br/>
128+
/// If you are on the latest C# supporting async enumerables, you may prefer the cleaner syntax of <see cref="StreamResponseEnumerableFromChatbotAsync"/> instead.
129+
/// </summary>
130+
/// <param name="resultHandler">An action to be called as each new result arrives.</param>
131+
public async Task StreamResponseFromChatbotAsync(Action<string> resultHandler)
132+
{
133+
await foreach (string res in StreamResponseEnumerableFromChatbotAsync())
134+
{
135+
resultHandler(res);
136+
}
137+
}
138+
139+
/// <summary>
140+
/// Calls the API to get a response, which is appended to the current chat's <see cref="Messages"/> as an <see cref="ChatMessageRole.Assistant"/> <see cref="ChatMessage"/>, and streams the results as they come in. <br/>
141+
/// If you are not using C# 8 supporting async enumerables or if you are using the .NET Framework, you may need to use <see cref="StreamResponseFromChatbotAsync"/> instead.
142+
/// </summary>
143+
/// <returns>An async enumerable with each of the results as they come in. See <see href="https://docs.microsoft.com/en-us/dotnet/csharp/whats-new/csharp-8#asynchronous-streams"/> for more details on how to consume an async enumerable.</returns>
144+
public async IAsyncEnumerable<string> StreamResponseEnumerableFromChatbotAsync()
145+
{
146+
ChatRequest req = new ChatRequest(RequestParameters);
147+
req.Messages = _Messages.ToList();
148+
149+
StringBuilder responseStringBuilder = new StringBuilder();
150+
ChatMessageRole responseRole = null;
151+
152+
await foreach (var res in _endpoint.StreamChatEnumerableAsync(req))
153+
{
154+
if (res.Choices.FirstOrDefault()?.Delta is ChatMessage delta)
155+
{
156+
responseRole = delta.Role;
157+
string deltaContent = delta.Content;
158+
159+
if (!string.IsNullOrEmpty(deltaContent))
160+
{
161+
responseStringBuilder.Append(deltaContent);
162+
yield return deltaContent;
163+
}
164+
}
165+
}
166+
167+
if (responseRole != null)
168+
{
169+
AppendMessage(responseRole, responseStringBuilder.ToString());
170+
}
171+
}
125172
}
126173
}

README.md

Lines changed: 26 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -115,6 +115,32 @@ foreach (ChatMessage msg in chat.Messages)
115115
}
116116
```
117117

118+
#### Streaming
119+
120+
Streaming allows you to get results are they are generated, which can help your application feel more responsive.
121+
122+
Using the new C# 8.0 async iterators:
123+
```csharp
124+
var chat = api.Chat.CreateConversation();
125+
chat.AppendUserInput("How to make a hamburger?");
126+
127+
await foreach (var res in chat.StreamResponseEnumerableFromChatbotAsync())
128+
{
129+
Console.Write(res);
130+
}
131+
```
132+
133+
Or if using classic .NET framework or C# <8.0:
134+
```csharp
135+
var chat = api.Chat.CreateConversation();
136+
chat.AppendUserInput("How to make a hamburger?");
137+
138+
await chat.StreamResponseFromChatbotAsync(res =>
139+
{
140+
Console.Write(res);
141+
});
142+
```
143+
118144
#### Chat Endpoint Requests
119145
You can access full control of the Chat API by using the `OpenAIAPI.Chat.CreateChatCompletionAsync()` and related methods.
120146

0 commit comments

Comments
 (0)