Skip to content

Commit 058a5a7

Browse files
committed
Add support for json response format
1 parent 816da99 commit 058a5a7

File tree

2 files changed

+85
-2
lines changed

2 files changed

+85
-2
lines changed

OpenAI_API/Chat/ChatRequest.cs

Lines changed: 41 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,10 @@
11
using Newtonsoft.Json;
2+
using Newtonsoft.Json.Linq;
3+
using OpenAI_API.Models;
24
using System;
35
using System.Collections.Generic;
6+
using System.ComponentModel;
7+
using System.IO;
48
using System.Linq;
59
using System.Text;
610

@@ -119,6 +123,28 @@ public string StopSequence
119123
[JsonProperty("user")]
120124
public string user { get; set; }
121125

126+
/// <summary>
127+
/// An object specifying the format that the model must output. Setting to <see cref="ResponseFormats.JsonObject"/> enables JSON mode, which guarantees the message the model generates is valid JSON, assuming that the <see cref="ChatChoice.FinishReason"/> is not "length".
128+
/// Important: when using JSON mode, you must also instruct the model to produce JSON yourself via a system or user message. Without this, the model may generate an unending stream of whitespace until the generation reaches the token limit, resulting in a long-running and seemingly "stuck" request.Also note that the message content may be partially cut off if `finish_reason= "length"`, which indicates the generation exceeded `max_tokens` or the conversation exceeded the max context length.
129+
/// </summary>
130+
[JsonIgnore]
131+
public string ResponseFormat { get; set; } = "text";
132+
133+
/// <summary>
134+
/// This is only used for serializing the request into JSON, do not use it directly.
135+
/// </summary>
136+
[JsonProperty("response_format", DefaultValueHandling=DefaultValueHandling.Ignore)]
137+
public Dictionary<string, string> ResponseFormatRaw
138+
{
139+
get
140+
{
141+
if (ResponseFormat == null || ResponseFormat == ResponseFormats.Text)
142+
return null;
143+
else
144+
return new Dictionary<string, string>() { { "type", ResponseFormat } };
145+
}
146+
}
147+
122148
/// <summary>
123149
/// Creates a new, empty <see cref="ChatRequest"/>
124150
/// </summary>
@@ -133,7 +159,6 @@ public ChatRequest(ChatRequest basedOn)
133159
{
134160
if (basedOn == null)
135161
return;
136-
137162
this.Model = basedOn.Model;
138163
this.Messages = basedOn.Messages;
139164
this.Temperature = basedOn.Temperature;
@@ -145,5 +170,20 @@ public ChatRequest(ChatRequest basedOn)
145170
this.PresencePenalty = basedOn.PresencePenalty;
146171
this.LogitBias = basedOn.LogitBias;
147172
}
173+
174+
/// <summary>
175+
/// Options for the <see cref="ChatRequest.ResponseFormat"/> property
176+
/// </summary>
177+
public static class ResponseFormats
178+
{
179+
/// <summary>
180+
/// The default response format, which is may be any type of text
181+
/// </summary>
182+
public const string Text = "text";
183+
/// <summary>
184+
/// The response format is guaranteed to be valid JSON, assuming that the <see cref="ChatChoice.FinishReason"/> is not "length"
185+
/// </summary>
186+
public const string JsonObject = "json_object";
187+
}
148188
}
149189
}

OpenAI_Tests/ChatEndpointTests.cs

Lines changed: 44 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,10 +1,12 @@
1-
using NUnit.Framework;
1+
using Newtonsoft.Json;
2+
using NUnit.Framework;
23
using OpenAI_API.Chat;
34
using OpenAI_API.Completions;
45
using OpenAI_API.Models;
56
using OpenAI_API.Moderation;
67
using System;
78
using System.Collections.Generic;
9+
using System.IO;
810
using System.Linq;
911
using System.Threading;
1012
using System.Threading.Tasks;
@@ -453,5 +455,46 @@ public async Task StreamingConversation(string model)
453455
Assert.AreEqual(result, chat.Messages.Last().Content);
454456
}
455457

458+
[TestCase("gpt-4-1106-preview")]
459+
[TestCase("gpt-3.5-turbo-1106")]
460+
public void ChatJsonFormat(string model)
461+
{
462+
var api = new OpenAI_API.OpenAIAPI();
463+
ChatRequest chatRequest = new ChatRequest()
464+
{
465+
Model = model,
466+
Temperature = 0.0,
467+
MaxTokens = 500,
468+
ResponseFormat = ChatRequest.ResponseFormats.JsonObject,
469+
Messages = new ChatMessage[] {
470+
new ChatMessage(ChatMessageRole.System, "You are a helpful assistant designed to output JSON."),
471+
new ChatMessage(ChatMessageRole.User, "Who won the world series in 2020? Return JSON of a 'wins' dictionary with the year as the numeric key and the winning team as the string value.")
472+
}
473+
};
474+
475+
var results = api.Chat.CreateChatCompletionAsync(chatRequest).Result;
476+
Assert.IsNotNull(results);
477+
478+
Assert.NotNull(results.Object);
479+
Assert.NotNull(results.Choices);
480+
Assert.NotZero(results.Choices.Count);
481+
Assert.AreEqual(ChatMessageRole.Assistant, results.Choices[0].Message.Role);
482+
Assert.That(results.Choices.All(c => c.Message.Content.Length > 1));
483+
Assert.AreEqual("stop", results.Choices[0].FinishReason);
484+
485+
using (StringReader stringReader = new StringReader(results.Choices[0].Message.Content))
486+
{
487+
using (JsonTextReader jsonReader= new JsonTextReader(stringReader))
488+
{
489+
var serializer = new JsonSerializer();
490+
var json = serializer.Deserialize<Dictionary<string, Dictionary<int, string>>>(jsonReader);
491+
Assert.NotNull(json);
492+
Assert.IsTrue(json.ContainsKey("wins"));
493+
Assert.IsTrue(json["wins"].ContainsKey(2020));
494+
Assert.AreEqual("Los Angeles Dodgers", json["wins"][2020]);
495+
}
496+
}
497+
498+
}
456499
}
457500
}

0 commit comments

Comments
 (0)