Skip to content

Commit b0ddc2d

Browse files
committed
fix enum
1 parent c4f7873 commit b0ddc2d

File tree

3 files changed

+10
-10
lines changed

3 files changed

+10
-10
lines changed

src/__tests__/Client.test.ts

Lines changed: 5 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -68,7 +68,7 @@ describe('client', () => {
6868
frequencyPenalty: 0,
6969
modelName: 'gpt-3.5-turbo',
7070
presencePenalty: 0,
71-
responseFormat: 'text',
71+
responseFormat: 'TEXT',
7272
temperature: 0.7,
7373
topP: 1,
7474
seed: null,
@@ -111,7 +111,7 @@ describe('client', () => {
111111
frequencyPenalty: 0,
112112
modelName: 'gpt-3.5-turbo',
113113
presencePenalty: 0,
114-
responseFormat: 'text',
114+
responseFormat: 'TEXT',
115115
temperature: 0.7,
116116
topP: 1,
117117
seed: null,
@@ -154,7 +154,7 @@ describe('client', () => {
154154
frequencyPenalty: 0,
155155
modelName: 'gpt-3.5-turbo',
156156
presencePenalty: 0,
157-
responseFormat: 'text',
157+
responseFormat: 'TEXT',
158158
temperature: 0.7,
159159
topP: 1,
160160
seed: null,
@@ -197,7 +197,7 @@ describe('client', () => {
197197
frequencyPenalty: 0,
198198
modelName: 'gpt-3.5-turbo',
199199
presencePenalty: 0,
200-
responseFormat: 'text',
200+
responseFormat: 'TEXT',
201201
temperature: 0.7,
202202
topP: 1,
203203
seed: null,
@@ -240,7 +240,7 @@ describe('client', () => {
240240
frequencyPenalty: 0,
241241
modelName: 'gpt-3.5-turbo',
242242
presencePenalty: 0,
243-
responseFormat: 'text',
243+
responseFormat: 'TEXT',
244244
temperature: 0.7,
245245
topP: 1,
246246
seed: null,

src/helpers/__tests__/template.test.ts

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -158,7 +158,7 @@ describe('template helpers', () => {
158158
topP: 1.0,
159159
presencePenalty: 0.0,
160160
frequencyPenalty: 0.0,
161-
responseFormat: 'json',
161+
responseFormat: 'JSON',
162162
seed: 0
163163
},
164164
promptTools: [],
@@ -182,7 +182,7 @@ describe('template helpers', () => {
182182
topP: 1.0,
183183
presencePenalty: 0.0,
184184
frequencyPenalty: 0.0,
185-
responseFormat: 'json',
185+
responseFormat: 'JSON',
186186
seed: 0
187187
},
188188
promptTools: [],
@@ -207,7 +207,7 @@ describe('template helpers', () => {
207207
topP: 1.0,
208208
presencePenalty: 0.0,
209209
frequencyPenalty: 0.0,
210-
responseFormat: 'json',
210+
responseFormat: 'JSON',
211211
seed: 0
212212
},
213213
promptTools: [],
@@ -228,7 +228,7 @@ describe('template helpers', () => {
228228
topP: 1.0,
229229
presencePenalty: 0.0,
230230
frequencyPenalty: 0.0,
231-
responseFormat: 'json',
231+
responseFormat: 'JSON',
232232
seed: 0
233233
},
234234
promptTools: [],

src/helpers/openAi.ts

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -65,7 +65,7 @@ export const mapPromptToOpenAIConfig = (promptConfig: PromptConfiguration): Chat
6565
frequency_penalty: promptParameters.frequencyPenalty,
6666
tool_choice: mapToolChoiceToOpenAI(promptTools, promptParameters.toolChoice),
6767
response_format: {
68-
type: promptParameters.responseFormat === 'json' ? 'json_object' : 'text'
68+
type: promptParameters.responseFormat === 'JSON' ? 'json_object' : 'text'
6969
},
7070
tools: promptTools.map((tool) => mapToolToOpenAi(tool))
7171
}

0 commit comments

Comments
 (0)