Skip to content

Commit ff15456

Browse files
committed
fix tool choice
1 parent 2acaf53 commit ff15456

File tree

4 files changed

+75
-24
lines changed

4 files changed

+75
-24
lines changed

src/__tests__/__snapshots__/Client.test.ts.snap

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -24,7 +24,7 @@ exports[`client getOpenAiPrompt returnsmapped prompt 1`] = `
2424
},
2525
"seed": null,
2626
"temperature": 0.7,
27-
"tool_choice": "none",
27+
"tool_choice": undefined,
2828
"tools": undefined,
2929
"top_p": 1,
3030
}
Lines changed: 58 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,58 @@
1+
// Jest Snapshot v1, https://goo.gl/fbAQLP
2+
3+
exports[`openAi helpers mapPromptToOpenAIConfig should map the configuration to OpenAI parameters correctly - tools 1`] = `
4+
{
5+
"frequency_penalty": 0.1,
6+
"max_tokens": 150,
7+
"messages": [
8+
{
9+
"content": "Hello world",
10+
"name": undefined,
11+
"role": "user",
12+
},
13+
],
14+
"model": "text-davinci-002",
15+
"presence_penalty": 0.1,
16+
"response_format": {
17+
"type": "json_object",
18+
},
19+
"seed": 42,
20+
"temperature": 0.7,
21+
"tool_choice": "auto",
22+
"tools": [
23+
{
24+
"function": {
25+
"description": "description",
26+
"name": "exampleTool",
27+
"parameters": {},
28+
},
29+
"type": "function",
30+
},
31+
],
32+
"top_p": 0.5,
33+
}
34+
`;
35+
36+
exports[`openAi helpers mapPromptToOpenAIConfig should map the configuration to OpenAI parameters correctly 1`] = `
37+
{
38+
"frequency_penalty": 0.1,
39+
"max_tokens": 150,
40+
"messages": [
41+
{
42+
"content": "Hello world",
43+
"name": undefined,
44+
"role": "user",
45+
},
46+
],
47+
"model": "text-davinci-002",
48+
"presence_penalty": 0.1,
49+
"response_format": {
50+
"type": "json_object",
51+
},
52+
"seed": 42,
53+
"temperature": 0.7,
54+
"tool_choice": undefined,
55+
"tools": undefined,
56+
"top_p": 0.5,
57+
}
58+
`;

src/helpers/__tests__/openAi.test.ts

Lines changed: 14 additions & 21 deletions
Original file line numberDiff line numberDiff line change
@@ -59,25 +59,18 @@ describe('openAi helpers', () => {
5959
})
6060

6161
describe('mapPromptToOpenAIConfig', () => {
62-
const mockPromptConfig = createPromptConfigurationFixture()
63-
6462
it('should map the configuration to OpenAI parameters correctly', () => {
65-
const result = mapPromptToOpenAIConfig(mockPromptConfig)
66-
expect(result).toEqual({
67-
messages: [{ role: 'user', name: undefined, content: 'Hello world' }],
68-
model: 'text-davinci-002',
69-
top_p: 0.5,
70-
max_tokens: 150,
71-
temperature: 0.7,
72-
seed: 42,
73-
presence_penalty: 0.1,
74-
frequency_penalty: 0.1,
75-
tool_choice: 'none',
76-
response_format: {
77-
type: 'json_object'
78-
},
79-
tools: undefined
80-
})
63+
const result = mapPromptToOpenAIConfig(createPromptConfigurationFixture())
64+
expect(result).toMatchSnapshot()
65+
})
66+
67+
it('should map the configuration to OpenAI parameters correctly - tools', () => {
68+
const result = mapPromptToOpenAIConfig(
69+
createPromptConfigurationFixture({
70+
promptTools: [createPromptToolFixture({ name: 'exampleTool' })]
71+
})
72+
)
73+
expect(result).toMatchSnapshot()
8174
})
8275

8376
// Additional test cases to test other configurations and scenarios can be added here.
@@ -92,9 +85,9 @@ describe('openAi helpers', () => {
9285

9386
it('should return "none" if there are no tools or toolChoice is "none"', () => {
9487
const tools: PromptTool[] = []
95-
expect(mapToolChoiceToOpenAI(tools, 'none')).toBe('none')
96-
expect(mapToolChoiceToOpenAI(tools)).toBe('none')
97-
expect(mapToolChoiceToOpenAI(tools, 'auto')).toBe('none')
88+
expect(mapToolChoiceToOpenAI(tools, 'none')).toBe(undefined)
89+
expect(mapToolChoiceToOpenAI(tools)).toBe(undefined)
90+
expect(mapToolChoiceToOpenAI(tools, 'auto')).toBe(undefined)
9891
})
9992

10093
it('should return tool function object if a valid toolChoice matches a tool', () => {

src/helpers/openAi.ts

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -69,9 +69,9 @@ export const mapMessagesToOpenAI = (promptMessages: PromptMessage[]): ChatComple
6969
})
7070
}
7171

72-
export const mapToolChoiceToOpenAI = (tools: PromptTool[], toolChoice?: string | null): ChatCompletionToolChoiceOption => {
72+
export const mapToolChoiceToOpenAI = (tools: PromptTool[], toolChoice?: string | null): ChatCompletionToolChoiceOption | undefined => {
7373
if (tools.length === 0) {
74-
return 'none'
74+
return undefined
7575
}
7676
if (toolChoice === 'auto' || (!toolChoice && tools.length !== 0)) {
7777
return 'auto'

0 commit comments

Comments
 (0)