Skip to content

Commit 3b1a6d5

Browse files
committed
update schema
1 parent ff15456 commit 3b1a6d5

File tree

9 files changed

+887
-138
lines changed

9 files changed

+887
-138
lines changed

src/Client.ts

Lines changed: 7 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -20,12 +20,12 @@ export default class PromptFoundry {
2020
})
2121
}
2222

23-
public async getRawPrompt({ promptId }: { promptId: string }): Promise<PromptConfiguration> {
24-
return this.client.get<PromptConfiguration>(`/prompts/${promptId}`)
23+
public async getRawPrompt({ id }: { id: string }): Promise<PromptConfiguration> {
24+
return this.client.get<PromptConfiguration>(`/prompts/${id}`)
2525
}
2626

27-
public async getPrompt({ promptId, variables }: { promptId: string; variables: Record<string, string> }): Promise<PromptConfiguration> {
28-
const result = await this.getRawPrompt({ promptId })
27+
public async getPrompt({ id, variables }: { id: string; variables: Record<string, string> }): Promise<PromptConfiguration> {
28+
const result = await this.getRawPrompt({ id })
2929

3030
if (!validatePromptVariables(result, variables)) {
3131
const missingVariables = getMissingPromptVariables(result, variables)
@@ -35,13 +35,13 @@ export default class PromptFoundry {
3535
}
3636

3737
public async getOpenAiPrompt({
38-
promptId,
38+
id,
3939
variables
4040
}: {
41-
promptId: string
41+
id: string
4242
variables: Record<string, string>
4343
}): Promise<ChatCompletionCreateParamsNonStreaming> {
44-
const updatedWithVariables = await this.getPrompt({ promptId, variables })
44+
const updatedWithVariables = await this.getPrompt({ id, variables })
4545

4646
return mapPromptToOpenAIConfig(updatedWithVariables)
4747
}

src/__tests__/Client.test.ts

Lines changed: 36 additions & 30 deletions
Original file line numberDiff line numberDiff line change
@@ -22,8 +22,9 @@ describe('client', () => {
2222
const client = new Client({ apiKey: '123' })
2323

2424
const value = {
25-
promptId: 'HELLO',
26-
promptParameters: {
25+
id: 'HELLO',
26+
name: 'winning',
27+
parameters: {
2728
frequencyPenalty: 0,
2829
modelName: 'gpt-3.5-turbo',
2930
presencePenalty: 0,
@@ -33,7 +34,7 @@ describe('client', () => {
3334
temperature: 0.7,
3435
topP: 1
3536
},
36-
promptMessages: [
37+
messages: [
3738
{
3839
content: 'Hello, world!',
3940
role: 'user'
@@ -54,7 +55,7 @@ describe('client', () => {
5455
// @ts-expect-error - mocking the client's client property
5556
client.client = apiClient
5657

57-
const prompt = await client.getPrompt({ promptId: 'HELLO', variables: {} })
58+
const prompt = await client.getPrompt({ id: 'HELLO', variables: {} })
5859

5960
expect(prompt).toEqual(value)
6061
})
@@ -63,8 +64,9 @@ describe('client', () => {
6364
const client = new Client({ apiKey: '123' })
6465

6566
const value: PromptConfiguration = {
66-
promptId: 'HELLO',
67-
promptParameters: {
67+
id: 'HELLO',
68+
name: 'winning',
69+
parameters: {
6870
frequencyPenalty: 0,
6971
modelName: 'gpt-3.5-turbo',
7072
presencePenalty: 0,
@@ -75,7 +77,7 @@ describe('client', () => {
7577
maxTokens: null,
7678
toolChoice: 'auto'
7779
},
78-
promptMessages: [
80+
messages: [
7981
{
8082
content: 'Hello, world!',
8183
role: PromptMessageRoleEnum.USER,
@@ -89,7 +91,7 @@ describe('client', () => {
8991
toolCallId: null
9092
}
9193
],
92-
promptTools: []
94+
tools: []
9395
}
9496

9597
const apiClient: APIClient = {
@@ -101,17 +103,18 @@ describe('client', () => {
101103
// @ts-expect-error - mocking the client's client property
102104
client.client = apiClient
103105

104-
const prompt = await client.getPrompt({ promptId: 'HELLO', variables: { name: 'bob' } })
106+
const prompt = await client.getPrompt({ id: 'HELLO', variables: { name: 'bob' } })
105107

106-
expect(prompt.promptMessages[1].content).toEqual('Hi there bob!')
108+
expect(prompt.messages[1].content).toEqual('Hi there bob!')
107109
})
108110

109111
it('should throw error if missing variables', async () => {
110112
const client = new Client({ apiKey: '123' })
111113

112114
const value: PromptConfiguration = {
113-
promptId: 'HELLO',
114-
promptParameters: {
115+
id: 'HELLO',
116+
name: 'winning',
117+
parameters: {
115118
frequencyPenalty: 0,
116119
modelName: 'gpt-3.5-turbo',
117120
presencePenalty: 0,
@@ -122,7 +125,7 @@ describe('client', () => {
122125
maxTokens: null,
123126
toolChoice: 'auto'
124127
},
125-
promptMessages: [
128+
messages: [
126129
{
127130
content: 'Hello, world!',
128131
role: PromptMessageRoleEnum.USER,
@@ -136,7 +139,7 @@ describe('client', () => {
136139
toolCallId: null
137140
}
138141
],
139-
promptTools: []
142+
tools: []
140143
}
141144

142145
const apiClient: APIClient = {
@@ -148,7 +151,7 @@ describe('client', () => {
148151
// @ts-expect-error - mocking the client's client property
149152
client.client = apiClient
150153

151-
await expect(() => client.getPrompt({ promptId: 'HELLO', variables: {} })).rejects.toThrowErrorMatchingSnapshot()
154+
await expect(() => client.getPrompt({ id: 'HELLO', variables: {} })).rejects.toThrowErrorMatchingSnapshot()
152155
})
153156
})
154157

@@ -157,8 +160,9 @@ describe('client', () => {
157160
const client = new Client({ apiKey: '123' })
158161

159162
const value: PromptConfiguration = {
160-
promptId: 'HELLO',
161-
promptParameters: {
163+
id: 'HELLO',
164+
name: 'winning',
165+
parameters: {
162166
frequencyPenalty: 0,
163167
modelName: 'gpt-3.5-turbo',
164168
presencePenalty: 0,
@@ -169,7 +173,7 @@ describe('client', () => {
169173
maxTokens: null,
170174
toolChoice: 'auto'
171175
},
172-
promptMessages: [
176+
messages: [
173177
{
174178
content: 'Hello, world!',
175179
role: PromptMessageRoleEnum.USER,
@@ -183,7 +187,7 @@ describe('client', () => {
183187
toolCallId: null
184188
}
185189
],
186-
promptTools: []
190+
tools: []
187191
}
188192

189193
const apiClient: APIClient = {
@@ -195,7 +199,7 @@ describe('client', () => {
195199
// @ts-expect-error - mocking the client's client property
196200
client.client = apiClient
197201

198-
const prompt = await client.getOpenAiPrompt({ promptId: 'HELLO', variables: {} })
202+
const prompt = await client.getOpenAiPrompt({ id: 'HELLO', variables: {} })
199203

200204
expect(prompt).toMatchSnapshot()
201205
})
@@ -204,8 +208,9 @@ describe('client', () => {
204208
const client = new Client({ apiKey: '123' })
205209

206210
const value: PromptConfiguration = {
207-
promptId: 'HELLO',
208-
promptParameters: {
211+
id: 'HELLO',
212+
name: 'winning',
213+
parameters: {
209214
frequencyPenalty: 0,
210215
modelName: 'gpt-3.5-turbo',
211216
presencePenalty: 0,
@@ -216,7 +221,7 @@ describe('client', () => {
216221
maxTokens: null,
217222
toolChoice: 'auto'
218223
},
219-
promptMessages: [
224+
messages: [
220225
{
221226
content: 'Hello, world!',
222227
role: PromptMessageRoleEnum.USER,
@@ -230,7 +235,7 @@ describe('client', () => {
230235
toolCallId: null
231236
}
232237
],
233-
promptTools: []
238+
tools: []
234239
}
235240

236241
const apiClient: APIClient = {
@@ -242,7 +247,7 @@ describe('client', () => {
242247
// @ts-expect-error - mocking the client's client property
243248
client.client = apiClient
244249

245-
const prompt = await client.getOpenAiPrompt({ promptId: 'HELLO', variables: { name: 'bob' } })
250+
const prompt = await client.getOpenAiPrompt({ id: 'HELLO', variables: { name: 'bob' } })
246251

247252
expect(prompt.messages[1].content).toEqual('Hi there bob!')
248253
})
@@ -251,8 +256,9 @@ describe('client', () => {
251256
const client = new Client({ apiKey: '123' })
252257

253258
const value: PromptConfiguration = {
254-
promptId: 'HELLO',
255-
promptParameters: {
259+
id: 'HELLO',
260+
name: 'winning',
261+
parameters: {
256262
frequencyPenalty: 0,
257263
modelName: 'gpt-3.5-turbo',
258264
presencePenalty: 0,
@@ -263,7 +269,7 @@ describe('client', () => {
263269
maxTokens: null,
264270
toolChoice: 'auto'
265271
},
266-
promptMessages: [
272+
messages: [
267273
{
268274
content: 'Hello, world!',
269275
role: PromptMessageRoleEnum.USER,
@@ -277,7 +283,7 @@ describe('client', () => {
277283
toolCallId: null
278284
}
279285
],
280-
promptTools: []
286+
tools: []
281287
}
282288

283289
const apiClient: APIClient = {
@@ -289,7 +295,7 @@ describe('client', () => {
289295
// @ts-expect-error - mocking the client's client property
290296
client.client = apiClient
291297

292-
await expect(() => client.getOpenAiPrompt({ promptId: 'HELLO', variables: {} })).rejects.toThrowErrorMatchingSnapshot()
298+
await expect(() => client.getOpenAiPrompt({ id: 'HELLO', variables: {} })).rejects.toThrowErrorMatchingSnapshot()
293299
})
294300
})
295301
})

src/helpers/__tests__/openAi.test.ts

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -67,7 +67,7 @@ describe('openAi helpers', () => {
6767
it('should map the configuration to OpenAI parameters correctly - tools', () => {
6868
const result = mapPromptToOpenAIConfig(
6969
createPromptConfigurationFixture({
70-
promptTools: [createPromptToolFixture({ name: 'exampleTool' })]
70+
tools: [createPromptToolFixture({ name: 'exampleTool' })]
7171
})
7272
)
7373
expect(result).toMatchSnapshot()

src/helpers/__tests__/template.test.ts

Lines changed: 32 additions & 22 deletions
Original file line numberDiff line numberDiff line change
@@ -154,68 +154,76 @@ describe('template helpers', () => {
154154
describe('renderPromptWithVariables', () => {
155155
it('renders a prompt with one variable', () => {
156156
const prompt: PromptConfiguration = {
157-
promptId: 'hi',
158-
promptParameters: {
157+
id: 'hi',
158+
name: 'Hi',
159+
parameters: {
159160
modelName: 'davinci',
160161
maxTokens: 100,
161162
temperature: 0.5,
162163
topP: 1.0,
163164
presencePenalty: 0.0,
164165
frequencyPenalty: 0.0,
165166
responseFormat: 'JSON',
166-
seed: 0
167+
seed: 0,
168+
toolChoice: 'none'
167169
},
168-
promptTools: [],
169-
promptMessages: [{ role: PromptMessageRoleEnum.USER, content: 'Hello, {{name}}!', toolCalls: null, toolCallId: null }]
170+
tools: [],
171+
messages: [{ role: PromptMessageRoleEnum.USER, content: 'Hello, {{name}}!', toolCalls: null, toolCallId: null }]
170172
}
171173
const variables = { name: 'Alice' }
172174

173175
expect(renderPromptWithVariables(prompt, variables)).toEqual({
174176
...prompt,
175-
promptMessages: [{ role: PromptMessageRoleEnum.USER, content: 'Hello, Alice!', toolCalls: null, toolCallId: null }]
177+
messages: [{ role: PromptMessageRoleEnum.USER, content: 'Hello, Alice!', toolCalls: null, toolCallId: null }]
176178
})
177179
})
178180

179181
it('renders a prompt with no variables', () => {
180182
const prompt: PromptConfiguration = {
181-
promptId: 'hi',
182-
promptParameters: {
183+
id: 'hi',
184+
name: 'Hi',
185+
186+
parameters: {
183187
modelName: 'davinci',
184188
maxTokens: 100,
185189
temperature: 0.5,
186190
topP: 1.0,
187191
presencePenalty: 0.0,
188192
frequencyPenalty: 0.0,
189193
responseFormat: 'JSON',
190-
seed: 0
194+
seed: 0,
195+
toolChoice: 'none'
191196
},
192-
promptTools: [],
193-
promptMessages: [{ role: PromptMessageRoleEnum.USER, content: 'Hello!', toolCalls: null, toolCallId: null }]
197+
tools: [],
198+
messages: [{ role: PromptMessageRoleEnum.USER, content: 'Hello!', toolCalls: null, toolCallId: null }]
194199
}
195200
const variables = {}
196201

197202
expect(renderPromptWithVariables(prompt, variables)).toEqual({
198203
...prompt,
199-
promptMessages: [{ role: PromptMessageRoleEnum.USER, content: 'Hello!', toolCalls: null, toolCallId: null }]
204+
messages: [{ role: PromptMessageRoleEnum.USER, content: 'Hello!', toolCalls: null, toolCallId: null }]
200205
})
201206
})
202207
})
203208
describe('validatePromptVariables', () => {
204209
it('returns true if all configured variables are in passed in varibles', () => {
205210
const prompt: PromptConfiguration = {
206-
promptId: 'hi',
207-
promptParameters: {
211+
id: 'hi',
212+
name: 'Hi',
213+
214+
parameters: {
208215
modelName: 'davinci',
209216
maxTokens: 100,
210217
temperature: 0.5,
211218
topP: 1.0,
212219
presencePenalty: 0.0,
213220
frequencyPenalty: 0.0,
214221
responseFormat: 'JSON',
215-
seed: 0
222+
seed: 0,
223+
toolChoice: 'none'
216224
},
217-
promptTools: [],
218-
promptMessages: [{ role: PromptMessageRoleEnum.USER, content: 'Hello, {{name}}!', toolCalls: null, toolCallId: null }]
225+
tools: [],
226+
messages: [{ role: PromptMessageRoleEnum.USER, content: 'Hello, {{name}}!', toolCalls: null, toolCallId: null }]
219227
}
220228
const variables = { name: 'Alice' }
221229

@@ -224,19 +232,21 @@ describe('template helpers', () => {
224232

225233
it('returns false if configured variables are missing in passed in varibles', () => {
226234
const prompt: PromptConfiguration = {
227-
promptId: 'hi',
228-
promptParameters: {
235+
id: 'hi',
236+
name: 'Hi',
237+
parameters: {
229238
modelName: 'davinci',
230239
maxTokens: 100,
231240
temperature: 0.5,
232241
topP: 1.0,
233242
presencePenalty: 0.0,
234243
frequencyPenalty: 0.0,
235244
responseFormat: 'JSON',
236-
seed: 0
245+
seed: 0,
246+
toolChoice: 'none'
237247
},
238-
promptTools: [],
239-
promptMessages: [{ role: PromptMessageRoleEnum.USER, content: 'Hello, {{name}}!', toolCalls: null, toolCallId: null }]
248+
tools: [],
249+
messages: [{ role: PromptMessageRoleEnum.USER, content: 'Hello, {{name}}!', toolCalls: null, toolCallId: null }]
240250
}
241251
const variables = {}
242252

0 commit comments

Comments
 (0)