Skip to content

Commit d2fccc5

Browse files
committed
add failing tests for embeddings api support
1 parent e403d89 commit d2fccc5

File tree

5 files changed

+174
-2
lines changed

5 files changed

+174
-2
lines changed

dev-packages/node-integration-tests/suites/tracing/openai/scenario.mjs

Lines changed: 40 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -74,6 +74,28 @@ class MockOpenAI {
7474
};
7575
},
7676
};
77+
78+
this.embeddings = {
79+
create: async params => {
80+
await new Promise(resolve => setTimeout(resolve, 10));
81+
82+
return {
83+
object: 'list',
84+
data: [
85+
{
86+
object: 'embedding',
87+
embedding: [0.1, 0.2, 0.3],
88+
index: 0,
89+
},
90+
],
91+
model: params.model,
92+
usage: {
93+
prompt_tokens: 10,
94+
total_tokens: 10,
95+
},
96+
};
97+
},
98+
};
7799
}
78100

79101
// Create a mock streaming response for chat completions
@@ -312,6 +334,24 @@ async function run() {
312334
} catch {
313335
// Error is expected and handled
314336
}
337+
338+
// Seventh test: embeddings API
339+
await client.embeddings.create({
340+
input: 'Embedding test!',
341+
model: 'text-embedding-3-small',
342+
dimensions: 1536,
343+
encoding_format: 'float',
344+
});
345+
346+
// Eighth test: embeddings API error model
347+
try {
348+
await client.embeddings.create({
349+
input: 'Error embedding test!',
350+
model: 'error-model',
351+
});
352+
} catch {
353+
// Error is expected and handled
354+
}
315355
});
316356
}
317357

dev-packages/node-integration-tests/suites/tracing/openai/test.ts

Lines changed: 68 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -144,6 +144,26 @@ describe('OpenAI integration', () => {
144144
origin: 'auto.ai.openai',
145145
status: 'internal_error',
146146
}),
147+
// Seventh span - embeddings API
148+
expect.objectContaining({
149+
data: {
150+
'gen_ai.operation.name': 'embeddings',
151+
'sentry.op': 'gen_ai.embeddings',
152+
'sentry.origin': 'auto.ai.openai',
153+
'gen_ai.system': 'openai',
154+
'gen_ai.request.model': 'text-embedding-3-small',
155+
},
156+
}),
157+
// Eighth span - embeddings API error model
158+
expect.objectContaining({
159+
data: {
160+
'gen_ai.operation.name': 'embeddings',
161+
'sentry.op': 'gen_ai.embeddings',
162+
'sentry.origin': 'auto.ai.openai',
163+
'gen_ai.system': 'openai',
164+
'gen_ai.request.model': 'error-model',
165+
},
166+
}),
147167
]),
148168
};
149169

@@ -297,6 +317,26 @@ describe('OpenAI integration', () => {
297317
origin: 'auto.ai.openai',
298318
status: 'internal_error',
299319
}),
320+
// Seventh span - embeddings API
321+
expect.objectContaining({
322+
data: {
323+
'gen_ai.operation.name': 'embeddings',
324+
'sentry.op': 'gen_ai.embeddings',
325+
'sentry.origin': 'auto.ai.openai',
326+
'gen_ai.system': 'openai',
327+
'gen_ai.request.model': 'text-embedding-3-small',
328+
},
329+
}),
330+
// Eighth span - embeddings API error model
331+
expect.objectContaining({
332+
data: {
333+
'gen_ai.operation.name': 'embeddings',
334+
'sentry.op': 'gen_ai.embeddings',
335+
'sentry.origin': 'auto.ai.openai',
336+
'gen_ai.system': 'openai',
337+
'gen_ai.request.model': 'error-model',
338+
},
339+
}),
300340
]),
301341
};
302342

@@ -400,7 +440,7 @@ describe('OpenAI integration', () => {
400440

401441
createEsmAndCjsTests(
402442
__dirname,
403-
'scenario-message-truncation-completions.mjs',
443+
'truncation/scenario-message-truncation-completions.mjs',
404444
'instrument-with-pii.mjs',
405445
(createRunner, test) => {
406446
test('truncates messages when they exceed byte limit - keeps only last message and crops it', async () => {
@@ -436,7 +476,7 @@ describe('OpenAI integration', () => {
436476

437477
createEsmAndCjsTests(
438478
__dirname,
439-
'scenario-message-truncation-responses.mjs',
479+
'truncation/scenario-message-truncation-responses.mjs',
440480
'instrument-with-pii.mjs',
441481
(createRunner, test) => {
442482
test('truncates string inputs when they exceed byte limit', async () => {
@@ -469,4 +509,30 @@ describe('OpenAI integration', () => {
469509
});
470510
},
471511
);
512+
513+
createEsmAndCjsTests(
514+
__dirname,
515+
'truncation/scenario-message-truncation-embeddings.mjs',
516+
'instrument-with-pii.mjs',
517+
(createRunner, test) => {
518+
test('truncates messages when they exceed byte limit - keeps only last message and crops it', async () => {
519+
await createRunner()
520+
.ignore('event')
521+
.expect({
522+
transaction: {
523+
transaction: 'main',
524+
spans: expect.arrayContaining([
525+
expect.objectContaining({
526+
data: expect.objectContaining({
527+
'gen_ai.operation.name': 'embeddings',
528+
}),
529+
}),
530+
]),
531+
},
532+
})
533+
.start()
534+
.completed();
535+
});
536+
},
537+
);
472538
});
Lines changed: 66 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,66 @@
1+
import { instrumentOpenAiClient } from '@sentry/core';
2+
import * as Sentry from '@sentry/node';
3+
4+
class MockOpenAI {
5+
constructor(config) {
6+
this.apiKey = config.apiKey;
7+
8+
this.embeddings = {
9+
create: async params => {
10+
await new Promise(resolve => setTimeout(resolve, 10));
11+
12+
return {
13+
object: 'list',
14+
data: [
15+
{
16+
object: 'embedding',
17+
embedding: [0.1, 0.2, 0.3],
18+
index: 0,
19+
},
20+
],
21+
model: params.model,
22+
usage: {
23+
prompt_tokens: 10,
24+
total_tokens: 10,
25+
},
26+
};
27+
},
28+
};
29+
}
30+
}
31+
32+
async function run() {
33+
await Sentry.startSpan({ op: 'function', name: 'main' }, async () => {
34+
const mockClient = new MockOpenAI({
35+
apiKey: 'mock-api-key',
36+
});
37+
38+
const client = instrumentOpenAiClient(mockClient);
39+
40+
// Create 1 large message that gets truncated to fit within the 20KB limit
41+
const largeContent = 'A'.repeat(25000) + 'B'.repeat(25000); // ~50KB gets truncated to include only As
42+
43+
await client.embeddings.create({
44+
input: largeContent,
45+
model: 'text-embedding-3-small',
46+
dimensions: 1536,
47+
encoding_format: 'float',
48+
});
49+
50+
// Create 3 large messages where:
51+
// - First 2 messages are very large (will be dropped)
52+
// - Last message is large but will be truncated to fit within the 20KB limit
53+
const largeContent1 = 'A'.repeat(15000); // ~15KB
54+
const largeContent2 = 'B'.repeat(15000); // ~15KB
55+
const largeContent3 = 'C'.repeat(25000); // ~25KB (will be truncated)
56+
57+
await client.embeddings.create({
58+
input: [largeContent1, largeContent2, largeContent3],
59+
model: 'text-embedding-3-small',
60+
dimensions: 1536,
61+
encoding_format: 'float',
62+
});
63+
});
64+
}
65+
66+
run();

0 commit comments

Comments
 (0)