Skip to content

Commit 2d42187

Browse files
authored
tests: pass newline on the VCR proxy server (#111)
1 parent ffe75b0 commit 2d42187

File tree

4 files changed

+259
-6
lines changed

4 files changed

+259
-6
lines changed

gateway/test/providers/anthropic.spec.ts

Lines changed: 1 addition & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -59,8 +59,7 @@ describe('anthropic', () => {
5959
chunks.push(chunk)
6060
}
6161

62-
// TODO(Marcelo): This is wrong. We need to fix this!
63-
expect(chunks).toMatchInlineSnapshot(`[]`)
62+
expect(chunks).toMatchSnapshot('chunks')
6463
expect(otelBatch, 'otelBatch length not 1').toHaveLength(1)
6564
expect(JSON.parse(otelBatch[0]!).resourceSpans?.[0].scopeSpans?.[0].spans?.[0]?.attributes).toMatchSnapshot('span')
6665
})

gateway/test/providers/anthropic.spec.ts.snap

Lines changed: 109 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1047,6 +1047,78 @@ The population standard deviation is used when your data represents the entire p
10471047
]
10481048
`;
10491049
1050+
exports[`anthropic > should call anthropic via gateway with stream > chunks 1`] = `
1051+
[
1052+
{
1053+
"message": {
1054+
"content": [],
1055+
"id": "msg_01Tce1onxgEPRDM3CRhb6A4i",
1056+
"model": "claude-opus-4-1-20250805",
1057+
"role": "assistant",
1058+
"stop_reason": null,
1059+
"stop_sequence": null,
1060+
"type": "message",
1061+
"usage": {
1062+
"cache_creation": {
1063+
"ephemeral_1h_input_tokens": 0,
1064+
"ephemeral_5m_input_tokens": 0,
1065+
},
1066+
"cache_creation_input_tokens": 0,
1067+
"cache_read_input_tokens": 0,
1068+
"input_tokens": 14,
1069+
"output_tokens": 1,
1070+
"service_tier": "standard",
1071+
},
1072+
},
1073+
"type": "message_start",
1074+
},
1075+
{
1076+
"content_block": {
1077+
"text": "",
1078+
"type": "text",
1079+
},
1080+
"index": 0,
1081+
"type": "content_block_start",
1082+
},
1083+
{
1084+
"delta": {
1085+
"text": "The",
1086+
"type": "text_delta",
1087+
},
1088+
"index": 0,
1089+
"type": "content_block_delta",
1090+
},
1091+
{
1092+
"delta": {
1093+
"text": " capital of France is Paris.",
1094+
"type": "text_delta",
1095+
},
1096+
"index": 0,
1097+
"type": "content_block_delta",
1098+
},
1099+
{
1100+
"index": 0,
1101+
"type": "content_block_stop",
1102+
},
1103+
{
1104+
"delta": {
1105+
"stop_reason": "end_turn",
1106+
"stop_sequence": null,
1107+
},
1108+
"type": "message_delta",
1109+
"usage": {
1110+
"cache_creation_input_tokens": 0,
1111+
"cache_read_input_tokens": 0,
1112+
"input_tokens": 14,
1113+
"output_tokens": 10,
1114+
},
1115+
},
1116+
{
1117+
"type": "message_stop",
1118+
},
1119+
]
1120+
`;
1121+
10501122
exports[`anthropic > should call anthropic via gateway with stream > span 1`] = `
10511123
[
10521124
{
@@ -1058,7 +1130,7 @@ exports[`anthropic > should call anthropic via gateway with stream > span 1`] =
10581130
{
10591131
"key": "logfire.json_schema",
10601132
"value": {
1061-
"stringValue": "{"type":"object","properties":{"gen_ai.system":{"type":"string"},"gen_ai.operation.name":{"type":"string"},"gen_ai.request.model":{"type":"string"},"gen_ai.request.max_tokens":{"type":"number"}}}",
1133+
"stringValue": "{"type":"object","properties":{"gen_ai.system":{"type":"string"},"gen_ai.operation.name":{"type":"string"},"gen_ai.request.model":{"type":"string"},"gen_ai.request.max_tokens":{"type":"number"},"gen_ai.response.model":{"type":"string"},"gen_ai.response.id":{"type":"string"},"gen_ai.usage.input_tokens":{"type":"number"},"gen_ai.usage.cache_read_tokens":{"type":"number"},"gen_ai.usage.cache_write_tokens":{"type":"number"},"gen_ai.usage.output_tokens":{"type":"number"}}}",
10621134
},
10631135
},
10641136
{
@@ -1091,5 +1163,41 @@ exports[`anthropic > should call anthropic via gateway with stream > span 1`] =
10911163
"intValue": 1024,
10921164
},
10931165
},
1166+
{
1167+
"key": "gen_ai.response.model",
1168+
"value": {
1169+
"stringValue": "claude-opus-4-1-20250805",
1170+
},
1171+
},
1172+
{
1173+
"key": "gen_ai.response.id",
1174+
"value": {
1175+
"stringValue": "msg_01Tce1onxgEPRDM3CRhb6A4i",
1176+
},
1177+
},
1178+
{
1179+
"key": "gen_ai.usage.input_tokens",
1180+
"value": {
1181+
"intValue": 14,
1182+
},
1183+
},
1184+
{
1185+
"key": "gen_ai.usage.cache_read_tokens",
1186+
"value": {
1187+
"intValue": 0,
1188+
},
1189+
},
1190+
{
1191+
"key": "gen_ai.usage.cache_write_tokens",
1192+
"value": {
1193+
"intValue": 0,
1194+
},
1195+
},
1196+
{
1197+
"key": "gen_ai.usage.output_tokens",
1198+
"value": {
1199+
"intValue": 10,
1200+
},
1201+
},
10941202
]
10951203
`;

gateway/test/providers/openai.spec.ts.snap

Lines changed: 148 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -346,7 +346,111 @@ exports[`openai > openai chat > span 1`] = `
346346
]
347347
`;
348348

349-
exports[`openai > openai chat stream > chunks 1`] = `[]`;
349+
exports[`openai > openai chat stream > chunks 1`] = `
350+
[
351+
{
352+
"choices": [
353+
{
354+
"delta": {
355+
"content": "",
356+
"refusal": null,
357+
"role": "assistant",
358+
},
359+
"finish_reason": null,
360+
"index": 0,
361+
},
362+
],
363+
"created": 1761823216,
364+
"id": "chatcmpl-CWKzQzo2QLXPzzjG5ZhftNDkOKT2q",
365+
"model": "gpt-5-2025-08-07",
366+
"obfuscation": "znk17e1VnU",
367+
"object": "chat.completion.chunk",
368+
"service_tier": "default",
369+
"system_fingerprint": null,
370+
"usage": null,
371+
},
372+
{
373+
"choices": [
374+
{
375+
"delta": {
376+
"content": "Paris",
377+
},
378+
"finish_reason": null,
379+
"index": 0,
380+
},
381+
],
382+
"created": 1761823216,
383+
"id": "chatcmpl-CWKzQzo2QLXPzzjG5ZhftNDkOKT2q",
384+
"model": "gpt-5-2025-08-07",
385+
"obfuscation": "9uyCALM",
386+
"object": "chat.completion.chunk",
387+
"service_tier": "default",
388+
"system_fingerprint": null,
389+
"usage": null,
390+
},
391+
{
392+
"choices": [
393+
{
394+
"delta": {
395+
"content": ".",
396+
},
397+
"finish_reason": null,
398+
"index": 0,
399+
},
400+
],
401+
"created": 1761823216,
402+
"id": "chatcmpl-CWKzQzo2QLXPzzjG5ZhftNDkOKT2q",
403+
"model": "gpt-5-2025-08-07",
404+
"obfuscation": "DxMvC33A2iV",
405+
"object": "chat.completion.chunk",
406+
"service_tier": "default",
407+
"system_fingerprint": null,
408+
"usage": null,
409+
},
410+
{
411+
"choices": [
412+
{
413+
"delta": {},
414+
"finish_reason": "stop",
415+
"index": 0,
416+
},
417+
],
418+
"created": 1761823216,
419+
"id": "chatcmpl-CWKzQzo2QLXPzzjG5ZhftNDkOKT2q",
420+
"model": "gpt-5-2025-08-07",
421+
"obfuscation": "6BmmA2",
422+
"object": "chat.completion.chunk",
423+
"service_tier": "default",
424+
"system_fingerprint": null,
425+
"usage": null,
426+
},
427+
{
428+
"choices": [],
429+
"created": 1761823216,
430+
"id": "chatcmpl-CWKzQzo2QLXPzzjG5ZhftNDkOKT2q",
431+
"model": "gpt-5-2025-08-07",
432+
"obfuscation": "8u6q1bA9GJu",
433+
"object": "chat.completion.chunk",
434+
"service_tier": "default",
435+
"system_fingerprint": null,
436+
"usage": {
437+
"completion_tokens": 11,
438+
"completion_tokens_details": {
439+
"accepted_prediction_tokens": 0,
440+
"audio_tokens": 0,
441+
"reasoning_tokens": 0,
442+
"rejected_prediction_tokens": 0,
443+
},
444+
"prompt_tokens": 23,
445+
"prompt_tokens_details": {
446+
"audio_tokens": 0,
447+
"cached_tokens": 0,
448+
},
449+
"total_tokens": 34,
450+
},
451+
},
452+
]
453+
`;
350454

351455
exports[`openai > openai chat stream > span 1`] = `
352456
[
@@ -359,7 +463,7 @@ exports[`openai > openai chat stream > span 1`] = `
359463
{
360464
"key": "logfire.json_schema",
361465
"value": {
362-
"stringValue": "{"type":"object","properties":{"gen_ai.system":{"type":"string"},"gen_ai.operation.name":{"type":"string"},"gen_ai.request.model":{"type":"string"},"gen_ai.request.max_tokens":{"type":"number"}}}",
466+
"stringValue": "{"type":"object","properties":{"gen_ai.system":{"type":"string"},"gen_ai.operation.name":{"type":"string"},"gen_ai.request.model":{"type":"string"},"gen_ai.request.max_tokens":{"type":"number"},"gen_ai.response.model":{"type":"string"},"gen_ai.response.id":{"type":"string"},"gen_ai.usage.input_tokens":{"type":"number"},"gen_ai.usage.cache_read_tokens":{"type":"number"},"gen_ai.usage.output_tokens":{"type":"number"},"gen_ai.usage.input_audio_tokens":{"type":"number"},"gen_ai.usage.output_audio_tokens":{"type":"number"}}}",
363467
},
364468
},
365469
{
@@ -392,6 +496,48 @@ exports[`openai > openai chat stream > span 1`] = `
392496
"intValue": 1024,
393497
},
394498
},
499+
{
500+
"key": "gen_ai.response.model",
501+
"value": {
502+
"stringValue": "gpt-5-2025-08-07",
503+
},
504+
},
505+
{
506+
"key": "gen_ai.response.id",
507+
"value": {
508+
"stringValue": "chatcmpl-CWKzQzo2QLXPzzjG5ZhftNDkOKT2q",
509+
},
510+
},
511+
{
512+
"key": "gen_ai.usage.input_tokens",
513+
"value": {
514+
"intValue": 23,
515+
},
516+
},
517+
{
518+
"key": "gen_ai.usage.cache_read_tokens",
519+
"value": {
520+
"intValue": 0,
521+
},
522+
},
523+
{
524+
"key": "gen_ai.usage.output_tokens",
525+
"value": {
526+
"intValue": 11,
527+
},
528+
},
529+
{
530+
"key": "gen_ai.usage.input_audio_tokens",
531+
"value": {
532+
"intValue": 0,
533+
},
534+
},
535+
{
536+
"key": "gen_ai.usage.output_audio_tokens",
537+
"value": {
538+
"intValue": 0,
539+
},
540+
},
395541
]
396542
`;
397543

proxy-vcr/proxy_vcr/main.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -89,7 +89,7 @@ async def proxy(request: Request) -> Response:
8989
if response.headers.get('content-type').startswith('text/event-stream'):
9090

9191
async def generator():
92-
async for chunk in response.aiter_lines():
92+
async for chunk in response.aiter_bytes():
9393
yield chunk
9494

9595
return StreamingResponse(generator(), status_code=response.status_code)

0 commit comments

Comments
 (0)