Skip to content

Commit 977ab53

Browse files
committed
Refactor OpenAI FileSearchTool tests to match built-in tool pattern
- Add non-streaming test with message history roundtrip - Add streaming test with event collection - Snapshot all_messages(), new_messages(), and event_parts - Follow exact pattern from web search tool tests
1 parent bc3ac7a commit 977ab53

File tree

1 file changed

+31
-28
lines changed

1 file changed

+31
-28
lines changed

tests/models/test_openai_responses.py

Lines changed: 31 additions & 28 deletions
Original file line numberDiff line numberDiff line change
@@ -7438,16 +7438,17 @@ def get_meaning_of_life() -> int:
74387438

74397439
@pytest.mark.vcr()
74407440
async def test_openai_responses_model_file_search_tool(allow_model_requests: None, openai_api_key: str):
7441-
"""Integration test for FileSearchTool with OpenAI."""
7441+
import asyncio
7442+
import os
7443+
import tempfile
7444+
74427445
from openai import AsyncOpenAI
74437446

74447447
from pydantic_ai.builtin_tools import FileSearchTool
74457448
from pydantic_ai.providers.openai import OpenAIProvider
74467449

74477450
async_client = AsyncOpenAI(api_key=openai_api_key)
74487451

7449-
import tempfile
7450-
74517452
with tempfile.NamedTemporaryFile(mode='w', suffix='.txt', delete=False) as f:
74527453
f.write('Paris is the capital of France. It is known for the Eiffel Tower.')
74537454
test_file_path = f.name
@@ -7461,20 +7462,19 @@ async def test_openai_responses_model_file_search_tool(allow_model_requests: Non
74617462
vector_store = await async_client.vector_stores.create(name='test-file-search')
74627463
await async_client.vector_stores.files.create(vector_store_id=vector_store.id, file_id=file.id)
74637464

7464-
import asyncio
7465-
74667465
await asyncio.sleep(2)
74677466

7468-
model = OpenAIResponsesModel('gpt-4o', provider=OpenAIProvider(openai_client=async_client))
7469-
agent = Agent(model=model, builtin_tools=[FileSearchTool(vector_store_ids=[vector_store.id])])
7467+
m = OpenAIResponsesModel('gpt-4o', provider=OpenAIProvider(openai_client=async_client))
7468+
agent = Agent(m, instructions='You are a helpful assistant.', builtin_tools=[FileSearchTool(vector_store_ids=[vector_store.id])])
74707469

7471-
result = await agent.run('What is the capital of France according to my files?')
7470+
result = await agent.run('What is the capital of France?')
7471+
assert result.all_messages() == snapshot()
74727472

7473-
assert 'Paris' in result.output or 'paris' in result.output.lower()
7473+
messages = result.all_messages()
7474+
result = await agent.run(user_prompt='Tell me about the Eiffel Tower.', message_history=messages)
7475+
assert result.new_messages() == snapshot()
74747476

74757477
finally:
7476-
import os
7477-
74787478
os.unlink(test_file_path)
74797479
if file is not None:
74807480
await async_client.files.delete(file.id)
@@ -7485,18 +7485,20 @@ async def test_openai_responses_model_file_search_tool(allow_model_requests: Non
74857485

74867486
@pytest.mark.vcr()
74877487
async def test_openai_responses_model_file_search_tool_stream(allow_model_requests: None, openai_api_key: str):
7488-
"""Integration test for FileSearchTool streaming with OpenAI."""
7488+
import asyncio
7489+
import os
7490+
import tempfile
7491+
from typing import Any
7492+
74897493
from openai import AsyncOpenAI
74907494

74917495
from pydantic_ai.builtin_tools import FileSearchTool
74927496
from pydantic_ai.providers.openai import OpenAIProvider
74937497

74947498
async_client = AsyncOpenAI(api_key=openai_api_key)
74957499

7496-
import tempfile
7497-
74987500
with tempfile.NamedTemporaryFile(mode='w', suffix='.txt', delete=False) as f:
7499-
f.write('The Eiffel Tower is located in Paris, France.')
7501+
f.write('Paris is the capital of France. It is known for the Eiffel Tower.')
75007502
test_file_path = f.name
75017503

75027504
file = None
@@ -7508,25 +7510,26 @@ async def test_openai_responses_model_file_search_tool_stream(allow_model_reques
75087510
vector_store = await async_client.vector_stores.create(name='test-file-search-stream')
75097511
await async_client.vector_stores.files.create(vector_store_id=vector_store.id, file_id=file.id)
75107512

7511-
import asyncio
7512-
75137513
await asyncio.sleep(2)
75147514

7515-
model = OpenAIResponsesModel('gpt-4o', provider=OpenAIProvider(openai_client=async_client))
7516-
agent = Agent(model=model, builtin_tools=[FileSearchTool(vector_store_ids=[vector_store.id])])
7515+
m = OpenAIResponsesModel('gpt-4o', provider=OpenAIProvider(openai_client=async_client))
7516+
agent = Agent(m, instructions='You are a helpful assistant.', builtin_tools=[FileSearchTool(vector_store_ids=[vector_store.id])])
75177517

7518-
result_text: list[str] = []
7519-
async with agent.run_stream('Where is the Eiffel Tower according to my files?') as result:
7520-
async for text in result.stream_text(delta=False):
7521-
result_text.append(text)
7522-
output = await result.get_output()
7518+
event_parts: list[Any] = []
7519+
async with agent.iter(user_prompt='What is the capital of France?') as agent_run:
7520+
async for node in agent_run:
7521+
if Agent.is_model_request_node(node) or Agent.is_call_tools_node(node):
7522+
async with node.stream(agent_run.ctx) as request_stream:
7523+
async for event in request_stream:
7524+
event_parts.append(event)
75237525

7524-
assert len(result_text) > 0
7525-
assert 'Paris' in output or 'France' in output or 'paris' in output.lower()
7526+
assert agent_run.result is not None
7527+
messages = agent_run.result.all_messages()
7528+
assert messages == snapshot()
75267529

7527-
finally:
7528-
import os
7530+
assert event_parts == snapshot()
75297531

7532+
finally:
75307533
os.unlink(test_file_path)
75317534
if file is not None:
75327535
await async_client.files.delete(file.id)

0 commit comments

Comments
 (0)