@@ -3205,13 +3205,14 @@ def _generate_response_with_texts(response_id: str, texts: list[str]) -> Generat
32053205
32063206@pytest .mark .vcr ()
32073207async def test_google_model_file_search_tool (allow_model_requests : None , google_provider : GoogleProvider ):
3208- """Integration test for FileSearchTool with Google."""
3208+ import asyncio
3209+ import os
3210+ import tempfile
3211+
32093212 from pydantic_ai .builtin_tools import FileSearchTool
32103213
32113214 client = google_provider .client
32123215
3213- import tempfile
3214-
32153216 with tempfile .NamedTemporaryFile (mode = 'w' , suffix = '.txt' , delete = False ) as f :
32163217 f .write ('Paris is the capital of France. The Eiffel Tower is a famous landmark in Paris.' )
32173218 test_file_path = f .name
@@ -3226,36 +3227,37 @@ async def test_google_model_file_search_tool(allow_model_requests: None, google_
32263227 file_search_store_name = store .name , file = f , config = {'mime_type' : 'text/plain' }
32273228 )
32283229
3229- import asyncio
3230-
32313230 await asyncio .sleep (3 )
32323231
3233- model = GoogleModel ('gemini-2.5-pro' , provider = google_provider )
3234- agent = Agent (model = model , builtin_tools = [FileSearchTool (vector_store_ids = [store .name ])])
3232+ m = GoogleModel ('gemini-2.5-pro' , provider = google_provider )
3233+ agent = Agent (m , system_prompt = 'You are a helpful assistant.' , builtin_tools = [FileSearchTool (vector_store_ids = [store .name ])])
32353234
3236- result = await agent .run ('What is the capital of France according to my files?' )
3235+ result = await agent .run ('What is the capital of France?' )
3236+ assert result .all_messages () == snapshot ()
32373237
3238- assert 'Paris' in result .output or 'paris' in result .output .lower ()
3238+ messages = result .all_messages ()
3239+ result = await agent .run (user_prompt = 'Tell me about the Eiffel Tower.' , message_history = messages )
3240+ assert result .new_messages () == snapshot ()
32393241
32403242 finally :
3241- import os
3242-
32433243 os .unlink (test_file_path )
32443244 if store is not None and store .name is not None :
32453245 await client .aio .file_search_stores .delete (name = store .name , config = {'force' : True })
32463246
32473247
32483248@pytest .mark .vcr ()
32493249async def test_google_model_file_search_tool_stream (allow_model_requests : None , google_provider : GoogleProvider ):
3250- """Integration test for FileSearchTool streaming with Google."""
3250+ import asyncio
3251+ import os
3252+ import tempfile
3253+ from typing import Any
3254+
32513255 from pydantic_ai .builtin_tools import FileSearchTool
32523256
32533257 client = google_provider .client
32543258
3255- import tempfile
3256-
32573259 with tempfile .NamedTemporaryFile (mode = 'w' , suffix = '.txt' , delete = False ) as f :
3258- f .write ('The Louvre Museum is located in Paris, France. It houses the Mona Lisa .' )
3260+ f .write ('Paris is the capital of France. The Eiffel Tower is a famous landmark in Paris .' )
32593261 test_file_path = f .name
32603262
32613263 store = None
@@ -3268,25 +3270,26 @@ async def test_google_model_file_search_tool_stream(allow_model_requests: None,
32683270 file_search_store_name = store .name , file = f , config = {'mime_type' : 'text/plain' }
32693271 )
32703272
3271- import asyncio
3272-
32733273 await asyncio .sleep (3 )
32743274
3275- model = GoogleModel ('gemini-2.5-pro' , provider = google_provider )
3276- agent = Agent (model = model , builtin_tools = [FileSearchTool (vector_store_ids = [store .name ])])
3275+ m = GoogleModel ('gemini-2.5-pro' , provider = google_provider )
3276+ agent = Agent (m , system_prompt = 'You are a helpful assistant.' , builtin_tools = [FileSearchTool (vector_store_ids = [store .name ])])
32773277
3278- result_text : list [str ] = []
3279- async with agent .run_stream ('Where is the Louvre Museum according to my files?' ) as result :
3280- async for text in result .stream_text (delta = False ):
3281- result_text .append (text )
3282- output = await result .get_output ()
3278+ event_parts : list [Any ] = []
3279+ async with agent .iter (user_prompt = 'What is the capital of France?' ) as agent_run :
3280+ async for node in agent_run :
3281+ if Agent .is_model_request_node (node ) or Agent .is_call_tools_node (node ):
3282+ async with node .stream (agent_run .ctx ) as request_stream :
3283+ async for event in request_stream :
3284+ event_parts .append (event )
32833285
3284- assert len (result_text ) > 0
3285- assert 'Paris' in output or 'France' in output or 'Louvre' in output or 'paris' in output .lower ()
3286+ assert agent_run .result is not None
3287+ messages = agent_run .result .all_messages ()
3288+ assert messages == snapshot ()
32863289
3287- finally :
3288- import os
3290+ assert event_parts == snapshot ()
32893291
3292+ finally :
32903293 os .unlink (test_file_path )
32913294 if store is not None and store .name is not None :
32923295 await client .aio .file_search_stores .delete (name = store .name , config = {'force' : True })
0 commit comments