33import json
44from collections .abc import AsyncIterator
55from dataclasses import dataclass
6- from typing import TYPE_CHECKING , Any , Literal , overload
6+ from typing import TYPE_CHECKING , Any , Literal , cast , overload
77
88from openai import NOT_GIVEN , APIStatusError , AsyncOpenAI , AsyncStream , NotGiven
99from openai .types import ChatModel
@@ -247,9 +247,12 @@ async def _fetch_response(
247247 converted_tools = Converter .convert_tools (tools , handoffs )
248248 response_format = Converter .get_response_format (output_schema )
249249
250- include : list [ ResponseIncludable ] = converted_tools .includes
250+ include_set : set [ str ] = set ( converted_tools .includes )
251251 if model_settings .response_include is not None :
252- include = list ({* include , * model_settings .response_include })
252+ include_set .update (model_settings .response_include )
253+ if model_settings .top_logprobs is not None :
254+ include_set .add ("message.output_text.logprobs" )
255+ include = cast (list [ResponseIncludable ], list (include_set ))
253256
254257 if _debug .DONT_LOG_MODEL_DATA :
255258 logger .debug ("Calling LLM" )
@@ -264,6 +267,10 @@ async def _fetch_response(
264267 f"Previous response id: { previous_response_id } \n "
265268 )
266269
270+ extra_args = dict (model_settings .extra_args or {})
271+ if model_settings .top_logprobs is not None :
272+ extra_args ["top_logprobs" ] = model_settings .top_logprobs
273+
267274 return await self ._client .responses .create (
268275 previous_response_id = self ._non_null_or_not_given (previous_response_id ),
269276 instructions = self ._non_null_or_not_given (system_instructions ),
@@ -286,7 +293,7 @@ async def _fetch_response(
286293 store = self ._non_null_or_not_given (model_settings .store ),
287294 reasoning = self ._non_null_or_not_given (model_settings .reasoning ),
288295 metadata = self ._non_null_or_not_given (model_settings .metadata ),
289- ** ( model_settings . extra_args or {}) ,
296+ ** extra_args ,
290297 )
291298
292299 def _get_client (self ) -> AsyncOpenAI :
0 commit comments