@@ -101,7 +101,9 @@ def parse(
101101 parallel_tool_calls : bool | NotGiven = NOT_GIVEN ,
102102 prediction : Optional [ChatCompletionPredictionContentParam ] | NotGiven = NOT_GIVEN ,
103103 presence_penalty : Optional [float ] | NotGiven = NOT_GIVEN ,
104+ prompt_cache_key : str | NotGiven = NOT_GIVEN ,
104105 reasoning_effort : Optional [ReasoningEffort ] | NotGiven = NOT_GIVEN ,
106+ safety_identifier : str | NotGiven = NOT_GIVEN ,
105107 seed : Optional [int ] | NotGiven = NOT_GIVEN ,
106108 service_tier : Optional [Literal ["auto" , "default" , "flex" , "scale" , "priority" ]] | NotGiven = NOT_GIVEN ,
107109 stop : Union [Optional [str ], List [str ], None ] | NotGiven = NOT_GIVEN ,
@@ -197,8 +199,10 @@ def parser(raw_completion: ChatCompletion) -> ParsedChatCompletion[ResponseForma
197199 "parallel_tool_calls" : parallel_tool_calls ,
198200 "prediction" : prediction ,
199201 "presence_penalty" : presence_penalty ,
202+ "prompt_cache_key" : prompt_cache_key ,
200203 "reasoning_effort" : reasoning_effort ,
201204 "response_format" : _type_to_response_format (response_format ),
205+ "safety_identifier" : safety_identifier ,
202206 "seed" : seed ,
203207 "service_tier" : service_tier ,
204208 "stop" : stop ,
@@ -1378,7 +1382,9 @@ def stream(
13781382 parallel_tool_calls : bool | NotGiven = NOT_GIVEN ,
13791383 prediction : Optional [ChatCompletionPredictionContentParam ] | NotGiven = NOT_GIVEN ,
13801384 presence_penalty : Optional [float ] | NotGiven = NOT_GIVEN ,
1385+ prompt_cache_key : str | NotGiven = NOT_GIVEN ,
13811386 reasoning_effort : Optional [ReasoningEffort ] | NotGiven = NOT_GIVEN ,
1387+ safety_identifier : str | NotGiven = NOT_GIVEN ,
13821388 seed : Optional [int ] | NotGiven = NOT_GIVEN ,
13831389 service_tier : Optional [Literal ["auto" , "default" , "flex" , "scale" , "priority" ]] | NotGiven = NOT_GIVEN ,
13841390 stop : Union [Optional [str ], List [str ], None ] | NotGiven = NOT_GIVEN ,
@@ -1445,7 +1451,9 @@ def stream(
14451451 parallel_tool_calls = parallel_tool_calls ,
14461452 prediction = prediction ,
14471453 presence_penalty = presence_penalty ,
1454+ prompt_cache_key = prompt_cache_key ,
14481455 reasoning_effort = reasoning_effort ,
1456+ safety_identifier = safety_identifier ,
14491457 seed = seed ,
14501458 service_tier = service_tier ,
14511459 store = store ,
@@ -1514,7 +1522,9 @@ async def parse(
15141522 parallel_tool_calls : bool | NotGiven = NOT_GIVEN ,
15151523 prediction : Optional [ChatCompletionPredictionContentParam ] | NotGiven = NOT_GIVEN ,
15161524 presence_penalty : Optional [float ] | NotGiven = NOT_GIVEN ,
1525+ prompt_cache_key : str | NotGiven = NOT_GIVEN ,
15171526 reasoning_effort : Optional [ReasoningEffort ] | NotGiven = NOT_GIVEN ,
1527+ safety_identifier : str | NotGiven = NOT_GIVEN ,
15181528 seed : Optional [int ] | NotGiven = NOT_GIVEN ,
15191529 service_tier : Optional [Literal ["auto" , "default" , "flex" , "scale" , "priority" ]] | NotGiven = NOT_GIVEN ,
15201530 stop : Union [Optional [str ], List [str ], None ] | NotGiven = NOT_GIVEN ,
@@ -1610,8 +1620,10 @@ def parser(raw_completion: ChatCompletion) -> ParsedChatCompletion[ResponseForma
16101620 "parallel_tool_calls" : parallel_tool_calls ,
16111621 "prediction" : prediction ,
16121622 "presence_penalty" : presence_penalty ,
1623+ "prompt_cache_key" : prompt_cache_key ,
16131624 "reasoning_effort" : reasoning_effort ,
16141625 "response_format" : _type_to_response_format (response_format ),
1626+ "safety_identifier" : safety_identifier ,
16151627 "seed" : seed ,
16161628 "service_tier" : service_tier ,
16171629 "store" : store ,
@@ -2791,7 +2803,9 @@ def stream(
27912803 parallel_tool_calls : bool | NotGiven = NOT_GIVEN ,
27922804 prediction : Optional [ChatCompletionPredictionContentParam ] | NotGiven = NOT_GIVEN ,
27932805 presence_penalty : Optional [float ] | NotGiven = NOT_GIVEN ,
2806+ prompt_cache_key : str | NotGiven = NOT_GIVEN ,
27942807 reasoning_effort : Optional [ReasoningEffort ] | NotGiven = NOT_GIVEN ,
2808+ safety_identifier : str | NotGiven = NOT_GIVEN ,
27952809 seed : Optional [int ] | NotGiven = NOT_GIVEN ,
27962810 service_tier : Optional [Literal ["auto" , "default" , "flex" , "scale" , "priority" ]] | NotGiven = NOT_GIVEN ,
27972811 stop : Union [Optional [str ], List [str ], None ] | NotGiven = NOT_GIVEN ,
@@ -2859,7 +2873,9 @@ def stream(
28592873 parallel_tool_calls = parallel_tool_calls ,
28602874 prediction = prediction ,
28612875 presence_penalty = presence_penalty ,
2876+ prompt_cache_key = prompt_cache_key ,
28622877 reasoning_effort = reasoning_effort ,
2878+ safety_identifier = safety_identifier ,
28632879 seed = seed ,
28642880 service_tier = service_tier ,
28652881 stop = stop ,
0 commit comments