@@ -103,7 +103,6 @@ def parse(
103103 presence_penalty : Optional [float ] | NotGiven = NOT_GIVEN ,
104104 prompt_cache_key : str | NotGiven = NOT_GIVEN ,
105105 reasoning_effort : Optional [ReasoningEffort ] | NotGiven = NOT_GIVEN ,
106- text : completion_create_params .Text | NotGiven = NOT_GIVEN ,
107106 safety_identifier : str | NotGiven = NOT_GIVEN ,
108107 seed : Optional [int ] | NotGiven = NOT_GIVEN ,
109108 service_tier : Optional [Literal ["auto" , "default" , "flex" , "scale" , "priority" ]] | NotGiven = NOT_GIVEN ,
@@ -204,7 +203,6 @@ def parser(raw_completion: ChatCompletion) -> ParsedChatCompletion[ResponseForma
204203 "prompt_cache_key" : prompt_cache_key ,
205204 "reasoning_effort" : reasoning_effort ,
206205 "response_format" : _type_to_response_format (response_format ),
207- "text" : text ,
208206 "safety_identifier" : safety_identifier ,
209207 "seed" : seed ,
210208 "service_tier" : service_tier ,
@@ -267,7 +265,6 @@ def create(
267265 stream : Optional [Literal [False ]] | NotGiven = NOT_GIVEN ,
268266 stream_options : Optional [ChatCompletionStreamOptionsParam ] | NotGiven = NOT_GIVEN ,
269267 temperature : Optional [float ] | NotGiven = NOT_GIVEN ,
270- text : completion_create_params .Text | NotGiven = NOT_GIVEN ,
271268 tool_choice : ChatCompletionToolChoiceOptionParam | NotGiven = NOT_GIVEN ,
272269 tools : Iterable [ChatCompletionToolUnionParam ] | NotGiven = NOT_GIVEN ,
273270 top_logprobs : Optional [int ] | NotGiven = NOT_GIVEN ,
@@ -459,7 +456,7 @@ def create(
459456 our [model distillation](https://platform.openai.com/docs/guides/distillation)
460457 or [evals](https://platform.openai.com/docs/guides/evals) products.
461458
462- Supports text and image inputs. Note: image inputs over 10MB will be dropped.
459+ Supports text and image inputs. Note: image inputs over 8MB will be dropped.
463460
464461 stream: If set to true, the model response data will be streamed to the client as it is
465462 generated using
@@ -556,7 +553,6 @@ def create(
556553 store : Optional [bool ] | NotGiven = NOT_GIVEN ,
557554 stream_options : Optional [ChatCompletionStreamOptionsParam ] | NotGiven = NOT_GIVEN ,
558555 temperature : Optional [float ] | NotGiven = NOT_GIVEN ,
559- text : completion_create_params .Text | NotGiven = NOT_GIVEN ,
560556 tool_choice : ChatCompletionToolChoiceOptionParam | NotGiven = NOT_GIVEN ,
561557 tools : Iterable [ChatCompletionToolUnionParam ] | NotGiven = NOT_GIVEN ,
562558 top_logprobs : Optional [int ] | NotGiven = NOT_GIVEN ,
@@ -757,7 +753,7 @@ def create(
757753 our [model distillation](https://platform.openai.com/docs/guides/distillation)
758754 or [evals](https://platform.openai.com/docs/guides/evals) products.
759755
760- Supports text and image inputs. Note: image inputs over 10MB will be dropped.
756+ Supports text and image inputs. Note: image inputs over 8MB will be dropped.
761757
762758 stream_options: Options for streaming response. Only set this when you set `stream: true`.
763759
@@ -845,7 +841,6 @@ def create(
845841 store : Optional [bool ] | NotGiven = NOT_GIVEN ,
846842 stream_options : Optional [ChatCompletionStreamOptionsParam ] | NotGiven = NOT_GIVEN ,
847843 temperature : Optional [float ] | NotGiven = NOT_GIVEN ,
848- text : completion_create_params .Text | NotGiven = NOT_GIVEN ,
849844 tool_choice : ChatCompletionToolChoiceOptionParam | NotGiven = NOT_GIVEN ,
850845 tools : Iterable [ChatCompletionToolUnionParam ] | NotGiven = NOT_GIVEN ,
851846 top_logprobs : Optional [int ] | NotGiven = NOT_GIVEN ,
@@ -1046,7 +1041,7 @@ def create(
10461041 our [model distillation](https://platform.openai.com/docs/guides/distillation)
10471042 or [evals](https://platform.openai.com/docs/guides/evals) products.
10481043
1049- Supports text and image inputs. Note: image inputs over 10MB will be dropped.
1044+ Supports text and image inputs. Note: image inputs over 8MB will be dropped.
10501045
10511046 stream_options: Options for streaming response. Only set this when you set `stream: true`.
10521047
@@ -1134,7 +1129,6 @@ def create(
11341129 stream : Optional [Literal [False ]] | Literal [True ] | NotGiven = NOT_GIVEN ,
11351130 stream_options : Optional [ChatCompletionStreamOptionsParam ] | NotGiven = NOT_GIVEN ,
11361131 temperature : Optional [float ] | NotGiven = NOT_GIVEN ,
1137- text : completion_create_params .Text | NotGiven = NOT_GIVEN ,
11381132 tool_choice : ChatCompletionToolChoiceOptionParam | NotGiven = NOT_GIVEN ,
11391133 tools : Iterable [ChatCompletionToolUnionParam ] | NotGiven = NOT_GIVEN ,
11401134 top_logprobs : Optional [int ] | NotGiven = NOT_GIVEN ,
@@ -1181,7 +1175,6 @@ def create(
11811175 "stream" : stream ,
11821176 "stream_options" : stream_options ,
11831177 "temperature" : temperature ,
1184- "text" : text ,
11851178 "tool_choice" : tool_choice ,
11861179 "tools" : tools ,
11871180 "top_logprobs" : top_logprobs ,
@@ -1404,7 +1397,6 @@ def stream(
14041397 presence_penalty : Optional [float ] | NotGiven = NOT_GIVEN ,
14051398 prompt_cache_key : str | NotGiven = NOT_GIVEN ,
14061399 reasoning_effort : Optional [ReasoningEffort ] | NotGiven = NOT_GIVEN ,
1407- text : completion_create_params .Text | NotGiven = NOT_GIVEN ,
14081400 safety_identifier : str | NotGiven = NOT_GIVEN ,
14091401 seed : Optional [int ] | NotGiven = NOT_GIVEN ,
14101402 service_tier : Optional [Literal ["auto" , "default" , "flex" , "scale" , "priority" ]] | NotGiven = NOT_GIVEN ,
@@ -1475,7 +1467,6 @@ def stream(
14751467 presence_penalty = presence_penalty ,
14761468 prompt_cache_key = prompt_cache_key ,
14771469 reasoning_effort = reasoning_effort ,
1478- text = text ,
14791470 safety_identifier = safety_identifier ,
14801471 seed = seed ,
14811472 service_tier = service_tier ,
@@ -1548,7 +1539,6 @@ async def parse(
15481539 presence_penalty : Optional [float ] | NotGiven = NOT_GIVEN ,
15491540 prompt_cache_key : str | NotGiven = NOT_GIVEN ,
15501541 reasoning_effort : Optional [ReasoningEffort ] | NotGiven = NOT_GIVEN ,
1551- text : completion_create_params .Text | NotGiven = NOT_GIVEN ,
15521542 safety_identifier : str | NotGiven = NOT_GIVEN ,
15531543 seed : Optional [int ] | NotGiven = NOT_GIVEN ,
15541544 service_tier : Optional [Literal ["auto" , "default" , "flex" , "scale" , "priority" ]] | NotGiven = NOT_GIVEN ,
@@ -1649,7 +1639,6 @@ def parser(raw_completion: ChatCompletion) -> ParsedChatCompletion[ResponseForma
16491639 "prompt_cache_key" : prompt_cache_key ,
16501640 "reasoning_effort" : reasoning_effort ,
16511641 "response_format" : _type_to_response_format (response_format ),
1652- "text" : text ,
16531642 "safety_identifier" : safety_identifier ,
16541643 "seed" : seed ,
16551644 "service_tier" : service_tier ,
@@ -1712,7 +1701,6 @@ async def create(
17121701 stream : Optional [Literal [False ]] | NotGiven = NOT_GIVEN ,
17131702 stream_options : Optional [ChatCompletionStreamOptionsParam ] | NotGiven = NOT_GIVEN ,
17141703 temperature : Optional [float ] | NotGiven = NOT_GIVEN ,
1715- text : completion_create_params .Text | NotGiven = NOT_GIVEN ,
17161704 tool_choice : ChatCompletionToolChoiceOptionParam | NotGiven = NOT_GIVEN ,
17171705 tools : Iterable [ChatCompletionToolUnionParam ] | NotGiven = NOT_GIVEN ,
17181706 top_logprobs : Optional [int ] | NotGiven = NOT_GIVEN ,
@@ -1904,7 +1892,7 @@ async def create(
19041892 our [model distillation](https://platform.openai.com/docs/guides/distillation)
19051893 or [evals](https://platform.openai.com/docs/guides/evals) products.
19061894
1907- Supports text and image inputs. Note: image inputs over 10MB will be dropped.
1895+ Supports text and image inputs. Note: image inputs over 8MB will be dropped.
19081896
19091897 stream: If set to true, the model response data will be streamed to the client as it is
19101898 generated using
@@ -2001,7 +1989,6 @@ async def create(
20011989 store : Optional [bool ] | NotGiven = NOT_GIVEN ,
20021990 stream_options : Optional [ChatCompletionStreamOptionsParam ] | NotGiven = NOT_GIVEN ,
20031991 temperature : Optional [float ] | NotGiven = NOT_GIVEN ,
2004- text : completion_create_params .Text | NotGiven = NOT_GIVEN ,
20051992 tool_choice : ChatCompletionToolChoiceOptionParam | NotGiven = NOT_GIVEN ,
20061993 tools : Iterable [ChatCompletionToolUnionParam ] | NotGiven = NOT_GIVEN ,
20071994 top_logprobs : Optional [int ] | NotGiven = NOT_GIVEN ,
@@ -2202,7 +2189,7 @@ async def create(
22022189 our [model distillation](https://platform.openai.com/docs/guides/distillation)
22032190 or [evals](https://platform.openai.com/docs/guides/evals) products.
22042191
2205- Supports text and image inputs. Note: image inputs over 10MB will be dropped.
2192+ Supports text and image inputs. Note: image inputs over 8MB will be dropped.
22062193
22072194 stream_options: Options for streaming response. Only set this when you set `stream: true`.
22082195
@@ -2290,7 +2277,6 @@ async def create(
22902277 store : Optional [bool ] | NotGiven = NOT_GIVEN ,
22912278 stream_options : Optional [ChatCompletionStreamOptionsParam ] | NotGiven = NOT_GIVEN ,
22922279 temperature : Optional [float ] | NotGiven = NOT_GIVEN ,
2293- text : completion_create_params .Text | NotGiven = NOT_GIVEN ,
22942280 tool_choice : ChatCompletionToolChoiceOptionParam | NotGiven = NOT_GIVEN ,
22952281 tools : Iterable [ChatCompletionToolUnionParam ] | NotGiven = NOT_GIVEN ,
22962282 top_logprobs : Optional [int ] | NotGiven = NOT_GIVEN ,
@@ -2491,7 +2477,7 @@ async def create(
24912477 our [model distillation](https://platform.openai.com/docs/guides/distillation)
24922478 or [evals](https://platform.openai.com/docs/guides/evals) products.
24932479
2494- Supports text and image inputs. Note: image inputs over 10MB will be dropped.
2480+ Supports text and image inputs. Note: image inputs over 8MB will be dropped.
24952481
24962482 stream_options: Options for streaming response. Only set this when you set `stream: true`.
24972483
@@ -2579,7 +2565,6 @@ async def create(
25792565 stream : Optional [Literal [False ]] | Literal [True ] | NotGiven = NOT_GIVEN ,
25802566 stream_options : Optional [ChatCompletionStreamOptionsParam ] | NotGiven = NOT_GIVEN ,
25812567 temperature : Optional [float ] | NotGiven = NOT_GIVEN ,
2582- text : completion_create_params .Text | NotGiven = NOT_GIVEN ,
25832568 tool_choice : ChatCompletionToolChoiceOptionParam | NotGiven = NOT_GIVEN ,
25842569 tools : Iterable [ChatCompletionToolUnionParam ] | NotGiven = NOT_GIVEN ,
25852570 top_logprobs : Optional [int ] | NotGiven = NOT_GIVEN ,
@@ -2626,7 +2611,6 @@ async def create(
26262611 "stream" : stream ,
26272612 "stream_options" : stream_options ,
26282613 "temperature" : temperature ,
2629- "text" : text ,
26302614 "tool_choice" : tool_choice ,
26312615 "tools" : tools ,
26322616 "top_logprobs" : top_logprobs ,
@@ -2849,7 +2833,6 @@ def stream(
28492833 presence_penalty : Optional [float ] | NotGiven = NOT_GIVEN ,
28502834 prompt_cache_key : str | NotGiven = NOT_GIVEN ,
28512835 reasoning_effort : Optional [ReasoningEffort ] | NotGiven = NOT_GIVEN ,
2852- text : completion_create_params .Text | NotGiven = NOT_GIVEN ,
28532836 safety_identifier : str | NotGiven = NOT_GIVEN ,
28542837 seed : Optional [int ] | NotGiven = NOT_GIVEN ,
28552838 service_tier : Optional [Literal ["auto" , "default" , "flex" , "scale" , "priority" ]] | NotGiven = NOT_GIVEN ,
@@ -2921,7 +2904,6 @@ def stream(
29212904 presence_penalty = presence_penalty ,
29222905 prompt_cache_key = prompt_cache_key ,
29232906 reasoning_effort = reasoning_effort ,
2924- text = text ,
29252907 safety_identifier = safety_identifier ,
29262908 seed = seed ,
29272909 service_tier = service_tier ,
0 commit comments