Skip to content

Commit 575bbac

Browse files
committed
fix(streaming): correct indentation
1 parent b4109c5 commit 575bbac

File tree

1 file changed

+72
-72
lines changed

1 file changed

+72
-72
lines changed

src/openai/_streaming.py

Lines changed: 72 additions & 72 deletions
Original file line numberDiff line numberDiff line change
@@ -60,42 +60,42 @@ def __stream__(self) -> Iterator[_T]:
6060
if sse.data.startswith("[DONE]"):
6161
break
6262

63-
# we have to special case the Assistants `thread.` events since we won't have an "event" key in the data
64-
if sse.event and sse.event.startswith("thread."):
65-
data = sse.json()
66-
67-
if sse.event == "error" and is_mapping(data) and data.get("error"):
68-
message = None
69-
error = data.get("error")
70-
if is_mapping(error):
71-
message = error.get("message")
72-
if not message or not isinstance(message, str):
73-
message = "An error occurred during streaming"
74-
75-
raise APIError(
76-
message=message,
77-
request=self.response.request,
78-
body=data["error"],
79-
)
80-
81-
yield process_data(data={"data": data, "event": sse.event}, cast_to=cast_to, response=response)
82-
else:
83-
data = sse.json()
84-
if is_mapping(data) and data.get("error"):
85-
message = None
86-
error = data.get("error")
87-
if is_mapping(error):
88-
message = error.get("message")
89-
if not message or not isinstance(message, str):
90-
message = "An error occurred during streaming"
91-
92-
raise APIError(
93-
message=message,
94-
request=self.response.request,
95-
body=data["error"],
96-
)
97-
98-
yield process_data(data=data, cast_to=cast_to, response=response)
63+
# we have to special case the Assistants `thread.` events since we won't have an "event" key in the data
64+
if sse.event and sse.event.startswith("thread."):
65+
data = sse.json()
66+
67+
if sse.event == "error" and is_mapping(data) and data.get("error"):
68+
message = None
69+
error = data.get("error")
70+
if is_mapping(error):
71+
message = error.get("message")
72+
if not message or not isinstance(message, str):
73+
message = "An error occurred during streaming"
74+
75+
raise APIError(
76+
message=message,
77+
request=self.response.request,
78+
body=data["error"],
79+
)
80+
81+
yield process_data(data={"data": data, "event": sse.event}, cast_to=cast_to, response=response)
82+
else:
83+
data = sse.json()
84+
if is_mapping(data) and data.get("error"):
85+
message = None
86+
error = data.get("error")
87+
if is_mapping(error):
88+
message = error.get("message")
89+
if not message or not isinstance(message, str):
90+
message = "An error occurred during streaming"
91+
92+
raise APIError(
93+
message=message,
94+
request=self.response.request,
95+
body=data["error"],
96+
)
97+
98+
yield process_data(data=data, cast_to=cast_to, response=response)
9999

100100
finally:
101101
# Ensure the response is closed even if the consumer doesn't read all data
@@ -163,42 +163,42 @@ async def __stream__(self) -> AsyncIterator[_T]:
163163
if sse.data.startswith("[DONE]"):
164164
break
165165

166-
# we have to special case the Assistants `thread.` events since we won't have an "event" key in the data
167-
if sse.event and sse.event.startswith("thread."):
168-
data = sse.json()
169-
170-
if sse.event == "error" and is_mapping(data) and data.get("error"):
171-
message = None
172-
error = data.get("error")
173-
if is_mapping(error):
174-
message = error.get("message")
175-
if not message or not isinstance(message, str):
176-
message = "An error occurred during streaming"
177-
178-
raise APIError(
179-
message=message,
180-
request=self.response.request,
181-
body=data["error"],
182-
)
183-
184-
yield process_data(data={"data": data, "event": sse.event}, cast_to=cast_to, response=response)
185-
else:
186-
data = sse.json()
187-
if is_mapping(data) and data.get("error"):
188-
message = None
189-
error = data.get("error")
190-
if is_mapping(error):
191-
message = error.get("message")
192-
if not message or not isinstance(message, str):
193-
message = "An error occurred during streaming"
194-
195-
raise APIError(
196-
message=message,
197-
request=self.response.request,
198-
body=data["error"],
199-
)
200-
201-
yield process_data(data=data, cast_to=cast_to, response=response)
166+
# we have to special case the Assistants `thread.` events since we won't have an "event" key in the data
167+
if sse.event and sse.event.startswith("thread."):
168+
data = sse.json()
169+
170+
if sse.event == "error" and is_mapping(data) and data.get("error"):
171+
message = None
172+
error = data.get("error")
173+
if is_mapping(error):
174+
message = error.get("message")
175+
if not message or not isinstance(message, str):
176+
message = "An error occurred during streaming"
177+
178+
raise APIError(
179+
message=message,
180+
request=self.response.request,
181+
body=data["error"],
182+
)
183+
184+
yield process_data(data={"data": data, "event": sse.event}, cast_to=cast_to, response=response)
185+
else:
186+
data = sse.json()
187+
if is_mapping(data) and data.get("error"):
188+
message = None
189+
error = data.get("error")
190+
if is_mapping(error):
191+
message = error.get("message")
192+
if not message or not isinstance(message, str):
193+
message = "An error occurred during streaming"
194+
195+
raise APIError(
196+
message=message,
197+
request=self.response.request,
198+
body=data["error"],
199+
)
200+
201+
yield process_data(data=data, cast_to=cast_to, response=response)
202202

203203
finally:
204204
# Ensure the response is closed even if the consumer doesn't read all data

0 commit comments

Comments
 (0)