Skip to content

Commit ed56d26

Browse files
committed
clean up
1 parent 8689714 commit ed56d26

File tree

1 file changed

+0
-6
lines changed

1 file changed

+0
-6
lines changed

src/embeddedllm/entrypoints/chat_server.py

Lines changed: 0 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -247,12 +247,10 @@ async def create_chat_completion(
247247
)
248248
# Streaming response
249249
if request.stream:
250-
logger.error("stream: " + str(request.stream))
251250
return self.chat_completion_stream_generator(
252251
request, result_generator, request_id, conversation
253252
)
254253
else:
255-
# raise NotImplementedError("Not Yet Implemented Error")
256254
try:
257255
return await self.chat_completion_full_generator(
258256
request, raw_request, result_generator, request_id, conversation
@@ -303,8 +301,6 @@ async def chat_completion_stream_generator(
303301
model=model_name,
304302
)
305303

306-
# logger.debug("chunk: "+ str(chunk))
307-
308304
if request.stream_options and request.stream_options.include_usage:
309305
chunk.usage = None
310306
data = chunk.model_dump_json(exclude_unset=True)
@@ -400,7 +396,6 @@ async def chat_completion_stream_generator(
400396
chunk.usage = None
401397
data = chunk.model_dump_json(exclude_unset=True)
402398
yield f"data: {data}\n\n"
403-
# logger.debug("chunk: "+ str(chunk))
404399
finish_reason_sent[i] = True
405400

406401
if request.stream_options and request.stream_options.include_usage:
@@ -422,7 +417,6 @@ async def chat_completion_stream_generator(
422417
exclude_unset=True, exclude_none=True
423418
)
424419
yield f"data: {final_usage_data}\n\n"
425-
# logger.debug("final_usage_data: "+ str(final_usage_data))
426420

427421
except ValueError as e:
428422
# TODO: Use a vllm-specific Validation Error

0 commit comments

Comments
 (0)