@@ -712,13 +712,17 @@ async def chat_completion(request: ChatCompletionRequest):
712712
713713 if type (last_message .content ) == str :
714714 async_interpreter .messages .append (
715- {"role" : "user" , "type" : "message" , "content" : last_message .content }
715+ {
716+ "role" : "user" ,
717+ "type" : "message" ,
718+ "content" : str (last_message .content ),
719+ }
716720 )
717721 if type (last_message .content ) == list :
718722 for content in last_message .content :
719723 if content ["type" ] == "text" :
720724 async_interpreter .messages .append (
721- {"role" : "user" , "type" : "message" , "content" : content }
725+ {"role" : "user" , "type" : "message" , "content" : str ( content ) }
722726 )
723727 elif content ["type" ] == "image_url" :
724728 if "url" not in content ["image_url" ]:
@@ -743,6 +747,13 @@ async def chat_completion(request: ChatCompletionRequest):
743747 }
744748 )
745749
750+ if os .getenv ("INTERPRETER_SERVER_REQUIRE_START" , False ):
751+ if last_message .content != "{START}" :
752+ return
753+ if async_interpreter .messages [- 1 ]["content" ] == "{START}" :
754+ # Remove that {START} message that would have just been added
755+ async_interpreter .messages = async_interpreter .messages [:- 1 ]
756+
746757 if request .stream :
747758 return StreamingResponse (
748759 openai_compatible_generator (), media_type = "application/x-ndjson"
0 commit comments