File tree Expand file tree Collapse file tree 1 file changed +10
-3
lines changed Expand file tree Collapse file tree 1 file changed +10
-3
lines changed Original file line number Diff line number Diff line change 99litellm .REPEATED_STREAMING_CHUNK_LIMIT = 99999999
1010
1111import json
12+ import logging
1213import subprocess
1314import time
1415import uuid
2526from .run_tool_calling_llm import run_tool_calling_llm
2627from .utils .convert_to_openai_messages import convert_to_openai_messages
2728
28- import logging
29-
3029# Create or get the logger
31- logger = logging .getLogger ('LiteLLM' )
30+ logger = logging .getLogger ("LiteLLM" )
31+
3232
3333class SuppressDebugFilter (logging .Filter ):
3434 def filter (self , record ):
@@ -37,6 +37,7 @@ def filter(self, record):
3737 return False # Suppress this log message
3838 return True # Allow all other messages
3939
40+
4041class Llm :
4142 """
4243 A stateless LMC-style LLM with some helpful properties.
@@ -265,6 +266,12 @@ def run(self, messages):
265266
266267 pass
267268
269+ # If there should be a system message, there should be a system message!
270+ # Empty system messages appear to be deleted :(
271+ if system_message == "" :
272+ if messages [0 ]["role" ] != "system" :
273+ messages = [{"role" : "system" , "content" : system_message }] + messages
274+
268275 ## Start forming the request
269276
270277 params = {
You can’t perform that action at this time.
0 commit comments