@@ -245,17 +245,25 @@ def download_model(models_dir, models, interpreter):
245245 names = [
246246 line .split ()[0 ].replace (":latest" , "" )
247247 for line in lines
248- if line .strip () and not line .startswith ("failed" ) and not line .startswith ("NAME" )
248+ if line .strip ()
249+ and not line .startswith ("failed" )
250+ and not line .startswith ("NAME" )
249251 ] # Extract names, trim out ":latest", skip header
250252
251253 # Models whose name contain one of these keywords will be moved to the front of the list
252- priority_models = ["llama3" ,"codestral" ]
253- priority_models_found = []
254+ priority_models = ["llama3" , "codestral" ]
255+ priority_models_found = []
254256 for word in priority_models :
255- models_to_move = [name for name in names if word .lower () in name .lower ()]
257+ models_to_move = [
258+ name for name in names if word .lower () in name .lower ()
259+ ]
256260 priority_models_found .extend (models_to_move )
257- names = [name for name in names if not any (word .lower () in name .lower () for word in priority_models )]
258- names = priority_models_found + names
261+ names = [
262+ name
263+ for name in names
264+ if not any (word .lower () in name .lower () for word in priority_models )
265+ ]
266+ names = priority_models_found + names
259267
260268 for model in ["llama3.1" , "phi3" , "mistral-nemo" , "gemma2" , "codestral" ]:
261269 if model not in names :
@@ -297,7 +305,6 @@ def download_model(models_dir, models, interpreter):
297305 interpreter .llm .model = f"ollama/{ model } "
298306
299307 # Send a ping, which will actually load the model
300- interpreter .display_message ("Loading model..." )
301308
302309 old_max_tokens = interpreter .llm .max_tokens
303310 old_context_window = interpreter .llm .context_window
0 commit comments