File tree Expand file tree Collapse file tree 2 files changed +33
-1
lines changed Expand file tree Collapse file tree 2 files changed +33
-1
lines changed Original file line number Diff line number Diff line change @@ -273,7 +273,9 @@ def dump_state(self):
273273 "launch_kwargs" ,
274274 "train_kwargs" ,
275275 ]
276- return {key : getattr (self , key ) for key in state_keys } | self .kwargs
276+ # Exclude api_key from kwargs to prevent API keys from being saved in plain text
277+ filtered_kwargs = {k : v for k , v in self .kwargs .items () if k != "api_key" }
278+ return {key : getattr (self , key ) for key in state_keys } | filtered_kwargs
277279
278280 def _check_truncation (self , results ):
279281 if self .model_type != "responses" and any (c .finish_reason == "length" for c in results ["choices" ]):
Original file line number Diff line number Diff line change 11import json
2+ import tempfile
23import time
34import warnings
5+ from pathlib import Path
46from unittest import mock
57from unittest .mock import patch
68
@@ -604,3 +606,31 @@ def test_responses_api_tool_calls(litellm_test_server):
604606
605607 dspy_responses .assert_called_once ()
606608 assert dspy_responses .call_args .kwargs ["model" ] == "openai/dspy-test-model"
609+
610+
611+ def test_api_key_not_saved_in_json ():
612+ lm = dspy .LM (
613+ model = "openai/gpt-4o-mini" ,
614+ model_type = "chat" ,
615+ temperature = 1.0 ,
616+ max_tokens = 100 ,
617+ api_key = "sk-test-api-key-12345" ,
618+ )
619+
620+ predict = dspy .Predict ("question -> answer" )
621+ predict .lm = lm
622+
623+ with tempfile .TemporaryDirectory () as tmpdir :
624+ json_path = Path (tmpdir ) / "program.json"
625+ predict .save (json_path )
626+
627+ with open (json_path ) as f :
628+ saved_state = json .load (f )
629+
630+ # Verify API key is not in the saved state
631+ assert "api_key" not in saved_state .get ("lm" , {}), "API key should not be saved in JSON"
632+
633+ # Verify other attributes are saved
634+ assert saved_state ["lm" ]["model" ] == "openai/gpt-4o-mini"
635+ assert saved_state ["lm" ]["temperature" ] == 1.0
636+ assert saved_state ["lm" ]["max_tokens" ] == 100
You can’t perform that action at this time.
0 commit comments