55
66from dsp .utils import dotdict
77from typing import Optional , List , Union
8- import openai
98import dspy
109import backoff
1110
1918 "The pinecone library is required to use PineconeRM. Install it with `pip install dspy-ai[pinecone]`"
2019 )
2120
21+ import openai
22+ try :
23+ OPENAI_LEGACY = int (openai .version .__version__ [0 ]) == 0
24+ except Exception :
25+ OPENAI_LEGACY = True
26+
27+ try :
28+ import openai .error
29+ ERRORS = (openai .error .RateLimitError , openai .error .ServiceUnavailableError , openai .error .APIError )
30+ except Exception :
31+ ERRORS = (openai .RateLimitError , openai .APIError )
2232
2333class PineconeRM (dspy .Retrieve ):
2434 """
@@ -164,7 +174,7 @@ def _mean_pooling(
164174
165175 @backoff .on_exception (
166176 backoff .expo ,
167- ( openai . RateLimitError ) ,
177+ ERRORS ,
168178 max_time = 15 ,
169179 )
170180 def _get_embeddings (
@@ -187,10 +197,15 @@ def _get_embeddings(
187197 ) from exc
188198
189199 if not self .use_local_model :
190- embedding = openai .embeddings .create (
191- input = queries , model = self ._openai_embed_model
192- )
193- return [embedding .embedding for embedding in embedding .data ]
200+ if OPENAI_LEGACY :
201+ embedding = openai .Embedding .create (
202+ input = queries , model = self ._openai_embed_model
203+ )
204+ else :
205+ embedding = openai .embeddings .create (
206+ input = queries , model = self ._openai_embed_model
207+ ).model_dump ()
208+ return [embedding ["embedding" ] for embedding in embedding ["data" ]]
194209
195210 # Use local model
196211 encoded_input = self ._local_tokenizer (queries , padding = True , truncation = True , return_tensors = "pt" ).to (self .device )
0 commit comments