3232_configured_api_key : Optional [str ] = None
3333_configured_pipeline_id : Optional [str ] = None
3434_configured_base_url : Optional [str ] = None
35- _configured_guardrails : Optional [List [Any ]] = None
3635
3736
3837def configure (
3938 api_key : Optional [str ] = None ,
4039 inference_pipeline_id : Optional [str ] = None ,
4140 base_url : Optional [str ] = None ,
42- guardrails : Optional [List [Any ]] = None ,
4341) -> None :
4442 """Configure the Openlayer tracer with custom settings.
4543
4644 This function allows you to programmatically set the API key, inference pipeline ID,
47- base URL, and global guardrails for the Openlayer client.
45+ and base URL for the Openlayer client, instead of relying on environment variables .
4846
4947 Args:
5048 api_key: The Openlayer API key. If not provided, falls back to OPENLAYER_API_KEY environment variable.
5149 inference_pipeline_id: The default inference pipeline ID to use for tracing.
5250 If not provided, falls back to OPENLAYER_INFERENCE_PIPELINE_ID environment variable.
5351 base_url: The base URL for the Openlayer API. If not provided, falls back to
5452 OPENLAYER_BASE_URL environment variable or the default.
55- guardrails: List of guardrail instances to apply globally to all LLM helper functions
56- (trace_openai, trace_anthropic, etc.). Does not affect @trace() decorator.
5753
5854 Examples:
5955 >>> import openlayer.lib.tracing.tracer as tracer
60- >>> from openlayer.lib.guardrails import PIIGuardrail
61- >>>
62- >>> # Configure with API key, pipeline ID, and global guardrails
63- >>> pii_guard = PIIGuardrail()
64- >>> tracer.configure(
65- ... api_key="your_api_key_here",
66- ... inference_pipeline_id="your_pipeline_id_here",
67- ... guardrails=[pii_guard]
68- ... )
69- >>>
70- >>> # Now all LLM helper functions will use the configured guardrails
71- >>> traced_client = tracer.trace_openai(openai.OpenAI())
56+ >>> # Configure with API key and pipeline ID
57+ >>> tracer.configure(api_key="your_api_key_here", inference_pipeline_id="your_pipeline_id_here")
58+ >>> # Now use the decorators normally
59+ >>> @tracer.trace()
60+ >>> def my_function():
61+ ... return "result"
7262 """
73- global _configured_api_key , _configured_pipeline_id , _configured_base_url , _configured_guardrails , _client
63+ global _configured_api_key , _configured_pipeline_id , _configured_base_url , _client
7464
7565 _configured_api_key = api_key
7666 _configured_pipeline_id = inference_pipeline_id
7767 _configured_base_url = base_url
78- _configured_guardrails = guardrails
7968
8069 # Reset the client so it gets recreated with new configuration
8170 _client = None
@@ -141,101 +130,18 @@ def create_step(
141130 output : Optional [Any ] = None ,
142131 metadata : Optional [Dict [str , Any ]] = None ,
143132 inference_pipeline_id : Optional [str ] = None ,
144- guardrails : Optional [List [Any ]] = None ,
145133) -> Generator [steps .Step , None , None ]:
146- """Starts a trace and yields a Step object with optional guardrails."""
147- # Apply input guardrails if provided
148- original_inputs = inputs
149- guardrail_metadata = {}
150-
151- if guardrails and inputs is not None :
152- try :
153- # Convert inputs to dict format for guardrail processing
154- if not isinstance (inputs , dict ):
155- inputs_dict = {"input" : inputs }
156- else :
157- inputs_dict = inputs
158-
159- modified_inputs_dict , input_guardrail_metadata = _apply_input_guardrails (
160- guardrails ,
161- inputs_dict ,
162- )
163- guardrail_metadata .update (input_guardrail_metadata )
164-
165- # Check if function should be skipped
166- if (
167- hasattr (modified_inputs_dict , "__class__" )
168- and modified_inputs_dict .__class__ .__name__ == "SkipFunctionExecution"
169- ):
170- # For create_step, we'll return None output and skip processing
171- inputs = original_inputs # Keep original for logging
172- else :
173- # Update inputs with guardrail modifications
174- if isinstance (original_inputs , dict ):
175- inputs = modified_inputs_dict
176- else :
177- inputs = modified_inputs_dict .get ("input" , original_inputs )
178-
179- except Exception as e :
180- # Log guardrail errors but don't fail step creation
181- if not hasattr (e , "guardrail_name" ):
182- logger .error ("Error applying guardrails to step creation: %s" , e )
183-
134+ """Starts a trace and yields a Step object."""
184135 new_step , is_root_step , token = _create_and_initialize_step (
185136 step_name = name ,
186137 step_type = step_type ,
187138 inputs = inputs ,
188139 output = output ,
189140 metadata = metadata ,
190141 )
191-
192142 try :
193143 yield new_step
194144 finally :
195- # Apply output guardrails if provided
196- if guardrails and new_step .output is not None :
197- try :
198- final_output , output_guardrail_metadata = _apply_output_guardrails (
199- guardrails ,
200- new_step .output ,
201- inputs if isinstance (inputs , dict ) else {"input" : inputs },
202- )
203- guardrail_metadata .update (output_guardrail_metadata )
204-
205- if final_output != new_step .output :
206- new_step .output = final_output
207-
208- except Exception as e :
209- if not hasattr (e , "guardrail_name" ):
210- logger .error ("Error applying output guardrails to step: %s" , e )
211-
212- # Add guardrail metadata to step
213- if guardrail_metadata :
214- step_metadata = new_step .metadata or {}
215- step_metadata .update (
216- {
217- "guardrails" : guardrail_metadata ,
218- "has_guardrails" : True ,
219- "guardrail_actions" : [
220- metadata .get ("action" )
221- for metadata in guardrail_metadata .values ()
222- ],
223- "guardrail_names" : [
224- key .replace ("input_" , "" ).replace ("output_" , "" )
225- for key in guardrail_metadata .keys ()
226- ],
227- }
228- )
229- # Add action flags
230- actions = step_metadata ["guardrail_actions" ]
231- step_metadata ["guardrail_blocked" ] = "blocked" in actions
232- step_metadata ["guardrail_modified" ] = (
233- "redacted" in actions or "modified" in actions
234- )
235- step_metadata ["guardrail_allowed" ] = "allow" in actions
236-
237- new_step .metadata = step_metadata
238-
239145 if new_step .end_time is None :
240146 new_step .end_time = time .time ()
241147 if new_step .latency is None :
@@ -250,29 +156,11 @@ def create_step(
250156 )
251157
252158
253- def add_chat_completion_step_to_trace (
254- guardrails : Optional [List [Any ]] = None , ** kwargs
255- ) -> None :
256- """Adds a chat completion step to the trace with optional guardrails.
257-
258- Args:
259- guardrails: Optional list of guardrail instances to apply. If None, uses
260- configured global guardrails.
261- **kwargs: Step data including inputs, output, metadata, etc.
262- """
263- # Use provided guardrails or fall back to configured global guardrails
264- effective_guardrails = guardrails or _configured_guardrails
265-
266- # Extract inputs and output for guardrail processing
267- inputs = kwargs .get ("inputs" )
268- output = kwargs .get ("output" )
269-
159+ def add_chat_completion_step_to_trace (** kwargs ) -> None :
160+ """Adds a chat completion step to the trace."""
270161 with create_step (
271162 step_type = enums .StepType .CHAT_COMPLETION ,
272163 name = kwargs .get ("name" , "Chat Completion" ),
273- inputs = inputs ,
274- output = output ,
275- guardrails = effective_guardrails ,
276164 ) as step :
277165 step .log (** kwargs )
278166
@@ -286,12 +174,6 @@ def trace(
286174):
287175 """Decorator to trace a function with optional guardrails.
288176
289- Args:
290- inference_pipeline_id: Optional pipeline ID to override default
291- context_kwarg: Name of function parameter containing context for RAG
292- guardrails: List of guardrail instances to apply to inputs/outputs
293- **step_kwargs: Additional step configuration
294-
295177 Examples
296178 --------
297179
@@ -1192,9 +1074,6 @@ def _apply_input_guardrails(
11921074 Args:
11931075 guardrails: List of guardrail instances
11941076 inputs: Extracted function inputs
1195- func_args: Original function args
1196- func_kwargs: Original function kwargs
1197- func_signature: Function signature
11981077
11991078 Returns:
12001079 Tuple of (modified_inputs, guardrail_metadata)
0 commit comments