@@ -847,7 +847,7 @@ def create_inference_pipeline(
847847 self ,
848848 project_id : str ,
849849 task_type : TaskType ,
850- name : str = "Production " ,
850+ name : str = "production " ,
851851 description : Optional [str ] = None ,
852852 reference_df : Optional [pd .DataFrame ] = None ,
853853 reference_dataset_file_path : Optional [str ] = None ,
@@ -879,7 +879,7 @@ def create_inference_pipeline(
879879 except exceptions .OpenlayerResourceNotFound :
880880 # Validate inference pipeline
881881 inference_pipeline_config = {
882- "name" : name or "Production " ,
882+ "name" : name or "production " ,
883883 "description" : description or "Monitoring production data." ,
884884 "storageType" : api .STORAGE .value ,
885885 }
@@ -964,7 +964,7 @@ def load_inference_pipeline(
964964 name : Optional [str ] = None ,
965965 ) -> InferencePipeline :
966966 """Loads an existing inference pipeline from an Openlayer project."""
967- name = name or "Production "
967+ name = name or "production "
968968 endpoint = f"projects/{ project_id } /inference-pipelines?name={ name } "
969969 inference_pipeline_data = self .api .get_request (endpoint )
970970 if len (inference_pipeline_data ["items" ]) == 0 :
0 commit comments