@@ -219,6 +219,7 @@ def _run_with_conda_pack(
219219 )
220220 if os .path .exists (os .path .join (conda_pack_path , "spark-defaults.conf" )):
221221 env_vars ["SPARK_CONF_DIR" ] = os .path .join (DEFAULT_IMAGE_CONDA_DIR , slug )
222+ logger .info (f"Running with conda pack in a container with command { command } " )
222223 return self ._activate_conda_env_and_run (
223224 image , slug , command , bind_volumes , env_vars
224225 )
@@ -701,7 +702,8 @@ def predict(self) -> None:
701702 )
702703
703704 _download_model (
704- oci_auth = self .oci_auth ,
705+ auth = self .auth_type ,
706+ profile = self .profile ,
705707 ocid = ocid ,
706708 artifact_directory = artifact_directory ,
707709 region = region ,
@@ -724,32 +726,37 @@ def predict(self) -> None:
724726 # bind_volumnes
725727 bind_volumes = {}
726728 SCRIPT = "script.py"
729+ dir_path = os .path .dirname (os .path .realpath (__file__ ))
727730 if not is_in_notebook_session ():
728731 bind_volumes = {
729732 os .path .expanduser (
730733 os .path .dirname (self .config ["execution" ]["oci_config" ])
731734 ): {"bind" : os .path .join (DEFAULT_IMAGE_HOME_DIR , ".oci" )}
732735 }
733- dir_path = os .path .dirname (os .path .realpath (__file__ ))
734-
736+
735737 self .config ["execution" ]["source_folder" ] = os .path .abspath (
736738 os .path .join (dir_path , ".." )
737739 )
738740 self .config ["execution" ]["entrypoint" ] = SCRIPT
739741 bind_volumes [artifact_directory ] = {"bind" : DEFAULT_MODEL_DEPLOYMENT_FOLDER }
740742
741- # payload
743+ # extra cmd
742744 data = self .config ["execution" ].get ("payload" )
745+ extra_cmd = f"--payload '{ data } ' " + f"--auth { self .auth_type } "
746+ if self .auth_type != "resource_principal" :
747+ extra_cmd += f"--profile { self .profile } "
743748
744749 if is_in_notebook_session () or NO_CONTAINER :
745- script_path = os .path .join (self .config ['execution' ]['source_folder' ], SCRIPT )
746- run_command (cmd = f"python { script_path } " + f"{ artifact_directory } " + f"'{ data } '" , shell = True )
750+ # _run_with_conda_pack has code to handle notebook session case,
751+ # however, it activate the conda pack and then run the script.
752+ # For the deployment, we just take the current conda env and run it.
753+ # Hence we just handle the notebook case directly here.
754+ script_path = os .path .join (os .path .join (dir_path , ".." ), SCRIPT )
755+ cmd = f"python { script_path } " + f"--artifact-directory { artifact_directory } " + extra_cmd
756+ logger .info (f"Running in a notebook or NO_CONTAINER with command { cmd } " )
757+ run_command (cmd = cmd , shell = True )
747758 else :
748- extra_cmd = (
749- DEFAULT_MODEL_DEPLOYMENT_FOLDER
750- + " "
751- + data
752- )
759+ extra_cmd = f"--artifact-directory { DEFAULT_MODEL_DEPLOYMENT_FOLDER } " + extra_cmd
753760 exit_code = self ._run_with_conda_pack (
754761 bind_volumes , extra_cmd , install = True , conda_uri = conda_path
755762 )
@@ -758,7 +765,7 @@ def predict(self) -> None:
758765 f"`predict` did not complete successfully. Exit code: { exit_code } . "
759766 f"Run with the --debug argument to view container logs."
760767 )
761-
768+
762769 def _get_conda_info_from_custom_metadata (self , ocid ):
763770 """
764771 Get conda env info from custom metadata from model catalog.
0 commit comments