Skip to content

Commit 2cd078a

Browse files
Merge branch 'main' into ODSC-51216/cleanup_summary_status_updates
2 parents 3a0b38f + 37e116e commit 2cd078a

File tree

12 files changed

+204
-183
lines changed

12 files changed

+204
-183
lines changed

.pre-commit-config.yaml

Lines changed: 51 additions & 51 deletions
Original file line numberDiff line numberDiff line change
@@ -1,52 +1,52 @@
11
repos:
2-
# Standard hooks
3-
- repo: https://github.com/pre-commit/pre-commit-hooks
4-
rev: v4.4.0
5-
hooks:
6-
- id: check-ast
7-
exclude: ^docs/
8-
- id: check-docstring-first
9-
exclude: ^(docs/|tests/)
10-
- id: check-json
11-
- id: check-merge-conflict
12-
- id: check-yaml
13-
exclude: feature_store_*.yaml
14-
args: ['--allow-multiple-documents']
15-
- id: detect-private-key
16-
- id: end-of-file-fixer
17-
exclude: '\.ipynb?$'
18-
- id: pretty-format-json
19-
args: ['--autofix']
20-
- id: trailing-whitespace
21-
args: [--markdown-linebreak-ext=md]
22-
exclude: ^docs/
23-
# Black, the code formatter, natively supports pre-commit
24-
- repo: https://github.com/psf/black
25-
rev: 23.3.0
26-
hooks:
27-
- id: black
28-
exclude: ^docs/
29-
# Regex based rst files common mistakes detector
30-
- repo: https://github.com/pre-commit/pygrep-hooks
31-
rev: v1.10.0
32-
hooks:
33-
- id: rst-backticks
34-
files: ^docs/
35-
- id: rst-inline-touching-normal
36-
files: ^docs/
37-
# Hardcoded secrets and ocids detector
38-
- repo: https://github.com/gitleaks/gitleaks
39-
rev: v8.17.0
40-
hooks:
41-
- id: gitleaks
42-
exclude: .github/workflows/reusable-actions/set-dummy-conf.yml
43-
# Oracle copyright checker
44-
- repo: https://github.com/oracle-samples/oci-data-science-ai-samples/
45-
rev: cbe0136f7aaffe463b31ddf3f34b0e16b4b124ff
46-
hooks:
47-
- id: check-copyright
48-
name: check-copyright
49-
entry: .pre-commit-scripts/check-copyright.py
50-
language: script
51-
types_or: ['python', 'shell', 'bash']
52-
exclude: ^docs/
2+
# Standard hooks
3+
- repo: https://github.com/pre-commit/pre-commit-hooks
4+
rev: v4.4.0
5+
hooks:
6+
- id: check-ast
7+
exclude: ^docs/
8+
- id: check-docstring-first
9+
exclude: ^(docs/|tests/)
10+
- id: check-json
11+
- id: check-merge-conflict
12+
- id: check-yaml
13+
exclude: feature_store_*.yaml
14+
args: ["--allow-multiple-documents"]
15+
- id: detect-private-key
16+
- id: end-of-file-fixer
17+
exclude: '\.ipynb?$'
18+
- id: pretty-format-json
19+
args: ["--autofix"]
20+
- id: trailing-whitespace
21+
args: [--markdown-linebreak-ext=md]
22+
exclude: ^docs/
23+
# Black, the code formatter, natively supports pre-commit
24+
- repo: https://github.com/psf/black
25+
rev: 23.3.0
26+
hooks:
27+
- id: black
28+
exclude: ^docs/
29+
# Regex based rst files common mistakes detector
30+
- repo: https://github.com/pre-commit/pygrep-hooks
31+
rev: v1.10.0
32+
hooks:
33+
- id: rst-backticks
34+
files: ^docs/
35+
- id: rst-inline-touching-normal
36+
files: ^docs/
37+
# Hardcoded secrets and ocids detector
38+
- repo: https://github.com/gitleaks/gitleaks
39+
rev: v8.17.0
40+
hooks:
41+
- id: gitleaks
42+
exclude: .github/workflows/reusable-actions/set-dummy-conf.yml
43+
# Oracle copyright checker
44+
- repo: https://github.com/oracle-samples/oci-data-science-ai-samples/
45+
rev: 1bc5270a443b791c62f634233c0f4966dfcc0dd6
46+
hooks:
47+
- id: check-copyright
48+
name: check-copyright
49+
entry: .pre-commit-scripts/check-copyright.py
50+
language: script
51+
types_or: ["python", "shell", "bash"]
52+
exclude: ^docs/

ads/common/dsc_file_system.py

Lines changed: 0 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -265,8 +265,6 @@ def update_from_dsc_model(cls, dsc_model) -> dict:
265265

266266
class DSCFileSystemManager:
267267

268-
storage_mount_dest = set()
269-
270268
@classmethod
271269
def initialize(cls, arguments: dict) -> dict:
272270
"""Initialize and update arguments to dsc model.
@@ -286,12 +284,6 @@ def initialize(cls, arguments: dict) -> dict:
286284
"Parameter `dest` is required for mounting file storage system."
287285
)
288286

289-
if arguments["dest"] in cls.storage_mount_dest:
290-
raise ValueError(
291-
"Duplicate `dest` found. Please specify different `dest` for each file system to be mounted."
292-
)
293-
cls.storage_mount_dest.add(arguments["dest"])
294-
295287
# case oci://bucket@namespace/prefix
296288
if arguments["src"].startswith("oci://") and "@" in arguments["src"]:
297289
return OCIObjectStorage(**arguments).update_to_dsc_model()

ads/jobs/builders/runtimes/container_runtime.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -23,7 +23,7 @@ class ContainerRuntime(Runtime):
2323
>>> ContainerRuntime()
2424
>>> .with_image(
2525
>>> "iad.ocir.io/<your_tenancy>/<your_image>",
26-
>>> entrypoint=["/bin/sh", -c],
26+
>>> entrypoint=["/bin/sh", "-c"],
2727
>>> cmd="sleep 5 && echo Hello World",
2828
>>> )
2929
>>> .with_environment_variable(MY_ENV="MY_VALUE")

ads/llm/chain.py

Lines changed: 2 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -261,12 +261,8 @@ def load(cls, chain_dict: dict, **kwargs) -> "GuardrailSequence":
261261
from ads.llm.serialize import load
262262

263263
chain_spec = chain_dict[SPEC_CHAIN]
264-
chain = cls()
265-
for config in chain_spec:
266-
step = load(config, **kwargs)
267-
# Chain the step
268-
chain |= step
269-
return chain
264+
steps = [load(config, **kwargs) for config in chain_spec]
265+
return cls(*steps)
270266

271267
def __str__(self) -> str:
272268
return "\n".join([str(step.__class__) for step in self.steps])

ads/model/artifact.py

Lines changed: 20 additions & 11 deletions
Original file line numberDiff line numberDiff line change
@@ -30,21 +30,21 @@
3030
ADS_VERSION = __version__
3131

3232

33-
class ArtifactNestedFolderError(Exception): # pragma: no cover
33+
class ArtifactNestedFolderError(Exception): # pragma: no cover
3434
def __init__(self, folder: str):
3535
self.folder = folder
3636
super().__init__("The required artifact files placed in a nested folder.")
3737

3838

39-
class ArtifactRequiredFilesError(Exception): # pragma: no cover
39+
class ArtifactRequiredFilesError(Exception): # pragma: no cover
4040
def __init__(self, required_files: Tuple[str]):
4141
super().__init__(
4242
"Not all required files presented in artifact folder. "
4343
f"Required files for conda runtime: {required_files}. If you are using container runtime, set `ignore_conda_error=True`."
4444
)
4545

4646

47-
class AritfactFolderStructureError(Exception): # pragma: no cover
47+
class AritfactFolderStructureError(Exception): # pragma: no cover
4848
def __init__(self, required_files: Tuple[str]):
4949
super().__init__(
5050
"The artifact folder has a wrong structure. "
@@ -171,6 +171,7 @@ def __init__(
171171
self.ignore_conda_error = ignore_conda_error
172172
self.model = None
173173
self.auth = auth or authutil.default_signer()
174+
174175
if reload and not ignore_conda_error:
175176
self.reload()
176177
# Extracts the model_file_name from the score.py.
@@ -272,8 +273,9 @@ def prepare_runtime_yaml(
272273
or runtime_info.model_deployment.inference_conda_env.inference_python_version.strip()
273274
== ""
274275
):
275-
warnings.warn(
276-
"Cannot automatically detect the inference python version. `inference_python_version` must be provided."
276+
raise ValueError(
277+
"Cannot automatically detect the inference python version. "
278+
"`inference_python_version` must be provided."
277279
)
278280
runtime_file_path = os.path.join(self.artifact_dir, "runtime.yaml")
279281
if os.path.exists(runtime_file_path) and not force_overwrite:
@@ -416,6 +418,7 @@ def from_uri(
416418
force_overwrite: Optional[bool] = False,
417419
auth: Optional[Dict] = None,
418420
ignore_conda_error: Optional[bool] = False,
421+
reload: Optional[bool] = False,
419422
):
420423
"""Constructs a ModelArtifact object from the existing model artifacts.
421424
@@ -426,16 +429,20 @@ def from_uri(
426429
OCI object storage URI.
427430
artifact_dir: str
428431
The local artifact folder to store the files needed for deployment.
429-
model_file_name: (str, optional). Defaults to `None`
430-
The file name of the serialized model.
431-
force_overwrite: (bool, optional). Defaults to False.
432-
Whether to overwrite existing files or not.
433432
auth: (Dict, optional). Defaults to None.
434433
The default authetication is set using `ads.set_auth` API.
435434
If you need to override the default, use the `ads.common.auth.api_keys`
436435
or `ads.common.auth.resource_principal` to create appropriate
437436
authentication signer and kwargs required to instantiate
438437
IdentityClient object.
438+
force_overwrite: (bool, optional). Defaults to False.
439+
Whether to overwrite existing files or not.
440+
ignore_conda_error: (bool, optional). Defaults to False.
441+
Parameter to ignore error when collecting conda information.
442+
model_file_name: (str, optional). Defaults to `None`
443+
The file name of the serialized model.
444+
reload: (bool, optional). Defaults to False.
445+
Whether to reload the Model into the environment.
439446
440447
Returns
441448
-------
@@ -492,6 +499,8 @@ def from_uri(
492499
utils.copy_from_uri(
493500
uri=temp_dir, to_path=to_path, force_overwrite=True
494501
)
502+
except ArtifactRequiredFilesError as ex:
503+
logger.warning(ex)
495504

496505
if ObjectStorageDetails.is_oci_path(artifact_dir):
497506
for root, dirs, files in os.walk(to_path):
@@ -507,10 +516,10 @@ def from_uri(
507516

508517
return cls(
509518
artifact_dir=artifact_dir,
510-
model_file_name=model_file_name,
511-
reload=True,
512519
ignore_conda_error=ignore_conda_error,
513520
local_copy_dir=to_path,
521+
model_file_name=model_file_name,
522+
reload=reload,
514523
)
515524

516525
def __getattr__(self, item):

ads/model/generic_model.py

Lines changed: 35 additions & 19 deletions
Original file line numberDiff line numberDiff line change
@@ -975,17 +975,20 @@ def prepare(
975975
auth=self.auth,
976976
local_copy_dir=self.local_copy_dir,
977977
)
978-
self.runtime_info = self.model_artifact.prepare_runtime_yaml(
979-
inference_conda_env=self.properties.inference_conda_env,
980-
inference_python_version=self.properties.inference_python_version,
981-
training_conda_env=self.properties.training_conda_env,
982-
training_python_version=self.properties.training_python_version,
983-
force_overwrite=force_overwrite,
984-
namespace=namespace,
985-
bucketname=DEFAULT_CONDA_BUCKET_NAME,
986-
auth=self.auth,
987-
ignore_conda_error=self.ignore_conda_error,
988-
)
978+
try:
979+
self.runtime_info = self.model_artifact.prepare_runtime_yaml(
980+
inference_conda_env=self.properties.inference_conda_env,
981+
inference_python_version=self.properties.inference_python_version,
982+
training_conda_env=self.properties.training_conda_env,
983+
training_python_version=self.properties.training_python_version,
984+
force_overwrite=force_overwrite,
985+
namespace=namespace,
986+
bucketname=DEFAULT_CONDA_BUCKET_NAME,
987+
auth=self.auth,
988+
ignore_conda_error=self.ignore_conda_error,
989+
)
990+
except ValueError as e:
991+
raise e
989992

990993
self._summary_status.update_status(
991994
detail="Generated runtime.yaml", status=ModelState.DONE.value
@@ -1361,13 +1364,15 @@ def from_model_artifact(
13611364
properties.with_dict(local_vars)
13621365
auth = auth or authutil.default_signer()
13631366
artifact_dir = _prepare_artifact_dir(artifact_dir)
1367+
reload = kwargs.pop("reload", False)
13641368
model_artifact = ModelArtifact.from_uri(
13651369
uri=uri,
13661370
artifact_dir=artifact_dir,
1367-
model_file_name=model_file_name,
1368-
force_overwrite=force_overwrite,
13691371
auth=auth,
1372+
force_overwrite=force_overwrite,
13701373
ignore_conda_error=ignore_conda_error,
1374+
model_file_name=model_file_name,
1375+
reload=reload,
13711376
)
13721377
model = cls(
13731378
estimator=model_artifact.model,
@@ -1380,22 +1385,33 @@ def from_model_artifact(
13801385
model.local_copy_dir = model_artifact.local_copy_dir
13811386
model.model_artifact = model_artifact
13821387
model.ignore_conda_error = ignore_conda_error
1383-
model.reload_runtime_info()
1388+
1389+
if reload:
1390+
model.reload_runtime_info()
1391+
model._summary_status.update_action(
1392+
detail="Populated metadata(Custom, Taxonomy and Provenance)",
1393+
action="Call .populate_metadata() to populate metadata.",
1394+
)
1395+
13841396
model._summary_status.update_status(
13851397
detail="Generated score.py",
1386-
status=ModelState.DONE.value,
1398+
status=ModelState.NOTAPPLICABLE.value,
13871399
)
13881400
model._summary_status.update_status(
13891401
detail="Generated runtime.yaml",
1390-
status=ModelState.DONE.value,
1402+
status=ModelState.NOTAPPLICABLE.value,
13911403
)
13921404
model._summary_status.update_status(
1393-
detail="Serialized model", status=ModelState.DONE.value
1405+
detail="Serialized model",
1406+
status=ModelState.NOTAPPLICABLE.value,
13941407
)
1395-
model._summary_status.update_action(
1408+
model._summary_status.update_status(
13961409
detail="Populated metadata(Custom, Taxonomy and Provenance)",
1397-
action=f"Call .populate_metadata() to populate metadata.",
1410+
status=ModelState.AVAILABLE.value
1411+
if reload
1412+
else ModelState.NOTAPPLICABLE.value,
13981413
)
1414+
13991415
return model
14001416

14011417
def download_artifact(

ads/model/runtime/env_info.py

Lines changed: 8 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -106,7 +106,7 @@ def from_slug(
106106
env_path, python_version = service_pack_slug_mapping[env_slug]
107107
else:
108108
warnings.warn(
109-
"The {env_slug} is not a service pack. Use `from_path` method by passing in the object storage path."
109+
f"The {env_slug} is not a service pack. Use `from_path` method by passing in the object storage path."
110110
)
111111

112112
return cls._populate_env_info(
@@ -157,9 +157,13 @@ def from_path(cls, env_path: str, auth: dict = None) -> "EnvInfo":
157157
).fetch_metadata_of_object()
158158
python_version = metadata_json.get("python", None)
159159
env_slug = metadata_json.get("slug", None)
160+
if not python_version:
161+
raise ValueError(
162+
f"The manifest metadata of {env_path} doesn't contains inforamtion for python version."
163+
)
160164
except Exception as e:
161-
logging.warning(e)
162-
logging.warning(
165+
logging.debug(e)
166+
logging.debug(
163167
"python version and slug are not found from the manifest metadata."
164168
)
165169

@@ -227,7 +231,7 @@ def _populate_env_info(
227231
)
228232

229233
@classmethod
230-
def _validate_dict(cls,obj_dict: Dict) -> bool:
234+
def _validate_dict(cls, obj_dict: Dict) -> bool:
231235
"""Validate the content in the dictionary format from the yaml file.
232236
233237
Parameters

docs/source/user_guide/jobs/data_science_job.rst

Lines changed: 4 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -85,6 +85,10 @@ to create the job on OCI. To start a job run, you can call the :py:meth:`~ads.jo
8585
which returns a :py:class:`~ads.jobs.DataScienceJobRun` instance.
8686
Once the job or job run is created, the job OCID can be accessed through ``job.id`` or ``run.id``.
8787

88+
.. note::
89+
90+
Once a job is created, if you change the configuration, you will need to re-create a job for the new configuration.
91+
8892
.. code-block:: python
8993
9094
# Create the job on OCI Data Science

tests/unitary/default_setup/jobs/test_jobs_mount_file_system.py

Lines changed: 0 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -350,14 +350,6 @@ def test_file_manager_process_data_error(self):
350350
):
351351
DSCFileSystemManager.initialize(test_mount_file_system)
352352

353-
test_mount_file_system_list = [test_mount_file_system] * 2
354-
with pytest.raises(
355-
ValueError,
356-
match="Duplicate `dest` found. Please specify different `dest` for each file system to be mounted."
357-
):
358-
for mount_file_system in test_mount_file_system_list:
359-
DSCFileSystemManager.initialize(mount_file_system)
360-
361353
def test_dsc_object_storage(self):
362354
object_storage = OCIObjectStorage(
363355
src="oci://bucket@namespace/prefix",

0 commit comments

Comments
 (0)