oracle-ads 2.12.11__py3-none-any.whl → 2.13.1rc0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- ads/aqua/app.py +23 -10
- ads/aqua/common/enums.py +19 -14
- ads/aqua/common/errors.py +3 -4
- ads/aqua/common/utils.py +2 -2
- ads/aqua/constants.py +1 -0
- ads/aqua/evaluation/constants.py +7 -7
- ads/aqua/evaluation/errors.py +3 -4
- ads/aqua/extension/model_handler.py +23 -0
- ads/aqua/extension/models/ws_models.py +5 -6
- ads/aqua/finetuning/constants.py +3 -3
- ads/aqua/model/constants.py +7 -7
- ads/aqua/model/enums.py +4 -5
- ads/aqua/model/model.py +22 -0
- ads/aqua/modeldeployment/entities.py +3 -1
- ads/common/auth.py +33 -20
- ads/common/extended_enum.py +52 -44
- ads/llm/__init__.py +11 -8
- ads/llm/langchain/plugins/embeddings/__init__.py +4 -0
- ads/llm/langchain/plugins/embeddings/oci_data_science_model_deployment_endpoint.py +184 -0
- ads/model/artifact_downloader.py +3 -4
- ads/model/datascience_model.py +84 -64
- ads/model/generic_model.py +3 -3
- ads/model/model_metadata.py +17 -11
- ads/model/service/oci_datascience_model.py +12 -14
- ads/opctl/anomaly_detection.py +11 -0
- ads/opctl/backend/marketplace/helm_helper.py +13 -14
- ads/opctl/cli.py +4 -5
- ads/opctl/cmds.py +28 -32
- ads/opctl/config/merger.py +8 -11
- ads/opctl/config/resolver.py +25 -30
- ads/opctl/forecast.py +11 -0
- ads/opctl/operator/cli.py +9 -9
- ads/opctl/operator/common/backend_factory.py +56 -60
- ads/opctl/operator/common/const.py +5 -5
- ads/opctl/operator/lowcode/anomaly/const.py +8 -9
- ads/opctl/operator/lowcode/feature_store_marketplace/operator_utils.py +43 -48
- ads/opctl/operator/lowcode/forecast/__main__.py +5 -5
- ads/opctl/operator/lowcode/forecast/const.py +6 -6
- ads/opctl/operator/lowcode/forecast/model/arima.py +6 -3
- ads/opctl/operator/lowcode/forecast/model/automlx.py +53 -31
- ads/opctl/operator/lowcode/forecast/model/base_model.py +57 -30
- ads/opctl/operator/lowcode/forecast/model/forecast_datasets.py +60 -2
- ads/opctl/operator/lowcode/forecast/model/neuralprophet.py +5 -2
- ads/opctl/operator/lowcode/forecast/model/prophet.py +28 -15
- ads/opctl/operator/lowcode/forecast/whatifserve/score.py +19 -11
- ads/opctl/operator/lowcode/pii/constant.py +6 -7
- ads/opctl/operator/lowcode/recommender/constant.py +12 -7
- ads/opctl/operator/runtime/marketplace_runtime.py +4 -10
- ads/opctl/operator/runtime/runtime.py +4 -6
- ads/pipeline/ads_pipeline_run.py +13 -25
- ads/pipeline/visualizer/graph_renderer.py +3 -4
- {oracle_ads-2.12.11.dist-info → oracle_ads-2.13.1rc0.dist-info}/METADATA +6 -6
- {oracle_ads-2.12.11.dist-info → oracle_ads-2.13.1rc0.dist-info}/RECORD +56 -52
- {oracle_ads-2.12.11.dist-info → oracle_ads-2.13.1rc0.dist-info}/LICENSE.txt +0 -0
- {oracle_ads-2.12.11.dist-info → oracle_ads-2.13.1rc0.dist-info}/WHEEL +0 -0
- {oracle_ads-2.12.11.dist-info → oracle_ads-2.13.1rc0.dist-info}/entry_points.txt +0 -0
ads/opctl/cmds.py
CHANGED
@@ -1,7 +1,6 @@
|
|
1
1
|
#!/usr/bin/env python
|
2
|
-
# -*- coding: utf-8; -*-
|
3
2
|
|
4
|
-
# Copyright (c) 2022,
|
3
|
+
# Copyright (c) 2022, 2025 Oracle and/or its affiliates.
|
5
4
|
# Licensed under the Universal Permissive License v 1.0 as shown at https://oss.oracle.com/licenses/upl/
|
6
5
|
|
7
6
|
import configparser
|
@@ -11,13 +10,10 @@ from typing import Dict, List, Union
|
|
11
10
|
import click
|
12
11
|
import fsspec
|
13
12
|
import yaml
|
14
|
-
from ads.opctl.backend.marketplace.local_marketplace import (
|
15
|
-
LocalMarketplaceOperatorBackend,
|
16
|
-
)
|
17
13
|
|
18
14
|
import ads
|
19
15
|
from ads.common.auth import AuthContext, AuthType
|
20
|
-
from ads.common.extended_enum import
|
16
|
+
from ads.common.extended_enum import ExtendedEnum
|
21
17
|
from ads.common.oci_datascience import DSCNotebookSession
|
22
18
|
from ads.opctl.backend.ads_dataflow import DataFlowBackend
|
23
19
|
from ads.opctl.backend.ads_ml_job import MLJobBackend, MLJobDistributedBackend
|
@@ -30,6 +26,9 @@ from ads.opctl.backend.local import (
|
|
30
26
|
LocalOperatorBackend,
|
31
27
|
LocalPipelineBackend,
|
32
28
|
)
|
29
|
+
from ads.opctl.backend.marketplace.local_marketplace import (
|
30
|
+
LocalMarketplaceOperatorBackend,
|
31
|
+
)
|
33
32
|
from ads.opctl.config.base import ConfigProcessor
|
34
33
|
from ads.opctl.config.merger import ConfigMerger
|
35
34
|
from ads.opctl.config.resolver import ConfigResolver
|
@@ -64,7 +63,7 @@ from ads.opctl.operator.common.backend_factory import (
|
|
64
63
|
from ads.opctl.utils import get_service_pack_prefix, is_in_notebook_session
|
65
64
|
|
66
65
|
|
67
|
-
class DataScienceResource(
|
66
|
+
class DataScienceResource(ExtendedEnum):
|
68
67
|
JOB = "datasciencejob"
|
69
68
|
DATAFLOW = "dataflowapplication"
|
70
69
|
PIPELINE = "datasciencepipeline"
|
@@ -72,7 +71,7 @@ class DataScienceResource(str, metaclass=ExtendedEnumMeta):
|
|
72
71
|
MODEL = "datasciencemodel"
|
73
72
|
|
74
73
|
|
75
|
-
class DataScienceResourceRun(
|
74
|
+
class DataScienceResourceRun(ExtendedEnum):
|
76
75
|
JOB_RUN = "datasciencejobrun"
|
77
76
|
DATAFLOW_RUN = "dataflowrun"
|
78
77
|
PIPELINE_RUN = "datasciencepipelinerun"
|
@@ -100,18 +99,18 @@ DATA_SCIENCE_RESOURCE_RUN_BACKEND_MAP = {
|
|
100
99
|
|
101
100
|
class _BackendFactory:
|
102
101
|
BACKENDS_MAP = {
|
103
|
-
BACKEND_NAME.JOB
|
104
|
-
BACKEND_NAME.DATAFLOW
|
105
|
-
BACKEND_NAME.PIPELINE
|
106
|
-
BACKEND_NAME.MODEL_DEPLOYMENT
|
107
|
-
BACKEND_NAME.OPERATOR_LOCAL
|
108
|
-
BACKEND_NAME.MARKETPLACE
|
102
|
+
BACKEND_NAME.JOB: MLJobBackend,
|
103
|
+
BACKEND_NAME.DATAFLOW: DataFlowBackend,
|
104
|
+
BACKEND_NAME.PIPELINE: PipelineBackend,
|
105
|
+
BACKEND_NAME.MODEL_DEPLOYMENT: ModelDeploymentBackend,
|
106
|
+
BACKEND_NAME.OPERATOR_LOCAL: LocalOperatorBackend,
|
107
|
+
BACKEND_NAME.MARKETPLACE: LocalMarketplaceOperatorBackend,
|
109
108
|
}
|
110
109
|
|
111
110
|
LOCAL_BACKENDS_MAP = {
|
112
|
-
BACKEND_NAME.JOB
|
113
|
-
BACKEND_NAME.PIPELINE
|
114
|
-
BACKEND_NAME.MODEL_DEPLOYMENT
|
111
|
+
BACKEND_NAME.JOB: LocalBackend,
|
112
|
+
BACKEND_NAME.PIPELINE: LocalPipelineBackend,
|
113
|
+
BACKEND_NAME.MODEL_DEPLOYMENT: LocalModelDeploymentBackend,
|
115
114
|
}
|
116
115
|
|
117
116
|
def __init__(self, config: Dict):
|
@@ -120,14 +119,14 @@ class _BackendFactory:
|
|
120
119
|
if self._backend is None:
|
121
120
|
raise RuntimeError("Please specify backend.")
|
122
121
|
elif (
|
123
|
-
self._backend != BACKEND_NAME.LOCAL
|
122
|
+
self._backend != BACKEND_NAME.LOCAL
|
124
123
|
and self._backend not in self.BACKENDS_MAP
|
125
124
|
):
|
126
125
|
raise NotImplementedError(f"backend {self._backend} is not implemented.")
|
127
126
|
|
128
127
|
@property
|
129
128
|
def backend(self):
|
130
|
-
if self._backend == BACKEND_NAME.LOCAL
|
129
|
+
if self._backend == BACKEND_NAME.LOCAL:
|
131
130
|
kind = self.config.get("kind") or self.config["execution"].get("kind")
|
132
131
|
if kind not in self.LOCAL_BACKENDS_MAP:
|
133
132
|
options = [backend for backend in self.LOCAL_BACKENDS_MAP.keys()]
|
@@ -194,10 +193,7 @@ def run(config: Dict, **kwargs) -> Dict:
|
|
194
193
|
except RuntimeError:
|
195
194
|
pass
|
196
195
|
|
197
|
-
if
|
198
|
-
p.config["kind"] != BACKEND_NAME.LOCAL.value
|
199
|
-
and p.config["kind"] != "distributed"
|
200
|
-
):
|
196
|
+
if p.config["kind"] != BACKEND_NAME.LOCAL and p.config["kind"] != "distributed":
|
201
197
|
p.config["execution"]["backend"] = p.config["kind"]
|
202
198
|
return _BackendFactory(p.config).backend.apply()
|
203
199
|
else:
|
@@ -226,9 +222,9 @@ def run(config: Dict, **kwargs) -> Dict:
|
|
226
222
|
docker_build_cmd(ini)
|
227
223
|
config = update_image(config, ini)
|
228
224
|
|
229
|
-
if mode == BACKEND_NAME.LOCAL
|
225
|
+
if mode == BACKEND_NAME.LOCAL:
|
230
226
|
print(
|
231
|
-
"\
|
227
|
+
"\u26a0 Docker Image: "
|
232
228
|
+ ini.get("main", "registry")
|
233
229
|
+ ":"
|
234
230
|
+ ini.get("main", "tag")
|
@@ -238,7 +234,7 @@ def run(config: Dict, **kwargs) -> Dict:
|
|
238
234
|
|
239
235
|
backend = LocalBackendDistributed(config)
|
240
236
|
backend.run()
|
241
|
-
elif mode == BACKEND_NAME.DATAFLOW
|
237
|
+
elif mode == BACKEND_NAME.DATAFLOW:
|
242
238
|
raise RuntimeError(
|
243
239
|
"backend operator for distributed training can either be local or job"
|
244
240
|
)
|
@@ -283,7 +279,7 @@ def run(config: Dict, **kwargs) -> Dict:
|
|
283
279
|
}
|
284
280
|
for r, b in resource_to_backend.items():
|
285
281
|
if r in p.config["execution"]["ocid"]:
|
286
|
-
p.config["execution"]["backend"] = b
|
282
|
+
p.config["execution"]["backend"] = b
|
287
283
|
else:
|
288
284
|
p.step(ConfigResolver).step(ConfigValidator)
|
289
285
|
# spec may have changed during validation step (e.g. defaults filled in)
|
@@ -592,7 +588,7 @@ def configure() -> None:
|
|
592
588
|
|
593
589
|
print("==== Setting configuration for Data Science Jobs ====")
|
594
590
|
if click.confirm(
|
595
|
-
|
591
|
+
"Do you want to set up or update Data Science Jobs configuration?",
|
596
592
|
default=True,
|
597
593
|
):
|
598
594
|
required_fields = [
|
@@ -622,7 +618,7 @@ def configure() -> None:
|
|
622
618
|
|
623
619
|
print("==== Setting configuration for OCI Data Flow ====")
|
624
620
|
if click.confirm(
|
625
|
-
|
621
|
+
"Do you want to set up or update OCI Data Flow configuration?", default=True
|
626
622
|
):
|
627
623
|
required_fields = [
|
628
624
|
("compartment_id", ""),
|
@@ -652,7 +648,7 @@ def configure() -> None:
|
|
652
648
|
|
653
649
|
print("==== Setting configuration for OCI ML Pipeline ====")
|
654
650
|
if click.confirm(
|
655
|
-
|
651
|
+
"Do you want to set up or update OCI ML Pipeline configuration?", default=True
|
656
652
|
):
|
657
653
|
required_fields = [
|
658
654
|
("compartment_id", ""),
|
@@ -674,7 +670,7 @@ def configure() -> None:
|
|
674
670
|
|
675
671
|
print("==== Setting configuration for Data Science Model Deployment ====")
|
676
672
|
if click.confirm(
|
677
|
-
|
673
|
+
"Do you want to set up or update Data Science Model Deployment configuration?",
|
678
674
|
default=True,
|
679
675
|
):
|
680
676
|
required_fields = [
|
@@ -704,7 +700,7 @@ def configure() -> None:
|
|
704
700
|
|
705
701
|
print("==== Setting configuration for local backend ====")
|
706
702
|
if click.confirm(
|
707
|
-
|
703
|
+
"Do you want to set up or update local backend configuration?", default=True
|
708
704
|
):
|
709
705
|
required_fields = [
|
710
706
|
("max_parallel_containers", str(min(os.cpu_count(), 4))),
|
ads/opctl/config/merger.py
CHANGED
@@ -1,6 +1,6 @@
|
|
1
1
|
#!/usr/bin/env python
|
2
2
|
|
3
|
-
# Copyright (c) 2022,
|
3
|
+
# Copyright (c) 2022, 2025 Oracle and/or its affiliates.
|
4
4
|
# Licensed under the Universal Permissive License v 1.0 as shown at https://oss.oracle.com/licenses/upl/
|
5
5
|
|
6
6
|
import os
|
@@ -190,11 +190,11 @@ class ConfigMerger(ConfigProcessor):
|
|
190
190
|
def _get_service_config(self, oci_profile: str, ads_config_folder: str) -> Dict:
|
191
191
|
backend = self.config["execution"].get("backend", None)
|
192
192
|
backend_config = {
|
193
|
-
BACKEND_NAME.JOB
|
194
|
-
BACKEND_NAME.DATAFLOW
|
195
|
-
BACKEND_NAME.PIPELINE
|
196
|
-
BACKEND_NAME.LOCAL
|
197
|
-
BACKEND_NAME.MODEL_DEPLOYMENT
|
193
|
+
BACKEND_NAME.JOB: ADS_JOBS_CONFIG_FILE_NAME,
|
194
|
+
BACKEND_NAME.DATAFLOW: ADS_DATAFLOW_CONFIG_FILE_NAME,
|
195
|
+
BACKEND_NAME.PIPELINE: ADS_ML_PIPELINE_CONFIG_FILE_NAME,
|
196
|
+
BACKEND_NAME.LOCAL: ADS_LOCAL_BACKEND_CONFIG_FILE_NAME,
|
197
|
+
BACKEND_NAME.MODEL_DEPLOYMENT: ADS_MODEL_DEPLOYMENT_CONFIG_FILE_NAME,
|
198
198
|
}
|
199
199
|
config_file = backend_config.get(backend, ADS_JOBS_CONFIG_FILE_NAME)
|
200
200
|
|
@@ -213,10 +213,7 @@ class ConfigMerger(ConfigProcessor):
|
|
213
213
|
def _config_flex_shape_details(self):
|
214
214
|
infrastructure = self.config["infrastructure"]
|
215
215
|
backend = self.config["execution"].get("backend", None)
|
216
|
-
if
|
217
|
-
backend == BACKEND_NAME.JOB.value
|
218
|
-
or backend == BACKEND_NAME.MODEL_DEPLOYMENT.value
|
219
|
-
):
|
216
|
+
if backend == BACKEND_NAME.JOB or backend == BACKEND_NAME.MODEL_DEPLOYMENT:
|
220
217
|
shape_name = infrastructure.get("shape_name", "")
|
221
218
|
if shape_name.endswith(".Flex"):
|
222
219
|
if (
|
@@ -231,7 +228,7 @@ class ConfigMerger(ConfigProcessor):
|
|
231
228
|
"ocpus": infrastructure.pop("ocpus"),
|
232
229
|
"memory_in_gbs": infrastructure.pop("memory_in_gbs"),
|
233
230
|
}
|
234
|
-
elif backend == BACKEND_NAME.DATAFLOW
|
231
|
+
elif backend == BACKEND_NAME.DATAFLOW:
|
235
232
|
executor_shape = infrastructure.get("executor_shape", "")
|
236
233
|
driver_shape = infrastructure.get("driver_shape", "")
|
237
234
|
data_flow_shape_config_details = [
|
ads/opctl/config/resolver.py
CHANGED
@@ -1,36 +1,34 @@
|
|
1
1
|
#!/usr/bin/env python
|
2
|
-
# -*- coding: utf-8; -*-
|
3
2
|
|
4
|
-
# Copyright (c) 2022,
|
3
|
+
# Copyright (c) 2022, 2025 Oracle and/or its affiliates.
|
5
4
|
# Licensed under the Universal Permissive License v 1.0 as shown at https://oss.oracle.com/licenses/upl/
|
6
5
|
|
7
6
|
import base64
|
7
|
+
import glob
|
8
8
|
import json
|
9
9
|
import os
|
10
|
-
from typing import Dict
|
11
|
-
from typing import Tuple
|
10
|
+
from typing import Dict, Tuple
|
12
11
|
|
13
12
|
import yaml
|
14
|
-
import glob
|
15
13
|
|
16
14
|
from ads.common.auth import create_signer
|
15
|
+
from ads.common.decorator.runtime_dependency import (
|
16
|
+
OptionalDependency,
|
17
|
+
runtime_dependency,
|
18
|
+
)
|
17
19
|
from ads.opctl import logger
|
18
20
|
from ads.opctl.config.base import ConfigProcessor
|
19
21
|
from ads.opctl.config.utils import NotSupportedError, convert_notebook
|
20
22
|
from ads.opctl.constants import (
|
23
|
+
BACKEND_NAME,
|
21
24
|
ML_JOB_GPU_IMAGE,
|
22
25
|
ML_JOB_IMAGE,
|
23
|
-
BACKEND_NAME,
|
24
26
|
)
|
25
27
|
from ads.opctl.utils import (
|
28
|
+
get_namespace,
|
29
|
+
get_region_key,
|
26
30
|
list_ads_operators,
|
27
31
|
parse_conda_uri,
|
28
|
-
get_region_key,
|
29
|
-
get_namespace,
|
30
|
-
)
|
31
|
-
from ads.common.decorator.runtime_dependency import (
|
32
|
-
runtime_dependency,
|
33
|
-
OptionalDependency,
|
34
32
|
)
|
35
33
|
|
36
34
|
|
@@ -91,7 +89,7 @@ class ConfigResolver(ConfigProcessor):
|
|
91
89
|
if not (
|
92
90
|
self.config["execution"].get("conda_slug")
|
93
91
|
or self.config["execution"].get("image")
|
94
|
-
or self.config["execution"]["backend"] == BACKEND_NAME.DATAFLOW
|
92
|
+
or self.config["execution"]["backend"] == BACKEND_NAME.DATAFLOW
|
95
93
|
):
|
96
94
|
raise ValueError(
|
97
95
|
"If not running an operator, conda pack info or image name needs to be given."
|
@@ -134,21 +132,18 @@ class ConfigResolver(ConfigProcessor):
|
|
134
132
|
raise FileNotFoundError(
|
135
133
|
f"{self.config['execution']['source_folder']} is not found."
|
136
134
|
)
|
137
|
-
|
138
|
-
|
139
|
-
|
140
|
-
|
141
|
-
|
142
|
-
|
143
|
-
|
144
|
-
|
145
|
-
inflection.underscore(
|
146
|
-
self.config["execution"]["operator_name"]
|
147
|
-
),
|
148
|
-
)
|
135
|
+
elif self._is_ads_operator():
|
136
|
+
curr_dir = os.path.dirname(os.path.abspath(__file__))
|
137
|
+
self.config["execution"]["source_folder"] = os.path.normpath(
|
138
|
+
os.path.join(
|
139
|
+
curr_dir,
|
140
|
+
"..",
|
141
|
+
"operators",
|
142
|
+
inflection.underscore(self.config["execution"]["operator_name"]),
|
149
143
|
)
|
150
|
-
|
151
|
-
|
144
|
+
)
|
145
|
+
else:
|
146
|
+
self.config["execution"]["source_folder"] = None
|
152
147
|
|
153
148
|
def _resolve_entry_script(self) -> None:
|
154
149
|
# this should be run after _resolve_source_folder_path
|
@@ -263,9 +258,9 @@ class ConfigResolver(ConfigProcessor):
|
|
263
258
|
self.config["infrastructure"]["docker_registry"], image
|
264
259
|
)
|
265
260
|
else:
|
266
|
-
self.config["execution"][
|
267
|
-
"image"
|
268
|
-
|
261
|
+
self.config["execution"]["image"] = (
|
262
|
+
f"{region_key}.ocir.io/{namespace}/{image}"
|
263
|
+
)
|
269
264
|
|
270
265
|
def _resolve_env_vars(self) -> None:
|
271
266
|
env_vars = self.config["execution"].get("env_vars", {})
|
ads/opctl/forecast.py
ADDED
@@ -0,0 +1,11 @@
|
|
1
|
+
#!/usr/bin/env python
|
2
|
+
|
3
|
+
# Copyright (c) 2025 Oracle and/or its affiliates.
|
4
|
+
# Licensed under the Universal Permissive License v 1.0 as shown at https://oss.oracle.com/licenses/upl/
|
5
|
+
|
6
|
+
from ads.opctl.operator.lowcode.forecast.__main__ import operate
|
7
|
+
from ads.opctl.operator.lowcode.forecast.operator_config import ForecastOperatorConfig
|
8
|
+
|
9
|
+
if __name__ == "__main__":
|
10
|
+
config = ForecastOperatorConfig()
|
11
|
+
operate(config)
|
ads/opctl/operator/cli.py
CHANGED
@@ -1,25 +1,24 @@
|
|
1
1
|
#!/usr/bin/env python
|
2
|
-
# -*- coding: utf-8 -*--
|
3
2
|
|
4
|
-
# Copyright (c) 2023 Oracle and/or its affiliates.
|
3
|
+
# Copyright (c) 2023, 2025 Oracle and/or its affiliates.
|
5
4
|
# Licensed under the Universal Permissive License v 1.0 as shown at https://oss.oracle.com/licenses/upl/
|
6
5
|
|
6
|
+
import logging
|
7
7
|
from typing import Any, Dict
|
8
8
|
|
9
9
|
import click
|
10
10
|
import fsspec
|
11
11
|
import yaml
|
12
|
-
|
13
|
-
from ads.opctl.operator.common.utils import default_signer
|
12
|
+
|
14
13
|
from ads.common.auth import AuthType
|
15
14
|
from ads.common.object_storage_details import ObjectStorageDetails
|
15
|
+
from ads.opctl import logger
|
16
16
|
from ads.opctl.constants import BACKEND_NAME, RUNTIME_TYPE
|
17
17
|
from ads.opctl.decorator.common import click_options, with_auth, with_click_unknown_args
|
18
|
+
from ads.opctl.operator.common.utils import default_signer
|
18
19
|
from ads.opctl.utils import suppress_traceback
|
19
|
-
from ads.opctl import logger
|
20
20
|
|
21
21
|
from .__init__ import __operators__
|
22
|
-
from .cmd import run as cmd_run
|
23
22
|
from .cmd import build_conda as cmd_build_conda
|
24
23
|
from .cmd import build_image as cmd_build_image
|
25
24
|
from .cmd import create as cmd_create
|
@@ -28,6 +27,7 @@ from .cmd import init as cmd_init
|
|
28
27
|
from .cmd import list as cmd_list
|
29
28
|
from .cmd import publish_conda as cmd_publish_conda
|
30
29
|
from .cmd import publish_image as cmd_publish_image
|
30
|
+
from .cmd import run as cmd_run
|
31
31
|
from .cmd import verify as cmd_verify
|
32
32
|
|
33
33
|
DEBUG_OPTION = (
|
@@ -113,7 +113,7 @@ def info(debug: bool, **kwargs: Dict[str, Any]) -> None:
|
|
113
113
|
)
|
114
114
|
@click.option(
|
115
115
|
"--output",
|
116
|
-
help=
|
116
|
+
help="The folder name to save the resulting specification templates.",
|
117
117
|
required=False,
|
118
118
|
default=None,
|
119
119
|
)
|
@@ -292,9 +292,9 @@ def publish_conda(debug: bool, **kwargs: Dict[str, Any]) -> None:
|
|
292
292
|
"-b",
|
293
293
|
help=(
|
294
294
|
"Backend name or the path to the operator's backend config YAML file. "
|
295
|
-
f"\n\nExample 1:\n\n`ads operator run -f operator.yaml -b {BACKEND_NAME.LOCAL
|
295
|
+
f"\n\nExample 1:\n\n`ads operator run -f operator.yaml -b {BACKEND_NAME.LOCAL}`\n\n"
|
296
296
|
"Supported backend names: "
|
297
|
-
f"{(BACKEND_NAME.JOB
|
297
|
+
f"{(BACKEND_NAME.JOB,BACKEND_NAME.JOB + '.' + RUNTIME_TYPE.CONTAINER,BACKEND_NAME.DATAFLOW,BACKEND_NAME.LOCAL,BACKEND_NAME.LOCAL + '.'+ RUNTIME_TYPE.CONTAINER,)}. "
|
298
298
|
"However some operators may support only a subset of these backends."
|
299
299
|
"\n\nExample 2:\n\n`ads operator run -f operator.yaml -b backend.yaml`\n\n"
|
300
300
|
"Use the `ads operator init --help` command to generate the operator's specification "
|
@@ -1,7 +1,6 @@
|
|
1
1
|
#!/usr/bin/env python
|
2
|
-
# -*- coding: utf-8 -*--
|
3
2
|
|
4
|
-
# Copyright (c) 2023,
|
3
|
+
# Copyright (c) 2023, 2025 Oracle and/or its affiliates.
|
5
4
|
# Licensed under the Universal Permissive License v 1.0 as shown at https://oss.oracle.com/licenses/upl/
|
6
5
|
|
7
6
|
"""
|
@@ -12,11 +11,6 @@ The factory validates the backend type and runtime type before creating the back
|
|
12
11
|
from typing import Dict, List, Tuple, Union
|
13
12
|
|
14
13
|
import yaml
|
15
|
-
from ads.opctl.operator.common.utils import print_traceback
|
16
|
-
|
17
|
-
from ads.opctl.backend.marketplace.local_marketplace import (
|
18
|
-
LocalMarketplaceOperatorBackend,
|
19
|
-
)
|
20
14
|
|
21
15
|
from ads.opctl import logger
|
22
16
|
from ads.opctl.backend.ads_dataflow import DataFlowOperatorBackend
|
@@ -25,6 +19,9 @@ from ads.opctl.backend.base import Backend
|
|
25
19
|
from ads.opctl.backend.local import (
|
26
20
|
LocalOperatorBackend,
|
27
21
|
)
|
22
|
+
from ads.opctl.backend.marketplace.local_marketplace import (
|
23
|
+
LocalMarketplaceOperatorBackend,
|
24
|
+
)
|
28
25
|
from ads.opctl.config.base import ConfigProcessor
|
29
26
|
from ads.opctl.config.merger import ConfigMerger
|
30
27
|
from ads.opctl.constants import (
|
@@ -34,9 +31,10 @@ from ads.opctl.constants import (
|
|
34
31
|
RESOURCE_TYPE,
|
35
32
|
RUNTIME_TYPE,
|
36
33
|
)
|
37
|
-
from ads.opctl.operator.common.const import
|
34
|
+
from ads.opctl.operator.common.const import OPERATOR_BACKEND_SECTION_NAME, PACK_TYPE
|
38
35
|
from ads.opctl.operator.common.dictionary_merger import DictionaryMerger
|
39
36
|
from ads.opctl.operator.common.operator_loader import OperatorInfo, OperatorLoader
|
37
|
+
from ads.opctl.operator.common.utils import print_traceback
|
40
38
|
|
41
39
|
|
42
40
|
class BackendFactory:
|
@@ -46,57 +44,57 @@ class BackendFactory:
|
|
46
44
|
"""
|
47
45
|
|
48
46
|
BACKENDS = (
|
49
|
-
BACKEND_NAME.JOB
|
50
|
-
BACKEND_NAME.DATAFLOW
|
51
|
-
BACKEND_NAME.MARKETPLACE
|
47
|
+
BACKEND_NAME.JOB,
|
48
|
+
BACKEND_NAME.DATAFLOW,
|
49
|
+
BACKEND_NAME.MARKETPLACE,
|
52
50
|
)
|
53
51
|
|
54
52
|
LOCAL_BACKENDS = (
|
55
|
-
BACKEND_NAME.OPERATOR_LOCAL
|
56
|
-
BACKEND_NAME.LOCAL
|
53
|
+
BACKEND_NAME.OPERATOR_LOCAL,
|
54
|
+
BACKEND_NAME.LOCAL,
|
57
55
|
)
|
58
56
|
|
59
57
|
BACKEND_RUNTIME_MAP = {
|
60
|
-
BACKEND_NAME.JOB.
|
61
|
-
RUNTIME_TYPE.PYTHON.
|
62
|
-
BACKEND_NAME.JOB.
|
63
|
-
RUNTIME_TYPE.PYTHON.
|
58
|
+
BACKEND_NAME.JOB.lower(): {
|
59
|
+
RUNTIME_TYPE.PYTHON.lower(): (
|
60
|
+
BACKEND_NAME.JOB.lower(),
|
61
|
+
RUNTIME_TYPE.PYTHON.lower(),
|
64
62
|
),
|
65
|
-
RUNTIME_TYPE.CONTAINER.
|
66
|
-
BACKEND_NAME.JOB.
|
67
|
-
RUNTIME_TYPE.CONTAINER.
|
63
|
+
RUNTIME_TYPE.CONTAINER.lower(): (
|
64
|
+
BACKEND_NAME.JOB.lower(),
|
65
|
+
RUNTIME_TYPE.CONTAINER.lower(),
|
68
66
|
),
|
69
67
|
},
|
70
|
-
BACKEND_NAME.DATAFLOW.
|
71
|
-
RUNTIME_TYPE.DATAFLOW.
|
72
|
-
BACKEND_NAME.DATAFLOW.
|
73
|
-
RUNTIME_TYPE.DATAFLOW.
|
68
|
+
BACKEND_NAME.DATAFLOW.lower(): {
|
69
|
+
RUNTIME_TYPE.DATAFLOW.lower(): (
|
70
|
+
BACKEND_NAME.DATAFLOW.lower(),
|
71
|
+
RUNTIME_TYPE.DATAFLOW.lower(),
|
74
72
|
)
|
75
73
|
},
|
76
|
-
BACKEND_NAME.OPERATOR_LOCAL.
|
77
|
-
RUNTIME_TYPE.PYTHON.
|
78
|
-
BACKEND_NAME.OPERATOR_LOCAL.
|
79
|
-
RUNTIME_TYPE.PYTHON.
|
74
|
+
BACKEND_NAME.OPERATOR_LOCAL.lower(): {
|
75
|
+
RUNTIME_TYPE.PYTHON.lower(): (
|
76
|
+
BACKEND_NAME.OPERATOR_LOCAL.lower(),
|
77
|
+
RUNTIME_TYPE.PYTHON.lower(),
|
80
78
|
),
|
81
|
-
RUNTIME_TYPE.CONTAINER.
|
82
|
-
BACKEND_NAME.OPERATOR_LOCAL.
|
83
|
-
RUNTIME_TYPE.CONTAINER.
|
79
|
+
RUNTIME_TYPE.CONTAINER.lower(): (
|
80
|
+
BACKEND_NAME.OPERATOR_LOCAL.lower(),
|
81
|
+
RUNTIME_TYPE.CONTAINER.lower(),
|
84
82
|
),
|
85
83
|
},
|
86
|
-
BACKEND_NAME.MARKETPLACE.
|
87
|
-
RUNTIME_TYPE.PYTHON.
|
88
|
-
BACKEND_NAME.MARKETPLACE.
|
89
|
-
RUNTIME_TYPE.PYTHON.
|
84
|
+
BACKEND_NAME.MARKETPLACE.lower(): {
|
85
|
+
RUNTIME_TYPE.PYTHON.lower(): (
|
86
|
+
BACKEND_NAME.MARKETPLACE.lower(),
|
87
|
+
RUNTIME_TYPE.PYTHON.lower(),
|
90
88
|
)
|
91
89
|
},
|
92
90
|
}
|
93
91
|
|
94
92
|
BACKEND_MAP = {
|
95
|
-
BACKEND_NAME.JOB.
|
96
|
-
BACKEND_NAME.DATAFLOW.
|
97
|
-
BACKEND_NAME.OPERATOR_LOCAL.
|
98
|
-
BACKEND_NAME.LOCAL.
|
99
|
-
BACKEND_NAME.MARKETPLACE.
|
93
|
+
BACKEND_NAME.JOB.lower(): MLJobOperatorBackend,
|
94
|
+
BACKEND_NAME.DATAFLOW.lower(): DataFlowOperatorBackend,
|
95
|
+
BACKEND_NAME.OPERATOR_LOCAL.lower(): LocalOperatorBackend,
|
96
|
+
BACKEND_NAME.LOCAL.lower(): LocalOperatorBackend,
|
97
|
+
BACKEND_NAME.MARKETPLACE.lower(): LocalMarketplaceOperatorBackend,
|
100
98
|
}
|
101
99
|
|
102
100
|
@classmethod
|
@@ -135,15 +133,15 @@ class BackendFactory:
|
|
135
133
|
# validation
|
136
134
|
if not operator_type:
|
137
135
|
raise RuntimeError(
|
138
|
-
|
136
|
+
"The `type` attribute must be specified in the operator's config."
|
139
137
|
)
|
140
138
|
|
141
139
|
if not backend and not config.config.get(OPERATOR_BACKEND_SECTION_NAME):
|
142
140
|
logger.info(
|
143
|
-
f"Backend config is not provided, the {BACKEND_NAME.LOCAL
|
141
|
+
f"Backend config is not provided, the {BACKEND_NAME.LOCAL} "
|
144
142
|
"will be used by default. "
|
145
143
|
)
|
146
|
-
backend = BACKEND_NAME.LOCAL
|
144
|
+
backend = BACKEND_NAME.LOCAL
|
147
145
|
elif not backend:
|
148
146
|
backend = config.config.get(OPERATOR_BACKEND_SECTION_NAME)
|
149
147
|
|
@@ -164,8 +162,8 @@ class BackendFactory:
|
|
164
162
|
backend = {"kind": backend_kind}
|
165
163
|
|
166
164
|
backend_kind = (
|
167
|
-
BACKEND_NAME.OPERATOR_LOCAL
|
168
|
-
if backend.get("kind").lower() == BACKEND_NAME.LOCAL
|
165
|
+
BACKEND_NAME.OPERATOR_LOCAL
|
166
|
+
if backend.get("kind").lower() == BACKEND_NAME.LOCAL
|
169
167
|
else backend.get("kind").lower()
|
170
168
|
)
|
171
169
|
backend["kind"] = backend_kind
|
@@ -174,11 +172,11 @@ class BackendFactory:
|
|
174
172
|
# This is necessary, because Jobs and DataFlow have similar kind,
|
175
173
|
# The only difference would be in the infrastructure kind.
|
176
174
|
# This is a temporary solution, the logic needs to be placed in the ConfigMerger instead.
|
177
|
-
if backend_kind == BACKEND_NAME.JOB
|
175
|
+
if backend_kind == BACKEND_NAME.JOB:
|
178
176
|
if (backend.get("spec", {}) or {}).get("infrastructure", {}).get(
|
179
177
|
"type", ""
|
180
|
-
).lower() == BACKEND_NAME.DATAFLOW
|
181
|
-
backend_kind = BACKEND_NAME.DATAFLOW
|
178
|
+
).lower() == BACKEND_NAME.DATAFLOW:
|
179
|
+
backend_kind = BACKEND_NAME.DATAFLOW
|
182
180
|
|
183
181
|
runtime_type = runtime_type or (
|
184
182
|
backend.get("type")
|
@@ -247,17 +245,17 @@ class BackendFactory:
|
|
247
245
|
If the backend type is not supported.
|
248
246
|
"""
|
249
247
|
supported_backends = supported_backends or (cls.BACKENDS + cls.LOCAL_BACKENDS)
|
250
|
-
backend = (backend or BACKEND_NAME.OPERATOR_LOCAL
|
248
|
+
backend = (backend or BACKEND_NAME.OPERATOR_LOCAL).lower()
|
251
249
|
backend_kind, runtime_type = backend, None
|
252
250
|
|
253
|
-
if backend.lower() != BACKEND_NAME.OPERATOR_LOCAL
|
251
|
+
if backend.lower() != BACKEND_NAME.OPERATOR_LOCAL and "." in backend:
|
254
252
|
backend_kind, runtime_type = backend.split(".")
|
255
253
|
else:
|
256
254
|
backend_kind = backend
|
257
255
|
|
258
256
|
backend_kind = (
|
259
|
-
BACKEND_NAME.OPERATOR_LOCAL
|
260
|
-
if backend_kind == BACKEND_NAME.LOCAL
|
257
|
+
BACKEND_NAME.OPERATOR_LOCAL
|
258
|
+
if backend_kind == BACKEND_NAME.LOCAL
|
261
259
|
else backend_kind
|
262
260
|
)
|
263
261
|
|
@@ -357,7 +355,7 @@ class BackendFactory:
|
|
357
355
|
|
358
356
|
# generate supported backend specifications templates YAML
|
359
357
|
RUNTIME_TYPE_MAP = {
|
360
|
-
RESOURCE_TYPE.JOB
|
358
|
+
RESOURCE_TYPE.JOB: [
|
361
359
|
{
|
362
360
|
RUNTIME_TYPE.PYTHON: {
|
363
361
|
"conda_slug": operator_info.conda
|
@@ -373,7 +371,7 @@ class BackendFactory:
|
|
373
371
|
}
|
374
372
|
},
|
375
373
|
],
|
376
|
-
RESOURCE_TYPE.DATAFLOW
|
374
|
+
RESOURCE_TYPE.DATAFLOW: [
|
377
375
|
{
|
378
376
|
RUNTIME_TYPE.DATAFLOW: {
|
379
377
|
"conda_slug": operator_info.conda_prefix,
|
@@ -381,7 +379,7 @@ class BackendFactory:
|
|
381
379
|
}
|
382
380
|
}
|
383
381
|
],
|
384
|
-
BACKEND_NAME.OPERATOR_LOCAL
|
382
|
+
BACKEND_NAME.OPERATOR_LOCAL: [
|
385
383
|
{
|
386
384
|
RUNTIME_TYPE.CONTAINER: {
|
387
385
|
"kind": "operator",
|
@@ -397,7 +395,7 @@ class BackendFactory:
|
|
397
395
|
}
|
398
396
|
},
|
399
397
|
],
|
400
|
-
BACKEND_NAME.MARKETPLACE
|
398
|
+
BACKEND_NAME.MARKETPLACE: [
|
401
399
|
{
|
402
400
|
RUNTIME_TYPE.PYTHON: {
|
403
401
|
"kind": "marketplace",
|
@@ -445,11 +443,9 @@ class BackendFactory:
|
|
445
443
|
)
|
446
444
|
|
447
445
|
# generate YAML specification template
|
448
|
-
result[
|
449
|
-
(resource_type.lower(), runtime_type.value.lower())
|
450
|
-
] = yaml.load(
|
446
|
+
result[(resource_type.lower(), runtime_type.lower())] = yaml.load(
|
451
447
|
_BackendFactory(p.config).backend.init(
|
452
|
-
runtime_type=runtime_type
|
448
|
+
runtime_type=runtime_type,
|
453
449
|
**{**kwargs, **runtime_kwargs},
|
454
450
|
),
|
455
451
|
Loader=yaml.FullLoader,
|
@@ -1,10 +1,9 @@
|
|
1
1
|
#!/usr/bin/env python
|
2
|
-
# -*- coding: utf-8 -*--
|
3
2
|
|
4
|
-
# Copyright (c) 2023 Oracle and/or its affiliates.
|
3
|
+
# Copyright (c) 2023, 2025 Oracle and/or its affiliates.
|
5
4
|
# Licensed under the Universal Permissive License v 1.0 as shown at https://oss.oracle.com/licenses/upl/
|
6
5
|
|
7
|
-
from ads.common.extended_enum import
|
6
|
+
from ads.common.extended_enum import ExtendedEnum
|
8
7
|
|
9
8
|
# Env variable representing the operator input arguments.
|
10
9
|
# This variable is used when operator run on the OCI resources.
|
@@ -17,11 +16,12 @@ OPERATOR_BASE_DOCKER_GPU_FILE = "Dockerfile.gpu"
|
|
17
16
|
|
18
17
|
OPERATOR_BACKEND_SECTION_NAME = "backend"
|
19
18
|
|
20
|
-
|
19
|
+
|
20
|
+
class PACK_TYPE(ExtendedEnum):
|
21
21
|
SERVICE = "service"
|
22
22
|
CUSTOM = "published"
|
23
23
|
|
24
24
|
|
25
|
-
class ARCH_TYPE(
|
25
|
+
class ARCH_TYPE(ExtendedEnum):
|
26
26
|
CPU = "cpu"
|
27
27
|
GPU = "gpu"
|