oracle-ads 2.11.17__py3-none-any.whl → 2.11.19__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (44) hide show
  1. ads/aqua/common/utils.py +20 -3
  2. ads/aqua/config/__init__.py +4 -0
  3. ads/aqua/config/config.py +28 -0
  4. ads/aqua/config/evaluation/__init__.py +4 -0
  5. ads/aqua/config/evaluation/evaluation_service_config.py +282 -0
  6. ads/aqua/config/evaluation/evaluation_service_model_config.py +8 -0
  7. ads/aqua/config/utils/__init__.py +4 -0
  8. ads/aqua/config/utils/serializer.py +339 -0
  9. ads/aqua/constants.py +1 -1
  10. ads/aqua/evaluation/entities.py +1 -0
  11. ads/aqua/evaluation/evaluation.py +56 -88
  12. ads/aqua/extension/common_handler.py +2 -3
  13. ads/aqua/extension/common_ws_msg_handler.py +2 -2
  14. ads/aqua/extension/evaluation_handler.py +4 -3
  15. ads/aqua/extension/model_handler.py +26 -1
  16. ads/aqua/extension/utils.py +12 -1
  17. ads/aqua/modeldeployment/deployment.py +31 -51
  18. ads/aqua/ui.py +27 -25
  19. ads/common/auth.py +4 -4
  20. ads/jobs/builders/infrastructure/dsc_job.py +11 -5
  21. ads/jobs/builders/infrastructure/dsc_job_runtime.py +12 -25
  22. ads/jobs/builders/runtimes/artifact.py +0 -5
  23. ads/jobs/builders/runtimes/container_runtime.py +26 -3
  24. ads/opctl/conda/cmds.py +100 -42
  25. ads/opctl/conda/pack.py +3 -2
  26. ads/opctl/operator/lowcode/anomaly/const.py +1 -0
  27. ads/opctl/operator/lowcode/anomaly/model/base_model.py +58 -37
  28. ads/opctl/operator/lowcode/anomaly/model/factory.py +2 -0
  29. ads/opctl/operator/lowcode/anomaly/model/randomcutforest.py +116 -0
  30. ads/opctl/operator/lowcode/anomaly/schema.yaml +1 -0
  31. ads/opctl/operator/lowcode/forecast/const.py +1 -1
  32. ads/opctl/operator/lowcode/forecast/model/arima.py +6 -2
  33. ads/opctl/operator/lowcode/forecast/model/automlx.py +6 -1
  34. ads/opctl/operator/lowcode/forecast/model/autots.py +3 -1
  35. ads/opctl/operator/lowcode/forecast/model/factory.py +1 -1
  36. ads/opctl/operator/lowcode/forecast/model/ml_forecast.py +24 -15
  37. ads/opctl/operator/lowcode/forecast/model/neuralprophet.py +6 -1
  38. ads/opctl/operator/lowcode/forecast/model/prophet.py +3 -1
  39. ads/opctl/operator/lowcode/forecast/schema.yaml +1 -1
  40. {oracle_ads-2.11.17.dist-info → oracle_ads-2.11.19.dist-info}/METADATA +5 -1
  41. {oracle_ads-2.11.17.dist-info → oracle_ads-2.11.19.dist-info}/RECORD +44 -37
  42. {oracle_ads-2.11.17.dist-info → oracle_ads-2.11.19.dist-info}/LICENSE.txt +0 -0
  43. {oracle_ads-2.11.17.dist-info → oracle_ads-2.11.19.dist-info}/WHEEL +0 -0
  44. {oracle_ads-2.11.17.dist-info → oracle_ads-2.11.19.dist-info}/entry_points.txt +0 -0
@@ -1,12 +1,15 @@
1
1
  #!/usr/bin/env python
2
- # -*- coding: utf-8 -*-
3
2
  # Copyright (c) 2024 Oracle and/or its affiliates.
4
3
  # Licensed under the Universal Permissive License v 1.0 as shown at https://oss.oracle.com/licenses/upl/
5
4
  from dataclasses import fields
5
+ from datetime import datetime, timedelta
6
6
  from typing import Dict, Optional
7
7
 
8
+ from cachetools import TTLCache, cached
8
9
  from tornado.web import HTTPError
9
10
 
11
+ from ads.aqua import ODSC_MODEL_COMPARTMENT_OCID
12
+ from ads.aqua.common.utils import fetch_service_compartment
10
13
  from ads.aqua.extension.errors import Errors
11
14
 
12
15
 
@@ -21,3 +24,11 @@ def validate_function_parameters(data_class, input_data: Dict):
21
24
  raise HTTPError(
22
25
  400, Errors.MISSING_REQUIRED_PARAMETER.format(required_parameter)
23
26
  )
27
+
28
+
29
+ @cached(cache=TTLCache(maxsize=1, ttl=timedelta(minutes=1), timer=datetime.now))
30
+ def ui_compatability_check():
31
+ """This method caches the service compartment OCID details that is set by either the environment variable or if
32
+ fetched from the configuration. The cached result is returned when multiple calls are made in quick succession
33
+ from the UI to avoid multiple config file loads."""
34
+ return ODSC_MODEL_COMPARTMENT_OCID or fetch_service_compartment()
@@ -146,7 +146,7 @@ class AquaDeploymentApp(AquaApp):
146
146
  env_var : dict, optional
147
147
  Environment variable for the deployment, by default None.
148
148
  container_family: str
149
- The image family of model deployment container runtime. Required for unverified Aqua models.
149
+ The image family of model deployment container runtime.
150
150
  memory_in_gbs: float
151
151
  The memory in gbs for the shape selected.
152
152
  ocpus: float
@@ -230,41 +230,14 @@ class AquaDeploymentApp(AquaApp):
230
230
 
231
231
  env_var.update({"FT_MODEL": f"{fine_tune_output_path}"})
232
232
 
233
- is_custom_container = False
234
- try:
235
- container_type_key = aqua_model.custom_metadata_list.get(
236
- AQUA_DEPLOYMENT_CONTAINER_METADATA_NAME
237
- ).value
238
- except ValueError as err:
239
- message = (
240
- f"{AQUA_DEPLOYMENT_CONTAINER_METADATA_NAME} key is not available in the custom metadata field "
241
- f"for model {aqua_model.id}."
242
- )
243
- logger.debug(message)
244
- if not container_family:
245
- raise AquaValueError(
246
- f"{message}. For unverified Aqua models, container_family parameter should be "
247
- f"set and value can be one of {', '.join(InferenceContainerTypeFamily.values())}."
248
- ) from err
249
- container_type_key = container_family
250
- try:
251
- # Check if the container override flag is set. If set, then the user has chosen custom image
252
- if aqua_model.custom_metadata_list.get(
253
- AQUA_DEPLOYMENT_CONTAINER_OVERRIDE_FLAG_METADATA_NAME
254
- ).value:
255
- is_custom_container = True
256
- except Exception:
257
- pass
233
+ container_type_key = self._get_container_type_key(
234
+ model=aqua_model,
235
+ container_family=container_family
236
+ )
258
237
 
259
238
  # fetch image name from config
260
- # If the image is of type custom, then `container_type_key` is the inference image
261
- container_image = (
262
- get_container_image(
263
- container_type=container_type_key,
264
- )
265
- if not is_custom_container
266
- else container_type_key
267
- )
239
+ container_image = get_container_image(container_type=container_type_key)
240
+
268
241
  logging.info(
269
242
  f"Aqua Image used for deploying {aqua_model.id} : {container_image}"
270
243
  )
@@ -433,6 +406,26 @@ class AquaDeploymentApp(AquaApp):
433
406
  deployment.dsc_model_deployment, self.region
434
407
  )
435
408
 
409
+ @staticmethod
410
+ def _get_container_type_key(model: DataScienceModel, container_family: str) -> str:
411
+ container_type_key = UNKNOWN
412
+ if container_family:
413
+ container_type_key = container_family
414
+ else:
415
+ try:
416
+ container_type_key = model.custom_metadata_list.get(
417
+ AQUA_DEPLOYMENT_CONTAINER_METADATA_NAME
418
+ ).value
419
+ except ValueError as err:
420
+ raise AquaValueError(
421
+ f"{AQUA_DEPLOYMENT_CONTAINER_METADATA_NAME} key is not available in the custom metadata field "
422
+ f"for model {model.id}. For unverified Aqua models, {AQUA_DEPLOYMENT_CONTAINER_METADATA_NAME} should be"
423
+ f"set and value can be one of {', '.join(InferenceContainerTypeFamily.values())}."
424
+ ) from err
425
+
426
+ return container_type_key
427
+
428
+
436
429
  @telemetry(entry_point="plugin=deployment&action=list", name="aqua")
437
430
  def list(self, **kwargs) -> List["AquaDeployment"]:
438
431
  """List Aqua model deployments in a given compartment and under certain project.
@@ -672,23 +665,10 @@ class AquaDeploymentApp(AquaApp):
672
665
  restricted_params = []
673
666
  if params:
674
667
  model = DataScienceModel.from_id(model_id)
675
- try:
676
- container_type_key = model.custom_metadata_list.get(
677
- AQUA_DEPLOYMENT_CONTAINER_METADATA_NAME
678
- ).value
679
- except ValueError as err:
680
- message = (
681
- f"{AQUA_DEPLOYMENT_CONTAINER_METADATA_NAME} key is not available in the custom metadata field "
682
- f"for model {model_id}."
683
- )
684
- logger.debug(message)
685
-
686
- if not container_family:
687
- raise AquaValueError(
688
- f"{message}. For unverified Aqua models, container_family parameter should be "
689
- f"set and value can be one of {', '.join(InferenceContainerTypeFamily.values())}."
690
- ) from err
691
- container_type_key = container_family
668
+ container_type_key = self._get_container_type_key(
669
+ model=model,
670
+ container_family=container_family
671
+ )
692
672
 
693
673
  container_config = get_container_config()
694
674
  container_spec = container_config.get(ContainerSpec.CONTAINER_SPEC, {}).get(
ads/aqua/ui.py CHANGED
@@ -84,9 +84,6 @@ class AquaContainerConfigSpec(DataClassSerializable):
84
84
  health_check_port: str = None
85
85
  env_vars: List[dict] = None
86
86
  restricted_params: List[str] = None
87
- evaluation_configuration: AquaContainerEvaluationConfig = field(
88
- default_factory=AquaContainerEvaluationConfig
89
- )
90
87
 
91
88
 
92
89
  @dataclass(repr=False)
@@ -184,32 +181,37 @@ class AquaContainerConfig(DataClassSerializable):
184
181
  family=container_type,
185
182
  platforms=platforms,
186
183
  model_formats=model_formats,
187
- spec=AquaContainerConfigSpec(
188
- cli_param=container_spec.get(ContainerSpec.CLI_PARM, ""),
189
- server_port=container_spec.get(
190
- ContainerSpec.SERVER_PORT, ""
191
- ),
192
- health_check_port=container_spec.get(
193
- ContainerSpec.HEALTH_CHECK_PORT, ""
194
- ),
195
- env_vars=container_spec.get(ContainerSpec.ENV_VARS, []),
196
- restricted_params=container_spec.get(
197
- ContainerSpec.RESTRICTED_PARAMS, []
198
- ),
199
- evaluation_configuration=AquaContainerEvaluationConfig.from_config(
200
- container_spec.get(
201
- ContainerSpec.EVALUATION_CONFIGURATION, {}
202
- )
203
- ),
204
- )
205
- if container_spec
206
- else None,
184
+ spec=(
185
+ AquaContainerConfigSpec(
186
+ cli_param=container_spec.get(
187
+ ContainerSpec.CLI_PARM, ""
188
+ ),
189
+ server_port=container_spec.get(
190
+ ContainerSpec.SERVER_PORT, ""
191
+ ),
192
+ health_check_port=container_spec.get(
193
+ ContainerSpec.HEALTH_CHECK_PORT, ""
194
+ ),
195
+ env_vars=container_spec.get(ContainerSpec.ENV_VARS, []),
196
+ restricted_params=container_spec.get(
197
+ ContainerSpec.RESTRICTED_PARAMS, []
198
+ ),
199
+ )
200
+ if container_spec
201
+ else None
202
+ ),
207
203
  )
208
204
  if container.get("type") == "inference":
209
205
  inference_items[container_type] = container_item
210
- elif container_type == "odsc-llm-fine-tuning":
206
+ elif (
207
+ container.get("type") == "fine-tune"
208
+ or container_type == "odsc-llm-fine-tuning"
209
+ ):
211
210
  finetune_items[container_type] = container_item
212
- elif container_type == "odsc-llm-evaluate":
211
+ elif (
212
+ container.get("type") == "evaluate"
213
+ or container_type == "odsc-llm-evaluate"
214
+ ):
213
215
  evaluate_items[container_type] = container_item
214
216
 
215
217
  return AquaContainerConfig(
ads/common/auth.py CHANGED
@@ -73,7 +73,7 @@ class AuthState(metaclass=SingletonMeta):
73
73
  self.oci_key_profile = self.oci_key_profile or os.environ.get(
74
74
  "OCI_CONFIG_PROFILE", DEFAULT_PROFILE
75
75
  )
76
- self.oci_config = self.oci_config or {}
76
+ self.oci_config = self.oci_config or {"region": os.environ["OCI_RESOURCE_REGION"]} if os.environ.get("OCI_RESOURCE_REGION") else {}
77
77
  self.oci_signer_kwargs = self.oci_signer_kwargs or {}
78
78
  self.oci_client_kwargs = self.oci_client_kwargs or {}
79
79
 
@@ -82,7 +82,7 @@ def set_auth(
82
82
  auth: Optional[str] = AuthType.API_KEY,
83
83
  oci_config_location: Optional[str] = DEFAULT_LOCATION,
84
84
  profile: Optional[str] = DEFAULT_PROFILE,
85
- config: Optional[Dict] = {},
85
+ config: Optional[Dict] = {"region": os.environ["OCI_RESOURCE_REGION"]} if os.environ.get("OCI_RESOURCE_REGION") else {},
86
86
  signer: Optional[Any] = None,
87
87
  signer_callable: Optional[Callable] = None,
88
88
  signer_kwargs: Optional[Dict] = {},
@@ -678,7 +678,7 @@ class ResourcePrincipal(AuthSignerGenerator):
678
678
  >>> signer_generator = AuthFactory().signerGenerator(AuthType.RESOURCE_PRINCIPAL)
679
679
  >>> signer_generator(signer_args).create_signer()
680
680
  """
681
- configuration = ads.telemetry.update_oci_client_config()
681
+ configuration = ads.telemetry.update_oci_client_config(AuthState().oci_config)
682
682
  signer_dict = {
683
683
  "config": configuration,
684
684
  "signer": oci.auth.signers.get_resource_principals_signer(),
@@ -739,7 +739,7 @@ class InstancePrincipal(AuthSignerGenerator):
739
739
  >>> signer_generator = AuthFactory().signerGenerator(AuthType.INSTANCE_PRINCIPAL)
740
740
  >>> signer_generator(signer_args).create_signer()
741
741
  """
742
- configuration = ads.telemetry.update_oci_client_config()
742
+ configuration = ads.telemetry.update_oci_client_config(AuthState().oci_config)
743
743
  signer_dict = {
744
744
  "config": configuration,
745
745
  "signer": oci.auth.signers.InstancePrincipalsSecurityTokenSigner(
@@ -312,7 +312,7 @@ class DSCJob(OCIDataScienceMixin, oci.data_science.models.Job):
312
312
  logger.debug(oci_model)
313
313
  res = self.client.create_job(oci_model)
314
314
  self.update_from_oci_model(res.data)
315
- if self.lifecycle_state == "ACTIVE":
315
+ if not self.artifact:
316
316
  return
317
317
  try:
318
318
  if issubclass(self.artifact.__class__, Artifact):
@@ -487,7 +487,9 @@ class DSCJob(OCIDataScienceMixin, oci.data_science.models.Job):
487
487
  oci.data_science.models.DefaultJobConfigurationDetails().swagger_types.keys()
488
488
  )
489
489
  env_config_swagger_types = {}
490
- if hasattr(oci.data_science.models, "OcirContainerJobEnvironmentConfigurationDetails"):
490
+ if hasattr(
491
+ oci.data_science.models, "OcirContainerJobEnvironmentConfigurationDetails"
492
+ ):
491
493
  env_config_swagger_types = (
492
494
  oci.data_science.models.OcirContainerJobEnvironmentConfigurationDetails().swagger_types.keys()
493
495
  )
@@ -501,7 +503,7 @@ class DSCJob(OCIDataScienceMixin, oci.data_science.models.Job):
501
503
  value = kwargs.pop(key)
502
504
  if key in [
503
505
  ContainerRuntime.CONST_CMD,
504
- ContainerRuntime.CONST_ENTRYPOINT
506
+ ContainerRuntime.CONST_ENTRYPOINT,
505
507
  ] and isinstance(value, str):
506
508
  value = ContainerRuntimeHandler.split_args(value)
507
509
  env_config_kwargs[key] = value
@@ -535,9 +537,13 @@ class DSCJob(OCIDataScienceMixin, oci.data_science.models.Job):
535
537
 
536
538
  if env_config_kwargs:
537
539
  env_config_kwargs["jobEnvironmentType"] = "OCIR_CONTAINER"
538
- env_config_override = kwargs.get("job_environment_configuration_override_details", {})
540
+ env_config_override = kwargs.get(
541
+ "job_environment_configuration_override_details", {}
542
+ )
539
543
  env_config_override.update(env_config_kwargs)
540
- kwargs["job_environment_configuration_override_details"] = env_config_override
544
+ kwargs["job_environment_configuration_override_details"] = (
545
+ env_config_override
546
+ )
541
547
 
542
548
  wait = kwargs.pop("wait", False)
543
549
  run = DataScienceJobRun(**kwargs, **self.auth).create()
@@ -181,9 +181,9 @@ class RuntimeHandler:
181
181
  "jobType": self.data_science_job.job_type,
182
182
  }
183
183
  if runtime.maximum_runtime_in_minutes:
184
- job_configuration_details[
185
- "maximum_runtime_in_minutes"
186
- ] = runtime.maximum_runtime_in_minutes
184
+ job_configuration_details["maximum_runtime_in_minutes"] = (
185
+ runtime.maximum_runtime_in_minutes
186
+ )
187
187
  job_configuration_details["environment_variables"] = self._translate_env(
188
188
  runtime
189
189
  )
@@ -310,7 +310,7 @@ class RuntimeHandler:
310
310
  for extraction in extractions:
311
311
  runtime_spec.update(extraction(dsc_job))
312
312
  return self.RUNTIME_CLASS(self._format_env_var(runtime_spec))
313
-
313
+
314
314
  def _extract_properties(self, dsc_job) -> dict:
315
315
  """Extract the job runtime properties from data science job.
316
316
 
@@ -968,23 +968,10 @@ class ContainerRuntimeHandler(RuntimeHandler):
968
968
  payload["job_environment_configuration_details"] = job_env_config
969
969
  return payload
970
970
 
971
- def _translate_artifact(self, runtime: Runtime):
972
- """Specifies a dummy script as the job artifact.
973
- runtime is not used in this method.
974
-
975
- Parameters
976
- ----------
977
- runtime : Runtime
978
- This is not used.
979
-
980
- Returns
981
- -------
982
- str
983
- Path to the dummy script.
984
- """
985
- return os.path.join(
986
- os.path.dirname(__file__), "../../templates", "container.py"
987
- )
971
+ def _translate_artifact(self, runtime: ContainerRuntime):
972
+ """Additional artifact for the container"""
973
+ if runtime.artifact_uri:
974
+ return ScriptArtifact(runtime.artifact_uri, runtime)
988
975
 
989
976
  def _translate_env_config(self, runtime: Runtime) -> dict:
990
977
  """Converts runtime properties to ``OcirContainerJobEnvironmentConfigurationDetails`` payload required by OCI Data Science job.
@@ -1007,7 +994,7 @@ class ContainerRuntimeHandler(RuntimeHandler):
1007
994
  property = runtime.get_spec(key, None)
1008
995
  if key in [
1009
996
  ContainerRuntime.CONST_CMD,
1010
- ContainerRuntime.CONST_ENTRYPOINT
997
+ ContainerRuntime.CONST_ENTRYPOINT,
1011
998
  ] and isinstance(property, str):
1012
999
  property = self.split_args(property)
1013
1000
  if property is not None:
@@ -1063,7 +1050,7 @@ class ContainerRuntimeHandler(RuntimeHandler):
1063
1050
  spec[ContainerRuntime.CONST_ENV_VAR] = envs
1064
1051
 
1065
1052
  return spec
1066
-
1053
+
1067
1054
  def _extract_properties(self, dsc_job) -> dict:
1068
1055
  """Extract the runtime properties from data science job.
1069
1056
 
@@ -1078,10 +1065,10 @@ class ContainerRuntimeHandler(RuntimeHandler):
1078
1065
  A runtime specification dictionary for initializing a runtime.
1079
1066
  """
1080
1067
  spec = super()._extract_envs(dsc_job)
1081
-
1068
+
1082
1069
  job_env_config = getattr(dsc_job, "job_environment_configuration_details", None)
1083
1070
  job_env_type = getattr(job_env_config, "job_environment_type", None)
1084
-
1071
+
1085
1072
  if not (job_env_config and job_env_type == "OCIR_CONTAINER"):
1086
1073
  raise IncompatibleRuntime()
1087
1074
 
@@ -183,11 +183,6 @@ class ScriptArtifact(Artifact):
183
183
  if os.path.isdir(source):
184
184
  basename = os.path.basename(str(source).rstrip("/"))
185
185
  source = str(source).rstrip("/")
186
- # Runtime must have entrypoint if the source is a directory
187
- if self.runtime and not self.runtime.entrypoint:
188
- raise ValueError(
189
- "Please specify entrypoint when script source is a directory."
190
- )
191
186
  output = os.path.join(self.temp_dir.name, basename)
192
187
  shutil.make_archive(
193
188
  output, "zip", os.path.dirname(source), base_dir=basename
@@ -56,6 +56,7 @@ class ContainerRuntime(MultiNodeRuntime):
56
56
  CONST_CMD = "cmd"
57
57
  CONST_IMAGE_DIGEST = "imageDigest"
58
58
  CONST_IMAGE_SIGNATURE_ID = "imageSignatureId"
59
+ CONST_SCRIPT_PATH = "scriptPathURI"
59
60
  attribute_map = {
60
61
  CONST_IMAGE: CONST_IMAGE,
61
62
  CONST_ENTRYPOINT: CONST_ENTRYPOINT,
@@ -121,7 +122,7 @@ class ContainerRuntime(MultiNodeRuntime):
121
122
  def image_digest(self) -> str:
122
123
  """The container image digest."""
123
124
  return self.get_spec(self.CONST_IMAGE_DIGEST)
124
-
125
+
125
126
  def with_image_digest(self, image_digest: str) -> "ContainerRuntime":
126
127
  """Sets the digest of custom image.
127
128
 
@@ -136,12 +137,12 @@ class ContainerRuntime(MultiNodeRuntime):
136
137
  The runtime instance.
137
138
  """
138
139
  return self.set_spec(self.CONST_IMAGE_DIGEST, image_digest)
139
-
140
+
140
141
  @property
141
142
  def image_signature_id(self) -> str:
142
143
  """The container image signature id."""
143
144
  return self.get_spec(self.CONST_IMAGE_SIGNATURE_ID)
144
-
145
+
145
146
  def with_image_signature_id(self, image_signature_id: str) -> "ContainerRuntime":
146
147
  """Sets the signature id of custom image.
147
148
 
@@ -217,3 +218,25 @@ class ContainerRuntime(MultiNodeRuntime):
217
218
  entrypoint=["bash", "--login", "-c"],
218
219
  cmd="{Container CMD. For MLflow and Operator will be auto generated}",
219
220
  )
221
+
222
+ @property
223
+ def artifact_uri(self) -> str:
224
+ """The URI of the source code"""
225
+ return self.get_spec(self.CONST_SCRIPT_PATH)
226
+
227
+ def with_artifact(self, uri: str):
228
+ """Specifies the artifact to be added to the container.
229
+
230
+ Parameters
231
+ ----------
232
+ uri : str
233
+ URI to the source code script, which can be any URI supported by fsspec,
234
+ including http://, https:// and OCI object storage.
235
+ For example: oci://your_bucket@your_namespace/path/to/script.py
236
+
237
+ Returns
238
+ -------
239
+ self
240
+ The runtime instance.
241
+ """
242
+ return self.set_spec(self.CONST_SCRIPT_PATH, uri)