oracle-ads 2.12.9__py3-none-any.whl → 2.12.10__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (74) hide show
  1. ads/aqua/__init__.py +4 -3
  2. ads/aqua/app.py +28 -16
  3. ads/aqua/client/__init__.py +3 -0
  4. ads/aqua/client/client.py +799 -0
  5. ads/aqua/common/enums.py +3 -0
  6. ads/aqua/common/utils.py +62 -2
  7. ads/aqua/data.py +2 -19
  8. ads/aqua/evaluation/evaluation.py +20 -12
  9. ads/aqua/extension/aqua_ws_msg_handler.py +14 -7
  10. ads/aqua/extension/base_handler.py +12 -9
  11. ads/aqua/extension/finetune_handler.py +8 -14
  12. ads/aqua/extension/model_handler.py +24 -2
  13. ads/aqua/finetuning/constants.py +5 -2
  14. ads/aqua/finetuning/entities.py +67 -17
  15. ads/aqua/finetuning/finetuning.py +69 -54
  16. ads/aqua/model/entities.py +3 -1
  17. ads/aqua/model/model.py +196 -98
  18. ads/aqua/modeldeployment/deployment.py +22 -10
  19. ads/cli.py +16 -8
  20. ads/common/auth.py +9 -9
  21. ads/llm/autogen/__init__.py +2 -0
  22. ads/llm/autogen/constants.py +15 -0
  23. ads/llm/autogen/reports/__init__.py +2 -0
  24. ads/llm/autogen/reports/base.py +67 -0
  25. ads/llm/autogen/reports/data.py +103 -0
  26. ads/llm/autogen/reports/session.py +526 -0
  27. ads/llm/autogen/reports/templates/chat_box.html +13 -0
  28. ads/llm/autogen/reports/templates/chat_box_lt.html +5 -0
  29. ads/llm/autogen/reports/templates/chat_box_rt.html +6 -0
  30. ads/llm/autogen/reports/utils.py +56 -0
  31. ads/llm/autogen/v02/__init__.py +4 -0
  32. ads/llm/autogen/{client_v02.py → v02/client.py} +23 -10
  33. ads/llm/autogen/v02/log_handlers/__init__.py +2 -0
  34. ads/llm/autogen/v02/log_handlers/oci_file_handler.py +83 -0
  35. ads/llm/autogen/v02/loggers/__init__.py +6 -0
  36. ads/llm/autogen/v02/loggers/metric_logger.py +320 -0
  37. ads/llm/autogen/v02/loggers/session_logger.py +580 -0
  38. ads/llm/autogen/v02/loggers/utils.py +86 -0
  39. ads/llm/autogen/v02/runtime_logging.py +163 -0
  40. ads/llm/langchain/plugins/chat_models/oci_data_science.py +12 -11
  41. ads/model/__init__.py +11 -13
  42. ads/model/artifact.py +47 -8
  43. ads/model/extractor/embedding_onnx_extractor.py +80 -0
  44. ads/model/framework/embedding_onnx_model.py +438 -0
  45. ads/model/generic_model.py +26 -24
  46. ads/model/model_metadata.py +8 -7
  47. ads/opctl/config/merger.py +13 -14
  48. ads/opctl/operator/common/operator_config.py +4 -4
  49. ads/opctl/operator/lowcode/common/transformations.py +50 -8
  50. ads/opctl/operator/lowcode/common/utils.py +22 -6
  51. ads/opctl/operator/lowcode/forecast/__main__.py +10 -0
  52. ads/opctl/operator/lowcode/forecast/const.py +2 -0
  53. ads/opctl/operator/lowcode/forecast/model/arima.py +19 -13
  54. ads/opctl/operator/lowcode/forecast/model/automlx.py +129 -36
  55. ads/opctl/operator/lowcode/forecast/model/autots.py +1 -0
  56. ads/opctl/operator/lowcode/forecast/model/base_model.py +61 -14
  57. ads/opctl/operator/lowcode/forecast/model/forecast_datasets.py +1 -1
  58. ads/opctl/operator/lowcode/forecast/model/neuralprophet.py +10 -3
  59. ads/opctl/operator/lowcode/forecast/model/prophet.py +25 -18
  60. ads/opctl/operator/lowcode/forecast/operator_config.py +31 -0
  61. ads/opctl/operator/lowcode/forecast/schema.yaml +76 -0
  62. ads/opctl/operator/lowcode/forecast/utils.py +4 -3
  63. ads/opctl/operator/lowcode/forecast/whatifserve/__init__.py +7 -0
  64. ads/opctl/operator/lowcode/forecast/whatifserve/deployment_manager.py +233 -0
  65. ads/opctl/operator/lowcode/forecast/whatifserve/score.py +238 -0
  66. ads/telemetry/base.py +18 -11
  67. ads/telemetry/client.py +33 -13
  68. ads/templates/schemas/openapi.json +1740 -0
  69. ads/templates/score_embedding_onnx.jinja2 +202 -0
  70. {oracle_ads-2.12.9.dist-info → oracle_ads-2.12.10.dist-info}/METADATA +9 -8
  71. {oracle_ads-2.12.9.dist-info → oracle_ads-2.12.10.dist-info}/RECORD +74 -48
  72. {oracle_ads-2.12.9.dist-info → oracle_ads-2.12.10.dist-info}/LICENSE.txt +0 -0
  73. {oracle_ads-2.12.9.dist-info → oracle_ads-2.12.10.dist-info}/WHEEL +0 -0
  74. {oracle_ads-2.12.9.dist-info → oracle_ads-2.12.10.dist-info}/entry_points.txt +0 -0
@@ -1,10 +1,11 @@
1
1
  #!/usr/bin/env python
2
- # Copyright (c) 2024 Oracle and/or its affiliates.
2
+ # Copyright (c) 2024, 2025 Oracle and/or its affiliates.
3
3
  # Licensed under the Universal Permissive License v 1.0 as shown at https://oss.oracle.com/licenses/upl/
4
4
 
5
5
  import json
6
6
  import os
7
- from dataclasses import MISSING, asdict, fields
7
+ import time
8
+ import traceback
8
9
  from typing import Dict
9
10
 
10
11
  from oci.data_science.models import (
@@ -12,12 +13,14 @@ from oci.data_science.models import (
12
13
  UpdateModelDetails,
13
14
  UpdateModelProvenanceDetails,
14
15
  )
16
+ from pydantic import ValidationError
15
17
 
16
18
  from ads.aqua import logger
17
19
  from ads.aqua.app import AquaApp
18
20
  from ads.aqua.common.enums import Resource, Tags
19
21
  from ads.aqua.common.errors import AquaFileExistsError, AquaValueError
20
22
  from ads.aqua.common.utils import (
23
+ build_pydantic_error_message,
21
24
  get_container_image,
22
25
  upload_local_to_os,
23
26
  )
@@ -104,24 +107,12 @@ class AquaFineTuningApp(AquaApp):
104
107
  if not create_fine_tuning_details:
105
108
  try:
106
109
  create_fine_tuning_details = CreateFineTuningDetails(**kwargs)
107
- except Exception as ex:
108
- allowed_create_fine_tuning_details = ", ".join(
109
- field.name for field in fields(CreateFineTuningDetails)
110
- ).rstrip()
110
+ except ValidationError as ex:
111
+ custom_errors = build_pydantic_error_message(ex)
111
112
  raise AquaValueError(
112
- "Invalid create fine tuning parameters. Allowable parameters are: "
113
- f"{allowed_create_fine_tuning_details}."
113
+ f"Invalid parameters for creating a fine-tuned model. Error details: {custom_errors}."
114
114
  ) from ex
115
115
 
116
- source = self.get_source(create_fine_tuning_details.ft_source_id)
117
-
118
- # todo: revisit validation for fine tuned models
119
- # if source.compartment_id != ODSC_MODEL_COMPARTMENT_OCID:
120
- # raise AquaValueError(
121
- # f"Fine tuning is only supported for Aqua service models in {ODSC_MODEL_COMPARTMENT_OCID}. "
122
- # "Use a valid Aqua service model id instead."
123
- # )
124
-
125
116
  target_compartment = (
126
117
  create_fine_tuning_details.compartment_id or COMPARTMENT_OCID
127
118
  )
@@ -160,19 +151,18 @@ class AquaFineTuningApp(AquaApp):
160
151
  f"Logging is required for fine tuning if replica is larger than {DEFAULT_FT_REPLICA}."
161
152
  )
162
153
 
163
- ft_parameters = None
164
- try:
165
- ft_parameters = AquaFineTuningParams(
166
- **create_fine_tuning_details.ft_parameters,
167
- )
168
- except Exception as ex:
169
- allowed_fine_tuning_parameters = ", ".join(
170
- field.name for field in fields(AquaFineTuningParams)
171
- ).rstrip()
154
+ if create_fine_tuning_details.watch_logs and not (
155
+ create_fine_tuning_details.log_id
156
+ and create_fine_tuning_details.log_group_id
157
+ ):
172
158
  raise AquaValueError(
173
- "Invalid fine tuning parameters. Fine tuning parameters should "
174
- f"be a dictionary with keys: {allowed_fine_tuning_parameters}."
175
- ) from ex
159
+ "Logging is required for fine tuning if watch_logs is set to True. "
160
+ "Please provide log_id and log_group_id with the request parameters."
161
+ )
162
+
163
+ ft_parameters = self._get_finetuning_params(
164
+ create_fine_tuning_details.ft_parameters
165
+ )
176
166
 
177
167
  experiment_model_version_set_id = create_fine_tuning_details.experiment_id
178
168
  experiment_model_version_set_name = create_fine_tuning_details.experiment_name
@@ -229,6 +219,8 @@ class AquaFineTuningApp(AquaApp):
229
219
  defined_tags=create_fine_tuning_details.defined_tags,
230
220
  )
231
221
 
222
+ source = self.get_source(create_fine_tuning_details.ft_source_id)
223
+
232
224
  ft_model_custom_metadata = ModelCustomMetadata()
233
225
  ft_model_custom_metadata.add(
234
226
  key=FineTuneCustomMetadata.FINE_TUNE_SOURCE,
@@ -401,6 +393,9 @@ class AquaFineTuningApp(AquaApp):
401
393
  defined_tags=model_defined_tags,
402
394
  ),
403
395
  )
396
+ logger.debug(
397
+ f"Successfully updated model custom metadata list and freeform tags for the model {ft_model.id}."
398
+ )
404
399
 
405
400
  self.update_model_provenance(
406
401
  model_id=ft_model.id,
@@ -408,6 +403,9 @@ class AquaFineTuningApp(AquaApp):
408
403
  training_id=ft_job_run.id
409
404
  ),
410
405
  )
406
+ logger.debug(
407
+ f"Successfully updated model provenance for the model {ft_model.id}."
408
+ )
411
409
 
412
410
  # tracks the shape and replica used for fine-tuning the service models
413
411
  telemetry_kwargs = (
@@ -435,6 +433,20 @@ class AquaFineTuningApp(AquaApp):
435
433
  value=source.display_name,
436
434
  )
437
435
 
436
+ if create_fine_tuning_details.watch_logs:
437
+ logger.info(
438
+ f"Watching fine-tuning job run logs for {ft_job_run.id}. Press Ctrl+C to stop watching logs.\n"
439
+ )
440
+ try:
441
+ ft_job_run.watch()
442
+ except KeyboardInterrupt:
443
+ logger.info(f"\nStopped watching logs for {ft_job_run.id}.\n")
444
+ time.sleep(1)
445
+ except Exception:
446
+ logger.debug(
447
+ f"Something unexpected occurred while watching logs.\n{traceback.format_exc()}"
448
+ )
449
+
438
450
  return AquaFineTuningSummary(
439
451
  id=ft_model.id,
440
452
  name=ft_model.display_name,
@@ -481,11 +493,7 @@ class AquaFineTuningApp(AquaApp):
481
493
  **model_freeform_tags,
482
494
  **model_defined_tags,
483
495
  },
484
- parameters={
485
- key: value
486
- for key, value in asdict(ft_parameters).items()
487
- if value is not None
488
- },
496
+ parameters=ft_parameters,
489
497
  )
490
498
 
491
499
  def _build_fine_tuning_runtime(
@@ -548,7 +556,7 @@ class AquaFineTuningApp(AquaApp):
548
556
  ) -> str:
549
557
  """Builds the oci launch cmd for fine tuning container runtime."""
550
558
  oci_launch_cmd = f"--training_data {dataset_path} --output_dir {report_path} --val_set_size {val_set_size} "
551
- for key, value in asdict(parameters).items():
559
+ for key, value in parameters.to_dict().items():
552
560
  if value is not None:
553
561
  if key == "batch_size":
554
562
  oci_launch_cmd += f"--micro_{key} {value} "
@@ -587,7 +595,7 @@ class AquaFineTuningApp(AquaApp):
587
595
  config = self.get_config(model_id, AQUA_MODEL_FINETUNING_CONFIG)
588
596
  if not config:
589
597
  logger.debug(
590
- f"Fine-tuning config for custom model: {model_id} is not available."
598
+ f"Fine-tuning config for custom model: {model_id} is not available. Use defaults."
591
599
  )
592
600
  return config
593
601
 
@@ -613,15 +621,36 @@ class AquaFineTuningApp(AquaApp):
613
621
  default_params = {"params": {}}
614
622
  finetuning_config = self.get_finetuning_config(model_id)
615
623
  config_parameters = finetuning_config.get("configuration", UNKNOWN_DICT)
616
- dataclass_fields = {field.name for field in fields(AquaFineTuningParams)}
624
+ dataclass_fields = self._get_finetuning_params(
625
+ config_parameters, validate=False
626
+ ).to_dict()
617
627
  for name, value in config_parameters.items():
618
- if name == "micro_batch_size":
619
- name = "batch_size"
620
628
  if name in dataclass_fields:
629
+ if name == "micro_batch_size":
630
+ name = "batch_size"
621
631
  default_params["params"][name] = value
622
632
 
623
633
  return default_params
624
634
 
635
+ @staticmethod
636
+ def _get_finetuning_params(
637
+ params: Dict = None, validate: bool = True
638
+ ) -> AquaFineTuningParams:
639
+ """
640
+ Get and validate the fine-tuning params, and return an error message if validation fails. In order to skip
641
+ @model_validator decorator's validation, pass validate=False.
642
+ """
643
+ try:
644
+ finetuning_params = AquaFineTuningParams(
645
+ **{**params, **{"_validate": validate}}
646
+ )
647
+ except ValidationError as ex:
648
+ custom_errors = build_pydantic_error_message(ex)
649
+ raise AquaValueError(
650
+ f"Invalid finetuning parameters. Error details: {custom_errors}."
651
+ ) from ex
652
+ return finetuning_params
653
+
625
654
  def validate_finetuning_params(self, params: Dict = None) -> Dict:
626
655
  """Validate if the fine-tuning parameters passed by the user can be overridden. Parameter values are not
627
656
  validated, only param keys are validated.
@@ -635,19 +664,5 @@ class AquaFineTuningApp(AquaApp):
635
664
  -------
636
665
  Return a list of restricted params.
637
666
  """
638
- try:
639
- AquaFineTuningParams(
640
- **params,
641
- )
642
- except Exception as e:
643
- logger.debug(str(e))
644
- allowed_fine_tuning_parameters = ", ".join(
645
- f"{field.name} (required)" if field.default is MISSING else field.name
646
- for field in fields(AquaFineTuningParams)
647
- ).rstrip()
648
- raise AquaValueError(
649
- f"Invalid fine tuning parameters. Allowable parameters are: "
650
- f"{allowed_fine_tuning_parameters}."
651
- ) from e
652
-
667
+ self._get_finetuning_params(params or {})
653
668
  return {"valid": True}
@@ -1,5 +1,5 @@
1
1
  #!/usr/bin/env python
2
- # Copyright (c) 2024 Oracle and/or its affiliates.
2
+ # Copyright (c) 2024, 2025 Oracle and/or its affiliates.
3
3
  # Licensed under the Universal Permissive License v 1.0 as shown at https://oss.oracle.com/licenses/upl/
4
4
 
5
5
  """
@@ -283,6 +283,7 @@ class ImportModelDetails(CLIBuilderMixin):
283
283
  os_path: str
284
284
  download_from_hf: Optional[bool] = True
285
285
  local_dir: Optional[str] = None
286
+ cleanup_model_cache: Optional[bool] = False
286
287
  inference_container: Optional[str] = None
287
288
  finetuning_container: Optional[str] = None
288
289
  compartment_id: Optional[str] = None
@@ -293,6 +294,7 @@ class ImportModelDetails(CLIBuilderMixin):
293
294
  ignore_patterns: Optional[List[str]] = None
294
295
  freeform_tags: Optional[dict] = None
295
296
  defined_tags: Optional[dict] = None
297
+ ignore_model_artifact_check: Optional[bool] = None
296
298
 
297
299
  def __post_init__(self):
298
300
  self._command = "model register"