oracle-ads 2.10.0__py3-none-any.whl → 2.11.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (117) hide show
  1. ads/aqua/__init__.py +12 -0
  2. ads/aqua/base.py +324 -0
  3. ads/aqua/cli.py +19 -0
  4. ads/aqua/config/deployment_config_defaults.json +9 -0
  5. ads/aqua/config/resource_limit_names.json +7 -0
  6. ads/aqua/constants.py +45 -0
  7. ads/aqua/data.py +40 -0
  8. ads/aqua/decorator.py +101 -0
  9. ads/aqua/deployment.py +643 -0
  10. ads/aqua/dummy_data/icon.txt +1 -0
  11. ads/aqua/dummy_data/oci_model_deployments.json +56 -0
  12. ads/aqua/dummy_data/oci_models.json +1 -0
  13. ads/aqua/dummy_data/readme.md +26 -0
  14. ads/aqua/evaluation.py +1751 -0
  15. ads/aqua/exception.py +82 -0
  16. ads/aqua/extension/__init__.py +40 -0
  17. ads/aqua/extension/base_handler.py +138 -0
  18. ads/aqua/extension/common_handler.py +21 -0
  19. ads/aqua/extension/deployment_handler.py +202 -0
  20. ads/aqua/extension/evaluation_handler.py +135 -0
  21. ads/aqua/extension/finetune_handler.py +66 -0
  22. ads/aqua/extension/model_handler.py +59 -0
  23. ads/aqua/extension/ui_handler.py +201 -0
  24. ads/aqua/extension/utils.py +23 -0
  25. ads/aqua/finetune.py +579 -0
  26. ads/aqua/job.py +29 -0
  27. ads/aqua/model.py +819 -0
  28. ads/aqua/training/__init__.py +4 -0
  29. ads/aqua/training/exceptions.py +459 -0
  30. ads/aqua/ui.py +453 -0
  31. ads/aqua/utils.py +715 -0
  32. ads/cli.py +37 -6
  33. ads/common/auth.py +7 -0
  34. ads/common/decorator/__init__.py +7 -3
  35. ads/common/decorator/require_nonempty_arg.py +65 -0
  36. ads/common/object_storage_details.py +166 -7
  37. ads/common/oci_client.py +18 -1
  38. ads/common/oci_logging.py +2 -2
  39. ads/common/oci_mixin.py +4 -5
  40. ads/common/serializer.py +34 -5
  41. ads/common/utils.py +75 -10
  42. ads/config.py +40 -1
  43. ads/dataset/correlation_plot.py +10 -12
  44. ads/jobs/ads_job.py +43 -25
  45. ads/jobs/builders/infrastructure/base.py +4 -2
  46. ads/jobs/builders/infrastructure/dsc_job.py +49 -39
  47. ads/jobs/builders/runtimes/base.py +71 -1
  48. ads/jobs/builders/runtimes/container_runtime.py +4 -4
  49. ads/jobs/builders/runtimes/pytorch_runtime.py +10 -63
  50. ads/jobs/templates/driver_pytorch.py +27 -10
  51. ads/model/artifact_downloader.py +84 -14
  52. ads/model/artifact_uploader.py +25 -23
  53. ads/model/datascience_model.py +388 -38
  54. ads/model/deployment/model_deployment.py +10 -2
  55. ads/model/generic_model.py +8 -0
  56. ads/model/model_file_description_schema.json +68 -0
  57. ads/model/model_metadata.py +1 -1
  58. ads/model/service/oci_datascience_model.py +34 -5
  59. ads/opctl/config/merger.py +2 -2
  60. ads/opctl/operator/__init__.py +3 -1
  61. ads/opctl/operator/cli.py +7 -1
  62. ads/opctl/operator/cmd.py +3 -3
  63. ads/opctl/operator/common/errors.py +2 -1
  64. ads/opctl/operator/common/operator_config.py +22 -3
  65. ads/opctl/operator/common/utils.py +16 -0
  66. ads/opctl/operator/lowcode/anomaly/MLoperator +15 -0
  67. ads/opctl/operator/lowcode/anomaly/README.md +209 -0
  68. ads/opctl/operator/lowcode/anomaly/__init__.py +5 -0
  69. ads/opctl/operator/lowcode/anomaly/__main__.py +104 -0
  70. ads/opctl/operator/lowcode/anomaly/cmd.py +35 -0
  71. ads/opctl/operator/lowcode/anomaly/const.py +88 -0
  72. ads/opctl/operator/lowcode/anomaly/environment.yaml +12 -0
  73. ads/opctl/operator/lowcode/anomaly/model/__init__.py +5 -0
  74. ads/opctl/operator/lowcode/anomaly/model/anomaly_dataset.py +147 -0
  75. ads/opctl/operator/lowcode/anomaly/model/automlx.py +89 -0
  76. ads/opctl/operator/lowcode/anomaly/model/autots.py +103 -0
  77. ads/opctl/operator/lowcode/anomaly/model/base_model.py +354 -0
  78. ads/opctl/operator/lowcode/anomaly/model/factory.py +67 -0
  79. ads/opctl/operator/lowcode/anomaly/model/tods.py +119 -0
  80. ads/opctl/operator/lowcode/anomaly/operator_config.py +105 -0
  81. ads/opctl/operator/lowcode/anomaly/schema.yaml +359 -0
  82. ads/opctl/operator/lowcode/anomaly/utils.py +81 -0
  83. ads/opctl/operator/lowcode/common/__init__.py +5 -0
  84. ads/opctl/operator/lowcode/common/const.py +10 -0
  85. ads/opctl/operator/lowcode/common/data.py +96 -0
  86. ads/opctl/operator/lowcode/common/errors.py +41 -0
  87. ads/opctl/operator/lowcode/common/transformations.py +191 -0
  88. ads/opctl/operator/lowcode/common/utils.py +250 -0
  89. ads/opctl/operator/lowcode/forecast/README.md +3 -2
  90. ads/opctl/operator/lowcode/forecast/__main__.py +18 -2
  91. ads/opctl/operator/lowcode/forecast/cmd.py +8 -7
  92. ads/opctl/operator/lowcode/forecast/const.py +17 -1
  93. ads/opctl/operator/lowcode/forecast/environment.yaml +3 -2
  94. ads/opctl/operator/lowcode/forecast/model/arima.py +106 -117
  95. ads/opctl/operator/lowcode/forecast/model/automlx.py +204 -180
  96. ads/opctl/operator/lowcode/forecast/model/autots.py +144 -253
  97. ads/opctl/operator/lowcode/forecast/model/base_model.py +326 -259
  98. ads/opctl/operator/lowcode/forecast/model/forecast_datasets.py +325 -176
  99. ads/opctl/operator/lowcode/forecast/model/neuralprophet.py +293 -237
  100. ads/opctl/operator/lowcode/forecast/model/prophet.py +191 -208
  101. ads/opctl/operator/lowcode/forecast/operator_config.py +24 -33
  102. ads/opctl/operator/lowcode/forecast/schema.yaml +116 -29
  103. ads/opctl/operator/lowcode/forecast/utils.py +186 -356
  104. ads/opctl/operator/lowcode/pii/model/guardrails.py +18 -15
  105. ads/opctl/operator/lowcode/pii/model/report.py +7 -7
  106. ads/opctl/operator/lowcode/pii/operator_config.py +1 -8
  107. ads/opctl/operator/lowcode/pii/utils.py +0 -82
  108. ads/opctl/operator/runtime/runtime.py +3 -2
  109. ads/telemetry/base.py +62 -0
  110. ads/telemetry/client.py +105 -0
  111. ads/telemetry/telemetry.py +6 -3
  112. {oracle_ads-2.10.0.dist-info → oracle_ads-2.11.0.dist-info}/METADATA +44 -7
  113. {oracle_ads-2.10.0.dist-info → oracle_ads-2.11.0.dist-info}/RECORD +116 -59
  114. ads/opctl/operator/lowcode/forecast/model/transformations.py +0 -125
  115. {oracle_ads-2.10.0.dist-info → oracle_ads-2.11.0.dist-info}/LICENSE.txt +0 -0
  116. {oracle_ads-2.10.0.dist-info → oracle_ads-2.11.0.dist-info}/WHEEL +0 -0
  117. {oracle_ads-2.10.0.dist-info → oracle_ads-2.11.0.dist-info}/entry_points.txt +0 -0
@@ -1,7 +1,7 @@
1
1
  #!/usr/bin/env python
2
2
  # -*- coding: utf-8 -*--
3
3
 
4
- # Copyright (c) 2023 Oracle and/or its affiliates.
4
+ # Copyright (c) 2023, 2024 Oracle and/or its affiliates.
5
5
  # Licensed under the Universal Permissive License v 1.0 as shown at https://oss.oracle.com/licenses/upl/
6
6
 
7
7
  import os
@@ -19,11 +19,11 @@ from ads.opctl.operator.lowcode.pii.model.report import (
19
19
  )
20
20
  from ads.opctl.operator.lowcode.pii.operator_config import PiiOperatorConfig
21
21
  from ads.opctl.operator.lowcode.pii.utils import (
22
- _load_data,
23
- _write_data,
24
22
  default_signer,
25
23
  get_output_name,
26
24
  )
25
+ from ads.opctl.operator.lowcode.common.utils import load_data, write_data
26
+ from ads.opctl.operator.lowcode.common.errors import InvalidParameterError
27
27
 
28
28
 
29
29
  class PIIGuardrail:
@@ -68,15 +68,20 @@ class PIIGuardrail:
68
68
  )
69
69
  self.datasets = None
70
70
 
71
- def load_data(self, uri=None, storage_options=None):
71
+ def _load_data(self, uri=None, storage_options=None):
72
72
  """Loads input data."""
73
73
  input_data_uri = uri or self.spec.input_data.url
74
74
  logger.info(f"Loading input data from `{input_data_uri}` ...")
75
75
 
76
- self.datasets = _load_data(
77
- filename=input_data_uri,
78
- storage_options=storage_options or self.storage_options,
79
- )
76
+ try:
77
+ self.datasets = load_data(
78
+ data_spec=self.spec.input_data,
79
+ storage_options=storage_options or self.storage_options,
80
+ )
81
+ except InvalidParameterError as e:
82
+ e.args = e.args + ("Invalid Parameter: input_data",)
83
+ raise e
84
+
80
85
  return self
81
86
 
82
87
  def process(self, **kwargs):
@@ -92,12 +97,10 @@ class PIIGuardrail:
92
97
 
93
98
  if not data:
94
99
  try:
95
- self.load_data()
100
+ self._load_data()
96
101
  data = self.datasets
97
- except Exception as e:
98
- logger.warning(
99
- f"Failed to load data from `{self.spec.input_data.url}`."
100
- )
102
+ except InvalidParameterError as e:
103
+ e.args = e.args + ("Invalid Parameter: input_data",)
101
104
  raise e
102
105
 
103
106
  # process user data
@@ -110,10 +113,10 @@ class PIIGuardrail:
110
113
  # save output data
111
114
  if dst_uri:
112
115
  logger.info(f"Saving data into `{dst_uri}` ...")
113
-
114
- _write_data(
116
+ write_data(
115
117
  data=data.loc[:, data.columns != self.spec.target_column],
116
118
  filename=dst_uri,
119
+ format=None,
117
120
  storage_options=kwargs.pop("storage_options", None)
118
121
  or self.storage_options,
119
122
  )
@@ -22,6 +22,11 @@ from ads.common.decorator.runtime_dependency import (
22
22
  )
23
23
  from ads.common.serializer import DataClassSerializable
24
24
  from ads.opctl import logger
25
+ from ads.opctl.operator.lowcode.common.utils import (
26
+ disable_print,
27
+ enable_print,
28
+ human_time_friendly,
29
+ )
25
30
  from ads.opctl.operator.lowcode.pii.constant import (
26
31
  DEFAULT_COLOR,
27
32
  DEFAULT_SHOW_ROWS,
@@ -31,12 +36,7 @@ from ads.opctl.operator.lowcode.pii.constant import (
31
36
  PII_REPORT_DESCRIPTION,
32
37
  )
33
38
  from ads.opctl.operator.lowcode.pii.operator_config import PiiOperatorConfig
34
- from ads.opctl.operator.lowcode.pii.utils import (
35
- block_print,
36
- compute_rate,
37
- enable_print,
38
- human_time_friendly,
39
- )
39
+ from ads.opctl.operator.lowcode.pii.utils import compute_rate
40
40
 
41
41
  try:
42
42
  import datapane as dp
@@ -321,7 +321,7 @@ class PIIOperatorReport:
321
321
  def save_report(self, report_sections=None, report_uri=None, storage_options={}):
322
322
  with tempfile.TemporaryDirectory() as temp_dir:
323
323
  report_local_path = os.path.join(temp_dir, "___report.html")
324
- block_print()
324
+ disable_print()
325
325
  dp.save_report(
326
326
  report_sections or self.report_sections,
327
327
  path=report_local_path,
@@ -9,7 +9,7 @@ from dataclasses import dataclass, field
9
9
  from typing import Dict, List
10
10
 
11
11
  from ads.common.serializer import DataClassSerializable
12
- from ads.opctl.operator.common.operator_config import OperatorConfig
12
+ from ads.opctl.operator.common.operator_config import OperatorConfig, InputData
13
13
  from ads.opctl.operator.common.utils import _load_yaml_from_uri
14
14
  from ads.opctl.operator.lowcode.pii.constant import (
15
15
  DEFAULT_SHOW_ROWS,
@@ -18,13 +18,6 @@ from ads.opctl.operator.lowcode.pii.constant import (
18
18
  )
19
19
 
20
20
 
21
- @dataclass(repr=True)
22
- class InputData(DataClassSerializable):
23
- """Class representing operator specification input data details."""
24
-
25
- url: str = None
26
-
27
-
28
21
  @dataclass(repr=True)
29
22
  class OutputDirectory(DataClassSerializable):
30
23
  """Class representing operator specification output directory details."""
@@ -5,14 +5,6 @@
5
5
  # Licensed under the Universal Permissive License v 1.0 as shown at https://oss.oracle.com/licenses/upl/
6
6
 
7
7
  import os
8
- import sys
9
-
10
- import fsspec
11
- import pandas as pd
12
-
13
- from ads.common.object_storage_details import ObjectStorageDetails
14
-
15
- from .errors import PIIInputDataError
16
8
 
17
9
 
18
10
  def default_signer(**kwargs):
@@ -22,50 +14,6 @@ def default_signer(**kwargs):
22
14
  return default_signer(**kwargs)
23
15
 
24
16
 
25
- def _call_pandas_fsspec(pd_fn, filename, storage_options, **kwargs):
26
- if fsspec.utils.get_protocol(filename) == "file":
27
- return pd_fn(filename, **kwargs)
28
-
29
- storage_options = storage_options or (
30
- default_signer() if ObjectStorageDetails.is_oci_path(filename) else {}
31
- )
32
-
33
- return pd_fn(filename, storage_options=storage_options, **kwargs)
34
-
35
-
36
- def _load_data(filename, format=None, storage_options=None, columns=None, **kwargs):
37
- if not format:
38
- _, format = os.path.splitext(filename)
39
- format = format[1:]
40
- if format in ["json", "csv"]:
41
- read_fn = getattr(pd, f"read_{format}")
42
- data = _call_pandas_fsspec(read_fn, filename, storage_options=storage_options)
43
- elif format in ["tsv"]:
44
- data = _call_pandas_fsspec(
45
- pd.read_csv, filename, storage_options=storage_options, sep="\t"
46
- )
47
- else:
48
- raise PIIInputDataError(f"Unrecognized format: {format}")
49
- if columns:
50
- # keep only these columns, done after load because only CSV supports stream filtering
51
- data = data[columns]
52
- return data
53
-
54
-
55
- def _write_data(
56
- data, filename, format=None, storage_options=None, index=False, **kwargs
57
- ):
58
- if not format:
59
- _, format = os.path.splitext(filename)
60
- format = format[1:]
61
- if format in ["json", "csv"]:
62
- write_fn = getattr(data, f"to_{format}")
63
- return _call_pandas_fsspec(
64
- write_fn, filename, index=index, storage_options=storage_options
65
- )
66
- raise PIIInputDataError(f"Unrecognized format: {format}")
67
-
68
-
69
17
  def get_output_name(given_name, target_name=None):
70
18
  """Add ``-out`` suffix to the src filename."""
71
19
  if not target_name:
@@ -93,33 +41,3 @@ def construct_filth_cls_name(name: str) -> str:
93
41
  ################
94
42
  def compute_rate(elapsed_time, num_unit):
95
43
  return elapsed_time / num_unit
96
-
97
-
98
- def human_time_friendly(seconds):
99
- TIME_DURATION_UNITS = (
100
- ("week", 60 * 60 * 24 * 7),
101
- ("day", 60 * 60 * 24),
102
- ("hour", 60 * 60),
103
- ("min", 60),
104
- )
105
- if seconds == 0:
106
- return "inf"
107
- accumulator = []
108
- for unit, div in TIME_DURATION_UNITS:
109
- amount, seconds = divmod(float(seconds), div)
110
- if amount > 0:
111
- accumulator.append(
112
- "{} {}{}".format(int(amount), unit, "" if amount == 1 else "s")
113
- )
114
- accumulator.append("{} secs".format(round(seconds, 2)))
115
- return ", ".join(accumulator)
116
-
117
-
118
- # Disable
119
- def block_print():
120
- sys.stdout = open(os.devnull, "w")
121
-
122
-
123
- # Restore
124
- def enable_print():
125
- sys.stdout = sys.__stdout__
@@ -15,7 +15,8 @@ from cerberus import Validator
15
15
  from ads.common.extended_enum import ExtendedEnum
16
16
  from ads.common.serializer import DataClassSerializable
17
17
  from ads.opctl.operator.common.utils import _load_yaml_from_uri
18
- from ads.opctl.operator.common.errors import OperatorSchemaYamlError
18
+ from ads.opctl.operator.common.errors import InvalidParameterError
19
+
19
20
 
20
21
 
21
22
  class OPERATOR_LOCAL_RUNTIME_TYPE(ExtendedEnum):
@@ -60,7 +61,7 @@ class Runtime(DataClassSerializable):
60
61
  result = validator.validate(obj_dict)
61
62
 
62
63
  if not result:
63
- raise OperatorSchemaYamlError(json.dumps(validator.errors, indent=2))
64
+ raise InvalidParameterError(json.dumps(validator.errors, indent=2))
64
65
  return True
65
66
 
66
67
 
ads/telemetry/base.py ADDED
@@ -0,0 +1,62 @@
1
+ #!/usr/bin/env python
2
+ # -*- coding: utf-8 -*-
3
+ # Copyright (c) 2024 Oracle and/or its affiliates.
4
+ # Licensed under the Universal Permissive License v 1.0 as shown at https://oss.oracle.com/licenses/upl/
5
+
6
+ import logging
7
+
8
+ from ads import set_auth
9
+ from ads.common import oci_client as oc
10
+ from ads.common.auth import default_signer
11
+ from ads.config import OCI_RESOURCE_PRINCIPAL_VERSION
12
+
13
+
14
+ logger = logging.getLogger(__name__)
15
+ class TelemetryBase:
16
+ """Base class for Telemetry Client."""
17
+
18
+ def __init__(self, bucket: str, namespace: str = None) -> None:
19
+ """Initializes the telemetry client.
20
+
21
+ Parameters
22
+ ----------
23
+ bucket : str
24
+ OCI object storage bucket name storing the telemetry objects.
25
+ namespace : str, optional
26
+ Namespace of the OCI object storage bucket, by default None.
27
+ """
28
+ if OCI_RESOURCE_PRINCIPAL_VERSION:
29
+ set_auth("resource_principal")
30
+ self._auth = default_signer()
31
+ self.os_client = oc.OCIClientFactory(**self._auth).object_storage
32
+ self.bucket = bucket
33
+ self._namespace = namespace
34
+ self._service_endpoint = None
35
+ logger.debug(f"Initialized Telemetry. Namespace: {self.namespace}, Bucket: {self.bucket}")
36
+
37
+
38
+ @property
39
+ def namespace(self) -> str:
40
+ """Gets the namespace of the object storage from the tenancy.
41
+
42
+ Returns
43
+ -------
44
+ str
45
+ The namespace of the tenancy.
46
+ """
47
+ if not self._namespace:
48
+ self._namespace = self.os_client.get_namespace().data
49
+ return self._namespace
50
+
51
+ @property
52
+ def service_endpoint(self):
53
+ """Gets the tenancy-specific endpoint.
54
+
55
+ Returns
56
+ -------
57
+ str
58
+ Tenancy-specific endpoint.
59
+ """
60
+ if not self._service_endpoint:
61
+ self._service_endpoint = self.os_client.base_client.endpoint
62
+ return self._service_endpoint
@@ -0,0 +1,105 @@
1
+ #!/usr/bin/env python
2
+ # -*- coding: utf-8 -*-
3
+ # Copyright (c) 2024 Oracle and/or its affiliates.
4
+ # Licensed under the Universal Permissive License v 1.0 as shown at https://oss.oracle.com/licenses/upl/
5
+
6
+
7
+ import logging
8
+ import threading
9
+ import urllib.parse
10
+ import requests
11
+ from requests import Response
12
+ from .base import TelemetryBase
13
+ from ads.config import DEBUG_TELEMETRY
14
+
15
+
16
+ logger = logging.getLogger(__name__)
17
+
18
+
19
+ class TelemetryClient(TelemetryBase):
20
+ """Represents a telemetry python client providing functions to record an event.
21
+
22
+ Methods
23
+ -------
24
+ record_event(category: str = None, action: str = None, path: str = None, **kwargs) -> None
25
+ Send a head request to generate an event record.
26
+ record_event_async(category: str = None, action: str = None, path: str = None, **kwargs)
27
+ Starts thread to send a head request to generate an event record.
28
+
29
+ Examples
30
+ --------
31
+ >>> import os
32
+ >>> import traceback
33
+ >>> from ads.telemetry.client import TelemetryClient
34
+ >>> AQUA_BUCKET = os.environ.get("AQUA_BUCKET", "service-managed-models")
35
+ >>> AQUA_BUCKET_NS = os.environ.get("AQUA_BUCKET_NS", "ociodscdev")
36
+ >>> telemetry = TelemetryClient(bucket=AQUA_BUCKET, namespace=AQUA_BUCKET_NS)
37
+ >>> telemetry.record_event_async(category="aqua/service/model", action="create") # records create action
38
+ >>> telemetry.record_event_async(category="aqua/service/model/create", action="shape", detail="VM.GPU.A10.1")
39
+ """
40
+
41
+ @staticmethod
42
+ def _encode_user_agent(**kwargs):
43
+ message = urllib.parse.urlencode(kwargs)
44
+ return message
45
+
46
+ def record_event(
47
+ self, category: str = None, action: str = None, detail: str = None, **kwargs
48
+ ) -> Response:
49
+ """Send a head request to generate an event record.
50
+
51
+ Parameters
52
+ ----------
53
+ category: (str)
54
+ Category of the event, which is also the path to the directory containing the object representing the event.
55
+ action: (str)
56
+ Filename of the object representing the event.
57
+ detail: (str)
58
+ Can be used to pass additional values, if required. When set, detail is converted to an action,
59
+ category and action are grouped together for telemetry parsing in the backend.
60
+ **kwargs:
61
+ Can be used to pass additional attributes like value that will be passed in the headers of the request.
62
+
63
+ Returns
64
+ -------
65
+ Response
66
+ """
67
+ try:
68
+ if not category or not action:
69
+ raise ValueError("Please specify the category and the action.")
70
+ if detail:
71
+ category, action = f"{category}/{action}", detail
72
+ endpoint = f"{self.service_endpoint}/n/{self.namespace}/b/{self.bucket}/o/telemetry/{category}/{action}"
73
+ headers = {"User-Agent": self._encode_user_agent(**kwargs)}
74
+ logger.debug(f"Sending telemetry to endpoint: {endpoint}")
75
+ signer = self._auth["signer"]
76
+ response = requests.head(endpoint, auth=signer, headers=headers)
77
+ logger.debug(f"Telemetry status code: {response.status_code}")
78
+ return response
79
+ except Exception as e:
80
+ if DEBUG_TELEMETRY:
81
+ logger.error(f"There is an error recording telemetry: {e}")
82
+
83
+ def record_event_async(
84
+ self, category: str = None, action: str = None, detail: str = None, **kwargs
85
+ ):
86
+ """Send a head request to generate an event record.
87
+
88
+ Parameters
89
+ ----------
90
+ category (str)
91
+ Category of the event, which is also the path to the directory containing the object representing the event.
92
+ action (str)
93
+ Filename of the object representing the event.
94
+
95
+ Returns
96
+ -------
97
+ Thread
98
+ A started thread to send a head request to generate an event record.
99
+ """
100
+ thread = threading.Thread(
101
+ target=self.record_event, args=(category, action, detail), kwargs=kwargs
102
+ )
103
+ thread.daemon = True
104
+ thread.start()
105
+ return thread
@@ -1,7 +1,7 @@
1
1
  #!/usr/bin/env python
2
2
  # -*- coding: utf-8 -*--
3
3
 
4
- # Copyright (c) 2022, 2023 Oracle and/or its affiliates.
4
+ # Copyright (c) 2022, 2024 Oracle and/or its affiliates.
5
5
  # Licensed under the Universal Permissive License v 1.0 as shown at https://oss.oracle.com/licenses/upl/
6
6
 
7
7
  import os
@@ -102,7 +102,10 @@ def telemetry(
102
102
  entry_point
103
103
  )
104
104
  try:
105
- return func(*args, **{**kwargs, **{TELEMETRY_ARGUMENT_NAME: telemetry}})
105
+ # todo: inject telemetry arg and later update all functions that use the @telemetry
106
+ # decorator to accept **kwargs. Comment the below line as some aqua apis don't support kwargs.
107
+ # return func(*args, **{**kwargs, **{TELEMETRY_ARGUMENT_NAME: telemetry}})
108
+ return func(*args, **kwargs)
106
109
  except:
107
110
  raise
108
111
  finally:
@@ -178,7 +181,7 @@ class Telemetry:
178
181
  self: Telemetry
179
182
  An instance of the Telemetry.
180
183
  """
181
- os.environ[self.environ_variable] = self._original_value
184
+ os.environ[self.environ_variable] = self._original_value or ""
182
185
  return self
183
186
 
184
187
  def clean(self) -> "Telemetry":
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: oracle_ads
3
- Version: 2.10.0
3
+ Version: 2.11.0
4
4
  Summary: Oracle Accelerated Data Science SDK
5
5
  Keywords: Oracle Cloud Infrastructure,OCI,Machine Learning,ML,Artificial Intelligence,AI,Data Science,Cloud,Oracle
6
6
  Author: Oracle Data Science
@@ -24,13 +24,23 @@ Requires-Dist: matplotlib>=3.1.3
24
24
  Requires-Dist: numpy>=1.19.2
25
25
  Requires-Dist: oci>=2.113.0
26
26
  Requires-Dist: ocifs>=1.1.3
27
- Requires-Dist: pandas>1.2.1,<2.1
27
+ Requires-Dist: pandas>1.2.1; python_version<'3.9'
28
+ Requires-Dist: pandas>=2.2.0; python_version>='3.9'
28
29
  Requires-Dist: psutil>=5.7.2
29
30
  Requires-Dist: python_jsonschema_objects>=0.3.13
30
31
  Requires-Dist: requests
31
32
  Requires-Dist: scikit-learn>=1.0
32
33
  Requires-Dist: tabulate>=0.8.9
33
34
  Requires-Dist: tqdm>=4.59.0
35
+ Requires-Dist: oracle_ads[opctl] ; extra == "anomaly"
36
+ Requires-Dist: autots ; extra == "anomaly"
37
+ Requires-Dist: oracle-automlx[forecasting]==23.4.1 ; extra == "anomaly"
38
+ Requires-Dist: oracle-automlx[classic]==23.4.1 ; extra == "anomaly"
39
+ Requires-Dist: oracledb ; extra == "anomaly"
40
+ Requires-Dist: datapane ; extra == "anomaly"
41
+ Requires-Dist: fire ; extra == "aqua"
42
+ Requires-Dist: cachetools ; extra == "aqua"
43
+ Requires-Dist: jupyter_server ; extra == "aqua"
34
44
  Requires-Dist: hdfs[kerberos] ; extra == "bds"
35
45
  Requires-Dist: ibis-framework[impala] ; extra == "bds"
36
46
  Requires-Dist: sqlalchemy ; extra == "bds"
@@ -58,19 +68,21 @@ Requires-Dist: holidays==0.21.13 ; extra == "forecast"
58
68
  Requires-Dist: neuralprophet ; extra == "forecast"
59
69
  Requires-Dist: numpy ; extra == "forecast"
60
70
  Requires-Dist: oci-cli ; extra == "forecast"
61
- Requires-Dist: optuna==2.9.0 ; extra == "forecast"
71
+ Requires-Dist: optuna==3.1.0 ; extra == "forecast"
62
72
  Requires-Dist: oracle-ads ; extra == "forecast"
63
- Requires-Dist: oracle-automlx[forecasting]==23.2.3 ; extra == "forecast"
73
+ Requires-Dist: oracle-automlx[forecasting]==23.4.1 ; extra == "forecast"
74
+ Requires-Dist: oracle-automlx[classic]==23.4.1 ; extra == "forecast"
64
75
  Requires-Dist: pmdarima ; extra == "forecast"
65
76
  Requires-Dist: prophet ; extra == "forecast"
66
77
  Requires-Dist: shap ; extra == "forecast"
67
78
  Requires-Dist: sktime ; extra == "forecast"
68
79
  Requires-Dist: statsmodels ; extra == "forecast"
69
80
  Requires-Dist: plotly ; extra == "forecast"
81
+ Requires-Dist: oracledb ; extra == "forecast"
70
82
  Requires-Dist: geopandas ; extra == "geo"
71
83
  Requires-Dist: oracle_ads[viz] ; extra == "geo"
72
84
  Requires-Dist: transformers ; extra == "huggingface"
73
- Requires-Dist: langchain>=0.0.295 ; extra == "llm"
85
+ Requires-Dist: langchain>=0.1.10 ; extra == "llm"
74
86
  Requires-Dist: evaluate>=0.4.0 ; extra == "llm"
75
87
  Requires-Dist: ipython>=7.23.1, <8.0 ; extra == "notebook"
76
88
  Requires-Dist: ipywidgets~=7.6.3 ; extra == "notebook"
@@ -106,18 +118,42 @@ Requires-Dist: spacy==3.6.1 ; extra == "pii"
106
118
  Requires-Dist: pyspark>=3.0.0 ; extra == "spark"
107
119
  Requires-Dist: oracle_ads[viz] ; extra == "tensorflow"
108
120
  Requires-Dist: tensorflow ; extra == "tensorflow"
121
+ Requires-Dist: dask==2023.5.0 ; extra == "testsuite" and ( python_version=='3.8')
122
+ Requires-Dist: dask==2023.10.1 ; extra == "testsuite" and ( python_version>='3.9')
123
+ Requires-Dist: arff ; extra == "testsuite"
124
+ Requires-Dist: category_encoders==2.6.3 ; extra == "testsuite"
125
+ Requires-Dist: cohere==4.53 ; extra == "testsuite"
126
+ Requires-Dist: dask==2023.10.1 ; extra == "testsuite" and ( python_version>='3.9')
127
+ Requires-Dist: dask==2023.5.0 ; extra == "testsuite" and ( python_version=='3.8')
128
+ Requires-Dist: faiss-cpu ; extra == "testsuite"
129
+ Requires-Dist: fastparquet==2024.2.0 ; extra == "testsuite"
130
+ Requires-Dist: imbalanced-learn ; extra == "testsuite"
131
+ Requires-Dist: lxml ; extra == "testsuite"
132
+ Requires-Dist: mysql-connector-python ; extra == "testsuite"
133
+ Requires-Dist: nltk ; extra == "testsuite"
134
+ Requires-Dist: notebook==6.4.12 ; extra == "testsuite"
135
+ Requires-Dist: opensearch-py ; extra == "testsuite"
136
+ Requires-Dist: pdfplumber ; extra == "testsuite"
137
+ Requires-Dist: py4j ; extra == "testsuite"
138
+ Requires-Dist: pyarrow ; extra == "testsuite"
139
+ Requires-Dist: statsmodels ; extra == "testsuite" and ( python_version=='3.8')
140
+ Requires-Dist: statsmodels>=0.14.1 ; extra == "testsuite" and ( python_version>='3.9')
141
+ Requires-Dist: tables ; extra == "testsuite"
142
+ Requires-Dist: xlrd>=1.2.0 ; extra == "testsuite"
109
143
  Requires-Dist: spacy ; extra == "text"
110
144
  Requires-Dist: wordcloud>=1.8.1 ; extra == "text"
111
145
  Requires-Dist: oracle_ads[viz] ; extra == "torch"
112
146
  Requires-Dist: torch ; extra == "torch"
113
147
  Requires-Dist: torchvision ; extra == "torch"
114
- Requires-Dist: bokeh>=2.3.0, <=2.4.3 ; extra == "viz"
148
+ Requires-Dist: bokeh>=3.0.0, <3.2.0 ; extra == "viz"
115
149
  Requires-Dist: folium>=0.12.1 ; extra == "viz"
116
150
  Requires-Dist: graphviz<0.17 ; extra == "viz"
117
151
  Requires-Dist: scipy>=1.5.4 ; extra == "viz"
118
152
  Requires-Dist: seaborn>=0.11.0 ; extra == "viz"
119
153
  Project-URL: Documentation, https://accelerated-data-science.readthedocs.io/en/latest/index.html
120
154
  Project-URL: Github, https://github.com/oracle/accelerated-data-science
155
+ Provides-Extra: anomaly
156
+ Provides-Extra: aqua
121
157
  Provides-Extra: bds
122
158
  Provides-Extra: boosted
123
159
  Provides-Extra: data
@@ -133,6 +169,7 @@ Provides-Extra: optuna
133
169
  Provides-Extra: pii
134
170
  Provides-Extra: spark
135
171
  Provides-Extra: tensorflow
172
+ Provides-Extra: testsuite
136
173
  Provides-Extra: text
137
174
  Provides-Extra: torch
138
175
  Provides-Extra: viz
@@ -171,7 +208,7 @@ You have various options when installing ADS.
171
208
  To use the AI Forecast Operator, install the "forecast" dependencies using the following command:
172
209
 
173
210
  ```bash
174
- python3 -m pip install 'oracle_ads[forecast]==2.9.0'
211
+ python3 -m pip install 'oracle_ads[forecast]>=2.9.0'
175
212
  ```
176
213
 
177
214
  ### Installing extras libraries