oracle-ads 2.12.6__py3-none-any.whl → 2.12.8__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (29) hide show
  1. ads/aqua/common/utils.py +4 -1
  2. ads/aqua/constants.py +1 -0
  3. ads/aqua/evaluation/entities.py +2 -2
  4. ads/aqua/evaluation/evaluation.py +2 -6
  5. ads/aqua/extension/model_handler.py +4 -0
  6. ads/aqua/model/entities.py +2 -0
  7. ads/aqua/model/model.py +25 -19
  8. ads/aqua/modeldeployment/deployment.py +15 -9
  9. ads/llm/autogen/__init__.py +0 -0
  10. ads/llm/autogen/client_v02.py +282 -0
  11. ads/opctl/operator/common/utils.py +6 -4
  12. ads/opctl/operator/lowcode/anomaly/model/base_model.py +2 -3
  13. ads/opctl/operator/lowcode/anomaly/model/factory.py +2 -2
  14. ads/opctl/operator/lowcode/common/transformations.py +14 -10
  15. ads/opctl/operator/lowcode/common/utils.py +37 -37
  16. ads/opctl/operator/lowcode/forecast/model/automlx.py +10 -2
  17. ads/opctl/operator/lowcode/forecast/model/base_model.py +2 -1
  18. ads/opctl/operator/lowcode/forecast/model/factory.py +3 -2
  19. ads/opctl/operator/lowcode/forecast/model/prophet.py +4 -1
  20. ads/opctl/operator/lowcode/forecast/schema.yaml +1 -1
  21. ads/opctl/operator/lowcode/pii/model/factory.py +7 -5
  22. ads/opctl/operator/lowcode/recommender/model/base_model.py +2 -1
  23. ads/opctl/operator/lowcode/recommender/model/factory.py +4 -6
  24. ads/opctl/operator/lowcode/recommender/model/svd.py +5 -5
  25. {oracle_ads-2.12.6.dist-info → oracle_ads-2.12.8.dist-info}/METADATA +3 -2
  26. {oracle_ads-2.12.6.dist-info → oracle_ads-2.12.8.dist-info}/RECORD +29 -27
  27. {oracle_ads-2.12.6.dist-info → oracle_ads-2.12.8.dist-info}/LICENSE.txt +0 -0
  28. {oracle_ads-2.12.6.dist-info → oracle_ads-2.12.8.dist-info}/WHEEL +0 -0
  29. {oracle_ads-2.12.6.dist-info → oracle_ads-2.12.8.dist-info}/entry_points.txt +0 -0
ads/aqua/common/utils.py CHANGED
@@ -788,13 +788,14 @@ def get_ocid_substring(ocid: str, key_len: int) -> str:
788
788
  return ocid[-key_len:] if ocid and len(ocid) > key_len else ""
789
789
 
790
790
 
791
- def upload_folder(os_path: str, local_dir: str, model_name: str) -> str:
791
+ def upload_folder(os_path: str, local_dir: str, model_name: str, exclude_pattern: str = None) -> str:
792
792
  """Upload the local folder to the object storage
793
793
 
794
794
  Args:
795
795
  os_path (str): object storage URI with prefix. This is the path to upload
796
796
  local_dir (str): Local directory where the object is downloaded
797
797
  model_name (str): Name of the huggingface model
798
+ exclude_pattern (optional, str): The matching pattern of files to be excluded from uploading.
798
799
  Retuns:
799
800
  str: Object name inside the bucket
800
801
  """
@@ -804,6 +805,8 @@ def upload_folder(os_path: str, local_dir: str, model_name: str) -> str:
804
805
  auth_state = AuthState()
805
806
  object_path = os_details.filepath.rstrip("/") + "/" + model_name + "/"
806
807
  command = f"oci os object bulk-upload --src-dir {local_dir} --prefix {object_path} -bn {os_details.bucket} -ns {os_details.namespace} --auth {auth_state.oci_iam_type} --profile {auth_state.oci_key_profile} --no-overwrite"
808
+ if exclude_pattern:
809
+ command += f" --exclude {exclude_pattern}"
807
810
  try:
808
811
  logger.info(f"Running: {command}")
809
812
  subprocess.check_call(shlex.split(command))
ads/aqua/constants.py CHANGED
@@ -35,6 +35,7 @@ AQUA_MODEL_ARTIFACT_CONFIG = "config.json"
35
35
  AQUA_MODEL_ARTIFACT_CONFIG_MODEL_NAME = "_name_or_path"
36
36
  AQUA_MODEL_ARTIFACT_CONFIG_MODEL_TYPE = "model_type"
37
37
  AQUA_MODEL_ARTIFACT_FILE = "model_file"
38
+ HF_METADATA_FOLDER = ".cache/"
38
39
  HF_LOGIN_DEFAULT_TIMEOUT = 2
39
40
 
40
41
  TRAINING_METRICS_FINAL = "training_metrics_final"
@@ -83,7 +83,7 @@ class CreateAquaEvaluationDetails(Serializable):
83
83
  ocpus: Optional[float] = None
84
84
  log_group_id: Optional[str] = None
85
85
  log_id: Optional[str] = None
86
- metrics: Optional[List[str]] = None
86
+ metrics: Optional[List[Dict[str, Any]]] = None
87
87
  force_overwrite: Optional[bool] = False
88
88
 
89
89
  class Config:
@@ -140,7 +140,7 @@ class AquaEvaluationCommands(Serializable):
140
140
  evaluation_id: str
141
141
  evaluation_target_id: str
142
142
  input_data: Dict[str, Any]
143
- metrics: List[str]
143
+ metrics: List[Dict[str, Any]]
144
144
  output_dir: str
145
145
  params: Dict[str, Any]
146
146
 
@@ -159,7 +159,8 @@ class AquaEvaluationApp(AquaApp):
159
159
  create_aqua_evaluation_details = CreateAquaEvaluationDetails(**kwargs)
160
160
  except Exception as ex:
161
161
  custom_errors = {
162
- ".".join(map(str, e["loc"])): e["msg"] for e in json.loads(ex.json())
162
+ ".".join(map(str, e["loc"])): e["msg"]
163
+ for e in json.loads(ex.json())
163
164
  }
164
165
  raise AquaValueError(
165
166
  f"Invalid create evaluation parameters. Error details: {custom_errors}."
@@ -619,11 +620,6 @@ class AquaEvaluationApp(AquaApp):
619
620
  evaluation_id=evaluation_id,
620
621
  evaluation_target_id=evaluation_source_id,
621
622
  input_data={
622
- "columns": {
623
- "prompt": "prompt",
624
- "completion": "completion",
625
- "category": "category",
626
- },
627
623
  "format": Path(dataset_path).suffix,
628
624
  "url": dataset_path,
629
625
  },
@@ -129,6 +129,8 @@ class AquaModelHandler(AquaAPIhandler):
129
129
  str(input_data.get("download_from_hf", "false")).lower() == "true"
130
130
  )
131
131
  inference_container_uri = input_data.get("inference_container_uri")
132
+ allow_patterns = input_data.get("allow_patterns")
133
+ ignore_patterns = input_data.get("ignore_patterns")
132
134
 
133
135
  return self.finish(
134
136
  AquaModelApp().register(
@@ -141,6 +143,8 @@ class AquaModelHandler(AquaAPIhandler):
141
143
  project_id=project_id,
142
144
  model_file=model_file,
143
145
  inference_container_uri=inference_container_uri,
146
+ allow_patterns=allow_patterns,
147
+ ignore_patterns=ignore_patterns,
144
148
  )
145
149
  )
146
150
 
@@ -289,6 +289,8 @@ class ImportModelDetails(CLIBuilderMixin):
289
289
  project_id: Optional[str] = None
290
290
  model_file: Optional[str] = None
291
291
  inference_container_uri: Optional[str] = None
292
+ allow_patterns: Optional[List[str]] = None
293
+ ignore_patterns: Optional[List[str]] = None
292
294
 
293
295
  def __post_init__(self):
294
296
  self._command = "model register"
ads/aqua/model/model.py CHANGED
@@ -40,6 +40,7 @@ from ads.aqua.constants import (
40
40
  AQUA_MODEL_ARTIFACT_CONFIG_MODEL_TYPE,
41
41
  AQUA_MODEL_ARTIFACT_FILE,
42
42
  AQUA_MODEL_TYPE_CUSTOM,
43
+ HF_METADATA_FOLDER,
43
44
  LICENSE_TXT,
44
45
  MODEL_BY_REFERENCE_OSS_PATH_KEY,
45
46
  README,
@@ -1274,6 +1275,8 @@ class AquaModelApp(AquaApp):
1274
1275
  model_name: str,
1275
1276
  os_path: str,
1276
1277
  local_dir: str = None,
1278
+ allow_patterns: List[str] = None,
1279
+ ignore_patterns: List[str] = None,
1277
1280
  ) -> str:
1278
1281
  """This helper function downloads the model artifact from Hugging Face to a local folder, then uploads
1279
1282
  to object storage location.
@@ -1283,6 +1286,12 @@ class AquaModelApp(AquaApp):
1283
1286
  model_name (str): The huggingface model name.
1284
1287
  os_path (str): The OS path where the model files are located.
1285
1288
  local_dir (str): The local temp dir to store the huggingface model.
1289
+ allow_patterns (list): Model files matching at least one pattern are downloaded.
1290
+ Example: ["*.json"] will download all .json files. ["folder/*"] will download all files under `folder`.
1291
+ Patterns are Standard Wildcards (globbing patterns) and rules can be found here: https://docs.python.org/3/library/fnmatch.html
1292
+ ignore_patterns (list): Model files matching any of the patterns are not downloaded.
1293
+ Example: ["*.json"] will ignore all .json files. ["folder/*"] will ignore all files under `folder`.
1294
+ Patterns are Standard Wildcards (globbing patterns) and rules can be found here: https://docs.python.org/3/library/fnmatch.html
1286
1295
 
1287
1296
  Returns
1288
1297
  -------
@@ -1293,30 +1302,19 @@ class AquaModelApp(AquaApp):
1293
1302
  if not local_dir:
1294
1303
  local_dir = os.path.join(os.path.expanduser("~"), "cached-model")
1295
1304
  local_dir = os.path.join(local_dir, model_name)
1296
- retry = 10
1297
- i = 0
1298
- huggingface_download_err_message = None
1299
- while i < retry:
1300
- try:
1301
- # Download to cache folder. The while loop retries when there is a network failure
1302
- snapshot_download(repo_id=model_name)
1303
- except Exception as e:
1304
- huggingface_download_err_message = str(e)
1305
- i += 1
1306
- else:
1307
- break
1308
- if i == retry:
1309
- raise Exception(
1310
- f"Could not download the model {model_name} from https://huggingface.co with message {huggingface_download_err_message}"
1311
- )
1312
1305
  os.makedirs(local_dir, exist_ok=True)
1313
- # Copy the model from the cache to destination
1314
- snapshot_download(repo_id=model_name, local_dir=local_dir)
1315
- # Upload to object storage
1306
+ snapshot_download(
1307
+ repo_id=model_name,
1308
+ local_dir=local_dir,
1309
+ allow_patterns=allow_patterns,
1310
+ ignore_patterns=ignore_patterns,
1311
+ )
1312
+ # Upload to object storage and skip .cache/huggingface/ folder
1316
1313
  model_artifact_path = upload_folder(
1317
1314
  os_path=os_path,
1318
1315
  local_dir=local_dir,
1319
1316
  model_name=model_name,
1317
+ exclude_pattern=f"{HF_METADATA_FOLDER}*"
1320
1318
  )
1321
1319
 
1322
1320
  return model_artifact_path
@@ -1335,6 +1333,12 @@ class AquaModelApp(AquaApp):
1335
1333
  os_path (str): Object storage destination URI to store the downloaded model. Format: oci://bucket-name@namespace/prefix
1336
1334
  inference_container (str): selects service defaults
1337
1335
  finetuning_container (str): selects service defaults
1336
+ allow_patterns (list): Model files matching at least one pattern are downloaded.
1337
+ Example: ["*.json"] will download all .json files. ["folder/*"] will download all files under `folder`.
1338
+ Patterns are Standard Wildcards (globbing patterns) and rules can be found here: https://docs.python.org/3/library/fnmatch.html
1339
+ ignore_patterns (list): Model files matching any of the patterns are not downloaded.
1340
+ Example: ["*.json"] will ignore all .json files. ["folder/*"] will ignore all files under `folder`.
1341
+ Patterns are Standard Wildcards (globbing patterns) and rules can be found here: https://docs.python.org/3/library/fnmatch.html
1338
1342
 
1339
1343
  Returns:
1340
1344
  AquaModel:
@@ -1381,6 +1385,8 @@ class AquaModelApp(AquaApp):
1381
1385
  model_name=model_name,
1382
1386
  os_path=import_model_details.os_path,
1383
1387
  local_dir=import_model_details.local_dir,
1388
+ allow_patterns=import_model_details.allow_patterns,
1389
+ ignore_patterns=import_model_details.ignore_patterns,
1384
1390
  ).rstrip("/")
1385
1391
  else:
1386
1392
  artifact_path = import_model_details.os_path.rstrip("/")
@@ -185,7 +185,7 @@ class AquaDeploymentApp(AquaApp):
185
185
  tags[tag] = aqua_model.freeform_tags[tag]
186
186
 
187
187
  tags.update({Tags.AQUA_MODEL_NAME_TAG: aqua_model.display_name})
188
- tags.update({Tags.TASK: aqua_model.freeform_tags.get(Tags.TASK, None)})
188
+ tags.update({Tags.TASK: aqua_model.freeform_tags.get(Tags.TASK, UNKNOWN)})
189
189
 
190
190
  # Set up info to get deployment config
191
191
  config_source_id = model_id
@@ -533,16 +533,22 @@ class AquaDeploymentApp(AquaApp):
533
533
  return results
534
534
 
535
535
  @telemetry(entry_point="plugin=deployment&action=delete", name="aqua")
536
- def delete(self,model_deployment_id:str):
537
- return self.ds_client.delete_model_deployment(model_deployment_id=model_deployment_id).data
536
+ def delete(self, model_deployment_id: str):
537
+ return self.ds_client.delete_model_deployment(
538
+ model_deployment_id=model_deployment_id
539
+ ).data
538
540
 
539
- @telemetry(entry_point="plugin=deployment&action=deactivate",name="aqua")
540
- def deactivate(self,model_deployment_id:str):
541
- return self.ds_client.deactivate_model_deployment(model_deployment_id=model_deployment_id).data
541
+ @telemetry(entry_point="plugin=deployment&action=deactivate", name="aqua")
542
+ def deactivate(self, model_deployment_id: str):
543
+ return self.ds_client.deactivate_model_deployment(
544
+ model_deployment_id=model_deployment_id
545
+ ).data
542
546
 
543
- @telemetry(entry_point="plugin=deployment&action=activate",name="aqua")
544
- def activate(self,model_deployment_id:str):
545
- return self.ds_client.activate_model_deployment(model_deployment_id=model_deployment_id).data
547
+ @telemetry(entry_point="plugin=deployment&action=activate", name="aqua")
548
+ def activate(self, model_deployment_id: str):
549
+ return self.ds_client.activate_model_deployment(
550
+ model_deployment_id=model_deployment_id
551
+ ).data
546
552
 
547
553
  @telemetry(entry_point="plugin=deployment&action=get", name="aqua")
548
554
  def get(self, model_deployment_id: str, **kwargs) -> "AquaDeploymentDetail":
File without changes
@@ -0,0 +1,282 @@
1
+ # coding: utf-8
2
+ # Copyright (c) 2016, 2024, Oracle and/or its affiliates. All rights reserved.
3
+ # This software is dual-licensed to you under the Universal Permissive License (UPL) 1.0 as shown at https://oss.oracle.com/licenses/upl or Apache License 2.0 as shown at http://www.apache.org/licenses/LICENSE-2.0. You may choose either license.
4
+
5
+ """This module contains the custom LLM client for AutoGen v0.2 to use LangChain chat models.
6
+ https://microsoft.github.io/autogen/0.2/blog/2024/01/26/Custom-Models/
7
+
8
+ To use the custom client:
9
+ 1. Prepare the LLM config, including the parameters for initializing the LangChain client.
10
+ 2. Register the custom LLM
11
+
12
+ The LLM config should config the following keys:
13
+ * model_client_cls: Required by AutoGen to identify the custom client. It should be "LangChainModelClient"
14
+ * langchain_cls: LangChain class including the full import path.
15
+ * model: Name of the model to be used by AutoGen
16
+ * client_params: A dictionary containing the parameters to initialize the LangChain chat model.
17
+
18
+ Although the `LangChainModelClient` is designed to be generic and can potentially support any LangChain chat model,
19
+ the invocation depends on the server API spec and it may not be compatible with some implementations.
20
+
21
+ Following is an example config for OCI Generative AI service:
22
+ {
23
+ "model_client_cls": "LangChainModelClient",
24
+ "langchain_cls": "langchain_community.chat_models.oci_generative_ai.ChatOCIGenAI",
25
+ "model": "cohere.command-r-plus",
26
+ # client_params will be used to initialize the LangChain ChatOCIGenAI class.
27
+ "client_params": {
28
+ "model_id": "cohere.command-r-plus",
29
+ "compartment_id": COMPARTMENT_OCID,
30
+ "model_kwargs": {"temperature": 0, "max_tokens": 2048},
31
+ # Update the authentication method as needed
32
+ "auth_type": "SECURITY_TOKEN",
33
+ "auth_profile": "DEFAULT",
34
+ # You may need to specify `service_endpoint` if the service is in a different region.
35
+ },
36
+ }
37
+
38
+ Following is an example config for OCI Data Science Model Deployment:
39
+ {
40
+ "model_client_cls": "LangChainModelClient",
41
+ "langchain_cls": "ads.llm.ChatOCIModelDeploymentVLLM",
42
+ "model": "odsc-llm",
43
+ "endpoint": "https://MODEL_DEPLOYMENT_URL/predict",
44
+ "model_kwargs": {"temperature": 0.1, "max_tokens": 2048},
45
+ # function_call_params will only be added to the API call when function/tools are added.
46
+ "function_call_params": {
47
+ "tool_choice": "auto",
48
+ "chat_template": ChatTemplates.mistral(),
49
+ },
50
+ }
51
+
52
+ Note that if `client_params` is not specified in the config, all arguments from the config except
53
+ `model_client_cls` and `langchain_cls`, and `function_call_params`, will be used to initialize
54
+ the LangChain chat model.
55
+
56
+ The `function_call_params` will only be used for function/tool calling when tools are specified.
57
+
58
+ To register the custom client:
59
+
60
+ from ads.llm.autogen.client_v02 import LangChainModelClient, register_custom_client
61
+ register_custom_client(LangChainModelClient)
62
+
63
+ Once registered with ADS, the custom LLM class will be auto-registered for all new agents.
64
+ There is no need to call `register_model_client()` on each agent.
65
+
66
+ References:
67
+ https://microsoft.github.io/autogen/0.2/docs/notebooks/agentchat_huggingface_langchain/
68
+ https://github.com/microsoft/autogen/blob/0.2/notebook/agentchat_custom_model.ipynb
69
+
70
+ """
71
+ import copy
72
+ import importlib
73
+ import json
74
+ import logging
75
+ from typing import Any, Dict, List, Union
76
+ from types import SimpleNamespace
77
+
78
+ from autogen import ModelClient
79
+ from autogen.oai.client import OpenAIWrapper, PlaceHolderClient
80
+ from langchain_core.messages import AIMessage
81
+
82
+
83
+ logger = logging.getLogger(__name__)
84
+
85
+ # custom_clients is a dictionary mapping the name of the class to the actual class
86
+ custom_clients = {}
87
+
88
+ # There is a bug in GroupChat when using custom client:
89
+ # https://github.com/microsoft/autogen/issues/2956
90
+ # Here we will be patching the OpenAIWrapper to fix the issue.
91
+ # With this patch, you only need to register the client once with ADS.
92
+ # For example:
93
+ #
94
+ # from ads.llm.autogen.client_v02 import LangChainModelClient, register_custom_client
95
+ # register_custom_client(LangChainModelClient)
96
+ #
97
+ # This patch will auto-register the custom LLM to all new agents.
98
+ # So there is no need to call `register_model_client()` on each agent.
99
+ OpenAIWrapper._original_register_default_client = OpenAIWrapper._register_default_client
100
+
101
+
102
+ def _new_register_default_client(
103
+ self: OpenAIWrapper, config: Dict[str, Any], openai_config: Dict[str, Any]
104
+ ) -> None:
105
+ """This is a patched version of the _register_default_client() method
106
+ to automatically register custom client for agents.
107
+ """
108
+ model_client_cls_name = config.get("model_client_cls")
109
+ if model_client_cls_name in custom_clients:
110
+ self._clients.append(PlaceHolderClient(config))
111
+ self.register_model_client(custom_clients[model_client_cls_name])
112
+ else:
113
+ self._original_register_default_client(
114
+ config=config, openai_config=openai_config
115
+ )
116
+
117
+
118
+ # Patch the _register_default_client() method
119
+ OpenAIWrapper._register_default_client = _new_register_default_client
120
+
121
+
122
+ def register_custom_client(client_class):
123
+ """Registers custom client for AutoGen."""
124
+ if client_class.__name__ not in custom_clients:
125
+ custom_clients[client_class.__name__] = client_class
126
+
127
+
128
+ def _convert_to_langchain_tool(tool):
129
+ """Converts the OpenAI tool spec to LangChain tool spec."""
130
+ if tool["type"] == "function":
131
+ tool = tool["function"]
132
+ required = tool["parameters"].get("required", [])
133
+ properties = copy.deepcopy(tool["parameters"]["properties"])
134
+ for key in properties.keys():
135
+ val = properties[key]
136
+ val["default"] = key in required
137
+ return {
138
+ "title": tool["name"],
139
+ "description": tool["description"],
140
+ "properties": properties,
141
+ }
142
+ raise NotImplementedError(f"Type {tool['type']} is not supported.")
143
+
144
+
145
+ def _convert_to_openai_tool_call(tool_call):
146
+ """Converts the LangChain tool call in AI message to OpenAI tool call."""
147
+ return {
148
+ "id": tool_call.get("id"),
149
+ "function": {
150
+ "name": tool_call.get("name"),
151
+ "arguments": (
152
+ ""
153
+ if tool_call.get("args") is None
154
+ else json.dumps(tool_call.get("args"))
155
+ ),
156
+ },
157
+ "type": "function",
158
+ }
159
+
160
+
161
+ class Message(AIMessage):
162
+ """Represents message returned from the LLM."""
163
+
164
+ @classmethod
165
+ def from_message(cls, message: AIMessage):
166
+ """Converts from LangChain AIMessage."""
167
+ message = copy.deepcopy(message)
168
+ message.__class__ = cls
169
+ message.tool_calls = [
170
+ _convert_to_openai_tool_call(tool) for tool in message.tool_calls
171
+ ]
172
+ return message
173
+
174
+ @property
175
+ def function_call(self):
176
+ """Function calls."""
177
+ return self.tool_calls
178
+
179
+
180
+ class LangChainModelClient(ModelClient):
181
+ """Represents a model client wrapping a LangChain chat model."""
182
+
183
+ def __init__(self, config: dict, **kwargs) -> None:
184
+ super().__init__()
185
+ logger.info("LangChain model client config: %s", str(config))
186
+ # Make a copy of the config since we are popping some keys
187
+ config = copy.deepcopy(config)
188
+ # model_client_cls will always be LangChainModelClient
189
+ self.client_class = config.pop("model_client_cls")
190
+
191
+ # model_name is used in constructing the response.
192
+ self.model_name = config.get("model", "")
193
+
194
+ # If the config specified function_call_params,
195
+ # Pop the params and use them only for tool calling.
196
+ self.function_call_params = config.pop("function_call_params", {})
197
+
198
+ # If the config specified invoke_params,
199
+ # Pop the params and use them only for invoking.
200
+ self.invoke_params = config.pop("invoke_params", {})
201
+
202
+ # Import the LangChain class
203
+ if "langchain_cls" not in config:
204
+ raise ValueError("Missing langchain_cls in LangChain Model Client config.")
205
+ module_cls = config.pop("langchain_cls")
206
+ module_name, cls_name = str(module_cls).rsplit(".", 1)
207
+ langchain_module = importlib.import_module(module_name)
208
+ langchain_cls = getattr(langchain_module, cls_name)
209
+
210
+ # If the config specified client_params,
211
+ # Only use the client_params to initialize the LangChain model.
212
+ # Otherwise, use the config
213
+ self.client_params = config.get("client_params", config)
214
+
215
+ # Initialize the LangChain client
216
+ self.model = langchain_cls(**self.client_params)
217
+
218
+ def create(self, params) -> ModelClient.ModelClientResponseProtocol:
219
+ """Creates a LLM completion for a given config.
220
+
221
+ Parameters
222
+ ----------
223
+ params : dict
224
+ OpenAI API compatible parameters, including all the keys from llm_config.
225
+
226
+ Returns
227
+ -------
228
+ ModelClientResponseProtocol
229
+ Response from LLM
230
+
231
+ """
232
+ streaming = params.get("stream", False)
233
+ # TODO: num_of_responses
234
+ num_of_responses = params.get("n", 1)
235
+ messages = params.pop("messages", [])
236
+
237
+ invoke_params = copy.deepcopy(self.invoke_params)
238
+
239
+ tools = params.get("tools")
240
+ if tools:
241
+ model = self.model.bind_tools(
242
+ [_convert_to_langchain_tool(tool) for tool in tools]
243
+ )
244
+ # invoke_params["tools"] = tools
245
+ invoke_params.update(self.function_call_params)
246
+ else:
247
+ model = self.model
248
+
249
+ response = SimpleNamespace()
250
+ response.choices = []
251
+ response.model = self.model_name
252
+
253
+ if streaming and messages:
254
+ # If streaming is enabled and has messages, then iterate over the chunks of the response.
255
+ raise NotImplementedError()
256
+ else:
257
+ # If streaming is not enabled, send a regular chat completion request
258
+ ai_message = model.invoke(messages, **invoke_params)
259
+ choice = SimpleNamespace()
260
+ choice.message = Message.from_message(ai_message)
261
+ response.choices.append(choice)
262
+ return response
263
+
264
+ def message_retrieval(
265
+ self, response: ModelClient.ModelClientResponseProtocol
266
+ ) -> Union[List[str], List[ModelClient.ModelClientResponseProtocol.Choice.Message]]:
267
+ """
268
+ Retrieve and return a list of strings or a list of Choice.Message from the response.
269
+
270
+ NOTE: if a list of Choice.Message is returned, it currently needs to contain the fields of OpenAI's ChatCompletion Message object,
271
+ since that is expected for function or tool calling in the rest of the codebase at the moment, unless a custom agent is being used.
272
+ """
273
+ return [choice.message for choice in response.choices]
274
+
275
+ def cost(self, response: ModelClient.ModelClientResponseProtocol) -> float:
276
+ response.cost = 0
277
+ return 0
278
+
279
+ @staticmethod
280
+ def get_usage(response: ModelClient.ModelClientResponseProtocol) -> Dict:
281
+ """Return usage summary of the response using RESPONSE_USAGE_KEYS."""
282
+ return {}
@@ -1,5 +1,4 @@
1
1
  #!/usr/bin/env python
2
- # -*- coding: utf-8 -*--
3
2
 
4
3
  # Copyright (c) 2023, 2024 Oracle and/or its affiliates.
5
4
  # Licensed under the Universal Permissive License v 1.0 as shown at https://oss.oracle.com/licenses/upl/
@@ -18,7 +17,6 @@ import yaml
18
17
  from cerberus import Validator
19
18
 
20
19
  from ads.opctl import logger, utils
21
- from ads.opctl.operator import __operators__
22
20
 
23
21
  CONTAINER_NETWORK = "CONTAINER_NETWORK"
24
22
 
@@ -26,7 +24,11 @@ CONTAINER_NETWORK = "CONTAINER_NETWORK"
26
24
  class OperatorValidator(Validator):
27
25
  """The custom validator class."""
28
26
 
29
- pass
27
+ def validate(self, obj_dict, **kwargs):
28
+ # Model should be case insensitive
29
+ if "model" in obj_dict["spec"]:
30
+ obj_dict["spec"]["model"] = str(obj_dict["spec"]["model"]).lower()
31
+ return super().validate(obj_dict, **kwargs)
30
32
 
31
33
 
32
34
  def create_output_folder(name):
@@ -34,7 +36,7 @@ def create_output_folder(name):
34
36
  protocol = fsspec.utils.get_protocol(output_folder)
35
37
  storage_options = {}
36
38
  if protocol != "file":
37
- storage_options = auth or default_signer()
39
+ storage_options = default_signer()
38
40
 
39
41
  fs = fsspec.filesystem(protocol, **storage_options)
40
42
  name_suffix = 1
@@ -166,9 +166,8 @@ class AnomalyOperatorBaseModel(ABC):
166
166
  yaml_appendix = rc.Yaml(self.config.to_dict())
167
167
  summary = rc.Block(
168
168
  rc.Group(
169
- rc.Text(
170
- f"You selected the **`{self.spec.model}`** model.\n{model_description.text}\n"
171
- ),
169
+ rc.Text(f"You selected the **`{self.spec.model}`** model.\n"),
170
+ model_description,
172
171
  rc.Text(
173
172
  "Based on your dataset, you could have also selected "
174
173
  f"any of the models: `{'`, `'.join(SupportedModels.keys() if self.spec.datetime_column else NonTimeADSupportedModels.keys())}`."
@@ -26,9 +26,9 @@ class UnSupportedModelError(Exception):
26
26
 
27
27
  def __init__(self, operator_config: AnomalyOperatorConfig, model_type: str):
28
28
  supported_models = (
29
- SupportedModels.values
29
+ SupportedModels.values()
30
30
  if operator_config.spec.datetime_column
31
- else NonTimeADSupportedModels.values
31
+ else NonTimeADSupportedModels.values()
32
32
  )
33
33
  message = (
34
34
  f"Model: `{model_type}` is not supported. "
@@ -1,18 +1,19 @@
1
1
  #!/usr/bin/env python
2
- # -*- coding: utf-8 -*--
3
2
 
4
- # Copyright (c) 2023 Oracle and/or its affiliates.
3
+ # Copyright (c) 2023, 2024 Oracle and/or its affiliates.
5
4
  # Licensed under the Universal Permissive License v 1.0 as shown at https://oss.oracle.com/licenses/upl/
6
5
 
6
+ from abc import ABC
7
+
8
+ import pandas as pd
9
+
7
10
  from ads.opctl import logger
11
+ from ads.opctl.operator.lowcode.common.const import DataColumns
8
12
  from ads.opctl.operator.lowcode.common.errors import (
9
- InvalidParameterError,
10
13
  DataMismatchError,
14
+ InvalidParameterError,
11
15
  )
12
- from ads.opctl.operator.lowcode.common.const import DataColumns
13
16
  from ads.opctl.operator.lowcode.common.utils import merge_category_columns
14
- import pandas as pd
15
- from abc import ABC
16
17
 
17
18
 
18
19
  class Transformations(ABC):
@@ -58,6 +59,7 @@ class Transformations(ABC):
58
59
 
59
60
  """
60
61
  clean_df = self._remove_trailing_whitespace(data)
62
+ # clean_df = self._normalize_column_names(clean_df)
61
63
  if self.name == "historical_data":
62
64
  self._check_historical_dataset(clean_df)
63
65
  clean_df = self._set_series_id_column(clean_df)
@@ -95,8 +97,11 @@ class Transformations(ABC):
95
97
  def _remove_trailing_whitespace(self, df):
96
98
  return df.apply(lambda x: x.str.strip() if x.dtype == "object" else x)
97
99
 
100
+ # def _normalize_column_names(self, df):
101
+ # return df.rename(columns=lambda x: re.sub("[^A-Za-z0-9_]+", "", x))
102
+
98
103
  def _set_series_id_column(self, df):
99
- self._target_category_columns_map = dict()
104
+ self._target_category_columns_map = {}
100
105
  if not self.target_category_columns:
101
106
  df[DataColumns.Series] = "Series 1"
102
107
  self.has_artificial_series = True
@@ -125,10 +130,10 @@ class Transformations(ABC):
125
130
  df[self.dt_column_name] = pd.to_datetime(
126
131
  df[self.dt_column_name], format=self.dt_column_format
127
132
  )
128
- except:
133
+ except Exception as ee:
129
134
  raise InvalidParameterError(
130
135
  f"Unable to determine the datetime type for column: {self.dt_column_name} in dataset: {self.name}. Please specify the format explicitly. (For example adding 'format: %d/%m/%Y' underneath 'name: {self.dt_column_name}' in the datetime_column section of the yaml file if you haven't already. For reference, here is the first datetime given: {df[self.dt_column_name].values[0]}"
131
- )
136
+ ) from ee
132
137
  return df
133
138
 
134
139
  def _set_multi_index(self, df):
@@ -242,7 +247,6 @@ class Transformations(ABC):
242
247
  "Class": "A",
243
248
  "Num": 2
244
249
  },
245
-
246
250
  }
247
251
  """
248
252
 
@@ -1,42 +1,32 @@
1
1
  #!/usr/bin/env python
2
- # -*- coding: utf-8 -*--
3
2
 
4
3
  # Copyright (c) 2024 Oracle and/or its affiliates.
5
4
  # Licensed under the Universal Permissive License v 1.0 as shown at https://oss.oracle.com/licenses/upl/
6
5
 
7
- import argparse
8
6
  import logging
9
7
  import os
10
8
  import shutil
11
9
  import sys
12
10
  import tempfile
13
- import time
14
- from string import Template
15
- from typing import Any, Dict, List, Tuple
16
- import pandas as pd
17
- from ads.opctl import logger
18
- import oracledb
11
+ from typing import List, Union
19
12
 
20
13
  import fsspec
21
- import yaml
22
- from typing import Union
14
+ import oracledb
15
+ import pandas as pd
23
16
 
17
+ from ads.common.object_storage_details import ObjectStorageDetails
24
18
  from ads.opctl import logger
19
+ from ads.opctl.operator.common.operator_config import OutputDirectory
25
20
  from ads.opctl.operator.lowcode.common.errors import (
26
- InputDataError,
27
21
  InvalidParameterError,
28
- PermissionsError,
29
- DataMismatchError,
30
22
  )
31
- from ads.opctl.operator.common.operator_config import OutputDirectory
32
- from ads.common.object_storage_details import ObjectStorageDetails
33
23
  from ads.secrets import ADBSecretKeeper
34
24
 
35
25
 
36
26
  def call_pandas_fsspec(pd_fn, filename, storage_options, **kwargs):
37
- if fsspec.utils.get_protocol(filename) == "file":
38
- return pd_fn(filename, **kwargs)
39
- elif fsspec.utils.get_protocol(filename) in ["http", "https"]:
27
+ if fsspec.utils.get_protocol(filename) == "file" or fsspec.utils.get_protocol(
28
+ filename
29
+ ) in ["http", "https"]:
40
30
  return pd_fn(filename, **kwargs)
41
31
 
42
32
  storage_options = storage_options or (
@@ -48,7 +38,7 @@ def call_pandas_fsspec(pd_fn, filename, storage_options, **kwargs):
48
38
 
49
39
  def load_data(data_spec, storage_options=None, **kwargs):
50
40
  if data_spec is None:
51
- raise InvalidParameterError(f"No details provided for this data source.")
41
+ raise InvalidParameterError("No details provided for this data source.")
52
42
  filename = data_spec.url
53
43
  format = data_spec.format
54
44
  columns = data_spec.columns
@@ -67,7 +57,7 @@ def load_data(data_spec, storage_options=None, **kwargs):
67
57
  if not format:
68
58
  _, format = os.path.splitext(filename)
69
59
  format = format[1:]
70
- if format in ["json", "clipboard", "excel", "csv", "feather", "hdf"]:
60
+ if format in ["json", "clipboard", "excel", "csv", "feather", "hdf", "parquet"]:
71
61
  read_fn = getattr(pd, f"read_{format}")
72
62
  data = call_pandas_fsspec(
73
63
  read_fn, filename, storage_options=storage_options
@@ -84,19 +74,31 @@ def load_data(data_spec, storage_options=None, **kwargs):
84
74
  with tempfile.TemporaryDirectory() as temp_dir:
85
75
  if vault_secret_id is not None:
86
76
  try:
87
- with ADBSecretKeeper.load_secret(vault_secret_id, wallet_dir=temp_dir) as adwsecret:
88
- if 'wallet_location' in adwsecret and 'wallet_location' not in connect_args:
89
- shutil.unpack_archive(adwsecret["wallet_location"], temp_dir)
90
- connect_args['wallet_location'] = temp_dir
91
- if 'user_name' in adwsecret and 'user' not in connect_args:
92
- connect_args['user'] = adwsecret['user_name']
93
- if 'password' in adwsecret and 'password' not in connect_args:
94
- connect_args['password'] = adwsecret['password']
95
- if 'service_name' in adwsecret and 'service_name' not in connect_args:
96
- connect_args['service_name'] = adwsecret['service_name']
77
+ with ADBSecretKeeper.load_secret(
78
+ vault_secret_id, wallet_dir=temp_dir
79
+ ) as adwsecret:
80
+ if (
81
+ "wallet_location" in adwsecret
82
+ and "wallet_location" not in connect_args
83
+ ):
84
+ shutil.unpack_archive(
85
+ adwsecret["wallet_location"], temp_dir
86
+ )
87
+ connect_args["wallet_location"] = temp_dir
88
+ if "user_name" in adwsecret and "user" not in connect_args:
89
+ connect_args["user"] = adwsecret["user_name"]
90
+ if "password" in adwsecret and "password" not in connect_args:
91
+ connect_args["password"] = adwsecret["password"]
92
+ if (
93
+ "service_name" in adwsecret
94
+ and "service_name" not in connect_args
95
+ ):
96
+ connect_args["service_name"] = adwsecret["service_name"]
97
97
 
98
98
  except Exception as e:
99
- raise Exception(f"Could not retrieve database credentials from vault {vault_secret_id}: {e}")
99
+ raise Exception(
100
+ f"Could not retrieve database credentials from vault {vault_secret_id}: {e}"
101
+ )
100
102
 
101
103
  con = oracledb.connect(**connect_args)
102
104
  if table_name is not None:
@@ -105,11 +107,11 @@ def load_data(data_spec, storage_options=None, **kwargs):
105
107
  data = pd.read_sql(sql, con)
106
108
  else:
107
109
  raise InvalidParameterError(
108
- f"Database `connect_args` provided without sql query or table name. Please specify either `sql` or `table_name`."
110
+ "Database `connect_args` provided without sql query or table name. Please specify either `sql` or `table_name`."
109
111
  )
110
112
  else:
111
113
  raise InvalidParameterError(
112
- f"No filename/url provided, and no connect_args provided. Please specify one of these if you want to read data from a file or a database respectively."
114
+ "No filename/url provided, and no connect_args provided. Please specify one of these if you want to read data from a file or a database respectively."
113
115
  )
114
116
  if columns:
115
117
  # keep only these columns, done after load because only CSV supports stream filtering
@@ -232,7 +234,7 @@ def human_time_friendly(seconds):
232
234
  accumulator.append(
233
235
  "{} {}{}".format(int(amount), unit, "" if amount == 1 else "s")
234
236
  )
235
- accumulator.append("{} secs".format(round(seconds, 2)))
237
+ accumulator.append(f"{round(seconds, 2)} secs")
236
238
  return ", ".join(accumulator)
237
239
 
238
240
 
@@ -248,9 +250,7 @@ def find_output_dirname(output_dir: OutputDirectory):
248
250
  unique_output_dir = f"{output_dir}_{counter}"
249
251
  counter += 1
250
252
  logger.warn(
251
- "Since the output directory was not specified, the output will be saved to {} directory.".format(
252
- unique_output_dir
253
- )
253
+ f"Since the output directory was not specified, the output will be saved to {unique_output_dir} directory."
254
254
  )
255
255
  return unique_output_dir
256
256
 
@@ -2,6 +2,7 @@
2
2
  # Copyright (c) 2023, 2024 Oracle and/or its affiliates.
3
3
  # Licensed under the Universal Permissive License v 1.0 as shown at https://oss.oracle.com/licenses/upl/
4
4
  import logging
5
+ import os
5
6
  import traceback
6
7
 
7
8
  import numpy as np
@@ -80,10 +81,17 @@ class AutoMLXOperatorModel(ForecastOperatorBaseModel):
80
81
 
81
82
  from automlx import Pipeline, init
82
83
 
84
+ cpu_count = os.cpu_count()
83
85
  try:
86
+ if cpu_count < 4:
87
+ engine = "local"
88
+ engine_opts = None
89
+ else:
90
+ engine = "ray"
91
+ engine_opts = ({"ray_setup": {"_temp_dir": "/tmp/ray-temp"}},)
84
92
  init(
85
- engine="ray",
86
- engine_opts={"ray_setup": {"_temp_dir": "/tmp/ray-temp"}},
93
+ engine=engine,
94
+ engine_opts=engine_opts,
87
95
  loglevel=logging.CRITICAL,
88
96
  )
89
97
  except Exception as e:
@@ -148,8 +148,9 @@ class ForecastOperatorBaseModel(ABC):
148
148
  header_section = rc.Block(
149
149
  rc.Heading("Forecast Report", level=1),
150
150
  rc.Text(
151
- f"You selected the {self.spec.model} model.\n{model_description}\nBased on your dataset, you could have also selected any of the models: {SupportedModels.keys()}."
151
+ f"You selected the {self.spec.model} model.\nBased on your dataset, you could have also selected any of the models: {SupportedModels.keys()}."
152
152
  ),
153
+ model_description,
153
154
  rc.Group(
154
155
  rc.Metric(
155
156
  heading="Analysis was completed in ",
@@ -11,6 +11,7 @@ from .automlx import AutoMLXOperatorModel
11
11
  from .autots import AutoTSOperatorModel
12
12
  from .base_model import ForecastOperatorBaseModel
13
13
  from .forecast_datasets import ForecastDatasets
14
+ from .ml_forecast import MLForecastOperatorModel
14
15
  from .neuralprophet import NeuralProphetOperatorModel
15
16
  from .prophet import ProphetOperatorModel
16
17
 
@@ -19,7 +20,7 @@ class UnSupportedModelError(Exception):
19
20
  def __init__(self, model_type: str):
20
21
  super().__init__(
21
22
  f"Model: `{model_type}` "
22
- f"is not supported. Supported models: {SupportedModels.values}"
23
+ f"is not supported. Supported models: {SupportedModels.values()}"
23
24
  )
24
25
 
25
26
 
@@ -32,7 +33,7 @@ class ForecastOperatorModelFactory:
32
33
  SupportedModels.Prophet: ProphetOperatorModel,
33
34
  SupportedModels.Arima: ArimaOperatorModel,
34
35
  SupportedModels.NeuralProphet: NeuralProphetOperatorModel,
35
- # SupportedModels.LGBForecast: MLForecastOperatorModel,
36
+ SupportedModels.LGBForecast: MLForecastOperatorModel,
36
37
  SupportedModels.AutoMLX: AutoMLXOperatorModel,
37
38
  SupportedModels.AutoTS: AutoTSOperatorModel,
38
39
  }
@@ -142,6 +142,9 @@ class ProphetOperatorModel(ForecastOperatorBaseModel):
142
142
  dt_column=self.spec.datetime_column.name,
143
143
  )
144
144
 
145
+ # if os.environ["OCI__IS_SPARK"]:
146
+ # pass
147
+ # else:
145
148
  Parallel(n_jobs=-1, require="sharedmem")(
146
149
  delayed(ProphetOperatorModel._train_model)(
147
150
  self, i, series_id, df, model_kwargs.copy()
@@ -354,7 +357,7 @@ class ProphetOperatorModel(ForecastOperatorBaseModel):
354
357
  logger.warn(f"Failed to generate Explanations with error: {e}.")
355
358
  logger.debug(f"Full Traceback: {traceback.format_exc()}")
356
359
 
357
- model_description = (
360
+ model_description = rc.Text(
358
361
  "Prophet is a procedure for forecasting time series data based on an additive "
359
362
  "model where non-linear trends are fit with yearly, weekly, and daily seasonality, "
360
363
  "plus holiday effects. It works best with time series that have strong seasonal "
@@ -311,7 +311,7 @@ spec:
311
311
  missing_value_imputation:
312
312
  type: boolean
313
313
  required: false
314
- default: false
314
+ default: true
315
315
  outlier_treatment:
316
316
  type: boolean
317
317
  required: false
@@ -1,7 +1,6 @@
1
1
  #!/usr/bin/env python
2
- # -*- coding: utf-8 -*--
3
2
 
4
- # Copyright (c) 2023 Oracle and/or its affiliates.
3
+ # Copyright (c) 2023, 2024 Oracle and/or its affiliates.
5
4
  # Licensed under the Universal Permissive License v 1.0 as shown at https://oss.oracle.com/licenses/upl/
6
5
 
7
6
  import uuid
@@ -18,7 +17,7 @@ class UnSupportedDetectorError(Exception):
18
17
  def __init__(self, dtype: str):
19
18
  super().__init__(
20
19
  f"Detector: `{dtype}` "
21
- f"is not supported. Supported models: {SupportedDetector.values}"
20
+ f"is not supported. Supported models: {SupportedDetector.values()}"
22
21
  )
23
22
 
24
23
 
@@ -42,7 +41,10 @@ class SpacyDetector(PiiBaseDetector):
42
41
  @runtime_dependency(module="scrubadub", install_from=OptionalDependency.PII)
43
42
  @runtime_dependency(module="scrubadub_spacy", install_from=OptionalDependency.PII)
44
43
  def construct(cls, entity, model, **kwargs):
45
- spacy_entity_detector = scrubadub_spacy.detectors.spacy.SpacyEntityDetector(
44
+ from scrubadub.filth import Filth
45
+ from scrubadub_spacy.detectors.spacy import SpacyEntityDetector
46
+
47
+ spacy_entity_detector = SpacyEntityDetector(
46
48
  named_entities=[entity],
47
49
  name=f"spacy_{uuid.uuid4()}",
48
50
  model=model,
@@ -50,7 +52,7 @@ class SpacyDetector(PiiBaseDetector):
50
52
  if entity.upper() not in cls.DEFAULT_SPACY_NAMED_ENTITIES:
51
53
  filth_cls = type(
52
54
  construct_filth_cls_name(entity),
53
- (scrubadub.filth.Filth,),
55
+ (Filth,),
54
56
  {"type": entity.upper()},
55
57
  )
56
58
  spacy_entity_detector.filth_cls_map[entity.upper()] = filth_cls
@@ -61,8 +61,9 @@ class RecommenderOperatorBaseModel(ABC):
61
61
  header_section = rc.Block(
62
62
  rc.Heading("Recommender Report", level=1),
63
63
  rc.Text(
64
- f"The recommendations was generated using {SupportedModels.SVD.upper()}. {model_description}"
64
+ f"The recommendations was generated using {SupportedModels.SVD.upper()}."
65
65
  ),
66
+ model_description,
66
67
  rc.Group(
67
68
  rc.Metric(
68
69
  heading="Recommendations was generated in ",
@@ -1,7 +1,6 @@
1
1
  #!/usr/bin/env python
2
- # -*- coding: utf-8 -*--
3
2
 
4
- # Copyright (c) 2023 Oracle and/or its affiliates.
3
+ # Copyright (c) 2023, 2024 Oracle and/or its affiliates.
5
4
  # Licensed under the Universal Permissive License v 1.0 as shown at https://oss.oracle.com/licenses/upl/
6
5
 
7
6
  from ..constant import SupportedModels
@@ -10,11 +9,12 @@ from .base_model import RecommenderOperatorBaseModel
10
9
  from .recommender_dataset import RecommenderDatasets
11
10
  from .svd import SVDOperatorModel
12
11
 
12
+
13
13
  class UnSupportedModelError(Exception):
14
14
  def __init__(self, model_type: str):
15
15
  super().__init__(
16
16
  f"Model: `{model_type}` "
17
- f"is not supported. Supported models: {SupportedModels.values}"
17
+ f"is not supported. Supported models: {SupportedModels.values()}"
18
18
  )
19
19
 
20
20
 
@@ -23,9 +23,7 @@ class RecommenderOperatorModelFactory:
23
23
  The factory class helps to instantiate proper model operator based on the model type.
24
24
  """
25
25
 
26
- _MAP = {
27
- SupportedModels.SVD: SVDOperatorModel
28
- }
26
+ _MAP = {SupportedModels.SVD: SVDOperatorModel}
29
27
 
30
28
  @classmethod
31
29
  def get_model(
@@ -78,11 +78,11 @@ class SVDOperatorModel(RecommenderOperatorBaseModel):
78
78
  return recommendations_df, metric
79
79
 
80
80
  def _generate_report(self):
81
- model_description = """
82
- Singular Value Decomposition (SVD) is a matrix factorization technique used in recommendation systems to
83
- decompose a user-item interaction matrix into three constituent matrices. These matrices capture the
84
- latent factors that explain the observed interactions.
85
- """
81
+ model_description = rc.Text(
82
+ "Singular Value Decomposition (SVD) is a matrix factorization technique used in recommendation systems to \
83
+ decompose a user-item interaction matrix into three constituent matrices. These matrices capture the \
84
+ latent factors that explain the observed interactions."
85
+ )
86
86
  new_user_recommendations = self._get_recommendations(
87
87
  "__new_user__", self.spec.top_k
88
88
  )
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.3
2
2
  Name: oracle_ads
3
- Version: 2.12.6
3
+ Version: 2.12.8
4
4
  Summary: Oracle Accelerated Data Science SDK
5
5
  Keywords: Oracle Cloud Infrastructure,OCI,Machine Learning,ML,Artificial Intelligence,AI,Data Science,Cloud,Oracle
6
6
  Author: Oracle Data Science
@@ -109,7 +109,7 @@ Requires-Dist: py-cpuinfo ; extra == "opctl"
109
109
  Requires-Dist: rich ; extra == "opctl"
110
110
  Requires-Dist: fire ; extra == "opctl"
111
111
  Requires-Dist: cachetools ; extra == "opctl"
112
- Requires-Dist: huggingface_hub==0.23.4 ; extra == "opctl"
112
+ Requires-Dist: huggingface_hub==0.26.2 ; extra == "opctl"
113
113
  Requires-Dist: optuna==2.9.0 ; extra == "optuna"
114
114
  Requires-Dist: oracle_ads[viz] ; extra == "optuna"
115
115
  Requires-Dist: aiohttp ; extra == "pii"
@@ -130,6 +130,7 @@ Requires-Dist: pyspark>=3.0.0 ; extra == "spark"
130
130
  Requires-Dist: oracle_ads[viz] ; extra == "tensorflow"
131
131
  Requires-Dist: tensorflow<=2.15.1 ; extra == "tensorflow"
132
132
  Requires-Dist: arff ; extra == "testsuite"
133
+ Requires-Dist: autogen-agentchat~=0.2 ; extra == "testsuite"
133
134
  Requires-Dist: category_encoders==2.6.3 ; extra == "testsuite"
134
135
  Requires-Dist: cohere==4.53 ; extra == "testsuite"
135
136
  Requires-Dist: faiss-cpu ; extra == "testsuite"
@@ -4,7 +4,7 @@ ads/config.py,sha256=WGFgS5-dxqC9_iRJKakn-mh9545gHJpWB_Y0hT5O3ec,8016
4
4
  ads/aqua/__init__.py,sha256=IUKZAsxUGVicsyeSwsGwK6rAUJ1vIUW9ywduA3U22xc,1015
5
5
  ads/aqua/app.py,sha256=BQuQ9RERU0rKmn3N3xicKzYaXOd7xBwX1aVuVLNgw98,11993
6
6
  ads/aqua/cli.py,sha256=W-0kswzRDEilqHyw5GSMOrARgvOyPRtkEtpy54ew0Jo,3907
7
- ads/aqua/constants.py,sha256=UAfB1aQXMDJ4OQ98IeZb4l5TYhmCsnwXbS4Uylgnfro,2947
7
+ ads/aqua/constants.py,sha256=fTPrRuWaZB1_THZ2I1nOrwW1pQGpvMC44--Ok5Myr5Y,2978
8
8
  ads/aqua/data.py,sha256=7T7kdHGnEH6FXL_7jv_Da0CjEWXfjQZTFkaZWQikis4,932
9
9
  ads/aqua/ui.py,sha256=hGl4btUsMImkpzZ-Ae_WVVaRqfpdG_gUeHKD9E1nKbE,26195
10
10
  ads/aqua/common/__init__.py,sha256=rZrmh1nho40OCeabXCNWtze-mXi-PGKetcZdxZSn3_0,204
@@ -12,7 +12,7 @@ ads/aqua/common/decorator.py,sha256=JEN6Cy4DYgQbmIR3ShCjTuBMCnilDxq7jkYMJse1rcM,
12
12
  ads/aqua/common/entities.py,sha256=UsP8CczuifLOLr_gAhulh8VmgGSFir3rli1MMQ-CZhk,537
13
13
  ads/aqua/common/enums.py,sha256=HnaraHfkYmuqC5mEF7gyvQmqbOc6r_9EI2MF-cieb5o,2991
14
14
  ads/aqua/common/errors.py,sha256=Ev2xbaqkDqeCYDx4ZgOKOoM0sXsOXP3GIV6N1lhIUxM,3085
15
- ads/aqua/common/utils.py,sha256=n8Da5PO-28xj9WG7w8zSQDoLCRs_kQRIrk_yXW-W85o,37310
15
+ ads/aqua/common/utils.py,sha256=ipWRenYo3x_N9QN9pyverZXfxxd9fBIk4acmpZclwzY,37516
16
16
  ads/aqua/config/__init__.py,sha256=2a_1LI4jWtJpbic5_v4EoOUTXCAH7cmsy9BW5prDHjU,179
17
17
  ads/aqua/config/config.py,sha256=MNY4ttccaQdhxUyS1o367YIDl-U_AiSLVlgvzSd7JE4,944
18
18
  ads/aqua/config/evaluation/__init__.py,sha256=2a_1LI4jWtJpbic5_v4EoOUTXCAH7cmsy9BW5prDHjU,179
@@ -26,9 +26,9 @@ ads/aqua/dummy_data/oci_models.json,sha256=mxUU8o3plmAFfr06fQmIQuiGe2qFFBlUB7QNP
26
26
  ads/aqua/dummy_data/readme.md,sha256=AlBPt0HBSOFA5HbYVsFsdTm-BC3R5NRpcKrTxdjEnlI,1256
27
27
  ads/aqua/evaluation/__init__.py,sha256=Fd7WL7MpQ1FtJjlftMY2KHli5cz1wr5MDu3hGmV89a0,298
28
28
  ads/aqua/evaluation/constants.py,sha256=GvcXvPIw-VDKw4a8WNKs36uWdT-f7VJrWSpnnRnthGg,1533
29
- ads/aqua/evaluation/entities.py,sha256=3Ni4AIULLZ79rcaGdcZGx4HUxR2QjyJza6auYohPcFM,5466
29
+ ads/aqua/evaluation/entities.py,sha256=OqD2AfCO31ZO88hfORsjLdmJRqOjZrep2zVESEj6qJc,5488
30
30
  ads/aqua/evaluation/errors.py,sha256=qzR63YEIA8haCh4HcBHFFm7j4g6jWDfGszqrPkXx9zQ,4564
31
- ads/aqua/evaluation/evaluation.py,sha256=iopL7A6RNfqsaLg19xsg9lRnSZ0aI6mkx8kooDyulio,58016
31
+ ads/aqua/evaluation/evaluation.py,sha256=UGo6Ly148qw3br1tNo-fagvyipDi4P-2AEZ8T4m6GR4,57856
32
32
  ads/aqua/extension/__init__.py,sha256=mRArjU6UZpZYVr0qHSSkPteA_CKcCZIczOFaK421m9o,1453
33
33
  ads/aqua/extension/aqua_ws_msg_handler.py,sha256=soSRnIFx93JCFf6HsuF_BQEpJ2mre-IVQDUDKUKPijY,3392
34
34
  ads/aqua/extension/base_handler.py,sha256=Zbb-uSNLljRU5NPOndn3_lx8MN_1yxlF2GHVpBT-kWk,5233
@@ -40,7 +40,7 @@ ads/aqua/extension/errors.py,sha256=ojDolyr3_0UCCwKqPtiZZyMQuX35jr8h8MQRP6HcBs4,
40
40
  ads/aqua/extension/evaluation_handler.py,sha256=fJH73fa0xmkEiP8SxKL4A4dJgj-NoL3z_G-w_WW2zJs,4353
41
41
  ads/aqua/extension/evaluation_ws_msg_handler.py,sha256=dv0iwOSTxYj1kQ1rPEoDmGgFBzLUCLXq5h7rpmY2T1M,2098
42
42
  ads/aqua/extension/finetune_handler.py,sha256=abiDXNhkhtoV9hrYhCzwhDjdQKlqQ_KSqxKWntkvh3E,3288
43
- ads/aqua/extension/model_handler.py,sha256=Mlx12n8cssb7Cti0zpDNRHzIDk-xPC7pXzeHf8eY66E,10398
43
+ ads/aqua/extension/model_handler.py,sha256=Ec7NiU3Xvp_sZEvCvN6aVqeoiFrOpJMhDI5xtP_pSuw,10612
44
44
  ads/aqua/extension/models_ws_msg_handler.py,sha256=3CPfzWl1xfrE2Dpn_WYP9zY0kY5zlsAE8tU_6Y2-i18,1801
45
45
  ads/aqua/extension/ui_handler.py,sha256=3TibTMeqcsSWfPsorspFrhIV0PRh8_4FoWpudycT80g,10664
46
46
  ads/aqua/extension/ui_websocket_handler.py,sha256=oLFjaDrqkSERbhExdvxjLJX0oRcP-DVJ_aWn0qy0uvo,5084
@@ -53,12 +53,12 @@ ads/aqua/finetuning/entities.py,sha256=S7Ll_0WyWGh23my-6ow3vwHLDZqTel8CMCoE9oLow
53
53
  ads/aqua/finetuning/finetuning.py,sha256=mwKl8KA2Artp0dXzjXxxKn_UBnkYpNXMYN7ykrZcyEM,25145
54
54
  ads/aqua/model/__init__.py,sha256=j2iylvERdANxgrEDp7b_mLcKMz1CF5Go0qgYCiMwdos,278
55
55
  ads/aqua/model/constants.py,sha256=H239zDu3koa3UTdw-uQveXHX2NDwidclVcS4QIrCTJo,1593
56
- ads/aqua/model/entities.py,sha256=9SsdJfoBH7fDKGXQYs8pKLiZ-SqFnXaZrJod4FWU3mI,9670
56
+ ads/aqua/model/entities.py,sha256=wv1j18OG8NrmKLwIevyJ1ZVw965n3_3titOfwqyzlI8,9765
57
57
  ads/aqua/model/enums.py,sha256=t8GbK2nblIPm3gClR8W31RmbtTuqpoSzoN4W3JfD6AI,1004
58
- ads/aqua/model/model.py,sha256=IwfN9I3p7KDzhM5moiEBh9sxU6pGtIARKxJcyDOGslA,62711
58
+ ads/aqua/model/model.py,sha256=pFG4lkaqtovSpiu3BOCGT7scMtXt4rwup9Rof6Hl_CU,63908
59
59
  ads/aqua/modeldeployment/__init__.py,sha256=RJCfU1yazv3hVWi5rS08QVLTpTwZLnlC8wU8diwFjnM,391
60
60
  ads/aqua/modeldeployment/constants.py,sha256=lJF77zwxmlECljDYjwFAMprAUR_zctZHmawiP-4alLg,296
61
- ads/aqua/modeldeployment/deployment.py,sha256=8qx4cxzuln5FZpAXTZlvaHCio2fzFJxO4PrrAS1_b6A,30652
61
+ ads/aqua/modeldeployment/deployment.py,sha256=bk58MfjnrUiDUFwjBRJwBR_8b-6z8IuzTts2T0-pK3E,30729
62
62
  ads/aqua/modeldeployment/entities.py,sha256=7aoE2HemsFEvkQynAI4PCfZBcfPJrvbyZeEYvc7OIAA,5111
63
63
  ads/aqua/modeldeployment/inference.py,sha256=JPqzbHJoM-PpIU_Ft9lHudO9_1vFr7OPQ2GHjPoAufU,2142
64
64
  ads/aqua/training/__init__.py,sha256=w2DNWltXtASQgbrHyvKo0gMs5_chZoG-CSDMI4qe7i0,202
@@ -450,6 +450,8 @@ ads/llm/chat_template.py,sha256=t2QRfLLR_c_cq3JqABghWqiCSWjjuVc_mfEN-yVYG10,934
450
450
  ads/llm/deploy.py,sha256=5oZipFWU6q_9dCyt3WE4ic-n5rNZgQsYU_3lS_Vp_nY,2275
451
451
  ads/llm/requirements.txt,sha256=vaVwhWCteqmo0fRsEk6M8S1LQMjULU_Bt_syBAa2G-s,55
452
452
  ads/llm/serialize.py,sha256=WjQNMPACyR8nIh1dB7BLFUmqUrumld6vt91lg1DWzWI,7281
453
+ ads/llm/autogen/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
454
+ ads/llm/autogen/client_v02.py,sha256=-8fH-u769txu9eCfGi8XDkQ09DMPl5cCOmmywOFUguc,11127
453
455
  ads/llm/guardrails/__init__.py,sha256=sAqmLhogrLXb3xI7dPOj9HmSkpTnLh9wkzysuGd8AXk,204
454
456
  ads/llm/guardrails/base.py,sha256=scli_YSqDbArIJW5sA5PLjCd6G8_-dNUcpTybvQvZnk,16468
455
457
  ads/llm/guardrails/huggingface.py,sha256=4DFanCYb3R1SKYSFdcEyGH2ywQgf2yFDDZGJtOcoph0,1304
@@ -630,7 +632,7 @@ ads/opctl/operator/common/operator_config.py,sha256=1OEWqNOj7w4vpnSQ9lLxkGDOM4lI
630
632
  ads/opctl/operator/common/operator_loader.py,sha256=fpdrqDyOF9h4lsnGOsdDQsZl1xbdRFtqU6haaQJ15Ls,24146
631
633
  ads/opctl/operator/common/operator_schema.yaml,sha256=kIXKI9GCkwGhkby6THJR2zY6YK0waIgPfPxw85I7aG4,3046
632
634
  ads/opctl/operator/common/operator_yaml_generator.py,sha256=hH6wYj7oDYeAsE1grcIF4K1EE_RhguLXltxPbmB65iQ,5108
633
- ads/opctl/operator/common/utils.py,sha256=KQMTVimdm2A1igbE4r-u_aT_EQw7DkVQvDNFouYLmME,4971
635
+ ads/opctl/operator/common/utils.py,sha256=VC9DLNxipUzwEhkIIL50aQKhh2cjKNm8Nl_pwfXvVM4,5142
634
636
  ads/opctl/operator/common/data/synthetic.csv,sha256=zAxZ7NsWn0CKRWTW6IUKWWwdJs2OY_-yO1Nme_peFY4,769681
635
637
  ads/opctl/operator/lowcode/__init__.py,sha256=sAqmLhogrLXb3xI7dPOj9HmSkpTnLh9wkzysuGd8AXk,204
636
638
  ads/opctl/operator/lowcode/anomaly/MLoperator,sha256=mkf13TlGl64AZtgeNy4PVi81Z-0XEvntW2y7ME8wikw,509
@@ -648,8 +650,8 @@ ads/opctl/operator/lowcode/anomaly/model/anomaly_dataset.py,sha256=zpRRAtbjRgX9H
648
650
  ads/opctl/operator/lowcode/anomaly/model/anomaly_merlion.py,sha256=IT0g6wf2rZI-GFuuOgtESWYTE_D77P8y9YeRZ6ucguQ,5836
649
651
  ads/opctl/operator/lowcode/anomaly/model/automlx.py,sha256=40rY-mVYoLBmDw5uagayRoyYSkjsIY4U4LfyeU11AoA,3469
650
652
  ads/opctl/operator/lowcode/anomaly/model/autots.py,sha256=Ft6bLEXdpIMMDv4lLBzLhC2kRZki7zD9Jnu-LIPDDbw,4154
651
- ads/opctl/operator/lowcode/anomaly/model/base_model.py,sha256=Lbwyt0bCVaF80mSbZPq_05-Dw4oqX3RK6lF7S8QJeEI,15562
652
- ads/opctl/operator/lowcode/anomaly/model/factory.py,sha256=yld9CI-ZZJO2dDB24aOm6SLXbibNMeK1NQEZHpGNdfY,4144
653
+ ads/opctl/operator/lowcode/anomaly/model/base_model.py,sha256=RUNyoGPKi09h4zMQ__NNyremGVZKvllH4_5_8ftNVDA,15533
654
+ ads/opctl/operator/lowcode/anomaly/model/factory.py,sha256=EVYgEGvVTMNFt-tDP6SH3qDoVBAZD3D_Jlw6Xu9zdQU,4148
653
655
  ads/opctl/operator/lowcode/anomaly/model/isolationforest.py,sha256=e_C_I6d6PVojPoHz_D5r8nC_JctTYooVVKFlcX5kkls,2657
654
656
  ads/opctl/operator/lowcode/anomaly/model/oneclasssvm.py,sha256=eejgAtxwjGzWJBVdgp0oZHM4NCLAQh-AksGE0YuM7D4,2557
655
657
  ads/opctl/operator/lowcode/anomaly/model/randomcutforest.py,sha256=K8fVcG952bSUkgoXm7uU1jUUyBd8jvHprkbM4a7i_Xs,4329
@@ -658,8 +660,8 @@ ads/opctl/operator/lowcode/common/__init__.py,sha256=rZrmh1nho40OCeabXCNWtze-mXi
658
660
  ads/opctl/operator/lowcode/common/const.py,sha256=1dUhgup4L_U0s6BSYmgLPpZAe6xqfSHPPoLqW0j46U8,265
659
661
  ads/opctl/operator/lowcode/common/data.py,sha256=nKwE0ubF9fTHFOls5uQ3BBpcPNRtwvGW3UGK-JjAm84,4107
660
662
  ads/opctl/operator/lowcode/common/errors.py,sha256=LvQ_Qzh6cqD6uP91DMFFVXPrcc3010EE8LfBH-CH0ho,1534
661
- ads/opctl/operator/lowcode/common/transformations.py,sha256=Minukbv9Ja1yNJYgTQICU9kykIdbBELhrFFyWECgtes,9630
662
- ads/opctl/operator/lowcode/common/utils.py,sha256=jQIyjtg4i4hfrhBIGhSOzkry2-ziZrn8cBj8lcTv66E,9292
663
+ ads/opctl/operator/lowcode/common/transformations.py,sha256=WQsVKmYmPecZTsGvabUDCBzuNJfzpQVSe93nzElRnIc,9804
664
+ ads/opctl/operator/lowcode/common/utils.py,sha256=XadRZMiIgAUdXw7rDXl4xUPfta9Z_NQsQbDQIR-L73Q,9327
663
665
  ads/opctl/operator/lowcode/feature_store_marketplace/MLoperator,sha256=JO5ulr32WsFnbpk1KN97h8-D70jcFt1kRQ08UMkP4rU,346
664
666
  ads/opctl/operator/lowcode/feature_store_marketplace/README.md,sha256=fN9ROzOPdEZdRgSP_uYvAmD5bD983NC7Irfe_D-mvrw,1356
665
667
  ads/opctl/operator/lowcode/feature_store_marketplace/__init__.py,sha256=rZrmh1nho40OCeabXCNWtze-mXi-PGKetcZdxZSn3_0,204
@@ -684,18 +686,18 @@ ads/opctl/operator/lowcode/forecast/environment.yaml,sha256=eVMf9pcjADI14_GRGdZO
684
686
  ads/opctl/operator/lowcode/forecast/errors.py,sha256=X9zuV2Lqb5N9FuBHHshOFYyhvng5r9KGLHnQijZ5b8c,911
685
687
  ads/opctl/operator/lowcode/forecast/model_evaluator.py,sha256=HssIlfJlJt5HetwzT87rDeRYRwJAXG1yoSjT4SUB8D0,9266
686
688
  ads/opctl/operator/lowcode/forecast/operator_config.py,sha256=vG7n-RIiazujH0UtJ0uarx9IKDIAS0b4WcCo1dNLVL0,6422
687
- ads/opctl/operator/lowcode/forecast/schema.yaml,sha256=twmsn0wPPkgdVk8tKPZL3zBlxqecuXL0GSlIz3I8ZEI,10136
689
+ ads/opctl/operator/lowcode/forecast/schema.yaml,sha256=r9vll4zNn3maiEXO0aQdt4bQ9l9DmK_Jy7lpidhVubc,10135
688
690
  ads/opctl/operator/lowcode/forecast/utils.py,sha256=B7X3vLxmbx3MyUQxoplhQCMb0bgmPk2g-KN-OY768E8,13908
689
691
  ads/opctl/operator/lowcode/forecast/model/__init__.py,sha256=sAqmLhogrLXb3xI7dPOj9HmSkpTnLh9wkzysuGd8AXk,204
690
692
  ads/opctl/operator/lowcode/forecast/model/arima.py,sha256=lU7NlpXI1-g-O_1rGJLlEL17_ruGXAdzzY7H8nFRvGQ,10943
691
- ads/opctl/operator/lowcode/forecast/model/automlx.py,sha256=5_mVPpGqXUXSIKW9dM3fh0mYv-B_7XZu03yqFPrzHdc,14740
693
+ ads/opctl/operator/lowcode/forecast/model/automlx.py,sha256=lrNktixdaJJHHXqIrSmgzCZKEzB_CirQcuquf73AYUQ,14978
692
694
  ads/opctl/operator/lowcode/forecast/model/autots.py,sha256=Y9_EAfDD5r6SPZq7iGp7YMh-vH0lwAGNpyNT2sm7cqo,13027
693
- ads/opctl/operator/lowcode/forecast/model/base_model.py,sha256=b7ZVnGKTIULBWE5W_pQQGzcLM4g2YZIUEH3P94L41aQ,30988
694
- ads/opctl/operator/lowcode/forecast/model/factory.py,sha256=RrE6JJcUmkypjD6IQOR53I9GCg7jQO380r53oLmVK6A,3439
695
+ ads/opctl/operator/lowcode/forecast/model/base_model.py,sha256=H9yQ1DzyfGqnggEaSWgUJjW_bxml6Kto62gohuEO9y4,31006
696
+ ads/opctl/operator/lowcode/forecast/model/factory.py,sha256=hSRPPWdpIRSMYPUFMIUuxc2TPZt-SG18MiqhtdfL3mg,3488
695
697
  ads/opctl/operator/lowcode/forecast/model/forecast_datasets.py,sha256=GCwX9Udh4U79wBNG5bjSYabgRDO0u-ElVJkSC_HcBeA,16563
696
698
  ads/opctl/operator/lowcode/forecast/model/ml_forecast.py,sha256=NSZ2L6gRw4S68BUF0Vyu-cUPSsq8LRxgoVajW9Ra63k,9640
697
699
  ads/opctl/operator/lowcode/forecast/model/neuralprophet.py,sha256=rt4106o9qIKwoHnYICB9sOnQ8ujXyI83eoFY26KzsOU,18774
698
- ads/opctl/operator/lowcode/forecast/model/prophet.py,sha256=s9tWZdD1g50lnu5YgER2SNiXsQ3y51Q-XwYxIsWmmiQ,14284
700
+ ads/opctl/operator/lowcode/forecast/model/prophet.py,sha256=h3So9XYBZPRNqMvYNpU5bxbHgvwgpspuATALCuIWHeM,14368
699
701
  ads/opctl/operator/lowcode/pii/MLoperator,sha256=GKCuiXRwfGLyBqELbtgtg-kJPtNWNVA-kSprYTqhF64,6406
700
702
  ads/opctl/operator/lowcode/pii/README.md,sha256=2P3tpKv6v__Eehj6iLfTXgyDhS4lmi1BTfEdmJhT0K4,9237
701
703
  ads/opctl/operator/lowcode/pii/__init__.py,sha256=sAqmLhogrLXb3xI7dPOj9HmSkpTnLh9wkzysuGd8AXk,204
@@ -708,7 +710,7 @@ ads/opctl/operator/lowcode/pii/operator_config.py,sha256=pXCo97VCq6S7AvfeXW6BUUD
708
710
  ads/opctl/operator/lowcode/pii/schema.yaml,sha256=bgXBVJdOZWff_tthQvkuDE1P0KqeYlqbGyGkHNdln0Y,2735
709
711
  ads/opctl/operator/lowcode/pii/utils.py,sha256=z8VBPRURj_62xifxzijDElxvQNszNUmP694MQ2ErLLw,1130
710
712
  ads/opctl/operator/lowcode/pii/model/__init__.py,sha256=sAqmLhogrLXb3xI7dPOj9HmSkpTnLh9wkzysuGd8AXk,204
711
- ads/opctl/operator/lowcode/pii/model/factory.py,sha256=Fuq8iiN_GkyGBJlGvJJcN0byAfy0bSqKVgkgOE9B2XQ,2452
713
+ ads/opctl/operator/lowcode/pii/model/factory.py,sha256=mM-xifHwVa1tGHcTcvgySUrGsPmIqmOavf8i_dVbkRQ,2502
712
714
  ads/opctl/operator/lowcode/pii/model/guardrails.py,sha256=--GUFt-zlVyJY5WQZNMHjQDlVfVy-tYeXubgvYN-H-U,6246
713
715
  ads/opctl/operator/lowcode/pii/model/pii.py,sha256=hbOomsCNgj7uZNOdUIja3rE-iTGhh9P2hKh8xrtpXR4,5110
714
716
  ads/opctl/operator/lowcode/pii/model/report.py,sha256=vDivP5dWWBoIzDpT1ww2WMBZKybX6DigaPSCW46F__Q,16361
@@ -728,10 +730,10 @@ ads/opctl/operator/lowcode/recommender/environment.yaml,sha256=m3jYkrFpkQfL1dpiA
728
730
  ads/opctl/operator/lowcode/recommender/operator_config.py,sha256=HE30TuiXbVrC6Uy7G2mw4KU_xRSjzgTQHlMNumQauqE,2920
729
731
  ads/opctl/operator/lowcode/recommender/schema.yaml,sha256=OvaQRc56sOO-NNrF2hYU7JEsD-fNkr2LJwP7Nzj_bo8,6029
730
732
  ads/opctl/operator/lowcode/recommender/utils.py,sha256=-DgqObJ3G54wZw04aLvA9zwI_NUqwgQ7jaPHQP_6Q9g,401
731
- ads/opctl/operator/lowcode/recommender/model/base_model.py,sha256=wraH55srwQ9FfWfXTse1vOVTG-OOH8XZRdSDKP91DYM,7395
732
- ads/opctl/operator/lowcode/recommender/model/factory.py,sha256=CHCXR3-6HRSKJG3tCjdgBvUODtQ9C2zU0Nq-0zVb6p8,1798
733
+ ads/opctl/operator/lowcode/recommender/model/base_model.py,sha256=FwEadpqyETznrrRt1hvgVz_Rn7JiYbiBkCUFaIewajQ,7410
734
+ ads/opctl/operator/lowcode/recommender/model/factory.py,sha256=L6llG5EUP-yuCbqh8SSk08GONt-91nsplGnynV7DkCs,1768
733
735
  ads/opctl/operator/lowcode/recommender/model/recommender_dataset.py,sha256=QzcfA4Dzp412NCiNhFrJY2Rqbzlmneb1SAb98m_L_ms,870
734
- ads/opctl/operator/lowcode/recommender/model/svd.py,sha256=unPfnyvZk3wllN07syTjJAvVck3WpQ10XHc3a5_hPQY,4367
736
+ ads/opctl/operator/lowcode/recommender/model/svd.py,sha256=tGi6QccsU_OqrxTjvf6oWY5I3KfVrdjtzM5RwiNzTr0,4372
735
737
  ads/opctl/operator/runtime/__init__.py,sha256=sAqmLhogrLXb3xI7dPOj9HmSkpTnLh9wkzysuGd8AXk,204
736
738
  ads/opctl/operator/runtime/const.py,sha256=FSgllXcXKIRCbYSJiVAP8gZGpH7hGrEf3enYmUBrAIk,522
737
739
  ads/opctl/operator/runtime/container_runtime_schema.yaml,sha256=FU8Jjq1doq1eYW8b5YjlfSmWKnBN-lAuEk289_P9QFU,1235
@@ -813,8 +815,8 @@ ads/type_discovery/unknown_detector.py,sha256=yZuYQReO7PUyoWZE7onhhtYaOg6088wf1y
813
815
  ads/type_discovery/zipcode_detector.py,sha256=3AlETg_ZF4FT0u914WXvTT3F3Z6Vf51WiIt34yQMRbw,1421
814
816
  ads/vault/__init__.py,sha256=x9tMdDAOdF5iDHk9u2di_K-ze5Nq068x25EWOBoWwqY,245
815
817
  ads/vault/vault.py,sha256=hFBkpYE-Hfmzu1L0sQwUfYcGxpWmgG18JPndRl0NOXI,8624
816
- oracle_ads-2.12.6.dist-info/entry_points.txt,sha256=9VFnjpQCsMORA4rVkvN8eH6D3uHjtegb9T911t8cqV0,35
817
- oracle_ads-2.12.6.dist-info/LICENSE.txt,sha256=zoGmbfD1IdRKx834U0IzfFFFo5KoFK71TND3K9xqYqo,1845
818
- oracle_ads-2.12.6.dist-info/WHEEL,sha256=CpUCUxeHQbRN5UGRQHYRJorO5Af-Qy_fHMctcQ8DSGI,82
819
- oracle_ads-2.12.6.dist-info/METADATA,sha256=pHpTpBQerpdiKY6McAZy4M1hQPHGZMKFLWEaFL1YrO0,16221
820
- oracle_ads-2.12.6.dist-info/RECORD,,
818
+ oracle_ads-2.12.8.dist-info/entry_points.txt,sha256=9VFnjpQCsMORA4rVkvN8eH6D3uHjtegb9T911t8cqV0,35
819
+ oracle_ads-2.12.8.dist-info/LICENSE.txt,sha256=zoGmbfD1IdRKx834U0IzfFFFo5KoFK71TND3K9xqYqo,1845
820
+ oracle_ads-2.12.8.dist-info/WHEEL,sha256=CpUCUxeHQbRN5UGRQHYRJorO5Af-Qy_fHMctcQ8DSGI,82
821
+ oracle_ads-2.12.8.dist-info/METADATA,sha256=aKjc1EqBFSoyK7K30kgMzGfqvemx9-25hmyl5mZZ-xU,16282
822
+ oracle_ads-2.12.8.dist-info/RECORD,,