datarobot-genai 0.2.13__py3-none-any.whl → 0.2.19__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -14,51 +14,79 @@
14
14
 
15
15
  import logging
16
16
  import os
17
+ from typing import Annotated
18
+
19
+ from fastmcp.exceptions import ToolError
20
+ from fastmcp.tools.tool import ToolResult
17
21
 
18
22
  from datarobot_genai.drmcp.core.clients import get_sdk_client
19
23
  from datarobot_genai.drmcp.core.mcp_instance import dr_mcp_tool
24
+ from datarobot_genai.drmcp.core.utils import is_valid_url
20
25
 
21
26
  logger = logging.getLogger(__name__)
22
27
 
23
28
 
24
- @dr_mcp_tool(tags={"data", "management", "upload"})
25
- async def upload_dataset_to_ai_catalog(file_path: str) -> str:
26
- """
27
- Upload a dataset to the DataRobot AI Catalog.
28
-
29
- Args:
30
- file_path: Path to the file to upload.
29
+ @dr_mcp_tool(tags={"predictive", "data", "write", "upload", "catalog"})
30
+ async def upload_dataset_to_ai_catalog(
31
+ file_path: Annotated[str, "The path to the dataset file to upload."] | None = None,
32
+ file_url: Annotated[str, "The URL to the dataset file to upload."] | None = None,
33
+ ) -> ToolError | ToolResult:
34
+ """Upload a dataset to the DataRobot AI Catalog / Data Registry."""
35
+ if not file_path and not file_url:
36
+ return ToolError("Either file_path or file_url must be provided.")
37
+ if file_path and file_url:
38
+ return ToolError("Please provide either file_path or file_url, not both.")
31
39
 
32
- Returns
33
- -------
34
- A string summary of the upload result.
35
- """
40
+ # Get client
36
41
  client = get_sdk_client()
37
- if not os.path.exists(file_path):
38
- logger.error(f"File not found: {file_path}")
39
- return f"File not found: {file_path}"
40
- catalog_item = client.Dataset.create_from_file(file_path)
41
- logger.info(f"Successfully uploaded dataset: {catalog_item.id}")
42
- return f"AI Catalog ID: {catalog_item.id}"
43
-
44
-
45
- @dr_mcp_tool(tags={"data", "management", "list"})
46
- async def list_ai_catalog_items() -> str:
47
- """
48
- List all AI Catalog items (datasets) for the authenticated user.
49
-
50
- Returns
51
- -------
52
- A string summary of the AI Catalog items with their IDs and names.
53
- """
42
+ catalog_item = None
43
+ # If file path is provided, create dataset from file.
44
+ if file_path:
45
+ # Does file exist?
46
+ if not os.path.exists(file_path):
47
+ logger.error("File not found: %s", file_path)
48
+ return ToolError(f"File not found: {file_path}")
49
+ catalog_item = client.Dataset.create_from_file(file_path)
50
+ else:
51
+ # Does URL exist?
52
+ if file_url is None or not is_valid_url(file_url):
53
+ logger.error("Invalid file URL: %s", file_url)
54
+ return ToolError(f"Invalid file URL: {file_url}")
55
+ catalog_item = client.Dataset.create_from_url(file_url)
56
+
57
+ if not catalog_item:
58
+ return ToolError("Failed to upload dataset.")
59
+
60
+ return ToolResult(
61
+ content=f"Successfully uploaded dataset: {catalog_item.id}",
62
+ structured_content={
63
+ "dataset_id": catalog_item.id,
64
+ "dataset_version_id": catalog_item.version_id,
65
+ "dataset_name": catalog_item.name,
66
+ },
67
+ )
68
+
69
+
70
+ @dr_mcp_tool(tags={"predictive", "data", "read", "list", "catalog"})
71
+ async def list_ai_catalog_items() -> ToolResult:
72
+ """List all AI Catalog items (datasets) for the authenticated user."""
54
73
  client = get_sdk_client()
55
74
  datasets = client.Dataset.list()
75
+
56
76
  if not datasets:
57
77
  logger.info("No AI Catalog items found")
58
- return "No AI Catalog items found."
59
- result = "\n".join(f"{ds.id}: {ds.name}" for ds in datasets)
60
- logger.info(f"Found {len(datasets)} AI Catalog items")
61
- return result
78
+ return ToolResult(
79
+ content="No AI Catalog items found.",
80
+ structured_content={"datasets": []},
81
+ )
82
+
83
+ return ToolResult(
84
+ content=f"Found {len(datasets)} AI Catalog items.",
85
+ structured_content={
86
+ "datasets": [{"id": ds.id, "name": ds.name} for ds in datasets],
87
+ "count": len(datasets),
88
+ },
89
+ )
62
90
 
63
91
 
64
92
  # from fastmcp import Context
@@ -21,7 +21,6 @@ from nat.data_models.api_server import ChatRequest
21
21
  from nat.data_models.api_server import ChatResponse
22
22
  from nat.data_models.intermediate_step import IntermediateStep
23
23
  from nat.data_models.intermediate_step import IntermediateStepType
24
- from nat.runtime.loader import load_workflow
25
24
  from nat.utils.type_utils import StrPath
26
25
  from openai.types.chat import CompletionCreateParams
27
26
  from ragas import MultiTurnSample
@@ -34,6 +33,8 @@ from datarobot_genai.core.agents.base import InvokeReturn
34
33
  from datarobot_genai.core.agents.base import UsageMetrics
35
34
  from datarobot_genai.core.agents.base import extract_user_prompt_content
36
35
  from datarobot_genai.core.agents.base import is_streaming
36
+ from datarobot_genai.core.mcp.common import MCPConfig
37
+ from datarobot_genai.nat.helpers import load_workflow
37
38
 
38
39
  logger = logging.getLogger(__name__)
39
40
 
@@ -166,17 +167,24 @@ class NatAgent(BaseAgent[None]):
166
167
  # Print commands may need flush=True to ensure they are displayed in real-time.
167
168
  print("Running agent with user prompt:", chat_request.messages[0].content, flush=True)
168
169
 
170
+ mcp_config = MCPConfig(
171
+ authorization_context=self.authorization_context,
172
+ forwarded_headers=self.forwarded_headers,
173
+ )
174
+ server_config = mcp_config.server_config
175
+ headers = server_config["headers"] if server_config else None
176
+
169
177
  if is_streaming(completion_create_params):
170
178
 
171
179
  async def stream_generator() -> AsyncGenerator[
172
180
  tuple[str, MultiTurnSample | None, UsageMetrics], None
173
181
  ]:
174
- usage_metrics: UsageMetrics = {
182
+ default_usage_metrics: UsageMetrics = {
175
183
  "completion_tokens": 0,
176
184
  "prompt_tokens": 0,
177
185
  "total_tokens": 0,
178
186
  }
179
- async with load_workflow(self.workflow_path) as workflow:
187
+ async with load_workflow(self.workflow_path, headers=headers) as workflow:
180
188
  async with workflow.run(chat_request) as runner:
181
189
  intermediate_future = pull_intermediate_structured()
182
190
  async for result in runner.result_stream():
@@ -188,7 +196,7 @@ class NatAgent(BaseAgent[None]):
188
196
  yield (
189
197
  result_text,
190
198
  None,
191
- usage_metrics,
199
+ default_usage_metrics,
192
200
  )
193
201
 
194
202
  steps = await intermediate_future
@@ -197,6 +205,11 @@ class NatAgent(BaseAgent[None]):
197
205
  for step in steps
198
206
  if step.event_type == IntermediateStepType.LLM_END
199
207
  ]
208
+ usage_metrics: UsageMetrics = {
209
+ "completion_tokens": 0,
210
+ "prompt_tokens": 0,
211
+ "total_tokens": 0,
212
+ }
200
213
  for step in llm_end_steps:
201
214
  if step.usage_info:
202
215
  token_usage = step.usage_info.token_usage
@@ -210,7 +223,7 @@ class NatAgent(BaseAgent[None]):
210
223
  return stream_generator()
211
224
 
212
225
  # Create and invoke the NAT (Nemo Agent Toolkit) Agentic Workflow with the inputs
213
- result, steps = await self.run_nat_workflow(self.workflow_path, chat_request)
226
+ result, steps = await self.run_nat_workflow(self.workflow_path, chat_request, headers)
214
227
 
215
228
  llm_end_steps = [step for step in steps if step.event_type == IntermediateStepType.LLM_END]
216
229
  usage_metrics: UsageMetrics = {
@@ -234,7 +247,7 @@ class NatAgent(BaseAgent[None]):
234
247
  return result_text, pipeline_interactions, usage_metrics
235
248
 
236
249
  async def run_nat_workflow(
237
- self, workflow_path: StrPath, chat_request: ChatRequest
250
+ self, workflow_path: StrPath, chat_request: ChatRequest, headers: dict[str, str] | None
238
251
  ) -> tuple[ChatResponse | str, list[IntermediateStep]]:
239
252
  """Run the NAT workflow with the provided config file and input string.
240
253
 
@@ -247,7 +260,7 @@ class NatAgent(BaseAgent[None]):
247
260
  ChatResponse | str: The result from the NAT workflow
248
261
  list[IntermediateStep]: The list of intermediate steps
249
262
  """
250
- async with load_workflow(workflow_path) as workflow:
263
+ async with load_workflow(workflow_path, headers=headers) as workflow:
251
264
  async with workflow.run(chat_request) as runner:
252
265
  intermediate_future = pull_intermediate_structured()
253
266
  runner_outputs = await runner.result()
@@ -0,0 +1,87 @@
1
+ # Copyright 2025 DataRobot, Inc. and its affiliates.
2
+ #
3
+ # Licensed under the Apache License, Version 2.0 (the "License");
4
+ # you may not use this file except in compliance with the License.
5
+ # You may obtain a copy of the License at
6
+ #
7
+ # http://www.apache.org/licenses/LICENSE-2.0
8
+ #
9
+ # Unless required by applicable law or agreed to in writing, software
10
+ # distributed under the License is distributed on an "AS IS" BASIS,
11
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12
+ # See the License for the specific language governing permissions and
13
+ # limitations under the License.
14
+
15
+ from collections.abc import AsyncGenerator
16
+ from contextlib import asynccontextmanager
17
+
18
+ from nat.builder.workflow import Workflow
19
+ from nat.builder.workflow_builder import WorkflowBuilder
20
+ from nat.data_models.config import Config
21
+ from nat.runtime.loader import PluginTypes
22
+ from nat.runtime.loader import discover_and_register_plugins
23
+ from nat.runtime.session import SessionManager
24
+ from nat.utils.data_models.schema_validator import validate_schema
25
+ from nat.utils.io.yaml_tools import yaml_load
26
+ from nat.utils.type_utils import StrPath
27
+
28
+
29
+ def load_config(config_file: StrPath, headers: dict[str, str] | None = None) -> Config:
30
+ """
31
+ Load a NAT configuration file with injected headers. It ensures that all plugins are
32
+ loaded and then validates the configuration file against the Config schema.
33
+
34
+ Parameters
35
+ ----------
36
+ config_file : StrPath
37
+ The path to the configuration file
38
+
39
+ Returns
40
+ -------
41
+ Config
42
+ The validated Config object
43
+ """
44
+ # Ensure all of the plugins are loaded
45
+ discover_and_register_plugins(PluginTypes.CONFIG_OBJECT)
46
+
47
+ config_yaml = yaml_load(config_file)
48
+
49
+ add_headers_to_datarobot_mcp_auth(config_yaml, headers)
50
+
51
+ # Validate configuration adheres to NAT schemas
52
+ validated_nat_config = validate_schema(config_yaml, Config)
53
+
54
+ return validated_nat_config
55
+
56
+
57
+ def add_headers_to_datarobot_mcp_auth(config_yaml: dict, headers: dict[str, str] | None) -> None:
58
+ if headers:
59
+ if authentication := config_yaml.get("authentication"):
60
+ for auth_name in authentication:
61
+ auth_config = authentication[auth_name]
62
+ if auth_config.get("_type") == "datarobot_mcp_auth":
63
+ auth_config["headers"] = headers
64
+
65
+
66
+ @asynccontextmanager
67
+ async def load_workflow(
68
+ config_file: StrPath, max_concurrency: int = -1, headers: dict[str, str] | None = None
69
+ ) -> AsyncGenerator[Workflow, None]:
70
+ """
71
+ Load the NAT configuration file and create a Runner object. This is the primary entry point for
72
+ running NAT workflows with injected headers.
73
+
74
+ Parameters
75
+ ----------
76
+ config_file : StrPath
77
+ The path to the configuration file
78
+ max_concurrency : int, optional
79
+ The maximum number of parallel workflow invocations to support. Specifying 0 or -1 will
80
+ allow an unlimited count, by default -1
81
+ """
82
+ # Load the config object
83
+ config = load_config(config_file, headers=headers)
84
+
85
+ # Must yield the workflow function otherwise it cleans up
86
+ async with WorkflowBuilder.from_config(config=config) as workflow:
87
+ yield SessionManager(await workflow.build(), max_concurrency=max_concurrency)
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: datarobot-genai
3
- Version: 0.2.13
3
+ Version: 0.2.19
4
4
  Summary: Generic helpers for GenAI
5
5
  Project-URL: Homepage, https://github.com/datarobot-oss/datarobot-genai
6
6
  Author: DataRobot, Inc.
@@ -43,7 +43,7 @@ datarobot_genai/drmcp/core/server_life_cycle.py,sha256=WKGJWGxalvqxupzJ2y67Kklc_
43
43
  datarobot_genai/drmcp/core/telemetry.py,sha256=NEkSTC1w6uQgtukLHI-sWvR4EMgInysgATcvfQ5CplM,15378
44
44
  datarobot_genai/drmcp/core/tool_config.py,sha256=5OkC3e-vekZtdqg-DbwcyadGSrDxmZqSDey2YyGVn1M,2978
45
45
  datarobot_genai/drmcp/core/tool_filter.py,sha256=tLOcG50QBvS48cOVHM6OqoODYiiS6KeM_F-2diaHkW0,2858
46
- datarobot_genai/drmcp/core/utils.py,sha256=dSjrayWVcnC5GxQcvOIOSHaoEymPIVtG_s2ZBMlmSOw,4336
46
+ datarobot_genai/drmcp/core/utils.py,sha256=EvfpqKZ3tECMoxpIQ_tA_3rOgy6KJEYKC0lWZo_Daag,4517
47
47
  datarobot_genai/drmcp/core/dynamic_prompts/__init__.py,sha256=y4yapzp3KnFMzSR6HlNDS4uSuyNT7I1iPBvaCLsS0sU,577
48
48
  datarobot_genai/drmcp/core/dynamic_prompts/controllers.py,sha256=AGJlKqgHRO0Kd7Gl-Ulw9KYBgzjTTFXWBvOUF-SuKUI,5454
49
49
  datarobot_genai/drmcp/core/dynamic_prompts/dr_lib.py,sha256=4j33AKmq7kQX_EE2_RWAbP8-K5KPVEvpUginTWn_MHs,2701
@@ -67,24 +67,26 @@ datarobot_genai/drmcp/core/memory_management/__init__.py,sha256=y4yapzp3KnFMzSR6
67
67
  datarobot_genai/drmcp/core/memory_management/manager.py,sha256=gmc_SQs12YQFMWl2UbfWR40QmLV9XuCnwPZgQwKWrbA,30552
68
68
  datarobot_genai/drmcp/core/memory_management/memory_tools.py,sha256=AxzpwOlldmhhDfKZcAxaGs7Xih2SCe0XbQuXX5nQczI,6397
69
69
  datarobot_genai/drmcp/test_utils/__init__.py,sha256=y4yapzp3KnFMzSR6HlNDS4uSuyNT7I1iPBvaCLsS0sU,577
70
- datarobot_genai/drmcp/test_utils/integration_mcp_server.py,sha256=MdoR7r3m9uT7crodyhY69yhkrM7Thpe__BBD9lB_2oA,3328
71
- datarobot_genai/drmcp/test_utils/mcp_utils_ete.py,sha256=rgZkPF26YCHX2FGppWE4v22l_NQ3kLSPSUimO0tD4nM,4402
72
- datarobot_genai/drmcp/test_utils/mcp_utils_integration.py,sha256=0sU29Khal0CelnHBDInyTRiuPKrFFbTbIomOoUbyMhs,3271
73
- datarobot_genai/drmcp/test_utils/openai_llm_mcp_client.py,sha256=TvTkDBcHscLDmqge9NhHxVo1ABtb0n4NmmG2318mQHU,9088
74
- datarobot_genai/drmcp/test_utils/tool_base_ete.py,sha256=-mKHBkGkyOKQCVS2LHFhSnRofIqJBbeAPRkwizBDtTg,6104
70
+ datarobot_genai/drmcp/test_utils/elicitation_test_tool.py,sha256=UVKwy39nl3XcVAh6IATcN-cWL2bfrprgRQ7fbK82jTI,3287
71
+ datarobot_genai/drmcp/test_utils/integration_mcp_server.py,sha256=YSk19tbaka_0ziqi7LoXie4SJs-cvi9-H00Go0ZtQWE,3575
72
+ datarobot_genai/drmcp/test_utils/mcp_utils_ete.py,sha256=46rH0fYYmUj7ygf968iRbdSp5u95v23BEw3Ts_c431Y,4788
73
+ datarobot_genai/drmcp/test_utils/mcp_utils_integration.py,sha256=sHA_BWtpgIAFp9IXiNkUeBartBMjLAauqkV9bYtCr-g,3874
74
+ datarobot_genai/drmcp/test_utils/openai_llm_mcp_client.py,sha256=YgyqHK09MB-PBaqT34heqvmvYYFtLpzzSJt7xuTJmDg,11224
75
+ datarobot_genai/drmcp/test_utils/test_interactive.py,sha256=guXvR8q2H6VUdmvIjEJcElQJCC6lQ-oTrzbD2EkHeCs,8025
76
+ datarobot_genai/drmcp/test_utils/tool_base_ete.py,sha256=wmI-xcL0rSr56-ZoGNB8np0CZHAU583o8-Kw7fRwtMw,6232
75
77
  datarobot_genai/drmcp/test_utils/utils.py,sha256=esGKFv8aO31-Qg3owayeWp32BYe1CdYOEutjjdbweCw,3048
76
78
  datarobot_genai/drmcp/tools/__init__.py,sha256=0kq9vMkF7EBsS6lkEdiLibmUrghTQqosHbZ5k-V9a5g,578
77
79
  datarobot_genai/drmcp/tools/clients/__init__.py,sha256=0kq9vMkF7EBsS6lkEdiLibmUrghTQqosHbZ5k-V9a5g,578
78
80
  datarobot_genai/drmcp/tools/clients/atlassian.py,sha256=__M_uz7FrcbKCYRzeMn24DCEYD6OmFx_LuywHCxgXsA,6472
79
- datarobot_genai/drmcp/tools/clients/confluence.py,sha256=gbVxeBe7RDEEQt5UMGGW6GoAXsYLhL009dOejYIaIiQ,6325
80
- datarobot_genai/drmcp/tools/clients/jira.py,sha256=aSDmw07SqpoE5fMQchb_y3Ggn4WcTUZU_1M8TwvZ3-E,6498
81
+ datarobot_genai/drmcp/tools/clients/confluence.py,sha256=gDzy8t5t3b1mwEr-CuZ5BwXXQ52AXke8J_Ra7i_8T1g,13692
82
+ datarobot_genai/drmcp/tools/clients/jira.py,sha256=Rm91JAyrNIqxu66-9rU1YqoRXVnWbEy-Ahvy6f6HlVg,9823
81
83
  datarobot_genai/drmcp/tools/clients/s3.py,sha256=GmwzvurFdNfvxOooA8g5S4osRysHYU0S9ypg_177Glg,953
82
84
  datarobot_genai/drmcp/tools/confluence/__init__.py,sha256=0kq9vMkF7EBsS6lkEdiLibmUrghTQqosHbZ5k-V9a5g,578
83
- datarobot_genai/drmcp/tools/confluence/tools.py,sha256=t5OqXIhUm6y9bAWymyqwEMElwTxGw1xRnkW2MgJrNF8,3106
85
+ datarobot_genai/drmcp/tools/confluence/tools.py,sha256=jSF7yXGFqqlMcavkRIY4HbMxb7tCeunA2ST41wa2vGI,7219
84
86
  datarobot_genai/drmcp/tools/jira/__init__.py,sha256=0kq9vMkF7EBsS6lkEdiLibmUrghTQqosHbZ5k-V9a5g,578
85
- datarobot_genai/drmcp/tools/jira/tools.py,sha256=LBJkK9yjgRNZJHaqgJ3bknNnvLKpr2RLLtQYAs-O-oA,4034
87
+ datarobot_genai/drmcp/tools/jira/tools.py,sha256=dfkqTU2HH-7n44hX80ODFacKq0p0LOchFcZtIIKFNMM,9687
86
88
  datarobot_genai/drmcp/tools/predictive/__init__.py,sha256=WuOHlNNEpEmcF7gVnhckruJRKU2qtmJLE3E7zoCGLDo,1030
87
- datarobot_genai/drmcp/tools/predictive/data.py,sha256=k4EJxJrl8DYVGVfJ0DM4YTfnZlC_K3OUHZ0eRUzfluI,3165
89
+ datarobot_genai/drmcp/tools/predictive/data.py,sha256=mt3PvIel4IUAZo0HLIs6QsTtRkuzX1qg-5PHI3IJ7E8,4455
88
90
  datarobot_genai/drmcp/tools/predictive/deployment.py,sha256=lm02Ayuo11L1hP41fgi3QpR1Eyty-Wc16rM0c8SgliM,3277
89
91
  datarobot_genai/drmcp/tools/predictive/deployment_info.py,sha256=BGEF_dmbxOBJR0n1Tt9TO2-iNTQSBTr-oQUyaxLZ0ZI,15297
90
92
  datarobot_genai/drmcp/tools/predictive/model.py,sha256=Yih5-KedJ-1yupPLXCJsCXOdyWWi9pRvgapXDlgXWJA,4891
@@ -100,14 +102,15 @@ datarobot_genai/llama_index/agent.py,sha256=V6ZsD9GcBDJS-RJo1tJtIHhyW69_78gM6_fO
100
102
  datarobot_genai/llama_index/base.py,sha256=ovcQQtC-djD_hcLrWdn93jg23AmD6NBEj7xtw4a6K6c,14481
101
103
  datarobot_genai/llama_index/mcp.py,sha256=leXqF1C4zhuYEKFwNEfZHY4dsUuGZk3W7KArY-zxVL8,2645
102
104
  datarobot_genai/nat/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
103
- datarobot_genai/nat/agent.py,sha256=jDeIS9f-8vGbeLy5gQkSjeuHINx5Fh_4BvXYERsgIIk,10516
105
+ datarobot_genai/nat/agent.py,sha256=DuGrgqt1FzvAE-cRH_P3LTFUlwuClvbVurdwA-RsbuY,11177
104
106
  datarobot_genai/nat/datarobot_auth_provider.py,sha256=Z4NSsrHxK8hUeiqtK_lryHsUuZC74ziNo_FHbsZgtiM,4230
105
107
  datarobot_genai/nat/datarobot_llm_clients.py,sha256=Yu208Ed_p_4P3HdpuM7fYnKcXtimORHpKlWVPyijpU8,11356
106
108
  datarobot_genai/nat/datarobot_llm_providers.py,sha256=aDoQcTeGI-odqydPXEX9OGGNFbzAtpqzTvHHEkmJuEQ,4963
107
109
  datarobot_genai/nat/datarobot_mcp_client.py,sha256=35FzilxNp4VqwBYI0NsOc91-xZm1C-AzWqrOdDy962A,9612
108
- datarobot_genai-0.2.13.dist-info/METADATA,sha256=RJZ6ozRm3L6oreEu4D9gGKZLzlf3xoC7tZ3RrppBc_U,6301
109
- datarobot_genai-0.2.13.dist-info/WHEEL,sha256=WLgqFyCfm_KASv4WHyYy0P3pM_m7J5L9k2skdKLirC8,87
110
- datarobot_genai-0.2.13.dist-info/entry_points.txt,sha256=jEW3WxDZ8XIK9-ISmTyt5DbmBb047rFlzQuhY09rGrM,284
111
- datarobot_genai-0.2.13.dist-info/licenses/AUTHORS,sha256=isJGUXdjq1U7XZ_B_9AH8Qf0u4eX0XyQifJZ_Sxm4sA,80
112
- datarobot_genai-0.2.13.dist-info/licenses/LICENSE,sha256=U2_VkLIktQoa60Nf6Tbt7E4RMlfhFSjWjcJJfVC-YCE,11341
113
- datarobot_genai-0.2.13.dist-info/RECORD,,
110
+ datarobot_genai/nat/helpers.py,sha256=Q7E3ADZdtFfS8E6OQPyw2wgA6laQ58N3bhLj5CBWwJs,3265
111
+ datarobot_genai-0.2.19.dist-info/METADATA,sha256=rywu2LteAnHoLxfq84xckJSU10XX_sKMqli7pqsuCgg,6301
112
+ datarobot_genai-0.2.19.dist-info/WHEEL,sha256=WLgqFyCfm_KASv4WHyYy0P3pM_m7J5L9k2skdKLirC8,87
113
+ datarobot_genai-0.2.19.dist-info/entry_points.txt,sha256=jEW3WxDZ8XIK9-ISmTyt5DbmBb047rFlzQuhY09rGrM,284
114
+ datarobot_genai-0.2.19.dist-info/licenses/AUTHORS,sha256=isJGUXdjq1U7XZ_B_9AH8Qf0u4eX0XyQifJZ_Sxm4sA,80
115
+ datarobot_genai-0.2.19.dist-info/licenses/LICENSE,sha256=U2_VkLIktQoa60Nf6Tbt7E4RMlfhFSjWjcJJfVC-YCE,11341
116
+ datarobot_genai-0.2.19.dist-info/RECORD,,