datarobot-genai 0.2.34__py3-none-any.whl → 0.2.37__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- datarobot_genai/drmcp/tools/clients/microsoft_graph.py +126 -0
- datarobot_genai/drmcp/tools/microsoft_graph/tools.py +79 -0
- datarobot_genai/drmcp/tools/predictive/deployment.py +52 -46
- datarobot_genai/drmcp/tools/predictive/training.py +38 -10
- {datarobot_genai-0.2.34.dist-info → datarobot_genai-0.2.37.dist-info}/METADATA +1 -1
- {datarobot_genai-0.2.34.dist-info → datarobot_genai-0.2.37.dist-info}/RECORD +10 -10
- {datarobot_genai-0.2.34.dist-info → datarobot_genai-0.2.37.dist-info}/WHEEL +0 -0
- {datarobot_genai-0.2.34.dist-info → datarobot_genai-0.2.37.dist-info}/entry_points.txt +0 -0
- {datarobot_genai-0.2.34.dist-info → datarobot_genai-0.2.37.dist-info}/licenses/AUTHORS +0 -0
- {datarobot_genai-0.2.34.dist-info → datarobot_genai-0.2.37.dist-info}/licenses/LICENSE +0 -0
|
@@ -16,6 +16,7 @@
|
|
|
16
16
|
|
|
17
17
|
import logging
|
|
18
18
|
from typing import Any
|
|
19
|
+
from urllib.parse import quote
|
|
19
20
|
|
|
20
21
|
import httpx
|
|
21
22
|
from datarobot.auth.datarobot.exceptions import OAuthServiceClientErr
|
|
@@ -415,6 +416,131 @@ class MicrosoftGraphClient:
|
|
|
415
416
|
|
|
416
417
|
return base_resource
|
|
417
418
|
|
|
419
|
+
async def get_personal_drive_id(self) -> str:
|
|
420
|
+
"""Get the current user's personal OneDrive drive ID.
|
|
421
|
+
|
|
422
|
+
Returns
|
|
423
|
+
-------
|
|
424
|
+
The drive ID string for the user's personal OneDrive.
|
|
425
|
+
|
|
426
|
+
Raises
|
|
427
|
+
------
|
|
428
|
+
MicrosoftGraphError: If the drive cannot be retrieved.
|
|
429
|
+
"""
|
|
430
|
+
try:
|
|
431
|
+
response = await self._client.get(f"{GRAPH_API_BASE}/me/drive")
|
|
432
|
+
response.raise_for_status()
|
|
433
|
+
data = response.json()
|
|
434
|
+
return data["id"]
|
|
435
|
+
except httpx.HTTPStatusError as e:
|
|
436
|
+
status_code = e.response.status_code
|
|
437
|
+
if status_code == 401:
|
|
438
|
+
raise MicrosoftGraphError(
|
|
439
|
+
"Authentication failed. Access token may be expired or invalid."
|
|
440
|
+
) from e
|
|
441
|
+
if status_code == 403:
|
|
442
|
+
raise MicrosoftGraphError(
|
|
443
|
+
"Permission denied: cannot access personal OneDrive. "
|
|
444
|
+
"Requires Files.Read or Files.ReadWrite permission."
|
|
445
|
+
) from e
|
|
446
|
+
raise MicrosoftGraphError(f"Failed to get personal OneDrive: HTTP {status_code}") from e
|
|
447
|
+
|
|
448
|
+
async def create_file(
|
|
449
|
+
self,
|
|
450
|
+
drive_id: str,
|
|
451
|
+
file_name: str,
|
|
452
|
+
content: str,
|
|
453
|
+
parent_folder_id: str = "root",
|
|
454
|
+
conflict_behavior: str = "rename",
|
|
455
|
+
) -> MicrosoftGraphItem:
|
|
456
|
+
"""Create a text file in a drive (SharePoint document library or OneDrive).
|
|
457
|
+
|
|
458
|
+
Uses Microsoft Graph's simple upload endpoint for files < 4MB.
|
|
459
|
+
Files are created as text/plain content.
|
|
460
|
+
|
|
461
|
+
Args:
|
|
462
|
+
drive_id: The ID of the drive (document library) where the file will be created.
|
|
463
|
+
file_name: The name of the file to create (e.g., 'report.txt').
|
|
464
|
+
content: The text content to store in the file.
|
|
465
|
+
parent_folder_id: ID of the parent folder. Defaults to "root" (drive root folder).
|
|
466
|
+
conflict_behavior: How to handle name conflicts. Options:
|
|
467
|
+
- "rename" (default): Auto-renames to 'filename (1).txt', etc.
|
|
468
|
+
- "fail": Returns 409 Conflict error
|
|
469
|
+
- "replace": Overwrites existing file
|
|
470
|
+
|
|
471
|
+
Returns
|
|
472
|
+
-------
|
|
473
|
+
MicrosoftGraphItem representing the created file.
|
|
474
|
+
|
|
475
|
+
Raises
|
|
476
|
+
------
|
|
477
|
+
MicrosoftGraphError: If file creation fails.
|
|
478
|
+
"""
|
|
479
|
+
if not drive_id or not drive_id.strip():
|
|
480
|
+
raise MicrosoftGraphError("drive_id cannot be empty")
|
|
481
|
+
if not file_name or not file_name.strip():
|
|
482
|
+
raise MicrosoftGraphError("file_name cannot be empty")
|
|
483
|
+
|
|
484
|
+
# URL encode the filename for path-based addressing
|
|
485
|
+
encoded_name = quote(file_name, safe="")
|
|
486
|
+
|
|
487
|
+
# Simple upload endpoint for files < 4MB
|
|
488
|
+
# Reference: https://learn.microsoft.com/en-us/graph/api/driveitem-put-content
|
|
489
|
+
upload_url = (
|
|
490
|
+
f"{GRAPH_API_BASE}/drives/{drive_id}/items/{parent_folder_id}:/{encoded_name}:/content"
|
|
491
|
+
)
|
|
492
|
+
|
|
493
|
+
try:
|
|
494
|
+
response = await self._client.put(
|
|
495
|
+
upload_url,
|
|
496
|
+
content=content.encode("utf-8"),
|
|
497
|
+
headers={"Content-Type": "text/plain"},
|
|
498
|
+
params={"@microsoft.graph.conflictBehavior": conflict_behavior},
|
|
499
|
+
)
|
|
500
|
+
response.raise_for_status()
|
|
501
|
+
except httpx.HTTPStatusError as e:
|
|
502
|
+
raise self._handle_create_file_error(e, drive_id, file_name, parent_folder_id) from e
|
|
503
|
+
|
|
504
|
+
return MicrosoftGraphItem.from_api_response(response.json())
|
|
505
|
+
|
|
506
|
+
def _handle_create_file_error(
|
|
507
|
+
self,
|
|
508
|
+
error: httpx.HTTPStatusError,
|
|
509
|
+
drive_id: str,
|
|
510
|
+
file_name: str,
|
|
511
|
+
parent_folder_id: str,
|
|
512
|
+
) -> MicrosoftGraphError:
|
|
513
|
+
"""Handle HTTP errors for file creation and return appropriate MicrosoftGraphError."""
|
|
514
|
+
status_code = error.response.status_code
|
|
515
|
+
error_msg = f"Failed to create file: HTTP {status_code}"
|
|
516
|
+
|
|
517
|
+
if status_code == 400:
|
|
518
|
+
try:
|
|
519
|
+
error_data = error.response.json()
|
|
520
|
+
api_message = error_data.get("error", {}).get("message", "Invalid request")
|
|
521
|
+
error_msg = f"Bad request creating file: {api_message}"
|
|
522
|
+
except Exception:
|
|
523
|
+
error_msg = "Bad request: invalid parameters for file creation."
|
|
524
|
+
elif status_code == 401:
|
|
525
|
+
error_msg = "Authentication failed. Access token may be expired or invalid."
|
|
526
|
+
elif status_code == 403:
|
|
527
|
+
error_msg = (
|
|
528
|
+
f"Permission denied: you don't have permission to create files in drive "
|
|
529
|
+
f"'{drive_id}'. Requires Files.ReadWrite.All permission."
|
|
530
|
+
)
|
|
531
|
+
elif status_code == 404:
|
|
532
|
+
error_msg = (
|
|
533
|
+
f"Parent folder '{parent_folder_id}' not found in drive '{drive_id}'."
|
|
534
|
+
if parent_folder_id != "root"
|
|
535
|
+
else f"Drive '{drive_id}' not found."
|
|
536
|
+
)
|
|
537
|
+
elif status_code == 409:
|
|
538
|
+
error_msg = f"File '{file_name}' already exists and conflict behavior is set to 'fail'."
|
|
539
|
+
elif status_code == 429:
|
|
540
|
+
error_msg = "Rate limit exceeded. Please try again later."
|
|
541
|
+
|
|
542
|
+
return MicrosoftGraphError(error_msg)
|
|
543
|
+
|
|
418
544
|
async def __aenter__(self) -> "MicrosoftGraphClient":
|
|
419
545
|
"""Async context manager entry."""
|
|
420
546
|
return self
|
|
@@ -196,3 +196,82 @@ async def microsoft_graph_search_content(
|
|
|
196
196
|
"count": n,
|
|
197
197
|
},
|
|
198
198
|
)
|
|
199
|
+
|
|
200
|
+
|
|
201
|
+
@dr_mcp_tool(
|
|
202
|
+
tags={
|
|
203
|
+
"microsoft",
|
|
204
|
+
"graph api",
|
|
205
|
+
"sharepoint",
|
|
206
|
+
"onedrive",
|
|
207
|
+
"document library",
|
|
208
|
+
"create",
|
|
209
|
+
"file",
|
|
210
|
+
"write",
|
|
211
|
+
}
|
|
212
|
+
)
|
|
213
|
+
async def microsoft_create_file(
|
|
214
|
+
*,
|
|
215
|
+
file_name: Annotated[str, "The name of the file to create (e.g., 'report.txt')."],
|
|
216
|
+
content_text: Annotated[str, "The raw text content to be stored in the file."],
|
|
217
|
+
document_library_id: Annotated[
|
|
218
|
+
str | None,
|
|
219
|
+
"The ID of the document library (Drive). If not provided, saves to personal OneDrive.",
|
|
220
|
+
] = None,
|
|
221
|
+
parent_folder_id: Annotated[
|
|
222
|
+
str | None,
|
|
223
|
+
"ID of the parent folder. Defaults to 'root' (root of the drive).",
|
|
224
|
+
] = "root",
|
|
225
|
+
) -> ToolResult | ToolError:
|
|
226
|
+
"""
|
|
227
|
+
Create a new text file in SharePoint or OneDrive.
|
|
228
|
+
|
|
229
|
+
**Personal OneDrive:** Just provide file_name and content_text.
|
|
230
|
+
The file saves to your personal OneDrive root folder.
|
|
231
|
+
|
|
232
|
+
**SharePoint:** Provide document_library_id to save to a specific
|
|
233
|
+
SharePoint site. Get the ID from microsoft_graph_search_content
|
|
234
|
+
results ('documentLibraryId' field).
|
|
235
|
+
|
|
236
|
+
**Conflict Resolution:** If a file with the same name exists,
|
|
237
|
+
it will be automatically renamed (e.g., 'report (1).txt').
|
|
238
|
+
"""
|
|
239
|
+
if not file_name or not file_name.strip():
|
|
240
|
+
raise ToolError("Error: file_name is required.")
|
|
241
|
+
if not content_text:
|
|
242
|
+
raise ToolError("Error: content_text is required.")
|
|
243
|
+
|
|
244
|
+
access_token = await get_microsoft_graph_access_token()
|
|
245
|
+
if isinstance(access_token, ToolError):
|
|
246
|
+
raise access_token
|
|
247
|
+
|
|
248
|
+
folder_id = parent_folder_id if parent_folder_id else "root"
|
|
249
|
+
|
|
250
|
+
async with MicrosoftGraphClient(access_token=access_token) as client:
|
|
251
|
+
# Auto-fetch personal OneDrive if no library specified
|
|
252
|
+
if document_library_id is None:
|
|
253
|
+
drive_id = await client.get_personal_drive_id()
|
|
254
|
+
is_personal_onedrive = True
|
|
255
|
+
else:
|
|
256
|
+
drive_id = document_library_id
|
|
257
|
+
is_personal_onedrive = False
|
|
258
|
+
|
|
259
|
+
created_file = await client.create_file(
|
|
260
|
+
drive_id=drive_id,
|
|
261
|
+
file_name=file_name.strip(),
|
|
262
|
+
content=content_text,
|
|
263
|
+
parent_folder_id=folder_id,
|
|
264
|
+
conflict_behavior="rename",
|
|
265
|
+
)
|
|
266
|
+
|
|
267
|
+
return ToolResult(
|
|
268
|
+
content=f"File '{created_file.name}' created successfully.",
|
|
269
|
+
structured_content={
|
|
270
|
+
"file_name": created_file.name,
|
|
271
|
+
"destination": "onedrive" if is_personal_onedrive else "sharepoint",
|
|
272
|
+
"driveId": drive_id,
|
|
273
|
+
"id": created_file.id,
|
|
274
|
+
"webUrl": created_file.web_url,
|
|
275
|
+
"parentFolderId": created_file.parent_folder_id,
|
|
276
|
+
},
|
|
277
|
+
)
|
|
@@ -14,6 +14,10 @@
|
|
|
14
14
|
|
|
15
15
|
import json
|
|
16
16
|
import logging
|
|
17
|
+
from typing import Annotated
|
|
18
|
+
|
|
19
|
+
from fastmcp.exceptions import ToolError
|
|
20
|
+
from fastmcp.tools.tool import ToolResult
|
|
17
21
|
|
|
18
22
|
from datarobot_genai.drmcp.core.clients import get_sdk_client
|
|
19
23
|
from datarobot_genai.drmcp.core.mcp_instance import dr_mcp_tool
|
|
@@ -21,71 +25,73 @@ from datarobot_genai.drmcp.core.mcp_instance import dr_mcp_tool
|
|
|
21
25
|
logger = logging.getLogger(__name__)
|
|
22
26
|
|
|
23
27
|
|
|
24
|
-
@dr_mcp_tool(tags={"deployment", "management", "list"})
|
|
25
|
-
async def list_deployments() ->
|
|
26
|
-
"""
|
|
27
|
-
List all DataRobot deployments for the authenticated user.
|
|
28
|
-
|
|
29
|
-
Returns
|
|
30
|
-
-------
|
|
31
|
-
A string summary of the user's DataRobot deployments.
|
|
32
|
-
"""
|
|
28
|
+
@dr_mcp_tool(tags={"predictive", "deployment", "read", "management", "list"})
|
|
29
|
+
async def list_deployments() -> ToolResult:
|
|
30
|
+
"""List all DataRobot deployments for the authenticated user."""
|
|
33
31
|
client = get_sdk_client()
|
|
34
32
|
deployments = client.Deployment.list()
|
|
35
33
|
if not deployments:
|
|
36
|
-
|
|
37
|
-
|
|
38
|
-
|
|
39
|
-
|
|
40
|
-
|
|
41
|
-
|
|
34
|
+
return ToolResult(
|
|
35
|
+
content="No deployments found.",
|
|
36
|
+
structured_content={"deployments": []},
|
|
37
|
+
)
|
|
38
|
+
deployments_dict = {d.id: d.label for d in deployments}
|
|
39
|
+
return ToolResult(
|
|
40
|
+
content="\n".join(f"{d.id}: {d.label}" for d in deployments),
|
|
41
|
+
structured_content={"deployments": deployments_dict},
|
|
42
|
+
)
|
|
42
43
|
|
|
43
|
-
@dr_mcp_tool(tags={"deployment", "model", "info"})
|
|
44
|
-
async def get_model_info_from_deployment(deployment_id: str) -> str:
|
|
45
|
-
"""
|
|
46
|
-
Get model info associated with a given deployment ID.
|
|
47
44
|
|
|
48
|
-
|
|
49
|
-
|
|
45
|
+
@dr_mcp_tool(tags={"predictive", "deployment", "read", "model", "info"})
|
|
46
|
+
async def get_model_info_from_deployment(
|
|
47
|
+
*,
|
|
48
|
+
deployment_id: Annotated[str, "The ID of the DataRobot deployment"] | None = None,
|
|
49
|
+
) -> ToolError | ToolResult:
|
|
50
|
+
"""Retrieve model info associated with a given deployment ID."""
|
|
51
|
+
if not deployment_id:
|
|
52
|
+
raise ToolError("Deployment ID must be provided")
|
|
50
53
|
|
|
51
|
-
Returns
|
|
52
|
-
-------
|
|
53
|
-
The model info associated with the deployment as a JSON string.
|
|
54
|
-
"""
|
|
55
54
|
client = get_sdk_client()
|
|
56
55
|
deployment = client.Deployment.get(deployment_id)
|
|
57
|
-
|
|
58
|
-
|
|
59
|
-
|
|
56
|
+
return ToolResult(
|
|
57
|
+
content=(
|
|
58
|
+
f"Retrieved model info for deployment {deployment_id}, here are the details:\n"
|
|
59
|
+
f"{json.dumps(deployment.model, indent=2)}"
|
|
60
|
+
),
|
|
61
|
+
structured_content=deployment.model,
|
|
62
|
+
)
|
|
60
63
|
|
|
61
|
-
@dr_mcp_tool(tags={"deployment", "model", "create"})
|
|
62
|
-
async def deploy_model(model_id: str, label: str, description: str = "") -> str:
|
|
63
|
-
"""
|
|
64
|
-
Deploy a model by creating a new DataRobot deployment.
|
|
65
64
|
|
|
66
|
-
|
|
67
|
-
|
|
68
|
-
|
|
69
|
-
|
|
65
|
+
@dr_mcp_tool(tags={"predictive", "deployment", "write", "model", "create"})
|
|
66
|
+
async def deploy_model(
|
|
67
|
+
*,
|
|
68
|
+
model_id: Annotated[str, "The ID of the DataRobot model to deploy"] | None = None,
|
|
69
|
+
label: Annotated[str, "The label/name for the deployment"] | None = None,
|
|
70
|
+
description: Annotated[str, "Optional description for the deployment"] | None = None,
|
|
71
|
+
) -> ToolError | ToolResult:
|
|
72
|
+
"""Deploy a model by creating a new DataRobot deployment."""
|
|
73
|
+
if not model_id:
|
|
74
|
+
raise ToolError("Model ID must be provided")
|
|
75
|
+
if not label:
|
|
76
|
+
raise ToolError("Model label must be provided")
|
|
70
77
|
|
|
71
|
-
Returns
|
|
72
|
-
-------
|
|
73
|
-
JSON string with deployment ID and label, or error message.
|
|
74
|
-
"""
|
|
75
78
|
client = get_sdk_client()
|
|
76
79
|
try:
|
|
77
80
|
prediction_servers = client.PredictionServer.list()
|
|
78
81
|
if not prediction_servers:
|
|
79
|
-
|
|
80
|
-
return json.dumps({"error": "No prediction servers available"})
|
|
82
|
+
raise ToolError("No prediction servers available for deployment.")
|
|
81
83
|
deployment = client.Deployment.create_from_learning_model(
|
|
82
84
|
model_id=model_id,
|
|
83
85
|
label=label,
|
|
84
86
|
description=description,
|
|
85
87
|
default_prediction_server_id=prediction_servers[0].id,
|
|
86
88
|
)
|
|
87
|
-
|
|
88
|
-
|
|
89
|
+
return ToolResult(
|
|
90
|
+
content=f"Created deployment {deployment.id} with label {label}",
|
|
91
|
+
structured_content={
|
|
92
|
+
"deployment_id": deployment.id,
|
|
93
|
+
"label": label,
|
|
94
|
+
},
|
|
95
|
+
)
|
|
89
96
|
except Exception as e:
|
|
90
|
-
|
|
91
|
-
return json.dumps({"error": f"Error deploying model {model_id}: {type(e).__name__}: {e}"})
|
|
97
|
+
raise ToolError(f"Error deploying model {model_id}: {type(e).__name__}: {e}")
|
|
@@ -19,6 +19,7 @@ import logging
|
|
|
19
19
|
from dataclasses import asdict
|
|
20
20
|
from dataclasses import dataclass
|
|
21
21
|
from typing import Annotated
|
|
22
|
+
from typing import Any
|
|
22
23
|
|
|
23
24
|
import pandas as pd
|
|
24
25
|
from fastmcp.exceptions import ToolError
|
|
@@ -56,6 +57,36 @@ class DatasetInsight:
|
|
|
56
57
|
missing_data_summary: dict[str, float]
|
|
57
58
|
|
|
58
59
|
|
|
60
|
+
def _get_dataset_or_raise(client: Any, dataset_id: str) -> tuple[Any, pd.DataFrame]:
|
|
61
|
+
"""Fetch dataset and return it with its dataframe, with proper error handling.
|
|
62
|
+
|
|
63
|
+
Args:
|
|
64
|
+
client: DataRobot SDK client instance
|
|
65
|
+
dataset_id: The ID of the dataset to fetch
|
|
66
|
+
|
|
67
|
+
Returns
|
|
68
|
+
-------
|
|
69
|
+
Tuple of (dataset object, dataframe)
|
|
70
|
+
|
|
71
|
+
Raises
|
|
72
|
+
------
|
|
73
|
+
ToolError: If dataset is not found (404) or other error occurs
|
|
74
|
+
"""
|
|
75
|
+
try:
|
|
76
|
+
dataset = client.Dataset.get(dataset_id)
|
|
77
|
+
return dataset, dataset.get_as_dataframe()
|
|
78
|
+
except Exception as e:
|
|
79
|
+
error_str = str(e)
|
|
80
|
+
# Check if it's a 404 error (dataset not found)
|
|
81
|
+
if "404" in error_str or "Not Found" in error_str:
|
|
82
|
+
raise ToolError(
|
|
83
|
+
f"Dataset '{dataset_id}' not found. Please verify the dataset ID exists "
|
|
84
|
+
"and you have access to it."
|
|
85
|
+
)
|
|
86
|
+
# For other errors, provide context
|
|
87
|
+
raise ToolError(f"Failed to retrieve dataset '{dataset_id}': {error_str}")
|
|
88
|
+
|
|
89
|
+
|
|
59
90
|
@dr_mcp_tool(tags={"predictive", "training", "read", "analysis", "dataset"})
|
|
60
91
|
async def analyze_dataset(
|
|
61
92
|
*,
|
|
@@ -66,8 +97,7 @@ async def analyze_dataset(
|
|
|
66
97
|
raise ToolError("Dataset ID must be provided")
|
|
67
98
|
|
|
68
99
|
client = get_sdk_client()
|
|
69
|
-
dataset = client
|
|
70
|
-
df = dataset.get_as_dataframe()
|
|
100
|
+
dataset, df = _get_dataset_or_raise(client, dataset_id)
|
|
71
101
|
|
|
72
102
|
# Analyze dataset structure
|
|
73
103
|
numerical_cols = df.select_dtypes(include=["int64", "float64"]).columns.tolist()
|
|
@@ -119,12 +149,11 @@ async def suggest_use_cases(
|
|
|
119
149
|
raise ToolError("Dataset ID must be provided")
|
|
120
150
|
|
|
121
151
|
client = get_sdk_client()
|
|
122
|
-
dataset = client
|
|
123
|
-
df = dataset.get_as_dataframe()
|
|
152
|
+
dataset, df = _get_dataset_or_raise(client, dataset_id)
|
|
124
153
|
|
|
125
154
|
# Get dataset insights first
|
|
126
|
-
|
|
127
|
-
insights =
|
|
155
|
+
insights_result = await analyze_dataset(dataset_id=dataset_id)
|
|
156
|
+
insights = insights_result.structured_content
|
|
128
157
|
|
|
129
158
|
suggestions = []
|
|
130
159
|
for target_col in insights["potential_targets"]:
|
|
@@ -151,12 +180,11 @@ async def get_exploratory_insights(
|
|
|
151
180
|
raise ToolError("Dataset ID must be provided")
|
|
152
181
|
|
|
153
182
|
client = get_sdk_client()
|
|
154
|
-
dataset = client
|
|
155
|
-
df = dataset.get_as_dataframe()
|
|
183
|
+
dataset, df = _get_dataset_or_raise(client, dataset_id)
|
|
156
184
|
|
|
157
185
|
# Get dataset insights first
|
|
158
|
-
|
|
159
|
-
insights =
|
|
186
|
+
insights_result = await analyze_dataset(dataset_id=dataset_id)
|
|
187
|
+
insights = insights_result.structured_content
|
|
160
188
|
|
|
161
189
|
eda_insights = {
|
|
162
190
|
"dataset_summary": {
|
|
@@ -84,7 +84,7 @@ datarobot_genai/drmcp/tools/clients/atlassian.py,sha256=__M_uz7FrcbKCYRzeMn24DCE
|
|
|
84
84
|
datarobot_genai/drmcp/tools/clients/confluence.py,sha256=h_G0By_kDnJeWDT_d-IREsaZ5-0xB5GoLXOqblYP5MA,20706
|
|
85
85
|
datarobot_genai/drmcp/tools/clients/gdrive.py,sha256=RK4IISpYb99aK6WgDthesDoglaZxwGpG_PPAAe6xsVM,33064
|
|
86
86
|
datarobot_genai/drmcp/tools/clients/jira.py,sha256=Rm91JAyrNIqxu66-9rU1YqoRXVnWbEy-Ahvy6f6HlVg,9823
|
|
87
|
-
datarobot_genai/drmcp/tools/clients/microsoft_graph.py,sha256
|
|
87
|
+
datarobot_genai/drmcp/tools/clients/microsoft_graph.py,sha256=-g0EhaBVElKbujaO2cHdgc86hwFEkkyEyZVAw8pq7yM,24468
|
|
88
88
|
datarobot_genai/drmcp/tools/clients/s3.py,sha256=GmwzvurFdNfvxOooA8g5S4osRysHYU0S9ypg_177Glg,953
|
|
89
89
|
datarobot_genai/drmcp/tools/confluence/__init__.py,sha256=0kq9vMkF7EBsS6lkEdiLibmUrghTQqosHbZ5k-V9a5g,578
|
|
90
90
|
datarobot_genai/drmcp/tools/confluence/tools.py,sha256=_-ws65WLK8KZP_mKkf4yJ7ZunR8qdyoiMwHQX47MSMw,12362
|
|
@@ -93,16 +93,16 @@ datarobot_genai/drmcp/tools/gdrive/tools.py,sha256=7bNrp7E3opKwsBDYfLIOsOGfPXW-A
|
|
|
93
93
|
datarobot_genai/drmcp/tools/jira/__init__.py,sha256=0kq9vMkF7EBsS6lkEdiLibmUrghTQqosHbZ5k-V9a5g,578
|
|
94
94
|
datarobot_genai/drmcp/tools/jira/tools.py,sha256=dfkqTU2HH-7n44hX80ODFacKq0p0LOchFcZtIIKFNMM,9687
|
|
95
95
|
datarobot_genai/drmcp/tools/microsoft_graph/__init__.py,sha256=CuOaMt1AJo7cHx_GuhO3s_aqxZas_wlDsoBorBsvbeU,577
|
|
96
|
-
datarobot_genai/drmcp/tools/microsoft_graph/tools.py,sha256=
|
|
96
|
+
datarobot_genai/drmcp/tools/microsoft_graph/tools.py,sha256=cNctozv_4lRC5Kva3D2j4taZfeQHDE6LTAjcmeQXwWA,10446
|
|
97
97
|
datarobot_genai/drmcp/tools/predictive/__init__.py,sha256=WuOHlNNEpEmcF7gVnhckruJRKU2qtmJLE3E7zoCGLDo,1030
|
|
98
98
|
datarobot_genai/drmcp/tools/predictive/data.py,sha256=VbGs8ERP8vNFtTTryGhI61JItNVaJsx1gxpRX1ZFZcg,4626
|
|
99
|
-
datarobot_genai/drmcp/tools/predictive/deployment.py,sha256=
|
|
99
|
+
datarobot_genai/drmcp/tools/predictive/deployment.py,sha256=Pc6lz9V2JOw3Ufw-SsGAhMKf6-YhvbjGoNLRFOIcSSY,3670
|
|
100
100
|
datarobot_genai/drmcp/tools/predictive/deployment_info.py,sha256=BGEF_dmbxOBJR0n1Tt9TO2-iNTQSBTr-oQUyaxLZ0ZI,15297
|
|
101
101
|
datarobot_genai/drmcp/tools/predictive/model.py,sha256=BVxOMHh3--liwBU4VB1OWRrqkhJ4y_Rq053f7y94TF8,6276
|
|
102
102
|
datarobot_genai/drmcp/tools/predictive/predict.py,sha256=Qoob2_t2crfWtyPzkXMRz2ITZumnczU6Dq4C7q9RBMI,9370
|
|
103
103
|
datarobot_genai/drmcp/tools/predictive/predict_realtime.py,sha256=urq6rPyZFsAP-bPyclSNzrkvb6FTamdlFau8q0IWWJ0,13472
|
|
104
104
|
datarobot_genai/drmcp/tools/predictive/project.py,sha256=Mzf7rQogBV6h1-MWQYTwtDHOsMWfjOyyJpSYmmvNNuc,3253
|
|
105
|
-
datarobot_genai/drmcp/tools/predictive/training.py,sha256=
|
|
105
|
+
datarobot_genai/drmcp/tools/predictive/training.py,sha256=jeZGPWJ69PPOd2MhUbACgbllQ0CK7Kz-hNl596mJujQ,25021
|
|
106
106
|
datarobot_genai/langgraph/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
107
107
|
datarobot_genai/langgraph/agent.py,sha256=DRnywmS9KDywyChtuIZZwNKbJs8BpC259EG_kxYbiQ8,15828
|
|
108
108
|
datarobot_genai/langgraph/mcp.py,sha256=iA2_j46mZAaNaL7ntXT-LW6C-NMJkzr3VfKDDfe7mh8,2851
|
|
@@ -117,9 +117,9 @@ datarobot_genai/nat/datarobot_llm_clients.py,sha256=-_q_KlKOVQecIYJd8YRiYnS4ZNaz
|
|
|
117
117
|
datarobot_genai/nat/datarobot_llm_providers.py,sha256=aDoQcTeGI-odqydPXEX9OGGNFbzAtpqzTvHHEkmJuEQ,4963
|
|
118
118
|
datarobot_genai/nat/datarobot_mcp_client.py,sha256=jL8sXb8g4gvt0VYgB2tfMGsMjpB1GV2XIbN0iv_LxVU,10701
|
|
119
119
|
datarobot_genai/nat/helpers.py,sha256=Q7E3ADZdtFfS8E6OQPyw2wgA6laQ58N3bhLj5CBWwJs,3265
|
|
120
|
-
datarobot_genai-0.2.
|
|
121
|
-
datarobot_genai-0.2.
|
|
122
|
-
datarobot_genai-0.2.
|
|
123
|
-
datarobot_genai-0.2.
|
|
124
|
-
datarobot_genai-0.2.
|
|
125
|
-
datarobot_genai-0.2.
|
|
120
|
+
datarobot_genai-0.2.37.dist-info/METADATA,sha256=JFxYZKfbfrbePIywe1WQ1ZfjQ5W9HAq_GNYAuXxTcB8,6301
|
|
121
|
+
datarobot_genai-0.2.37.dist-info/WHEEL,sha256=WLgqFyCfm_KASv4WHyYy0P3pM_m7J5L9k2skdKLirC8,87
|
|
122
|
+
datarobot_genai-0.2.37.dist-info/entry_points.txt,sha256=jEW3WxDZ8XIK9-ISmTyt5DbmBb047rFlzQuhY09rGrM,284
|
|
123
|
+
datarobot_genai-0.2.37.dist-info/licenses/AUTHORS,sha256=isJGUXdjq1U7XZ_B_9AH8Qf0u4eX0XyQifJZ_Sxm4sA,80
|
|
124
|
+
datarobot_genai-0.2.37.dist-info/licenses/LICENSE,sha256=U2_VkLIktQoa60Nf6Tbt7E4RMlfhFSjWjcJJfVC-YCE,11341
|
|
125
|
+
datarobot_genai-0.2.37.dist-info/RECORD,,
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|