datarobot-genai 0.2.29__py3-none-any.whl → 0.2.34__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- datarobot_genai/core/cli/agent_kernel.py +4 -1
- datarobot_genai/drmcp/__init__.py +2 -2
- datarobot_genai/drmcp/core/exceptions.py +0 -4
- datarobot_genai/drmcp/core/logging.py +2 -2
- datarobot_genai/drmcp/test_utils/clients/__init__.py +0 -0
- datarobot_genai/drmcp/test_utils/clients/anthropic.py +68 -0
- datarobot_genai/drmcp/test_utils/{openai_llm_mcp_client.py → clients/base.py} +38 -40
- datarobot_genai/drmcp/test_utils/clients/dr_gateway.py +58 -0
- datarobot_genai/drmcp/test_utils/clients/openai.py +68 -0
- datarobot_genai/drmcp/test_utils/mcp_utils_ete.py +20 -0
- datarobot_genai/drmcp/test_utils/test_interactive.py +16 -16
- datarobot_genai/drmcp/test_utils/tool_base_ete.py +1 -1
- datarobot_genai/drmcp/test_utils/utils.py +1 -1
- datarobot_genai/drmcp/tools/clients/gdrive.py +187 -1
- datarobot_genai/drmcp/tools/gdrive/tools.py +186 -10
- datarobot_genai/drmcp/tools/predictive/data.py +5 -5
- datarobot_genai/drmcp/tools/predictive/model.py +87 -52
- datarobot_genai/drmcp/tools/predictive/project.py +2 -2
- datarobot_genai/drmcp/tools/predictive/training.py +14 -14
- {datarobot_genai-0.2.29.dist-info → datarobot_genai-0.2.34.dist-info}/METADATA +1 -1
- {datarobot_genai-0.2.29.dist-info → datarobot_genai-0.2.34.dist-info}/RECORD +25 -21
- {datarobot_genai-0.2.29.dist-info → datarobot_genai-0.2.34.dist-info}/WHEEL +0 -0
- {datarobot_genai-0.2.29.dist-info → datarobot_genai-0.2.34.dist-info}/entry_points.txt +0 -0
- {datarobot_genai-0.2.29.dist-info → datarobot_genai-0.2.34.dist-info}/licenses/AUTHORS +0 -0
- {datarobot_genai-0.2.29.dist-info → datarobot_genai-0.2.34.dist-info}/licenses/LICENSE +0 -0
|
@@ -20,6 +20,7 @@ import logging
|
|
|
20
20
|
import uuid
|
|
21
21
|
from typing import Annotated
|
|
22
22
|
from typing import Any
|
|
23
|
+
from typing import Literal
|
|
23
24
|
|
|
24
25
|
import httpx
|
|
25
26
|
from datarobot.auth.datarobot.exceptions import OAuthServiceClientErr
|
|
@@ -33,7 +34,17 @@ from datarobot_genai.drmcp.core.auth import get_access_token
|
|
|
33
34
|
|
|
34
35
|
logger = logging.getLogger(__name__)
|
|
35
36
|
|
|
36
|
-
SUPPORTED_FIELDS = {
|
|
37
|
+
SUPPORTED_FIELDS = {
|
|
38
|
+
"id",
|
|
39
|
+
"name",
|
|
40
|
+
"size",
|
|
41
|
+
"mimeType",
|
|
42
|
+
"webViewLink",
|
|
43
|
+
"createdTime",
|
|
44
|
+
"modifiedTime",
|
|
45
|
+
"starred",
|
|
46
|
+
"trashed",
|
|
47
|
+
}
|
|
37
48
|
SUPPORTED_FIELDS_STR = ",".join(SUPPORTED_FIELDS)
|
|
38
49
|
DEFAULT_FIELDS = f"nextPageToken,files({SUPPORTED_FIELDS_STR})"
|
|
39
50
|
GOOGLE_DRIVE_FOLDER_MIME = "application/vnd.google-apps.folder"
|
|
@@ -119,6 +130,8 @@ class GoogleDriveFile(BaseModel):
|
|
|
119
130
|
web_view_link: Annotated[str | None, Field(alias="webViewLink")] = None
|
|
120
131
|
created_time: Annotated[str | None, Field(alias="createdTime")] = None
|
|
121
132
|
modified_time: Annotated[str | None, Field(alias="modifiedTime")] = None
|
|
133
|
+
starred: bool | None = None
|
|
134
|
+
trashed: bool | None = None
|
|
122
135
|
|
|
123
136
|
model_config = ConfigDict(populate_by_name=True)
|
|
124
137
|
|
|
@@ -133,8 +146,31 @@ class GoogleDriveFile(BaseModel):
|
|
|
133
146
|
web_view_link=data.get("webViewLink"),
|
|
134
147
|
created_time=data.get("createdTime"),
|
|
135
148
|
modified_time=data.get("modifiedTime"),
|
|
149
|
+
starred=data.get("starred"),
|
|
150
|
+
trashed=data.get("trashed"),
|
|
136
151
|
)
|
|
137
152
|
|
|
153
|
+
def as_flat_dict(self) -> dict[str, Any]:
|
|
154
|
+
"""Return a flat dictionary representation of the file."""
|
|
155
|
+
result: dict[str, Any] = {
|
|
156
|
+
"id": self.id,
|
|
157
|
+
"name": self.name,
|
|
158
|
+
"mimeType": self.mime_type,
|
|
159
|
+
}
|
|
160
|
+
if self.size is not None:
|
|
161
|
+
result["size"] = self.size
|
|
162
|
+
if self.web_view_link is not None:
|
|
163
|
+
result["webViewLink"] = self.web_view_link
|
|
164
|
+
if self.created_time is not None:
|
|
165
|
+
result["createdTime"] = self.created_time
|
|
166
|
+
if self.modified_time is not None:
|
|
167
|
+
result["modifiedTime"] = self.modified_time
|
|
168
|
+
if self.starred is not None:
|
|
169
|
+
result["starred"] = self.starred
|
|
170
|
+
if self.trashed is not None:
|
|
171
|
+
result["trashed"] = self.trashed
|
|
172
|
+
return result
|
|
173
|
+
|
|
138
174
|
|
|
139
175
|
class PaginatedResult(BaseModel):
|
|
140
176
|
"""Result of a paginated API call."""
|
|
@@ -440,6 +476,66 @@ class GoogleDriveClient:
|
|
|
440
476
|
response.raise_for_status()
|
|
441
477
|
return GoogleDriveFile.from_api_response(response.json())
|
|
442
478
|
|
|
479
|
+
async def update_file_metadata(
|
|
480
|
+
self,
|
|
481
|
+
file_id: str,
|
|
482
|
+
new_name: str | None = None,
|
|
483
|
+
starred: bool | None = None,
|
|
484
|
+
trashed: bool | None = None,
|
|
485
|
+
) -> GoogleDriveFile:
|
|
486
|
+
"""Update file metadata in Google Drive.
|
|
487
|
+
|
|
488
|
+
Args:
|
|
489
|
+
file_id: The ID of the file to update.
|
|
490
|
+
new_name: A new name to rename the file. Must not be empty or whitespace.
|
|
491
|
+
starred: Set to True to star the file or False to unstar it.
|
|
492
|
+
trashed: Set to True to trash the file or False to restore it.
|
|
493
|
+
|
|
494
|
+
Returns
|
|
495
|
+
-------
|
|
496
|
+
GoogleDriveFile with updated metadata.
|
|
497
|
+
|
|
498
|
+
Raises
|
|
499
|
+
------
|
|
500
|
+
GoogleDriveError: If no update fields are provided, file is not found,
|
|
501
|
+
access is denied, or the request is invalid.
|
|
502
|
+
"""
|
|
503
|
+
if new_name is None and starred is None and trashed is None:
|
|
504
|
+
raise GoogleDriveError(
|
|
505
|
+
"At least one of new_name, starred, or trashed must be provided."
|
|
506
|
+
)
|
|
507
|
+
|
|
508
|
+
if new_name is not None and not new_name.strip():
|
|
509
|
+
raise GoogleDriveError("new_name cannot be empty or whitespace.")
|
|
510
|
+
|
|
511
|
+
body: dict[str, Any] = {}
|
|
512
|
+
if new_name is not None:
|
|
513
|
+
body["name"] = new_name
|
|
514
|
+
if starred is not None:
|
|
515
|
+
body["starred"] = starred
|
|
516
|
+
if trashed is not None:
|
|
517
|
+
body["trashed"] = trashed
|
|
518
|
+
|
|
519
|
+
response = await self._client.patch(
|
|
520
|
+
f"/{file_id}",
|
|
521
|
+
json=body,
|
|
522
|
+
params={"fields": SUPPORTED_FIELDS_STR, "supportsAllDrives": "true"},
|
|
523
|
+
)
|
|
524
|
+
|
|
525
|
+
if response.status_code == 404:
|
|
526
|
+
raise GoogleDriveError(f"File with ID '{file_id}' not found.")
|
|
527
|
+
if response.status_code == 403:
|
|
528
|
+
raise GoogleDriveError(
|
|
529
|
+
f"Permission denied: you don't have permission to update file '{file_id}'."
|
|
530
|
+
)
|
|
531
|
+
if response.status_code == 400:
|
|
532
|
+
raise GoogleDriveError("Bad request: invalid parameters for file update.")
|
|
533
|
+
if response.status_code == 429:
|
|
534
|
+
raise GoogleDriveError("Rate limit exceeded. Please try again later.")
|
|
535
|
+
|
|
536
|
+
response.raise_for_status()
|
|
537
|
+
return GoogleDriveFile.from_api_response(response.json())
|
|
538
|
+
|
|
443
539
|
async def _export_workspace_file(self, file_id: str, export_mime_type: str) -> str:
|
|
444
540
|
"""Export a Google Workspace file to the specified format.
|
|
445
541
|
|
|
@@ -726,6 +822,96 @@ class GoogleDriveClient:
|
|
|
726
822
|
headers={"Content-Type": f"multipart/related; boundary={boundary}"},
|
|
727
823
|
)
|
|
728
824
|
|
|
825
|
+
async def manage_access(
|
|
826
|
+
self,
|
|
827
|
+
*,
|
|
828
|
+
file_id: str,
|
|
829
|
+
action: Literal["add", "update", "remove"],
|
|
830
|
+
role: Literal["reader", "commenter", "writer", "fileOrganizer", "organizer", "owner"]
|
|
831
|
+
| None = None,
|
|
832
|
+
email_address: str | None = None,
|
|
833
|
+
permission_id: str | None = None,
|
|
834
|
+
transfer_ownership: bool = False,
|
|
835
|
+
) -> str:
|
|
836
|
+
"""Manage access permissions for a Google Drive file or folder.
|
|
837
|
+
|
|
838
|
+
Adds, updates, or removes sharing permissions on an existing Google Drive
|
|
839
|
+
file or folder using the Google Drive Permissions API.
|
|
840
|
+
|
|
841
|
+
This method supports granting access to users or groups, changing access
|
|
842
|
+
roles, and revoking permissions. Ownership transfer is supported for files
|
|
843
|
+
in "My Drive" when explicitly requested.
|
|
844
|
+
|
|
845
|
+
Args:
|
|
846
|
+
file_id: The ID of the Google Drive file or folder whose permissions
|
|
847
|
+
are being managed.
|
|
848
|
+
action: The permission operation to perform.
|
|
849
|
+
role: The access role to assign or update. Valid values include
|
|
850
|
+
Required for "add" and "update" actions.
|
|
851
|
+
email_address: The email address of the user or group to grant access to.
|
|
852
|
+
Required for the "add" action.
|
|
853
|
+
permission_id: The ID of the permission to update or remove.
|
|
854
|
+
Required for "update" and "remove" actions.
|
|
855
|
+
transfer_ownership: Whether to transfer ownership of the file.
|
|
856
|
+
Only applicable when action="update" and role="owner".
|
|
857
|
+
|
|
858
|
+
Returns
|
|
859
|
+
-------
|
|
860
|
+
Permission id.
|
|
861
|
+
For "add" its newly added permission.
|
|
862
|
+
For "update"/"remove" its previous permission.
|
|
863
|
+
|
|
864
|
+
Raises
|
|
865
|
+
------
|
|
866
|
+
GoogleDriveError: If the permission operation fails (invalid arguments,
|
|
867
|
+
insufficient permissions, resource not found, ownership transfer
|
|
868
|
+
not allowed, rate limited, etc.).
|
|
869
|
+
"""
|
|
870
|
+
if not file_id.strip():
|
|
871
|
+
raise GoogleDriveError("Argument validation error: 'file_id' cannot be empty.")
|
|
872
|
+
|
|
873
|
+
if action == "add" and not email_address:
|
|
874
|
+
raise GoogleDriveError("'email_address' is required for action 'add'.")
|
|
875
|
+
|
|
876
|
+
if action in ("update", "remove") and not permission_id:
|
|
877
|
+
raise GoogleDriveError("'permission_id' is required for action 'update' or 'remove'.")
|
|
878
|
+
|
|
879
|
+
if action != "remove" and not role:
|
|
880
|
+
raise GoogleDriveError("'role' is required for action 'add' or 'update'.")
|
|
881
|
+
|
|
882
|
+
if action == "add":
|
|
883
|
+
response = await self._client.post(
|
|
884
|
+
url=f"/{file_id}/permissions",
|
|
885
|
+
json={
|
|
886
|
+
"type": "user",
|
|
887
|
+
"role": role,
|
|
888
|
+
"emailAddress": email_address,
|
|
889
|
+
},
|
|
890
|
+
params={"sendNotificationEmail": False, "supportsAllDrives": True},
|
|
891
|
+
)
|
|
892
|
+
|
|
893
|
+
elif action == "update":
|
|
894
|
+
response = await self._client.patch(
|
|
895
|
+
url=f"/{file_id}/permissions/{permission_id}",
|
|
896
|
+
json={"role": role},
|
|
897
|
+
params={"transferOwnership": transfer_ownership, "supportsAllDrives": True},
|
|
898
|
+
)
|
|
899
|
+
|
|
900
|
+
elif action == "remove":
|
|
901
|
+
response = await self._client.delete(url=f"/{file_id}/permissions/{permission_id}")
|
|
902
|
+
|
|
903
|
+
else:
|
|
904
|
+
raise GoogleDriveError(f"Invalid action '{action}'")
|
|
905
|
+
|
|
906
|
+
if response.status_code not in (200, 201, 204):
|
|
907
|
+
raise GoogleDriveError(f"Drive API error {response.status_code}: {response.text}")
|
|
908
|
+
|
|
909
|
+
if action == "add":
|
|
910
|
+
return response.json()["id"]
|
|
911
|
+
|
|
912
|
+
# Cannot be null here because of above validators
|
|
913
|
+
return permission_id # type: ignore
|
|
914
|
+
|
|
729
915
|
async def __aenter__(self) -> "GoogleDriveClient":
|
|
730
916
|
"""Async context manager entry."""
|
|
731
917
|
return self
|
|
@@ -16,6 +16,7 @@
|
|
|
16
16
|
|
|
17
17
|
import logging
|
|
18
18
|
from typing import Annotated
|
|
19
|
+
from typing import Literal
|
|
19
20
|
|
|
20
21
|
from fastmcp.exceptions import ToolError
|
|
21
22
|
from fastmcp.tools.tool import ToolResult
|
|
@@ -33,7 +34,9 @@ from datarobot_genai.drmcp.tools.clients.gdrive import get_gdrive_access_token
|
|
|
33
34
|
logger = logging.getLogger(__name__)
|
|
34
35
|
|
|
35
36
|
|
|
36
|
-
@dr_mcp_tool(
|
|
37
|
+
@dr_mcp_tool(
|
|
38
|
+
tags={"google", "gdrive", "list", "search", "files", "find", "contents"}, enabled=False
|
|
39
|
+
)
|
|
37
40
|
async def gdrive_find_contents(
|
|
38
41
|
*,
|
|
39
42
|
page_size: Annotated[
|
|
@@ -164,7 +167,6 @@ async def gdrive_read_content(
|
|
|
164
167
|
f"An unexpected error occurred while reading Google Drive file content: {str(e)}"
|
|
165
168
|
)
|
|
166
169
|
|
|
167
|
-
# Provide helpful context about the conversion
|
|
168
170
|
export_info = ""
|
|
169
171
|
if file_content.was_exported:
|
|
170
172
|
export_info = f" (exported from {file_content.original_mime_type})"
|
|
@@ -252,7 +254,6 @@ async def gdrive_create_file(
|
|
|
252
254
|
logger.error(f"Unexpected error creating Google Drive file: {e}")
|
|
253
255
|
raise ToolError(f"An unexpected error occurred while creating Google Drive file: {str(e)}")
|
|
254
256
|
|
|
255
|
-
# Build response message
|
|
256
257
|
file_type = "folder" if mime_type == GOOGLE_DRIVE_FOLDER_MIME else "file"
|
|
257
258
|
content_info = ""
|
|
258
259
|
if initial_content and mime_type != GOOGLE_DRIVE_FOLDER_MIME:
|
|
@@ -260,11 +261,186 @@ async def gdrive_create_file(
|
|
|
260
261
|
|
|
261
262
|
return ToolResult(
|
|
262
263
|
content=f"Successfully created {file_type} '{created_file.name}'{content_info}.",
|
|
263
|
-
structured_content=
|
|
264
|
-
"id": created_file.id,
|
|
265
|
-
"name": created_file.name,
|
|
266
|
-
"mimeType": created_file.mime_type,
|
|
267
|
-
"webViewLink": created_file.web_view_link,
|
|
268
|
-
"createdTime": created_file.created_time,
|
|
269
|
-
},
|
|
264
|
+
structured_content=created_file.as_flat_dict(),
|
|
270
265
|
)
|
|
266
|
+
|
|
267
|
+
|
|
268
|
+
@dr_mcp_tool(
|
|
269
|
+
tags={"google", "gdrive", "update", "metadata", "rename", "star", "trash"}, enabled=False
|
|
270
|
+
)
|
|
271
|
+
async def gdrive_update_metadata(
|
|
272
|
+
*,
|
|
273
|
+
file_id: Annotated[str, "The ID of the file or folder to update."],
|
|
274
|
+
new_name: Annotated[str | None, "A new name to rename the file."] = None,
|
|
275
|
+
starred: Annotated[bool | None, "Set to True to star the file or False to unstar it."] = None,
|
|
276
|
+
trash: Annotated[bool | None, "Set to True to trash the file or False to restore it."] = None,
|
|
277
|
+
) -> ToolResult:
|
|
278
|
+
"""
|
|
279
|
+
Update non-content metadata fields of a Google Drive file or folder.
|
|
280
|
+
|
|
281
|
+
This tool allows you to:
|
|
282
|
+
- Rename files and folders by setting new_name
|
|
283
|
+
- Star or unstar files (per-user preference) with starred
|
|
284
|
+
- Move files to trash or restore them with trash
|
|
285
|
+
|
|
286
|
+
Usage:
|
|
287
|
+
- Rename: gdrive_update_metadata(file_id="1ABC...", new_name="New Name.txt")
|
|
288
|
+
- Star: gdrive_update_metadata(file_id="1ABC...", starred=True)
|
|
289
|
+
- Unstar: gdrive_update_metadata(file_id="1ABC...", starred=False)
|
|
290
|
+
- Trash: gdrive_update_metadata(file_id="1ABC...", trash=True)
|
|
291
|
+
- Restore: gdrive_update_metadata(file_id="1ABC...", trash=False)
|
|
292
|
+
- Multiple: gdrive_update_metadata(file_id="1ABC...", new_name="New.txt", starred=True)
|
|
293
|
+
|
|
294
|
+
Note:
|
|
295
|
+
- At least one of new_name, starred, or trash must be provided.
|
|
296
|
+
- Starring is per-user: starring a shared file only affects your view.
|
|
297
|
+
- Trashing a folder trashes all contents recursively.
|
|
298
|
+
- Trashing requires permissions (owner for My Drive, organizer for Shared Drives).
|
|
299
|
+
"""
|
|
300
|
+
if not file_id or not file_id.strip():
|
|
301
|
+
raise ToolError("Argument validation error: 'file_id' cannot be empty.")
|
|
302
|
+
|
|
303
|
+
if new_name is None and starred is None and trash is None:
|
|
304
|
+
raise ToolError(
|
|
305
|
+
"Argument validation error: at least one of 'new_name', 'starred', or 'trash' "
|
|
306
|
+
"must be provided."
|
|
307
|
+
)
|
|
308
|
+
|
|
309
|
+
if new_name is not None and not new_name.strip():
|
|
310
|
+
raise ToolError("Argument validation error: 'new_name' cannot be empty or whitespace.")
|
|
311
|
+
|
|
312
|
+
access_token = await get_gdrive_access_token()
|
|
313
|
+
if isinstance(access_token, ToolError):
|
|
314
|
+
raise access_token
|
|
315
|
+
|
|
316
|
+
try:
|
|
317
|
+
async with GoogleDriveClient(access_token) as client:
|
|
318
|
+
updated_file = await client.update_file_metadata(
|
|
319
|
+
file_id=file_id,
|
|
320
|
+
new_name=new_name,
|
|
321
|
+
starred=starred,
|
|
322
|
+
trashed=trash,
|
|
323
|
+
)
|
|
324
|
+
except GoogleDriveError as e:
|
|
325
|
+
logger.error(f"Google Drive error updating file metadata: {e}")
|
|
326
|
+
raise ToolError(str(e))
|
|
327
|
+
except Exception as e:
|
|
328
|
+
logger.error(f"Unexpected error updating Google Drive file metadata: {e}")
|
|
329
|
+
raise ToolError(
|
|
330
|
+
f"An unexpected error occurred while updating Google Drive file metadata: {str(e)}"
|
|
331
|
+
)
|
|
332
|
+
|
|
333
|
+
changes: list[str] = []
|
|
334
|
+
if new_name is not None:
|
|
335
|
+
changes.append(f"renamed to '{new_name}'")
|
|
336
|
+
if starred is True:
|
|
337
|
+
changes.append("starred")
|
|
338
|
+
elif starred is False:
|
|
339
|
+
changes.append("unstarred")
|
|
340
|
+
if trash is True:
|
|
341
|
+
changes.append("moved to trash")
|
|
342
|
+
elif trash is False:
|
|
343
|
+
changes.append("restored from trash")
|
|
344
|
+
|
|
345
|
+
changes_description = ", ".join(changes)
|
|
346
|
+
|
|
347
|
+
return ToolResult(
|
|
348
|
+
content=f"Successfully updated file '{updated_file.name}': {changes_description}.",
|
|
349
|
+
structured_content=updated_file.as_flat_dict(),
|
|
350
|
+
)
|
|
351
|
+
|
|
352
|
+
|
|
353
|
+
@dr_mcp_tool(tags={"google", "gdrive", "manage", "access", "acl"})
|
|
354
|
+
async def gdrive_manage_access(
|
|
355
|
+
*,
|
|
356
|
+
file_id: Annotated[str, "The ID of the file or folder."],
|
|
357
|
+
action: Annotated[Literal["add", "update", "remove"], "The operation to perform."],
|
|
358
|
+
role: Annotated[
|
|
359
|
+
Literal["reader", "commenter", "writer", "fileOrganizer", "organizer", "owner"] | None,
|
|
360
|
+
"The access level.",
|
|
361
|
+
] = None,
|
|
362
|
+
email_address: Annotated[
|
|
363
|
+
str | None, "The email of the user or group (required for 'add')."
|
|
364
|
+
] = None,
|
|
365
|
+
permission_id: Annotated[
|
|
366
|
+
str | None, "The specific permission ID (required for 'update' or 'remove')."
|
|
367
|
+
] = None,
|
|
368
|
+
transfer_ownership: Annotated[
|
|
369
|
+
bool, "Whether to transfer ownership (only for 'update' to 'owner' role)."
|
|
370
|
+
] = False,
|
|
371
|
+
) -> ToolResult:
|
|
372
|
+
"""
|
|
373
|
+
Consolidated tool for sharing files and managing permissions.
|
|
374
|
+
Pushes all logic to the Google Drive API permissions resource (create, update, delete).
|
|
375
|
+
|
|
376
|
+
Usage:
|
|
377
|
+
- Add role: gdrive_manage_access(
|
|
378
|
+
file_id="SomeFileId",
|
|
379
|
+
action="add",
|
|
380
|
+
role="reader",
|
|
381
|
+
email_address="dummy@user.com"
|
|
382
|
+
)
|
|
383
|
+
- Update role: gdrive_manage_access(
|
|
384
|
+
file_id="SomeFileId",
|
|
385
|
+
action="update",
|
|
386
|
+
role="reader",
|
|
387
|
+
permission_id="SomePermissionId"
|
|
388
|
+
)
|
|
389
|
+
- Remove permission: gdrive_manage_access(
|
|
390
|
+
file_id="SomeFileId",
|
|
391
|
+
action="remove",
|
|
392
|
+
permission_id="SomePermissionId"
|
|
393
|
+
)
|
|
394
|
+
"""
|
|
395
|
+
if not file_id or not file_id.strip():
|
|
396
|
+
raise ToolError("Argument validation error: 'file_id' cannot be empty.")
|
|
397
|
+
|
|
398
|
+
if action == "add" and not email_address:
|
|
399
|
+
raise ToolError("'email_address' is required for action 'add'.")
|
|
400
|
+
|
|
401
|
+
if action in ("update", "remove") and not permission_id:
|
|
402
|
+
raise ToolError("'permission_id' is required for action 'update' or 'remove'.")
|
|
403
|
+
|
|
404
|
+
if action != "remove" and not role:
|
|
405
|
+
raise ToolError("'role' is required for action 'add' or 'update'.")
|
|
406
|
+
|
|
407
|
+
access_token = await get_gdrive_access_token()
|
|
408
|
+
if isinstance(access_token, ToolError):
|
|
409
|
+
raise access_token
|
|
410
|
+
|
|
411
|
+
try:
|
|
412
|
+
async with GoogleDriveClient(access_token) as client:
|
|
413
|
+
permission_id = await client.manage_access(
|
|
414
|
+
file_id=file_id,
|
|
415
|
+
action=action,
|
|
416
|
+
role=role,
|
|
417
|
+
email_address=email_address,
|
|
418
|
+
permission_id=permission_id,
|
|
419
|
+
transfer_ownership=transfer_ownership,
|
|
420
|
+
)
|
|
421
|
+
except GoogleDriveError as e:
|
|
422
|
+
logger.error(f"Google Drive permission operation failed: {e}")
|
|
423
|
+
raise ToolError(str(e))
|
|
424
|
+
except Exception as e:
|
|
425
|
+
logger.error(f"Unexpected error changing permissions for Google Drive file {file_id}: {e}")
|
|
426
|
+
raise ToolError(
|
|
427
|
+
f"Unexpected error changing permissions for Google Drive file {file_id}: {str(e)}"
|
|
428
|
+
)
|
|
429
|
+
|
|
430
|
+
# Build response
|
|
431
|
+
structured_content = {"affectedFileId": file_id}
|
|
432
|
+
if action == "add":
|
|
433
|
+
content = (
|
|
434
|
+
f"Successfully added role '{role}' for '{email_address}' for gdrive file '{file_id}'. "
|
|
435
|
+
f"New permission id '{permission_id}'."
|
|
436
|
+
)
|
|
437
|
+
structured_content["newPermissionId"] = permission_id
|
|
438
|
+
elif action == "update":
|
|
439
|
+
content = (
|
|
440
|
+
f"Successfully updated role '{role}' (permission '{permission_id}') "
|
|
441
|
+
f"for gdrive file '{file_id}'."
|
|
442
|
+
)
|
|
443
|
+
else: # action == "remove":
|
|
444
|
+
content = f"Successfully removed permission '{permission_id}' for gdrive file '{file_id}'."
|
|
445
|
+
|
|
446
|
+
return ToolResult(content=content, structured_content=structured_content)
|
|
@@ -35,9 +35,9 @@ async def upload_dataset_to_ai_catalog(
|
|
|
35
35
|
) -> ToolError | ToolResult:
|
|
36
36
|
"""Upload a dataset to the DataRobot AI Catalog / Data Registry."""
|
|
37
37
|
if not file_path and not file_url:
|
|
38
|
-
|
|
38
|
+
raise ToolError("Either file_path or file_url must be provided.")
|
|
39
39
|
if file_path and file_url:
|
|
40
|
-
|
|
40
|
+
raise ToolError("Please provide either file_path or file_url, not both.")
|
|
41
41
|
|
|
42
42
|
# Get client
|
|
43
43
|
client = get_sdk_client()
|
|
@@ -47,17 +47,17 @@ async def upload_dataset_to_ai_catalog(
|
|
|
47
47
|
# Does file exist?
|
|
48
48
|
if not os.path.exists(file_path):
|
|
49
49
|
logger.error("File not found: %s", file_path)
|
|
50
|
-
|
|
50
|
+
raise ToolError(f"File not found: {file_path}")
|
|
51
51
|
catalog_item = client.Dataset.create_from_file(file_path)
|
|
52
52
|
else:
|
|
53
53
|
# Does URL exist?
|
|
54
54
|
if file_url is None or not is_valid_url(file_url):
|
|
55
55
|
logger.error("Invalid file URL: %s", file_url)
|
|
56
|
-
|
|
56
|
+
raise ToolError(f"Invalid file URL: {file_url}")
|
|
57
57
|
catalog_item = client.Dataset.create_from_url(file_url)
|
|
58
58
|
|
|
59
59
|
if not catalog_item:
|
|
60
|
-
|
|
60
|
+
raise ToolError("Failed to upload dataset.")
|
|
61
61
|
|
|
62
62
|
return ToolResult(
|
|
63
63
|
content=f"Successfully uploaded dataset: {catalog_item.id}",
|
|
@@ -14,9 +14,12 @@
|
|
|
14
14
|
|
|
15
15
|
import json
|
|
16
16
|
import logging
|
|
17
|
+
from typing import Annotated
|
|
17
18
|
from typing import Any
|
|
18
19
|
|
|
19
20
|
from datarobot.models.model import Model
|
|
21
|
+
from fastmcp.exceptions import ToolError
|
|
22
|
+
from fastmcp.tools.tool import ToolResult
|
|
20
23
|
|
|
21
24
|
from datarobot_genai.drmcp.core.clients import get_sdk_client
|
|
22
25
|
from datarobot_genai.drmcp.core.mcp_instance import dr_mcp_tool
|
|
@@ -50,33 +53,25 @@ class ModelEncoder(json.JSONEncoder):
|
|
|
50
53
|
return super().default(obj)
|
|
51
54
|
|
|
52
55
|
|
|
53
|
-
@dr_mcp_tool(tags={"model", "management", "info"})
|
|
54
|
-
async def get_best_model(
|
|
55
|
-
|
|
56
|
-
|
|
56
|
+
@dr_mcp_tool(tags={"predictive", "model", "read", "management", "info"})
|
|
57
|
+
async def get_best_model(
|
|
58
|
+
*,
|
|
59
|
+
project_id: Annotated[str, "The DataRobot project ID"] | None = None,
|
|
60
|
+
metric: Annotated[str, "The metric to use for best model selection (e.g., 'AUC', 'LogLoss')"]
|
|
61
|
+
| None = None,
|
|
62
|
+
) -> ToolError | ToolResult:
|
|
63
|
+
"""Get the best model for a DataRobot project, optionally by a specific metric."""
|
|
64
|
+
if not project_id:
|
|
65
|
+
raise ToolError("Project ID must be provided")
|
|
57
66
|
|
|
58
|
-
Args:
|
|
59
|
-
project_id: The ID of the DataRobot project.
|
|
60
|
-
metric: (Optional) The metric to use for best model selection (e.g., 'AUC', 'LogLoss').
|
|
61
|
-
|
|
62
|
-
Returns
|
|
63
|
-
-------
|
|
64
|
-
A formatted string describing the best model.
|
|
65
|
-
|
|
66
|
-
Raises
|
|
67
|
-
------
|
|
68
|
-
Exception: If project not found or no models exist in the project.
|
|
69
|
-
"""
|
|
70
67
|
client = get_sdk_client()
|
|
71
68
|
project = client.Project.get(project_id)
|
|
72
69
|
if not project:
|
|
73
|
-
|
|
74
|
-
raise Exception(f"Project with ID {project_id} not found.")
|
|
70
|
+
raise ToolError(f"Project with ID {project_id} not found.")
|
|
75
71
|
|
|
76
72
|
leaderboard = project.get_models()
|
|
77
73
|
if not leaderboard:
|
|
78
|
-
|
|
79
|
-
raise Exception("No models found for this project.")
|
|
74
|
+
raise ToolError("No models found for this project.")
|
|
80
75
|
|
|
81
76
|
if metric:
|
|
82
77
|
reverse_sort = metric.upper() in [
|
|
@@ -98,51 +93,91 @@ async def get_best_model(project_id: str, metric: str | None = None) -> str:
|
|
|
98
93
|
best_model = leaderboard[0]
|
|
99
94
|
logger.info(f"Found best model {best_model.id} for project {project_id}")
|
|
100
95
|
|
|
101
|
-
# Format the response as a human-readable string
|
|
102
96
|
metric_info = ""
|
|
97
|
+
metric_value = None
|
|
98
|
+
|
|
103
99
|
if metric and best_model.metrics and metric in best_model.metrics:
|
|
104
100
|
metric_value = best_model.metrics[metric].get("validation")
|
|
105
101
|
if metric_value is not None:
|
|
106
102
|
metric_info = f" with {metric}: {metric_value:.2f}"
|
|
107
103
|
|
|
108
|
-
|
|
109
|
-
|
|
110
|
-
|
|
111
|
-
|
|
112
|
-
|
|
113
|
-
|
|
114
|
-
|
|
104
|
+
# Include full metrics in the response
|
|
105
|
+
best_model_dict = model_to_dict(best_model)
|
|
106
|
+
best_model_dict["metric"] = metric
|
|
107
|
+
best_model_dict["metric_value"] = metric_value
|
|
108
|
+
|
|
109
|
+
# Format metrics for human-readable content
|
|
110
|
+
metrics_text = ""
|
|
111
|
+
if best_model.metrics:
|
|
112
|
+
metrics_list = []
|
|
113
|
+
for metric_name, metric_data in best_model.metrics.items():
|
|
114
|
+
if isinstance(metric_data, dict) and "validation" in metric_data:
|
|
115
|
+
val = metric_data["validation"]
|
|
116
|
+
if val is not None:
|
|
117
|
+
metrics_list.append(f"{metric_name}: {val:.4f}")
|
|
118
|
+
if metrics_list:
|
|
119
|
+
metrics_text = "\nPerformance metrics:\n" + "\n".join(f" - {m}" for m in metrics_list)
|
|
120
|
+
|
|
121
|
+
return ToolResult(
|
|
122
|
+
content=f"Best model: {best_model.model_type}{metric_info}{metrics_text}",
|
|
123
|
+
structured_content={
|
|
124
|
+
"project_id": project_id,
|
|
125
|
+
"best_model": best_model_dict,
|
|
126
|
+
},
|
|
127
|
+
)
|
|
128
|
+
|
|
129
|
+
|
|
130
|
+
@dr_mcp_tool(tags={"predictive", "model", "read", "scoring", "dataset"})
|
|
131
|
+
async def score_dataset_with_model(
|
|
132
|
+
*,
|
|
133
|
+
project_id: Annotated[str, "The DataRobot project ID"] | None = None,
|
|
134
|
+
model_id: Annotated[str, "The DataRobot model ID"] | None = None,
|
|
135
|
+
dataset_url: Annotated[str, "The dataset URL"] | None = None,
|
|
136
|
+
) -> ToolError | ToolResult:
|
|
137
|
+
"""Score a dataset using a specific DataRobot model."""
|
|
138
|
+
if not project_id:
|
|
139
|
+
raise ToolError("Project ID must be provided")
|
|
140
|
+
if not model_id:
|
|
141
|
+
raise ToolError("Model ID must be provided")
|
|
142
|
+
if not dataset_url:
|
|
143
|
+
raise ToolError("Dataset URL must be provided")
|
|
115
144
|
|
|
116
|
-
Args:
|
|
117
|
-
project_id: The ID of the DataRobot project.
|
|
118
|
-
model_id: The ID of the DataRobot model to use for scoring.
|
|
119
|
-
dataset_url: The URL to the dataset to score (must be accessible to DataRobot).
|
|
120
|
-
|
|
121
|
-
Returns
|
|
122
|
-
-------
|
|
123
|
-
A string summary of the scoring job or a meaningful error message.
|
|
124
|
-
"""
|
|
125
145
|
client = get_sdk_client()
|
|
126
146
|
project = client.Project.get(project_id)
|
|
127
147
|
model = client.Model.get(project, model_id)
|
|
128
148
|
job = model.score(dataset_url)
|
|
129
|
-
logger.info(f"Started scoring job {job.id} for model {model_id}")
|
|
130
|
-
return f"Scoring job started: {job.id}"
|
|
131
|
-
|
|
132
149
|
|
|
133
|
-
|
|
134
|
-
|
|
135
|
-
|
|
136
|
-
|
|
150
|
+
return ToolResult(
|
|
151
|
+
content=f"Scoring job started: {job.id}",
|
|
152
|
+
structured_content={
|
|
153
|
+
"scoring_job_id": job.id,
|
|
154
|
+
"project_id": project_id,
|
|
155
|
+
"model_id": model_id,
|
|
156
|
+
"dataset_url": dataset_url,
|
|
157
|
+
},
|
|
158
|
+
)
|
|
159
|
+
|
|
160
|
+
|
|
161
|
+
@dr_mcp_tool(tags={"predictive", "model", "read", "management", "list"})
|
|
162
|
+
async def list_models(
|
|
163
|
+
*,
|
|
164
|
+
project_id: Annotated[str, "The DataRobot project ID"] | None = None,
|
|
165
|
+
) -> ToolError | ToolResult:
|
|
166
|
+
"""List all models in a project."""
|
|
167
|
+
if not project_id:
|
|
168
|
+
raise ToolError("Project ID must be provided")
|
|
137
169
|
|
|
138
|
-
Args:
|
|
139
|
-
project_id: The ID of the DataRobot project.
|
|
140
|
-
|
|
141
|
-
Returns
|
|
142
|
-
-------
|
|
143
|
-
A string summary of the models in the project.
|
|
144
|
-
"""
|
|
145
170
|
client = get_sdk_client()
|
|
146
171
|
project = client.Project.get(project_id)
|
|
147
172
|
models = project.get_models()
|
|
148
|
-
|
|
173
|
+
|
|
174
|
+
return ToolResult(
|
|
175
|
+
content=(
|
|
176
|
+
f"Found {len(models)} models in project {project_id}, here are the details:\n"
|
|
177
|
+
f"{json.dumps(models, indent=2, cls=ModelEncoder)}"
|
|
178
|
+
),
|
|
179
|
+
structured_content={
|
|
180
|
+
"project_id": project_id,
|
|
181
|
+
"models": [model_to_dict(model) for model in models],
|
|
182
|
+
},
|
|
183
|
+
)
|
|
@@ -54,9 +54,9 @@ async def get_project_dataset_by_name(
|
|
|
54
54
|
The dataset ID and the dataset type (source or prediction) as a string, or an error message.
|
|
55
55
|
"""
|
|
56
56
|
if not project_id:
|
|
57
|
-
|
|
57
|
+
raise ToolError("Project ID is required.")
|
|
58
58
|
if not dataset_name:
|
|
59
|
-
|
|
59
|
+
raise ToolError("Dataset name is required.")
|
|
60
60
|
|
|
61
61
|
client = get_sdk_client()
|
|
62
62
|
project = client.Project.get(project_id)
|