datarobot-genai 0.2.27__py3-none-any.whl → 0.2.28__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- datarobot_genai/drmcp/core/config.py +121 -83
- datarobot_genai/drmcp/core/tool_config.py +17 -9
- datarobot_genai/drmcp/test_utils/tool_base_ete.py +68 -1
- datarobot_genai/drmcp/tools/clients/microsoft_graph.py +479 -0
- datarobot_genai/drmcp/tools/microsoft_graph/__init__.py +13 -0
- datarobot_genai/drmcp/tools/microsoft_graph/tools.py +198 -0
- datarobot_genai/drmcp/tools/predictive/data.py +11 -3
- datarobot_genai/drmcp/tools/predictive/training.py +1 -0
- {datarobot_genai-0.2.27.dist-info → datarobot_genai-0.2.28.dist-info}/METADATA +1 -1
- {datarobot_genai-0.2.27.dist-info → datarobot_genai-0.2.28.dist-info}/RECORD +14 -11
- {datarobot_genai-0.2.27.dist-info → datarobot_genai-0.2.28.dist-info}/WHEEL +0 -0
- {datarobot_genai-0.2.27.dist-info → datarobot_genai-0.2.28.dist-info}/entry_points.txt +0 -0
- {datarobot_genai-0.2.27.dist-info → datarobot_genai-0.2.28.dist-info}/licenses/AUTHORS +0 -0
- {datarobot_genai-0.2.27.dist-info → datarobot_genai-0.2.28.dist-info}/licenses/LICENSE +0 -0
|
@@ -31,6 +31,124 @@ from .constants import DEFAULT_DATAROBOT_ENDPOINT
|
|
|
31
31
|
from .constants import RUNTIME_PARAM_ENV_VAR_NAME_PREFIX
|
|
32
32
|
|
|
33
33
|
|
|
34
|
+
class MCPToolConfig(BaseSettings):
|
|
35
|
+
"""Tool configuration for MCP server."""
|
|
36
|
+
|
|
37
|
+
enable_predictive_tools: bool = Field(
|
|
38
|
+
default=True,
|
|
39
|
+
validation_alias=AliasChoices(
|
|
40
|
+
RUNTIME_PARAM_ENV_VAR_NAME_PREFIX + "ENABLE_PREDICTIVE_TOOLS",
|
|
41
|
+
"ENABLE_PREDICTIVE_TOOLS",
|
|
42
|
+
),
|
|
43
|
+
description="Enable/disable predictive tools",
|
|
44
|
+
)
|
|
45
|
+
|
|
46
|
+
enable_jira_tools: bool = Field(
|
|
47
|
+
default=False,
|
|
48
|
+
validation_alias=AliasChoices(
|
|
49
|
+
RUNTIME_PARAM_ENV_VAR_NAME_PREFIX + "ENABLE_JIRA_TOOLS",
|
|
50
|
+
"ENABLE_JIRA_TOOLS",
|
|
51
|
+
),
|
|
52
|
+
description="Enable/disable Jira tools",
|
|
53
|
+
)
|
|
54
|
+
|
|
55
|
+
enable_confluence_tools: bool = Field(
|
|
56
|
+
default=False,
|
|
57
|
+
validation_alias=AliasChoices(
|
|
58
|
+
RUNTIME_PARAM_ENV_VAR_NAME_PREFIX + "ENABLE_CONFLUENCE_TOOLS",
|
|
59
|
+
"ENABLE_CONFLUENCE_TOOLS",
|
|
60
|
+
),
|
|
61
|
+
description="Enable/disable Confluence tools",
|
|
62
|
+
)
|
|
63
|
+
|
|
64
|
+
enable_gdrive_tools: bool = Field(
|
|
65
|
+
default=False,
|
|
66
|
+
validation_alias=AliasChoices(
|
|
67
|
+
RUNTIME_PARAM_ENV_VAR_NAME_PREFIX + "ENABLE_GDRIVE_TOOLS",
|
|
68
|
+
"ENABLE_GDRIVE_TOOLS",
|
|
69
|
+
),
|
|
70
|
+
description="Enable/disable GDrive tools",
|
|
71
|
+
)
|
|
72
|
+
|
|
73
|
+
enable_microsoft_graph_tools: bool = Field(
|
|
74
|
+
default=False,
|
|
75
|
+
validation_alias=AliasChoices(
|
|
76
|
+
RUNTIME_PARAM_ENV_VAR_NAME_PREFIX + "ENABLE_MICROSOFT_GRAPH_TOOLS",
|
|
77
|
+
"ENABLE_MICROSOFT_GRAPH_TOOLS",
|
|
78
|
+
),
|
|
79
|
+
description="Enable/disable Sharepoint tools",
|
|
80
|
+
)
|
|
81
|
+
|
|
82
|
+
is_atlassian_oauth_provider_configured: bool = Field(
|
|
83
|
+
default=False,
|
|
84
|
+
validation_alias=AliasChoices(
|
|
85
|
+
RUNTIME_PARAM_ENV_VAR_NAME_PREFIX + "IS_ATLASSIAN_OAUTH_PROVIDER_CONFIGURED",
|
|
86
|
+
"IS_ATLASSIAN_OAUTH_PROVIDER_CONFIGURED",
|
|
87
|
+
),
|
|
88
|
+
description="Whether Atlassian OAuth provider is configured for Atlassian integration",
|
|
89
|
+
)
|
|
90
|
+
|
|
91
|
+
@property
|
|
92
|
+
def is_atlassian_oauth_configured(self) -> bool:
|
|
93
|
+
"""Check if Atlassian OAuth is configured via provider flag or environment variables."""
|
|
94
|
+
return self.is_atlassian_oauth_provider_configured or bool(
|
|
95
|
+
os.getenv("ATLASSIAN_CLIENT_ID") and os.getenv("ATLASSIAN_CLIENT_SECRET")
|
|
96
|
+
)
|
|
97
|
+
|
|
98
|
+
is_google_oauth_provider_configured: bool = Field(
|
|
99
|
+
default=False,
|
|
100
|
+
validation_alias=AliasChoices(
|
|
101
|
+
RUNTIME_PARAM_ENV_VAR_NAME_PREFIX + "IS_GOOGLE_OAUTH_PROVIDER_CONFIGURED",
|
|
102
|
+
"IS_GOOGLE_OAUTH_PROVIDER_CONFIGURED",
|
|
103
|
+
),
|
|
104
|
+
description="Whether Google OAuth provider is configured for Google integration",
|
|
105
|
+
)
|
|
106
|
+
|
|
107
|
+
@property
|
|
108
|
+
def is_google_oauth_configured(self) -> bool:
|
|
109
|
+
return self.is_google_oauth_provider_configured or bool(
|
|
110
|
+
os.getenv("GOOGLE_CLIENT_ID") and os.getenv("GOOGLE_CLIENT_SECRET")
|
|
111
|
+
)
|
|
112
|
+
|
|
113
|
+
is_microsoft_oauth_provider_configured: bool = Field(
|
|
114
|
+
default=False,
|
|
115
|
+
validation_alias=AliasChoices(
|
|
116
|
+
RUNTIME_PARAM_ENV_VAR_NAME_PREFIX + "IS_MICROSOFT_OAUTH_PROVIDER_CONFIGURED",
|
|
117
|
+
"IS_MICROSOFT_OAUTH_PROVIDER_CONFIGURED",
|
|
118
|
+
),
|
|
119
|
+
description="Whether Microsoft OAuth provider is configured for Microsoft integration",
|
|
120
|
+
)
|
|
121
|
+
|
|
122
|
+
@property
|
|
123
|
+
def is_microsoft_oauth_configured(self) -> bool:
|
|
124
|
+
return self.is_microsoft_oauth_provider_configured or bool(
|
|
125
|
+
os.getenv("MICROSOFT_CLIENT_ID") and os.getenv("MICROSOFT_CLIENT_SECRET")
|
|
126
|
+
)
|
|
127
|
+
|
|
128
|
+
@field_validator(
|
|
129
|
+
"enable_predictive_tools",
|
|
130
|
+
"enable_jira_tools",
|
|
131
|
+
"enable_confluence_tools",
|
|
132
|
+
"enable_gdrive_tools",
|
|
133
|
+
"enable_microsoft_graph_tools",
|
|
134
|
+
"is_atlassian_oauth_provider_configured",
|
|
135
|
+
"is_google_oauth_provider_configured",
|
|
136
|
+
"is_microsoft_oauth_provider_configured",
|
|
137
|
+
mode="before",
|
|
138
|
+
)
|
|
139
|
+
@classmethod
|
|
140
|
+
def validate_runtime_params(cls, v: Any) -> Any:
|
|
141
|
+
"""Validate runtime parameters."""
|
|
142
|
+
return extract_datarobot_runtime_param_payload(v)
|
|
143
|
+
|
|
144
|
+
model_config = SettingsConfigDict(
|
|
145
|
+
env_file=".env",
|
|
146
|
+
case_sensitive=False,
|
|
147
|
+
env_file_encoding="utf-8",
|
|
148
|
+
extra="ignore",
|
|
149
|
+
)
|
|
150
|
+
|
|
151
|
+
|
|
34
152
|
class MCPServerConfig(BaseSettings):
|
|
35
153
|
"""MCP Server configuration using pydantic settings."""
|
|
36
154
|
|
|
@@ -188,86 +306,11 @@ class MCPServerConfig(BaseSettings):
|
|
|
188
306
|
),
|
|
189
307
|
description="Enable/disable memory management",
|
|
190
308
|
)
|
|
191
|
-
enable_predictive_tools: bool = Field(
|
|
192
|
-
default=True,
|
|
193
|
-
validation_alias=AliasChoices(
|
|
194
|
-
RUNTIME_PARAM_ENV_VAR_NAME_PREFIX + "ENABLE_PREDICTIVE_TOOLS",
|
|
195
|
-
"ENABLE_PREDICTIVE_TOOLS",
|
|
196
|
-
),
|
|
197
|
-
description="Enable/disable predictive tools",
|
|
198
|
-
)
|
|
199
309
|
|
|
200
|
-
|
|
201
|
-
|
|
202
|
-
|
|
203
|
-
validation_alias=AliasChoices(
|
|
204
|
-
RUNTIME_PARAM_ENV_VAR_NAME_PREFIX + "ENABLE_JIRA_TOOLS",
|
|
205
|
-
"ENABLE_JIRA_TOOLS",
|
|
206
|
-
),
|
|
207
|
-
description="Enable/disable Jira tools",
|
|
310
|
+
tool_config: MCPToolConfig = Field(
|
|
311
|
+
default_factory=MCPToolConfig,
|
|
312
|
+
description="Tool configuration",
|
|
208
313
|
)
|
|
209
|
-
is_jira_oauth_provider_configured: bool = Field(
|
|
210
|
-
default=False,
|
|
211
|
-
validation_alias=AliasChoices(
|
|
212
|
-
RUNTIME_PARAM_ENV_VAR_NAME_PREFIX + "IS_JIRA_OAUTH_PROVIDER_CONFIGURED",
|
|
213
|
-
"IS_JIRA_OAUTH_PROVIDER_CONFIGURED",
|
|
214
|
-
),
|
|
215
|
-
description="Whether Jira OAuth provider is configured for Jira integration",
|
|
216
|
-
)
|
|
217
|
-
|
|
218
|
-
@property
|
|
219
|
-
def is_jira_oauth_configured(self) -> bool:
|
|
220
|
-
return self.is_jira_oauth_provider_configured or bool(
|
|
221
|
-
os.getenv("JIRA_CLIENT_ID") and os.getenv("JIRA_CLIENT_SECRET")
|
|
222
|
-
)
|
|
223
|
-
|
|
224
|
-
# Confluence tools
|
|
225
|
-
enable_confluence_tools: bool = Field(
|
|
226
|
-
default=False,
|
|
227
|
-
validation_alias=AliasChoices(
|
|
228
|
-
RUNTIME_PARAM_ENV_VAR_NAME_PREFIX + "ENABLE_CONFLUENCE_TOOLS",
|
|
229
|
-
"ENABLE_CONFLUENCE_TOOLS",
|
|
230
|
-
),
|
|
231
|
-
description="Enable/disable Confluence tools",
|
|
232
|
-
)
|
|
233
|
-
is_confluence_oauth_provider_configured: bool = Field(
|
|
234
|
-
default=False,
|
|
235
|
-
validation_alias=AliasChoices(
|
|
236
|
-
RUNTIME_PARAM_ENV_VAR_NAME_PREFIX + "IS_CONFLUENCE_OAUTH_PROVIDER_CONFIGURED",
|
|
237
|
-
"IS_CONFLUENCE_OAUTH_PROVIDER_CONFIGURED",
|
|
238
|
-
),
|
|
239
|
-
description="Whether Confluence OAuth provider is configured for Confluence integration",
|
|
240
|
-
)
|
|
241
|
-
|
|
242
|
-
@property
|
|
243
|
-
def is_confluence_oauth_configured(self) -> bool:
|
|
244
|
-
return self.is_confluence_oauth_provider_configured or bool(
|
|
245
|
-
os.getenv("CONFLUENCE_CLIENT_ID") and os.getenv("CONFLUENCE_CLIENT_SECRET")
|
|
246
|
-
)
|
|
247
|
-
|
|
248
|
-
# Gdrive tools
|
|
249
|
-
enable_gdrive_tools: bool = Field(
|
|
250
|
-
default=False,
|
|
251
|
-
validation_alias=AliasChoices(
|
|
252
|
-
RUNTIME_PARAM_ENV_VAR_NAME_PREFIX + "ENABLE_GDRIVE_TOOLS",
|
|
253
|
-
"ENABLE_GDRIVE_TOOLS",
|
|
254
|
-
),
|
|
255
|
-
description="Enable/disable GDrive tools",
|
|
256
|
-
)
|
|
257
|
-
is_gdrive_oauth_provider_configured: bool = Field(
|
|
258
|
-
default=False,
|
|
259
|
-
validation_alias=AliasChoices(
|
|
260
|
-
RUNTIME_PARAM_ENV_VAR_NAME_PREFIX + "IS_GDRIVE_OAUTH_PROVIDER_CONFIGURED",
|
|
261
|
-
"IS_GDRIVE_OAUTH_PROVIDER_CONFIGURED",
|
|
262
|
-
),
|
|
263
|
-
description="Whether GDrive OAuth provider is configured for GDrive integration",
|
|
264
|
-
)
|
|
265
|
-
|
|
266
|
-
@property
|
|
267
|
-
def is_gdrive_oauth_configured(self) -> bool:
|
|
268
|
-
return self.is_gdrive_oauth_provider_configured or bool(
|
|
269
|
-
os.getenv("GDRIVE_CLIENT_ID") and os.getenv("GDRIVE_CLIENT_SECRET")
|
|
270
|
-
)
|
|
271
314
|
|
|
272
315
|
@field_validator(
|
|
273
316
|
"otel_attributes",
|
|
@@ -291,11 +334,6 @@ class MCPServerConfig(BaseSettings):
|
|
|
291
334
|
"mcp_server_register_dynamic_tools_on_startup",
|
|
292
335
|
"tool_registration_duplicate_behavior",
|
|
293
336
|
"mcp_server_register_dynamic_prompts_on_startup",
|
|
294
|
-
"enable_predictive_tools",
|
|
295
|
-
"enable_jira_tools",
|
|
296
|
-
"is_jira_oauth_provider_configured",
|
|
297
|
-
"enable_confluence_tools",
|
|
298
|
-
"is_confluence_oauth_provider_configured",
|
|
299
337
|
mode="before",
|
|
300
338
|
)
|
|
301
339
|
@classmethod
|
|
@@ -30,6 +30,7 @@ class ToolType(str, Enum):
|
|
|
30
30
|
JIRA = "jira"
|
|
31
31
|
CONFLUENCE = "confluence"
|
|
32
32
|
GDRIVE = "gdrive"
|
|
33
|
+
MICROSOFT_GRAPH = "microsoft_graph"
|
|
33
34
|
|
|
34
35
|
|
|
35
36
|
class ToolConfig(TypedDict):
|
|
@@ -39,7 +40,7 @@ class ToolConfig(TypedDict):
|
|
|
39
40
|
oauth_check: Callable[["MCPServerConfig"], bool] | None
|
|
40
41
|
directory: str
|
|
41
42
|
package_prefix: str
|
|
42
|
-
config_field_name: str
|
|
43
|
+
config_field_name: str
|
|
43
44
|
|
|
44
45
|
|
|
45
46
|
# Tool configuration registry
|
|
@@ -53,25 +54,32 @@ TOOL_CONFIGS: dict[ToolType, ToolConfig] = {
|
|
|
53
54
|
),
|
|
54
55
|
ToolType.JIRA: ToolConfig(
|
|
55
56
|
name="jira",
|
|
56
|
-
oauth_check=lambda config: config.
|
|
57
|
+
oauth_check=lambda config: config.tool_config.is_atlassian_oauth_configured,
|
|
57
58
|
directory="jira",
|
|
58
59
|
package_prefix="datarobot_genai.drmcp.tools.jira",
|
|
59
60
|
config_field_name="enable_jira_tools",
|
|
60
61
|
),
|
|
61
62
|
ToolType.CONFLUENCE: ToolConfig(
|
|
62
63
|
name="confluence",
|
|
63
|
-
oauth_check=lambda config: config.
|
|
64
|
+
oauth_check=lambda config: config.tool_config.is_atlassian_oauth_configured,
|
|
64
65
|
directory="confluence",
|
|
65
66
|
package_prefix="datarobot_genai.drmcp.tools.confluence",
|
|
66
67
|
config_field_name="enable_confluence_tools",
|
|
67
68
|
),
|
|
68
69
|
ToolType.GDRIVE: ToolConfig(
|
|
69
70
|
name="gdrive",
|
|
70
|
-
oauth_check=lambda config: config.
|
|
71
|
+
oauth_check=lambda config: config.tool_config.is_google_oauth_configured,
|
|
71
72
|
directory="gdrive",
|
|
72
73
|
package_prefix="datarobot_genai.drmcp.tools.gdrive",
|
|
73
74
|
config_field_name="enable_gdrive_tools",
|
|
74
75
|
),
|
|
76
|
+
ToolType.MICROSOFT_GRAPH: ToolConfig(
|
|
77
|
+
name="microsoft_graph",
|
|
78
|
+
oauth_check=lambda config: config.tool_config.is_microsoft_oauth_configured,
|
|
79
|
+
directory="microsoft_graph",
|
|
80
|
+
package_prefix="datarobot_genai.drmcp.tools.microsoft_graph",
|
|
81
|
+
config_field_name="enable_microsoft_graph_tools",
|
|
82
|
+
),
|
|
75
83
|
}
|
|
76
84
|
|
|
77
85
|
|
|
@@ -92,12 +100,12 @@ def is_tool_enabled(tool_type: ToolType, config: "MCPServerConfig") -> bool:
|
|
|
92
100
|
-------
|
|
93
101
|
True if the tool is enabled, False otherwise
|
|
94
102
|
"""
|
|
95
|
-
|
|
96
|
-
enable_config_name =
|
|
97
|
-
is_enabled = getattr(config, enable_config_name)
|
|
103
|
+
tool_config_registry = TOOL_CONFIGS[tool_type]
|
|
104
|
+
enable_config_name = tool_config_registry["config_field_name"]
|
|
105
|
+
is_enabled = getattr(config.tool_config, enable_config_name)
|
|
98
106
|
|
|
99
107
|
# If tool is enabled, check OAuth requirements if needed
|
|
100
|
-
if is_enabled and
|
|
101
|
-
return
|
|
108
|
+
if is_enabled and tool_config_registry["oauth_check"] is not None:
|
|
109
|
+
return tool_config_registry["oauth_check"](config)
|
|
102
110
|
|
|
103
111
|
return is_enabled
|
|
@@ -39,6 +39,54 @@ class ETETestExpectations(BaseModel):
|
|
|
39
39
|
SHOULD_NOT_BE_EMPTY = "SHOULD_NOT_BE_EMPTY"
|
|
40
40
|
|
|
41
41
|
|
|
42
|
+
def _extract_structured_content(tool_result: str) -> Any:
|
|
43
|
+
r"""
|
|
44
|
+
Extract and parse structured content from tool result string.
|
|
45
|
+
|
|
46
|
+
Tool results are formatted as:
|
|
47
|
+
"Content: {content}\nStructured content: {structured_content}"
|
|
48
|
+
|
|
49
|
+
Structured content can be:
|
|
50
|
+
1. A JSON object with a "result" key: {"result": "..."} or {"result": "{...}"}
|
|
51
|
+
2. A direct JSON object: {"key": "value", ...}
|
|
52
|
+
3. Empty or missing
|
|
53
|
+
|
|
54
|
+
Args:
|
|
55
|
+
tool_result: The tool result string
|
|
56
|
+
|
|
57
|
+
Returns
|
|
58
|
+
-------
|
|
59
|
+
Parsed structured content, or None if not available
|
|
60
|
+
"""
|
|
61
|
+
# Early returns for invalid inputs
|
|
62
|
+
if not tool_result or "Structured content: " not in tool_result:
|
|
63
|
+
return None
|
|
64
|
+
|
|
65
|
+
structured_part = tool_result.split("Structured content: ", 1)[1].strip()
|
|
66
|
+
# Parse JSON, return None on failure or empty structured_part
|
|
67
|
+
if not structured_part:
|
|
68
|
+
return None
|
|
69
|
+
try:
|
|
70
|
+
structured_data = json.loads(structured_part)
|
|
71
|
+
except json.JSONDecodeError:
|
|
72
|
+
return None
|
|
73
|
+
|
|
74
|
+
# If structured data has a "result" key, extract and parse that
|
|
75
|
+
if isinstance(structured_data, dict) and "result" in structured_data:
|
|
76
|
+
result_value = structured_data["result"]
|
|
77
|
+
# If result is a JSON string (starts with { or [), try to parse it
|
|
78
|
+
if isinstance(result_value, str) and result_value.strip().startswith(("{", "[")):
|
|
79
|
+
try:
|
|
80
|
+
parsed_result = json.loads(result_value)
|
|
81
|
+
except json.JSONDecodeError:
|
|
82
|
+
parsed_result = result_value # Return string as-is if parsing fails
|
|
83
|
+
return parsed_result
|
|
84
|
+
return result_value # Return result value directly
|
|
85
|
+
|
|
86
|
+
# If it's a direct JSON object (not wrapped in {"result": ...}), return it as-is
|
|
87
|
+
return structured_data
|
|
88
|
+
|
|
89
|
+
|
|
42
90
|
def _check_dict_has_keys(
|
|
43
91
|
expected: dict[str, Any],
|
|
44
92
|
actual: dict[str, Any] | list[dict[str, Any]],
|
|
@@ -130,7 +178,26 @@ class ToolBaseE2E:
|
|
|
130
178
|
f"result, but got: {response.tool_results[i]}"
|
|
131
179
|
)
|
|
132
180
|
else:
|
|
133
|
-
actual_result =
|
|
181
|
+
actual_result = _extract_structured_content(response.tool_results[i])
|
|
182
|
+
if actual_result is None:
|
|
183
|
+
# Fallback: try to parse the entire tool result as JSON
|
|
184
|
+
try:
|
|
185
|
+
actual_result = json.loads(response.tool_results[i])
|
|
186
|
+
except json.JSONDecodeError:
|
|
187
|
+
# If that fails, try to extract content part
|
|
188
|
+
if "Content: " in response.tool_results[i]:
|
|
189
|
+
content_part = response.tool_results[i].split("Content: ", 1)[1]
|
|
190
|
+
if "\nStructured content: " in content_part:
|
|
191
|
+
content_part = content_part.split(
|
|
192
|
+
"\nStructured content: ", 1
|
|
193
|
+
)[0]
|
|
194
|
+
try:
|
|
195
|
+
actual_result = json.loads(content_part.strip())
|
|
196
|
+
except json.JSONDecodeError:
|
|
197
|
+
raise AssertionError(
|
|
198
|
+
f"Could not parse tool result for "
|
|
199
|
+
f"{tool_call.tool_name}: {response.tool_results[i]}"
|
|
200
|
+
)
|
|
134
201
|
assert _check_dict_has_keys(expected_result, actual_result), (
|
|
135
202
|
f"Should have called {tool_call.tool_name} tool with the correct "
|
|
136
203
|
f"result structure, but got: {response.tool_results[i]}"
|
|
@@ -0,0 +1,479 @@
|
|
|
1
|
+
# Copyright 2026 DataRobot, Inc.
|
|
2
|
+
#
|
|
3
|
+
# Licensed under the Apache License, Version 2.0 (the "License");
|
|
4
|
+
# you may not use this file except in compliance with the License.
|
|
5
|
+
# You may obtain a copy of the License at
|
|
6
|
+
#
|
|
7
|
+
# http://www.apache.org/licenses/LICENSE-2.0
|
|
8
|
+
#
|
|
9
|
+
# Unless required by applicable law or agreed to in writing, software
|
|
10
|
+
# distributed under the License is distributed on an "AS IS" BASIS,
|
|
11
|
+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
12
|
+
# See the License for the specific language governing permissions and
|
|
13
|
+
# limitations under the License.
|
|
14
|
+
|
|
15
|
+
"""Microsoft Graph API Client for searching SharePoint and OneDrive content."""
|
|
16
|
+
|
|
17
|
+
import logging
|
|
18
|
+
from typing import Any
|
|
19
|
+
|
|
20
|
+
import httpx
|
|
21
|
+
from datarobot.auth.datarobot.exceptions import OAuthServiceClientErr
|
|
22
|
+
from fastmcp.exceptions import ToolError
|
|
23
|
+
from pydantic import BaseModel
|
|
24
|
+
from pydantic import Field
|
|
25
|
+
|
|
26
|
+
from datarobot_genai.drmcp.core.auth import get_access_token
|
|
27
|
+
|
|
28
|
+
logger = logging.getLogger(__name__)
|
|
29
|
+
|
|
30
|
+
GRAPH_API_BASE = "https://graph.microsoft.com/v1.0"
|
|
31
|
+
MAX_SEARCH_RESULTS = 250
|
|
32
|
+
|
|
33
|
+
|
|
34
|
+
async def get_microsoft_graph_access_token() -> str | ToolError:
|
|
35
|
+
"""
|
|
36
|
+
Get Microsoft Graph OAuth access token with error handling.
|
|
37
|
+
|
|
38
|
+
Returns
|
|
39
|
+
-------
|
|
40
|
+
Access token string on success, ToolError on failure
|
|
41
|
+
|
|
42
|
+
Example:
|
|
43
|
+
```python
|
|
44
|
+
token = await get_microsoft_graph_access_token()
|
|
45
|
+
if isinstance(token, ToolError):
|
|
46
|
+
# Handle error
|
|
47
|
+
return token
|
|
48
|
+
# Use token
|
|
49
|
+
```
|
|
50
|
+
"""
|
|
51
|
+
try:
|
|
52
|
+
access_token = await get_access_token("microsoft")
|
|
53
|
+
if not access_token:
|
|
54
|
+
logger.warning("Empty access token received")
|
|
55
|
+
return ToolError("Received empty access token. Please complete the OAuth flow.")
|
|
56
|
+
return access_token
|
|
57
|
+
except OAuthServiceClientErr as e:
|
|
58
|
+
logger.error(f"OAuth client error: {e}", exc_info=True)
|
|
59
|
+
return ToolError(
|
|
60
|
+
"Could not obtain access token for Microsoft. Make sure the OAuth "
|
|
61
|
+
"permission was granted for the application to act on your behalf."
|
|
62
|
+
)
|
|
63
|
+
except Exception as e:
|
|
64
|
+
error_msg = str(e)
|
|
65
|
+
logger.error(f"Unexpected error obtaining access token: {error_msg}", exc_info=True)
|
|
66
|
+
return ToolError("An unexpected error occurred while obtaining access token for Microsoft.")
|
|
67
|
+
|
|
68
|
+
|
|
69
|
+
class MicrosoftGraphError(Exception):
|
|
70
|
+
"""Exception for Microsoft Graph API errors."""
|
|
71
|
+
|
|
72
|
+
def __init__(self, message: str) -> None:
|
|
73
|
+
super().__init__(message)
|
|
74
|
+
|
|
75
|
+
|
|
76
|
+
class MicrosoftGraphItem(BaseModel):
|
|
77
|
+
"""Represents an item (file or folder) from Microsoft Graph (SharePoint/OneDrive)."""
|
|
78
|
+
|
|
79
|
+
id: str
|
|
80
|
+
name: str
|
|
81
|
+
web_url: str | None = Field(None, alias="webUrl")
|
|
82
|
+
size: int | None = None
|
|
83
|
+
created_datetime: str | None = Field(None, alias="createdDateTime")
|
|
84
|
+
last_modified_datetime: str | None = Field(None, alias="lastModifiedDateTime")
|
|
85
|
+
is_folder: bool = False
|
|
86
|
+
mime_type: str | None = Field(None, alias="mimeType")
|
|
87
|
+
drive_id: str | None = Field(None, alias="driveId")
|
|
88
|
+
parent_folder_id: str | None = Field(None, alias="parentFolderId")
|
|
89
|
+
|
|
90
|
+
model_config = {"populate_by_name": True}
|
|
91
|
+
|
|
92
|
+
@classmethod
|
|
93
|
+
def from_api_response(cls, data: dict[str, Any]) -> "MicrosoftGraphItem":
|
|
94
|
+
"""Create a MicrosoftGraphItem from Microsoft Graph API response data."""
|
|
95
|
+
parent_ref = data.get("parentReference", {})
|
|
96
|
+
return cls(
|
|
97
|
+
id=data.get("id", ""),
|
|
98
|
+
name=data.get("name", "Unknown"),
|
|
99
|
+
web_url=data.get("webUrl"),
|
|
100
|
+
size=data.get("size"),
|
|
101
|
+
created_datetime=data.get("createdDateTime"),
|
|
102
|
+
last_modified_datetime=data.get("lastModifiedDateTime"),
|
|
103
|
+
is_folder="folder" in data,
|
|
104
|
+
mime_type=data.get("file", {}).get("mimeType") if "file" in data else None,
|
|
105
|
+
drive_id=parent_ref.get("driveId"),
|
|
106
|
+
parent_folder_id=parent_ref.get("id"),
|
|
107
|
+
)
|
|
108
|
+
|
|
109
|
+
|
|
110
|
+
class MicrosoftGraphClient:
|
|
111
|
+
"""Client for interacting with Microsoft Graph API to search SharePoint and OneDrive content."""
|
|
112
|
+
|
|
113
|
+
def __init__(self, access_token: str, site_url: str | None = None):
|
|
114
|
+
"""
|
|
115
|
+
Initialize Microsoft Graph client with access token.
|
|
116
|
+
|
|
117
|
+
Args:
|
|
118
|
+
access_token: OAuth access token for Microsoft Graph API
|
|
119
|
+
site_url: Optional SharePoint site URL (e.g., https://tenant.sharepoint.com/sites/sitename)
|
|
120
|
+
If not provided, searches across all accessible sites and OneDrive
|
|
121
|
+
"""
|
|
122
|
+
self.access_token = access_token
|
|
123
|
+
self.site_url = site_url
|
|
124
|
+
self._client = httpx.AsyncClient(
|
|
125
|
+
headers={
|
|
126
|
+
"Authorization": f"Bearer {access_token}",
|
|
127
|
+
"Content-Type": "application/json",
|
|
128
|
+
},
|
|
129
|
+
timeout=30.0,
|
|
130
|
+
)
|
|
131
|
+
self._site_id: str | None = None
|
|
132
|
+
|
|
133
|
+
async def _get_site_id(self) -> str:
|
|
134
|
+
"""Get the SharePoint site ID from the site URL or return root site ID."""
|
|
135
|
+
if self._site_id:
|
|
136
|
+
return self._site_id
|
|
137
|
+
|
|
138
|
+
# If no site_url provided, use root site
|
|
139
|
+
if not self.site_url:
|
|
140
|
+
# Get root site ID
|
|
141
|
+
graph_url = f"{GRAPH_API_BASE}/sites/root"
|
|
142
|
+
try:
|
|
143
|
+
response = await self._client.get(graph_url)
|
|
144
|
+
response.raise_for_status()
|
|
145
|
+
data = response.json()
|
|
146
|
+
self._site_id = data.get("id", "")
|
|
147
|
+
return self._site_id
|
|
148
|
+
except httpx.HTTPStatusError as e:
|
|
149
|
+
raise self._handle_http_error(e, "Failed to get root site ID") from e
|
|
150
|
+
|
|
151
|
+
# Extract site path from URL
|
|
152
|
+
# Format: https://{tenant}.sharepoint.com/sites/{site-name}
|
|
153
|
+
# or: https://{tenant}.sharepoint.com/sites/{site-name}/...
|
|
154
|
+
url_parts = self.site_url.replace("https://", "").split("/")
|
|
155
|
+
if len(url_parts) < 3:
|
|
156
|
+
raise MicrosoftGraphError(f"Invalid SharePoint site URL: {self.site_url}")
|
|
157
|
+
|
|
158
|
+
hostname = url_parts[0] # tenant.sharepoint.com
|
|
159
|
+
site_path = "/".join(url_parts[1:]) # sites/site-name/...
|
|
160
|
+
|
|
161
|
+
# Use Microsoft Graph API to get site ID
|
|
162
|
+
graph_url = f"{GRAPH_API_BASE}/sites/{hostname}:/{site_path}"
|
|
163
|
+
try:
|
|
164
|
+
response = await self._client.get(graph_url)
|
|
165
|
+
response.raise_for_status()
|
|
166
|
+
data = response.json()
|
|
167
|
+
self._site_id = data.get("id", "")
|
|
168
|
+
return self._site_id
|
|
169
|
+
except httpx.HTTPStatusError as e:
|
|
170
|
+
raise self._handle_http_error(
|
|
171
|
+
e, f"Failed to get site ID from URL: {self.site_url}"
|
|
172
|
+
) from e
|
|
173
|
+
|
|
174
|
+
def _handle_http_error(
|
|
175
|
+
self, error: httpx.HTTPStatusError, base_message: str
|
|
176
|
+
) -> MicrosoftGraphError:
|
|
177
|
+
"""Handle HTTP errors and return appropriate MicrosoftGraphError with user-friendly messages.""" # noqa: E501
|
|
178
|
+
error_msg = base_message
|
|
179
|
+
|
|
180
|
+
if error.response.status_code == 403:
|
|
181
|
+
error_msg += (
|
|
182
|
+
": Insufficient permissions. Requires Sites.Read.All or Sites.Search.All "
|
|
183
|
+
"permission."
|
|
184
|
+
)
|
|
185
|
+
elif error.response.status_code == 400:
|
|
186
|
+
try:
|
|
187
|
+
error_data = error.response.json()
|
|
188
|
+
api_message = error_data.get("error", {}).get("message", "Invalid request")
|
|
189
|
+
error_msg += f": {api_message}"
|
|
190
|
+
except Exception:
|
|
191
|
+
error_msg += ": Invalid request parameters."
|
|
192
|
+
else:
|
|
193
|
+
error_msg += f": HTTP {error.response.status_code}"
|
|
194
|
+
|
|
195
|
+
return MicrosoftGraphError(error_msg)
|
|
196
|
+
|
|
197
|
+
async def search_content(
|
|
198
|
+
self,
|
|
199
|
+
search_query: str,
|
|
200
|
+
site_id: str | None = None,
|
|
201
|
+
from_offset: int = 0,
|
|
202
|
+
size: int = 250,
|
|
203
|
+
entity_types: list[str] | None = None,
|
|
204
|
+
filters: list[str] | None = None,
|
|
205
|
+
include_hidden_content: bool = False,
|
|
206
|
+
region: str | None = None,
|
|
207
|
+
) -> list[MicrosoftGraphItem]:
|
|
208
|
+
"""
|
|
209
|
+
Search for content using Microsoft Graph API search.
|
|
210
|
+
|
|
211
|
+
This tool utilizes Microsoft Graph's search engine to locate items across
|
|
212
|
+
SharePoint sites, OneDrive, and other Microsoft 365 services. When a site
|
|
213
|
+
is specified, it searches within that site. Otherwise, it searches across
|
|
214
|
+
all accessible SharePoint sites and OneDrive.
|
|
215
|
+
|
|
216
|
+
Args:
|
|
217
|
+
search_query: The search string to find files, folders, or list items
|
|
218
|
+
site_id: Optional site ID to scope the search. If not provided and site_url
|
|
219
|
+
is set, will use that site. If neither is provided, searches across
|
|
220
|
+
all accessible sites.
|
|
221
|
+
from_offset: The zero-based index of the first result to return (default: 0).
|
|
222
|
+
Use this for pagination - increment by the size value to get the next page.
|
|
223
|
+
size: Maximum number of results to return in this request (default: 250, max: 250).
|
|
224
|
+
The LLM should control pagination by making multiple calls with different
|
|
225
|
+
'from' values (e.g., from=0 size=250, then from=250 size=250, etc.).
|
|
226
|
+
entity_types: Optional list of entity types to search. Valid values:
|
|
227
|
+
"driveItem", "listItem", "site", "list", "drive".
|
|
228
|
+
Default: ["driveItem", "listItem"]
|
|
229
|
+
filters: Optional list of filter expressions (KQL syntax) to refine search results
|
|
230
|
+
include_hidden_content: Whether to include hidden content in search results.
|
|
231
|
+
Only works with delegated permissions, not application
|
|
232
|
+
permissions.
|
|
233
|
+
region: Optional region code for application permissions (e.g., "NAM", "EUR", "APC")
|
|
234
|
+
|
|
235
|
+
Returns
|
|
236
|
+
-------
|
|
237
|
+
List of MicrosoftGraphItem objects matching the search query
|
|
238
|
+
|
|
239
|
+
Raises
|
|
240
|
+
------
|
|
241
|
+
MicrosoftGraphError: If the search fails
|
|
242
|
+
httpx.HTTPStatusError: If the API request fails
|
|
243
|
+
"""
|
|
244
|
+
if not search_query:
|
|
245
|
+
raise MicrosoftGraphError("Search query cannot be empty")
|
|
246
|
+
|
|
247
|
+
# Validate and limit size parameter
|
|
248
|
+
size = min(max(1, size), MAX_SEARCH_RESULTS) # Between 1 and 250
|
|
249
|
+
from_offset = max(0, from_offset) # Must be non-negative
|
|
250
|
+
|
|
251
|
+
# Determine which site to search
|
|
252
|
+
# If site_id is provided, use it directly; otherwise resolve from site_url if set
|
|
253
|
+
if site_id:
|
|
254
|
+
target_site_id = site_id
|
|
255
|
+
elif self.site_url:
|
|
256
|
+
target_site_id = await self._get_site_id()
|
|
257
|
+
else:
|
|
258
|
+
target_site_id = None
|
|
259
|
+
|
|
260
|
+
# Use unified Microsoft Search API for both site-specific and organization-wide search
|
|
261
|
+
# Reference: https://learn.microsoft.com/en-us/graph/api/search-query
|
|
262
|
+
graph_url = f"{GRAPH_API_BASE}/search/query"
|
|
263
|
+
|
|
264
|
+
# Default entity types: driveItem and listItem
|
|
265
|
+
if entity_types is None:
|
|
266
|
+
entity_types = ["driveItem", "listItem"]
|
|
267
|
+
|
|
268
|
+
# Validate entity types
|
|
269
|
+
valid_entity_types = ["driveItem", "listItem", "site", "list", "drive"]
|
|
270
|
+
entity_types = [et for et in entity_types if et in valid_entity_types]
|
|
271
|
+
if not entity_types:
|
|
272
|
+
entity_types = ["driveItem", "listItem"] # Fallback to default
|
|
273
|
+
|
|
274
|
+
# Build search request payload
|
|
275
|
+
# Reference: https://learn.microsoft.com/en-us/graph/search-concept-files
|
|
276
|
+
query_parts = []
|
|
277
|
+
|
|
278
|
+
# If searching within a specific site, add scoping using KQL syntax first
|
|
279
|
+
if target_site_id:
|
|
280
|
+
# Get site details to construct proper scoping query
|
|
281
|
+
try:
|
|
282
|
+
site_info_url = f"{GRAPH_API_BASE}/sites/{target_site_id}"
|
|
283
|
+
site_response = await self._client.get(site_info_url)
|
|
284
|
+
site_response.raise_for_status()
|
|
285
|
+
site_data = site_response.json()
|
|
286
|
+
site_web_url = site_data.get("webUrl", "")
|
|
287
|
+
|
|
288
|
+
# Use KQL to scope search to the specific site
|
|
289
|
+
# Format: path:"{site-url}"
|
|
290
|
+
if site_web_url:
|
|
291
|
+
query_parts.append(f'path:"{site_web_url}"')
|
|
292
|
+
except httpx.HTTPStatusError as e:
|
|
293
|
+
raise self._handle_http_error(e, "Failed to get site details for scoping") from e
|
|
294
|
+
except Exception as e:
|
|
295
|
+
logger.warning(
|
|
296
|
+
f"Could not get site details for scoping, using un-scoped search: {e}"
|
|
297
|
+
)
|
|
298
|
+
# Fall back to un-scoped search if site details can't be retrieved
|
|
299
|
+
|
|
300
|
+
# Add the main search query
|
|
301
|
+
query_parts.append(search_query)
|
|
302
|
+
|
|
303
|
+
# Add filters if provided (using AND operator for proper KQL syntax)
|
|
304
|
+
if filters:
|
|
305
|
+
# Join filters with AND operator for proper KQL syntax
|
|
306
|
+
filter_string = " AND ".join(filters)
|
|
307
|
+
query_parts.append(filter_string)
|
|
308
|
+
|
|
309
|
+
# Combine all query parts with spaces
|
|
310
|
+
query_string = " ".join(query_parts)
|
|
311
|
+
|
|
312
|
+
# Build request payload with from and size parameters
|
|
313
|
+
request_payload = {
|
|
314
|
+
"entityTypes": entity_types,
|
|
315
|
+
"query": {
|
|
316
|
+
"queryString": query_string,
|
|
317
|
+
},
|
|
318
|
+
"from": from_offset,
|
|
319
|
+
"size": size,
|
|
320
|
+
}
|
|
321
|
+
|
|
322
|
+
# Add includeHiddenContent (only works with delegated permissions)
|
|
323
|
+
if include_hidden_content:
|
|
324
|
+
request_payload["includeHiddenContent"] = True
|
|
325
|
+
|
|
326
|
+
# Add region for application permissions
|
|
327
|
+
if region:
|
|
328
|
+
request_payload["region"] = region
|
|
329
|
+
|
|
330
|
+
payload = {"requests": [request_payload]}
|
|
331
|
+
|
|
332
|
+
try:
|
|
333
|
+
response = await self._client.post(graph_url, json=payload)
|
|
334
|
+
response.raise_for_status()
|
|
335
|
+
data = response.json()
|
|
336
|
+
except httpx.HTTPStatusError as e:
|
|
337
|
+
raise self._handle_http_error(e, "Failed to search SharePoint content") from e
|
|
338
|
+
|
|
339
|
+
# Parse the Microsoft Search API response format
|
|
340
|
+
# Reference: https://learn.microsoft.com/en-us/graph/search-concept-files
|
|
341
|
+
results = []
|
|
342
|
+
for request_result in data.get("value", []):
|
|
343
|
+
hits_containers = request_result.get("hitsContainers", [])
|
|
344
|
+
for container in hits_containers:
|
|
345
|
+
hits = container.get("hits", [])
|
|
346
|
+
for hit in hits:
|
|
347
|
+
resource = hit.get("resource", {})
|
|
348
|
+
if not resource:
|
|
349
|
+
continue
|
|
350
|
+
|
|
351
|
+
odata_type = resource.get("@odata.type", "")
|
|
352
|
+
transformed_resource = self._transform_search_resource(resource, odata_type)
|
|
353
|
+
# transformed_resource always returns a dict, so we can process it directly
|
|
354
|
+
results.append(MicrosoftGraphItem.from_api_response(transformed_resource))
|
|
355
|
+
|
|
356
|
+
return results
|
|
357
|
+
|
|
358
|
+
def _transform_search_resource(
|
|
359
|
+
self, resource: dict[str, Any], odata_type: str
|
|
360
|
+
) -> dict[str, Any]:
|
|
361
|
+
"""Transform a search API resource to MicrosoftGraphItem-compatible format."""
|
|
362
|
+
# Preserve original values from resource if they exist, otherwise use defaults
|
|
363
|
+
# This ensures we don't lose data that might be present in the original response
|
|
364
|
+
base_resource = {
|
|
365
|
+
"id": resource.get("id", ""),
|
|
366
|
+
"webUrl": resource.get("webUrl"),
|
|
367
|
+
"createdDateTime": resource.get("createdDateTime"),
|
|
368
|
+
"lastModifiedDateTime": resource.get("lastModifiedDateTime"),
|
|
369
|
+
"size": resource.get("size"),
|
|
370
|
+
"folder": resource.get("folder", {}),
|
|
371
|
+
"file": resource.get("file", {}),
|
|
372
|
+
}
|
|
373
|
+
|
|
374
|
+
parent_ref = resource.get("parentReference", {})
|
|
375
|
+
|
|
376
|
+
if odata_type == "#microsoft.graph.listItem":
|
|
377
|
+
fields = resource.get("fields", {})
|
|
378
|
+
base_resource.update(
|
|
379
|
+
{
|
|
380
|
+
"name": fields.get("Title") or resource.get("name", "Unknown"),
|
|
381
|
+
"parentReference": {
|
|
382
|
+
"driveId": parent_ref.get("driveId"),
|
|
383
|
+
"id": parent_ref.get("id"),
|
|
384
|
+
},
|
|
385
|
+
}
|
|
386
|
+
)
|
|
387
|
+
elif odata_type == "#microsoft.graph.site":
|
|
388
|
+
base_resource.update(
|
|
389
|
+
{
|
|
390
|
+
"name": resource.get("displayName") or resource.get("name", "Unknown"),
|
|
391
|
+
"parentReference": {},
|
|
392
|
+
}
|
|
393
|
+
)
|
|
394
|
+
elif odata_type == "#microsoft.graph.list":
|
|
395
|
+
base_resource.update(
|
|
396
|
+
{
|
|
397
|
+
"name": resource.get("displayName") or resource.get("name", "Unknown"),
|
|
398
|
+
"parentReference": {
|
|
399
|
+
"siteId": parent_ref.get("siteId"),
|
|
400
|
+
},
|
|
401
|
+
}
|
|
402
|
+
)
|
|
403
|
+
elif odata_type == "#microsoft.graph.drive":
|
|
404
|
+
base_resource.update(
|
|
405
|
+
{
|
|
406
|
+
"name": resource.get("name", "Unknown"),
|
|
407
|
+
"parentReference": {
|
|
408
|
+
"siteId": parent_ref.get("siteId"),
|
|
409
|
+
},
|
|
410
|
+
}
|
|
411
|
+
)
|
|
412
|
+
else:
|
|
413
|
+
# Standard driveItem - use resource as-is
|
|
414
|
+
return resource
|
|
415
|
+
|
|
416
|
+
return base_resource
|
|
417
|
+
|
|
418
|
+
async def __aenter__(self) -> "MicrosoftGraphClient":
|
|
419
|
+
"""Async context manager entry."""
|
|
420
|
+
return self
|
|
421
|
+
|
|
422
|
+
async def __aexit__(
|
|
423
|
+
self, exc_type: type[BaseException] | None, exc_val: BaseException | None, exc_tb: Any
|
|
424
|
+
) -> None:
|
|
425
|
+
"""Async context manager exit."""
|
|
426
|
+
await self._client.aclose()
|
|
427
|
+
|
|
428
|
+
|
|
429
|
+
def validate_site_url(site_url: str) -> str | None:
|
|
430
|
+
"""Validate SharePoint site URL and return user-friendly error message if invalid.
|
|
431
|
+
|
|
432
|
+
Args:
|
|
433
|
+
site_url: The SharePoint site URL to validate
|
|
434
|
+
|
|
435
|
+
Returns
|
|
436
|
+
-------
|
|
437
|
+
None if valid, or a user-friendly error message if invalid
|
|
438
|
+
"""
|
|
439
|
+
if not site_url:
|
|
440
|
+
return (
|
|
441
|
+
"SharePoint site URL is required. "
|
|
442
|
+
"Please provide a valid SharePoint site URL (e.g., https://yourtenant.sharepoint.com/sites/yoursite)."
|
|
443
|
+
)
|
|
444
|
+
|
|
445
|
+
site_url = site_url.strip()
|
|
446
|
+
|
|
447
|
+
if not site_url.startswith("https://"):
|
|
448
|
+
return (
|
|
449
|
+
f"Invalid SharePoint site URL: '{site_url}'. "
|
|
450
|
+
"The URL must start with 'https://'. "
|
|
451
|
+
"Example: https://yourtenant.sharepoint.com/sites/yoursite"
|
|
452
|
+
)
|
|
453
|
+
|
|
454
|
+
if "sharepoint.com" not in site_url.lower():
|
|
455
|
+
return (
|
|
456
|
+
f"Invalid SharePoint site URL: '{site_url}'. "
|
|
457
|
+
"The URL must be a SharePoint site URL containing 'sharepoint.com'. "
|
|
458
|
+
"Example: https://yourtenant.sharepoint.com/sites/yoursite"
|
|
459
|
+
)
|
|
460
|
+
|
|
461
|
+
# Check basic URL structure
|
|
462
|
+
url_parts = site_url.replace("https://", "").split("/")
|
|
463
|
+
if len(url_parts) < 1 or not url_parts[0]:
|
|
464
|
+
return (
|
|
465
|
+
f"Invalid SharePoint site URL format: '{site_url}'. "
|
|
466
|
+
"The URL must include a domain name. "
|
|
467
|
+
"Example: https://yourtenant.sharepoint.com/sites/yoursite"
|
|
468
|
+
)
|
|
469
|
+
|
|
470
|
+
# Check if it looks like a valid SharePoint site URL
|
|
471
|
+
domain = url_parts[0]
|
|
472
|
+
if not domain.endswith("sharepoint.com"):
|
|
473
|
+
return (
|
|
474
|
+
f"Invalid SharePoint site URL: '{site_url}'. "
|
|
475
|
+
"The domain must end with 'sharepoint.com'. "
|
|
476
|
+
"Example: https://yourtenant.sharepoint.com/sites/yoursite"
|
|
477
|
+
)
|
|
478
|
+
|
|
479
|
+
return None
|
|
@@ -0,0 +1,13 @@
|
|
|
1
|
+
# Copyright 2026 DataRobot, Inc.
|
|
2
|
+
#
|
|
3
|
+
# Licensed under the Apache License, Version 2.0 (the "License");
|
|
4
|
+
# you may not use this file except in compliance with the License.
|
|
5
|
+
# You may obtain a copy of the License at
|
|
6
|
+
#
|
|
7
|
+
# http://www.apache.org/licenses/LICENSE-2.0
|
|
8
|
+
#
|
|
9
|
+
# Unless required by applicable law or agreed to in writing, software
|
|
10
|
+
# distributed under the License is distributed on an "AS IS" BASIS,
|
|
11
|
+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
12
|
+
# See the License for the specific language governing permissions and
|
|
13
|
+
# limitations under the License.
|
|
@@ -0,0 +1,198 @@
|
|
|
1
|
+
# Copyright 2026 DataRobot, Inc.
|
|
2
|
+
#
|
|
3
|
+
# Licensed under the Apache License, Version 2.0 (the "License");
|
|
4
|
+
# you may not use this file except in compliance with the License.
|
|
5
|
+
# You may obtain a copy of the License at
|
|
6
|
+
#
|
|
7
|
+
# http://www.apache.org/licenses/LICENSE-2.0
|
|
8
|
+
#
|
|
9
|
+
# Unless required by applicable law or agreed to in writing, software
|
|
10
|
+
# distributed under the License is distributed on an "AS IS" BASIS,
|
|
11
|
+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
12
|
+
# See the License for the specific language governing permissions and
|
|
13
|
+
# limitations under the License.
|
|
14
|
+
|
|
15
|
+
"""Microsoft Graph MCP tools for searching SharePoint and OneDrive content."""
|
|
16
|
+
|
|
17
|
+
import logging
|
|
18
|
+
from typing import Annotated
|
|
19
|
+
|
|
20
|
+
from fastmcp.exceptions import ToolError
|
|
21
|
+
from fastmcp.tools.tool import ToolResult
|
|
22
|
+
|
|
23
|
+
from datarobot_genai.drmcp.core.mcp_instance import dr_mcp_tool
|
|
24
|
+
from datarobot_genai.drmcp.tools.clients.microsoft_graph import MicrosoftGraphClient
|
|
25
|
+
from datarobot_genai.drmcp.tools.clients.microsoft_graph import MicrosoftGraphError
|
|
26
|
+
from datarobot_genai.drmcp.tools.clients.microsoft_graph import get_microsoft_graph_access_token
|
|
27
|
+
from datarobot_genai.drmcp.tools.clients.microsoft_graph import validate_site_url
|
|
28
|
+
|
|
29
|
+
logger = logging.getLogger(__name__)
|
|
30
|
+
|
|
31
|
+
|
|
32
|
+
@dr_mcp_tool(
|
|
33
|
+
tags={
|
|
34
|
+
"microsoft",
|
|
35
|
+
"graph api",
|
|
36
|
+
"sharepoint",
|
|
37
|
+
"drive",
|
|
38
|
+
"list",
|
|
39
|
+
"search",
|
|
40
|
+
"files",
|
|
41
|
+
"find",
|
|
42
|
+
"contents",
|
|
43
|
+
}
|
|
44
|
+
)
|
|
45
|
+
async def microsoft_graph_search_content(
|
|
46
|
+
*,
|
|
47
|
+
search_query: Annotated[str, "The search string to find files, folders, or list items."],
|
|
48
|
+
site_url: Annotated[
|
|
49
|
+
str | None,
|
|
50
|
+
"Optional SharePoint site URL to scope the search "
|
|
51
|
+
"(e.g., https://tenant.sharepoint.com/sites/sitename). "
|
|
52
|
+
"If not provided, searches across all accessible sites.",
|
|
53
|
+
] = None,
|
|
54
|
+
site_id: Annotated[
|
|
55
|
+
str | None,
|
|
56
|
+
"Optional ID of the site to scope the search. If provided, takes precedence over site_url.",
|
|
57
|
+
] = None,
|
|
58
|
+
from_offset: Annotated[
|
|
59
|
+
int,
|
|
60
|
+
"The zero-based index of the first result to return. Use this for pagination. "
|
|
61
|
+
"Default: 0 (start from the beginning). To get the next page, increment by the size "
|
|
62
|
+
"value (e.g., first page: from=0 size=250, second page: from=250 size=250, "
|
|
63
|
+
"third page: from=500 size=250).",
|
|
64
|
+
] = 0,
|
|
65
|
+
size: Annotated[
|
|
66
|
+
int,
|
|
67
|
+
"Maximum number of results to return in this request. Default is 250, max is 250. "
|
|
68
|
+
"The LLM should control pagination by making multiple calls with different 'from' values.",
|
|
69
|
+
] = 250,
|
|
70
|
+
entity_types: Annotated[
|
|
71
|
+
list[str] | None,
|
|
72
|
+
"Optional list of entity types to search. Valid values: 'driveItem', 'listItem', "
|
|
73
|
+
"'site', 'list', 'drive'. Default: ['driveItem', 'listItem']. "
|
|
74
|
+
"Multiple types can be specified.",
|
|
75
|
+
] = None,
|
|
76
|
+
filters: Annotated[
|
|
77
|
+
list[str] | None,
|
|
78
|
+
"Optional list of KQL filter expressions to refine search results "
|
|
79
|
+
"(e.g., ['fileType:docx', 'size>1000']).",
|
|
80
|
+
] = None,
|
|
81
|
+
include_hidden_content: Annotated[
|
|
82
|
+
bool,
|
|
83
|
+
"Whether to include hidden content in search results. Only works with delegated "
|
|
84
|
+
"permissions, not application permissions. Default: False.",
|
|
85
|
+
] = False,
|
|
86
|
+
region: Annotated[
|
|
87
|
+
str | None,
|
|
88
|
+
"Optional region code for application permissions (e.g., 'NAM', 'EUR', 'APC'). "
|
|
89
|
+
"Required when using application permissions to search SharePoint content in "
|
|
90
|
+
"specific regions.",
|
|
91
|
+
] = None,
|
|
92
|
+
) -> ToolResult | ToolError:
|
|
93
|
+
"""
|
|
94
|
+
Search for SharePoint and OneDrive content using Microsoft Graph Search API.
|
|
95
|
+
|
|
96
|
+
Search Scope:
|
|
97
|
+
- When site_url or site_id is provided: searches within the specified SharePoint site
|
|
98
|
+
- When neither is provided: searches across all accessible SharePoint sites and OneDrive
|
|
99
|
+
|
|
100
|
+
Supported Entity Types:
|
|
101
|
+
- driveItem: Files and folders in document libraries and OneDrive
|
|
102
|
+
- listItem: Items in SharePoint lists
|
|
103
|
+
- site: SharePoint sites
|
|
104
|
+
- list: SharePoint lists
|
|
105
|
+
- drive: Document libraries/drives
|
|
106
|
+
|
|
107
|
+
Filtering:
|
|
108
|
+
- Filters use KQL (Keyword Query Language) syntax
|
|
109
|
+
- Multiple filters are combined with AND operators
|
|
110
|
+
- Examples: ['fileType:docx', 'size>1000', 'lastModifiedTime>2024-01-01']
|
|
111
|
+
- Filters are applied in addition to the search query
|
|
112
|
+
|
|
113
|
+
Pagination:
|
|
114
|
+
- Controlled via from_offset (zero-based index) and size parameters
|
|
115
|
+
- Maximum size per request: 250 results
|
|
116
|
+
- To paginate: increment from_offset by size value for each subsequent page
|
|
117
|
+
- Example pagination sequence:
|
|
118
|
+
* Page 1: from_offset=0, size=250 (returns results 0-249)
|
|
119
|
+
* Page 2: from_offset=250, size=250 (returns results 250-499)
|
|
120
|
+
* Page 3: from_offset=500, size=250 (returns results 500-749)
|
|
121
|
+
|
|
122
|
+
API Reference:
|
|
123
|
+
- Endpoint: POST /search/query
|
|
124
|
+
- Documentation: https://learn.microsoft.com/en-us/graph/api/search-query
|
|
125
|
+
- Search concepts: https://learn.microsoft.com/en-us/graph/search-concept-files
|
|
126
|
+
|
|
127
|
+
Permissions:
|
|
128
|
+
- Requires Sites.Read.All or Sites.Search.All permission
|
|
129
|
+
- include_hidden_content only works with delegated permissions
|
|
130
|
+
- region parameter is required for application permissions in multi-region environments
|
|
131
|
+
"""
|
|
132
|
+
if not search_query:
|
|
133
|
+
raise ToolError("Argument validation error: 'search_query' cannot be empty.")
|
|
134
|
+
|
|
135
|
+
# Validate site_url if provided
|
|
136
|
+
if site_url:
|
|
137
|
+
validation_error = validate_site_url(site_url)
|
|
138
|
+
if validation_error:
|
|
139
|
+
raise ToolError(validation_error)
|
|
140
|
+
|
|
141
|
+
access_token = await get_microsoft_graph_access_token()
|
|
142
|
+
if isinstance(access_token, ToolError):
|
|
143
|
+
raise access_token
|
|
144
|
+
|
|
145
|
+
try:
|
|
146
|
+
async with MicrosoftGraphClient(access_token=access_token, site_url=site_url) as client:
|
|
147
|
+
items = await client.search_content(
|
|
148
|
+
search_query=search_query,
|
|
149
|
+
site_id=site_id,
|
|
150
|
+
from_offset=from_offset,
|
|
151
|
+
size=size,
|
|
152
|
+
entity_types=entity_types,
|
|
153
|
+
filters=filters,
|
|
154
|
+
include_hidden_content=include_hidden_content,
|
|
155
|
+
region=region,
|
|
156
|
+
)
|
|
157
|
+
except MicrosoftGraphError as e:
|
|
158
|
+
logger.error(f"Microsoft Graph error searching content: {e}")
|
|
159
|
+
raise ToolError(str(e))
|
|
160
|
+
except Exception as e:
|
|
161
|
+
logger.error(f"Unexpected error searching Microsoft Graph content: {e}", exc_info=True)
|
|
162
|
+
raise ToolError(
|
|
163
|
+
f"An unexpected error occurred while searching Microsoft Graph content: {str(e)}"
|
|
164
|
+
)
|
|
165
|
+
|
|
166
|
+
results = []
|
|
167
|
+
for item in items:
|
|
168
|
+
result_dict = {
|
|
169
|
+
"id": item.id, # Unique ID of the file, folder, or list item
|
|
170
|
+
"name": item.name,
|
|
171
|
+
"webUrl": item.web_url,
|
|
172
|
+
"size": item.size,
|
|
173
|
+
"createdDateTime": item.created_datetime,
|
|
174
|
+
"lastModifiedDateTime": item.last_modified_datetime,
|
|
175
|
+
"isFolder": item.is_folder,
|
|
176
|
+
"mimeType": item.mime_type,
|
|
177
|
+
# Document library/drive ID (driveId in Microsoft Graph API)
|
|
178
|
+
"documentLibraryId": item.drive_id,
|
|
179
|
+
"parentFolderId": item.parent_folder_id, # Parent folder ID
|
|
180
|
+
}
|
|
181
|
+
results.append(result_dict)
|
|
182
|
+
|
|
183
|
+
n = len(results)
|
|
184
|
+
return ToolResult(
|
|
185
|
+
content=(
|
|
186
|
+
f"Successfully searched Microsoft Graph and retrieved {n} result(s) for "
|
|
187
|
+
f"'{search_query}' (from={from_offset}, size={size})."
|
|
188
|
+
),
|
|
189
|
+
structured_content={
|
|
190
|
+
"query": search_query,
|
|
191
|
+
"siteUrl": site_url,
|
|
192
|
+
"siteId": site_id,
|
|
193
|
+
"from": from_offset,
|
|
194
|
+
"size": size,
|
|
195
|
+
"results": results,
|
|
196
|
+
"count": n,
|
|
197
|
+
},
|
|
198
|
+
)
|
|
@@ -12,6 +12,7 @@
|
|
|
12
12
|
# See the License for the specific language governing permissions and
|
|
13
13
|
# limitations under the License.
|
|
14
14
|
|
|
15
|
+
import json
|
|
15
16
|
import logging
|
|
16
17
|
import os
|
|
17
18
|
from typing import Annotated
|
|
@@ -28,6 +29,7 @@ logger = logging.getLogger(__name__)
|
|
|
28
29
|
|
|
29
30
|
@dr_mcp_tool(tags={"predictive", "data", "write", "upload", "catalog"})
|
|
30
31
|
async def upload_dataset_to_ai_catalog(
|
|
32
|
+
*,
|
|
31
33
|
file_path: Annotated[str, "The path to the dataset file to upload."] | None = None,
|
|
32
34
|
file_url: Annotated[str, "The URL to the dataset file to upload."] | None = None,
|
|
33
35
|
) -> ToolError | ToolResult:
|
|
@@ -80,11 +82,17 @@ async def list_ai_catalog_items() -> ToolResult:
|
|
|
80
82
|
structured_content={"datasets": []},
|
|
81
83
|
)
|
|
82
84
|
|
|
85
|
+
datasets_dict = {ds.id: ds.name for ds in datasets}
|
|
86
|
+
datasets_count = len(datasets)
|
|
87
|
+
|
|
83
88
|
return ToolResult(
|
|
84
|
-
content=
|
|
89
|
+
content=(
|
|
90
|
+
f"Found {datasets_count} AI Catalog items, here are the details:\n"
|
|
91
|
+
f"{json.dumps(datasets_dict, indent=2)}"
|
|
92
|
+
),
|
|
85
93
|
structured_content={
|
|
86
|
-
"datasets":
|
|
87
|
-
"count":
|
|
94
|
+
"datasets": datasets_dict,
|
|
95
|
+
"count": datasets_count,
|
|
88
96
|
},
|
|
89
97
|
)
|
|
90
98
|
|
|
@@ -617,6 +617,7 @@ async def get_model_feature_impact(
|
|
|
617
617
|
|
|
618
618
|
@dr_mcp_tool(tags={"predictive", "training", "read", "model", "evaluation"})
|
|
619
619
|
async def get_model_lift_chart(
|
|
620
|
+
*,
|
|
620
621
|
project_id: Annotated[str, "The ID of the DataRobot project"] | None = None,
|
|
621
622
|
model_id: Annotated[str, "The ID of the model to analyze"] | None = None,
|
|
622
623
|
source: Annotated[
|
|
@@ -27,7 +27,7 @@ datarobot_genai/drmcp/server.py,sha256=KE4kjS5f9bfdYftG14HBHrfvxDfCD4pwCXePfvl1O
|
|
|
27
27
|
datarobot_genai/drmcp/core/__init__.py,sha256=y4yapzp3KnFMzSR6HlNDS4uSuyNT7I1iPBvaCLsS0sU,577
|
|
28
28
|
datarobot_genai/drmcp/core/auth.py,sha256=E-5wrGbBFEBlD5377g6Exddrc7HsazamwX8tWr2RLXY,5815
|
|
29
29
|
datarobot_genai/drmcp/core/clients.py,sha256=y-yG8617LbmiZ_L7FWfMrk4WjIekyr76u_Q80aLqGpI,5524
|
|
30
|
-
datarobot_genai/drmcp/core/config.py,sha256=
|
|
30
|
+
datarobot_genai/drmcp/core/config.py,sha256=SWLhVKoqI4vmA-04TFKpKm1_G2yEMEN1e_8cv8d_XRM,13774
|
|
31
31
|
datarobot_genai/drmcp/core/config_utils.py,sha256=U-aieWw7MyP03cGDFIp97JH99ZUfr3vD9uuTzBzxn7w,6428
|
|
32
32
|
datarobot_genai/drmcp/core/constants.py,sha256=lUwoW_PTrbaBGqRJifKqCn3EoFacoEgdO-CpoFVrUoU,739
|
|
33
33
|
datarobot_genai/drmcp/core/credentials.py,sha256=PYEUDNMVw1BoMzZKLkPVTypNkVevEPtmk3scKnE-zYg,6706
|
|
@@ -40,7 +40,7 @@ datarobot_genai/drmcp/core/routes.py,sha256=dqE2M0UzAyyN9vQjlyTjYW4rpju3LT039po5
|
|
|
40
40
|
datarobot_genai/drmcp/core/routes_utils.py,sha256=vSseXWlplMSnRgoJgtP_rHxWSAVYcx_tpTv4lyTpQoc,944
|
|
41
41
|
datarobot_genai/drmcp/core/server_life_cycle.py,sha256=WKGJWGxalvqxupzJ2y67Kklc_9PgpZT0uyjlv_sr5wc,3419
|
|
42
42
|
datarobot_genai/drmcp/core/telemetry.py,sha256=NEkSTC1w6uQgtukLHI-sWvR4EMgInysgATcvfQ5CplM,15378
|
|
43
|
-
datarobot_genai/drmcp/core/tool_config.py,sha256=
|
|
43
|
+
datarobot_genai/drmcp/core/tool_config.py,sha256=izUdM6dN3GRBzSBs-OagggM2dX5PGBnDbVv4N5bfWFI,3668
|
|
44
44
|
datarobot_genai/drmcp/core/tool_filter.py,sha256=yKQlEtzyIeXGxZJkHbK36QI19vmgQkvqmfx5cTo2pp4,3156
|
|
45
45
|
datarobot_genai/drmcp/core/utils.py,sha256=EvfpqKZ3tECMoxpIQ_tA_3rOgy6KJEYKC0lWZo_Daag,4517
|
|
46
46
|
datarobot_genai/drmcp/core/dynamic_prompts/__init__.py,sha256=y4yapzp3KnFMzSR6HlNDS4uSuyNT7I1iPBvaCLsS0sU,577
|
|
@@ -72,7 +72,7 @@ datarobot_genai/drmcp/test_utils/mcp_utils_ete.py,sha256=46rH0fYYmUj7ygf968iRbdS
|
|
|
72
72
|
datarobot_genai/drmcp/test_utils/mcp_utils_integration.py,sha256=sHA_BWtpgIAFp9IXiNkUeBartBMjLAauqkV9bYtCr-g,3874
|
|
73
73
|
datarobot_genai/drmcp/test_utils/openai_llm_mcp_client.py,sha256=YgyqHK09MB-PBaqT34heqvmvYYFtLpzzSJt7xuTJmDg,11224
|
|
74
74
|
datarobot_genai/drmcp/test_utils/test_interactive.py,sha256=guXvR8q2H6VUdmvIjEJcElQJCC6lQ-oTrzbD2EkHeCs,8025
|
|
75
|
-
datarobot_genai/drmcp/test_utils/tool_base_ete.py,sha256=
|
|
75
|
+
datarobot_genai/drmcp/test_utils/tool_base_ete.py,sha256=3yMfOsz3LdHYywuE5BhdJDpTUowx37HsFSsMdBTxA80,9337
|
|
76
76
|
datarobot_genai/drmcp/test_utils/utils.py,sha256=esGKFv8aO31-Qg3owayeWp32BYe1CdYOEutjjdbweCw,3048
|
|
77
77
|
datarobot_genai/drmcp/tools/__init__.py,sha256=0kq9vMkF7EBsS6lkEdiLibmUrghTQqosHbZ5k-V9a5g,578
|
|
78
78
|
datarobot_genai/drmcp/tools/clients/__init__.py,sha256=0kq9vMkF7EBsS6lkEdiLibmUrghTQqosHbZ5k-V9a5g,578
|
|
@@ -80,6 +80,7 @@ datarobot_genai/drmcp/tools/clients/atlassian.py,sha256=__M_uz7FrcbKCYRzeMn24DCE
|
|
|
80
80
|
datarobot_genai/drmcp/tools/clients/confluence.py,sha256=h_G0By_kDnJeWDT_d-IREsaZ5-0xB5GoLXOqblYP5MA,20706
|
|
81
81
|
datarobot_genai/drmcp/tools/clients/gdrive.py,sha256=8GztWTdpJ7Ir3NIvIoOHPzDscoR1Ui7Ct2IiKmuUzIc,26012
|
|
82
82
|
datarobot_genai/drmcp/tools/clients/jira.py,sha256=Rm91JAyrNIqxu66-9rU1YqoRXVnWbEy-Ahvy6f6HlVg,9823
|
|
83
|
+
datarobot_genai/drmcp/tools/clients/microsoft_graph.py,sha256=PASGThDPE8zkBZqach8lurJL1y47DWUPLwvf9N6uLGM,19234
|
|
83
84
|
datarobot_genai/drmcp/tools/clients/s3.py,sha256=GmwzvurFdNfvxOooA8g5S4osRysHYU0S9ypg_177Glg,953
|
|
84
85
|
datarobot_genai/drmcp/tools/confluence/__init__.py,sha256=0kq9vMkF7EBsS6lkEdiLibmUrghTQqosHbZ5k-V9a5g,578
|
|
85
86
|
datarobot_genai/drmcp/tools/confluence/tools.py,sha256=_-ws65WLK8KZP_mKkf4yJ7ZunR8qdyoiMwHQX47MSMw,12362
|
|
@@ -87,15 +88,17 @@ datarobot_genai/drmcp/tools/gdrive/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQe
|
|
|
87
88
|
datarobot_genai/drmcp/tools/gdrive/tools.py,sha256=G8LlnGEINZqV83Q-b3ZliWkDjouhbozDam3w6GfA7s0,10711
|
|
88
89
|
datarobot_genai/drmcp/tools/jira/__init__.py,sha256=0kq9vMkF7EBsS6lkEdiLibmUrghTQqosHbZ5k-V9a5g,578
|
|
89
90
|
datarobot_genai/drmcp/tools/jira/tools.py,sha256=dfkqTU2HH-7n44hX80ODFacKq0p0LOchFcZtIIKFNMM,9687
|
|
91
|
+
datarobot_genai/drmcp/tools/microsoft_graph/__init__.py,sha256=CuOaMt1AJo7cHx_GuhO3s_aqxZas_wlDsoBorBsvbeU,577
|
|
92
|
+
datarobot_genai/drmcp/tools/microsoft_graph/tools.py,sha256=zJ-UA1TMhPOYcExvgWv0YBjDsSIDPA-U1SEbBrVfAc8,7744
|
|
90
93
|
datarobot_genai/drmcp/tools/predictive/__init__.py,sha256=WuOHlNNEpEmcF7gVnhckruJRKU2qtmJLE3E7zoCGLDo,1030
|
|
91
|
-
datarobot_genai/drmcp/tools/predictive/data.py,sha256=
|
|
94
|
+
datarobot_genai/drmcp/tools/predictive/data.py,sha256=sSFAmO6x0DSuolw8urhMaOj5PwfUH29oc2mEOZI3YU4,4631
|
|
92
95
|
datarobot_genai/drmcp/tools/predictive/deployment.py,sha256=lm02Ayuo11L1hP41fgi3QpR1Eyty-Wc16rM0c8SgliM,3277
|
|
93
96
|
datarobot_genai/drmcp/tools/predictive/deployment_info.py,sha256=BGEF_dmbxOBJR0n1Tt9TO2-iNTQSBTr-oQUyaxLZ0ZI,15297
|
|
94
97
|
datarobot_genai/drmcp/tools/predictive/model.py,sha256=Yih5-KedJ-1yupPLXCJsCXOdyWWi9pRvgapXDlgXWJA,4891
|
|
95
98
|
datarobot_genai/drmcp/tools/predictive/predict.py,sha256=Qoob2_t2crfWtyPzkXMRz2ITZumnczU6Dq4C7q9RBMI,9370
|
|
96
99
|
datarobot_genai/drmcp/tools/predictive/predict_realtime.py,sha256=urq6rPyZFsAP-bPyclSNzrkvb6FTamdlFau8q0IWWJ0,13472
|
|
97
100
|
datarobot_genai/drmcp/tools/predictive/project.py,sha256=xC52UdYvuFeNZC7Y5MfXcvzTL70WwAacQXESr6rqN6s,3255
|
|
98
|
-
datarobot_genai/drmcp/tools/predictive/training.py,sha256=
|
|
101
|
+
datarobot_genai/drmcp/tools/predictive/training.py,sha256=LzMxbBT8wxKYDrRlVElfmTUrzpmGvwrR-mTGf6YUnIA,23998
|
|
99
102
|
datarobot_genai/langgraph/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
100
103
|
datarobot_genai/langgraph/agent.py,sha256=DRnywmS9KDywyChtuIZZwNKbJs8BpC259EG_kxYbiQ8,15828
|
|
101
104
|
datarobot_genai/langgraph/mcp.py,sha256=iA2_j46mZAaNaL7ntXT-LW6C-NMJkzr3VfKDDfe7mh8,2851
|
|
@@ -110,9 +113,9 @@ datarobot_genai/nat/datarobot_llm_clients.py,sha256=Yu208Ed_p_4P3HdpuM7fYnKcXtim
|
|
|
110
113
|
datarobot_genai/nat/datarobot_llm_providers.py,sha256=aDoQcTeGI-odqydPXEX9OGGNFbzAtpqzTvHHEkmJuEQ,4963
|
|
111
114
|
datarobot_genai/nat/datarobot_mcp_client.py,sha256=35FzilxNp4VqwBYI0NsOc91-xZm1C-AzWqrOdDy962A,9612
|
|
112
115
|
datarobot_genai/nat/helpers.py,sha256=Q7E3ADZdtFfS8E6OQPyw2wgA6laQ58N3bhLj5CBWwJs,3265
|
|
113
|
-
datarobot_genai-0.2.
|
|
114
|
-
datarobot_genai-0.2.
|
|
115
|
-
datarobot_genai-0.2.
|
|
116
|
-
datarobot_genai-0.2.
|
|
117
|
-
datarobot_genai-0.2.
|
|
118
|
-
datarobot_genai-0.2.
|
|
116
|
+
datarobot_genai-0.2.28.dist-info/METADATA,sha256=4BXiWsOeKBshX05-9fayxuNDKjA6ova0H2CbrQILvCQ,6301
|
|
117
|
+
datarobot_genai-0.2.28.dist-info/WHEEL,sha256=WLgqFyCfm_KASv4WHyYy0P3pM_m7J5L9k2skdKLirC8,87
|
|
118
|
+
datarobot_genai-0.2.28.dist-info/entry_points.txt,sha256=jEW3WxDZ8XIK9-ISmTyt5DbmBb047rFlzQuhY09rGrM,284
|
|
119
|
+
datarobot_genai-0.2.28.dist-info/licenses/AUTHORS,sha256=isJGUXdjq1U7XZ_B_9AH8Qf0u4eX0XyQifJZ_Sxm4sA,80
|
|
120
|
+
datarobot_genai-0.2.28.dist-info/licenses/LICENSE,sha256=U2_VkLIktQoa60Nf6Tbt7E4RMlfhFSjWjcJJfVC-YCE,11341
|
|
121
|
+
datarobot_genai-0.2.28.dist-info/RECORD,,
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|