datarobot-genai 0.2.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- datarobot_genai/__init__.py +19 -0
- datarobot_genai/core/__init__.py +0 -0
- datarobot_genai/core/agents/__init__.py +43 -0
- datarobot_genai/core/agents/base.py +195 -0
- datarobot_genai/core/chat/__init__.py +19 -0
- datarobot_genai/core/chat/auth.py +146 -0
- datarobot_genai/core/chat/client.py +178 -0
- datarobot_genai/core/chat/responses.py +297 -0
- datarobot_genai/core/cli/__init__.py +18 -0
- datarobot_genai/core/cli/agent_environment.py +47 -0
- datarobot_genai/core/cli/agent_kernel.py +211 -0
- datarobot_genai/core/custom_model.py +141 -0
- datarobot_genai/core/mcp/__init__.py +0 -0
- datarobot_genai/core/mcp/common.py +218 -0
- datarobot_genai/core/telemetry_agent.py +126 -0
- datarobot_genai/core/utils/__init__.py +3 -0
- datarobot_genai/core/utils/auth.py +234 -0
- datarobot_genai/core/utils/urls.py +64 -0
- datarobot_genai/crewai/__init__.py +24 -0
- datarobot_genai/crewai/agent.py +42 -0
- datarobot_genai/crewai/base.py +159 -0
- datarobot_genai/crewai/events.py +117 -0
- datarobot_genai/crewai/mcp.py +59 -0
- datarobot_genai/drmcp/__init__.py +78 -0
- datarobot_genai/drmcp/core/__init__.py +13 -0
- datarobot_genai/drmcp/core/auth.py +165 -0
- datarobot_genai/drmcp/core/clients.py +180 -0
- datarobot_genai/drmcp/core/config.py +250 -0
- datarobot_genai/drmcp/core/config_utils.py +174 -0
- datarobot_genai/drmcp/core/constants.py +18 -0
- datarobot_genai/drmcp/core/credentials.py +190 -0
- datarobot_genai/drmcp/core/dr_mcp_server.py +316 -0
- datarobot_genai/drmcp/core/dr_mcp_server_logo.py +136 -0
- datarobot_genai/drmcp/core/dynamic_prompts/__init__.py +13 -0
- datarobot_genai/drmcp/core/dynamic_prompts/controllers.py +130 -0
- datarobot_genai/drmcp/core/dynamic_prompts/dr_lib.py +128 -0
- datarobot_genai/drmcp/core/dynamic_prompts/register.py +206 -0
- datarobot_genai/drmcp/core/dynamic_prompts/utils.py +33 -0
- datarobot_genai/drmcp/core/dynamic_tools/__init__.py +14 -0
- datarobot_genai/drmcp/core/dynamic_tools/deployment/__init__.py +0 -0
- datarobot_genai/drmcp/core/dynamic_tools/deployment/adapters/__init__.py +14 -0
- datarobot_genai/drmcp/core/dynamic_tools/deployment/adapters/base.py +72 -0
- datarobot_genai/drmcp/core/dynamic_tools/deployment/adapters/default.py +82 -0
- datarobot_genai/drmcp/core/dynamic_tools/deployment/adapters/drum.py +238 -0
- datarobot_genai/drmcp/core/dynamic_tools/deployment/config.py +228 -0
- datarobot_genai/drmcp/core/dynamic_tools/deployment/controllers.py +63 -0
- datarobot_genai/drmcp/core/dynamic_tools/deployment/metadata.py +162 -0
- datarobot_genai/drmcp/core/dynamic_tools/deployment/register.py +87 -0
- datarobot_genai/drmcp/core/dynamic_tools/deployment/schemas/drum_agentic_fallback_schema.json +36 -0
- datarobot_genai/drmcp/core/dynamic_tools/deployment/schemas/drum_prediction_fallback_schema.json +10 -0
- datarobot_genai/drmcp/core/dynamic_tools/register.py +254 -0
- datarobot_genai/drmcp/core/dynamic_tools/schema.py +532 -0
- datarobot_genai/drmcp/core/exceptions.py +25 -0
- datarobot_genai/drmcp/core/logging.py +98 -0
- datarobot_genai/drmcp/core/mcp_instance.py +542 -0
- datarobot_genai/drmcp/core/mcp_server_tools.py +129 -0
- datarobot_genai/drmcp/core/memory_management/__init__.py +13 -0
- datarobot_genai/drmcp/core/memory_management/manager.py +820 -0
- datarobot_genai/drmcp/core/memory_management/memory_tools.py +201 -0
- datarobot_genai/drmcp/core/routes.py +436 -0
- datarobot_genai/drmcp/core/routes_utils.py +30 -0
- datarobot_genai/drmcp/core/server_life_cycle.py +107 -0
- datarobot_genai/drmcp/core/telemetry.py +424 -0
- datarobot_genai/drmcp/core/tool_filter.py +108 -0
- datarobot_genai/drmcp/core/utils.py +131 -0
- datarobot_genai/drmcp/server.py +19 -0
- datarobot_genai/drmcp/test_utils/__init__.py +13 -0
- datarobot_genai/drmcp/test_utils/integration_mcp_server.py +102 -0
- datarobot_genai/drmcp/test_utils/mcp_utils_ete.py +96 -0
- datarobot_genai/drmcp/test_utils/mcp_utils_integration.py +94 -0
- datarobot_genai/drmcp/test_utils/openai_llm_mcp_client.py +234 -0
- datarobot_genai/drmcp/test_utils/tool_base_ete.py +151 -0
- datarobot_genai/drmcp/test_utils/utils.py +91 -0
- datarobot_genai/drmcp/tools/__init__.py +14 -0
- datarobot_genai/drmcp/tools/predictive/__init__.py +27 -0
- datarobot_genai/drmcp/tools/predictive/data.py +97 -0
- datarobot_genai/drmcp/tools/predictive/deployment.py +91 -0
- datarobot_genai/drmcp/tools/predictive/deployment_info.py +392 -0
- datarobot_genai/drmcp/tools/predictive/model.py +148 -0
- datarobot_genai/drmcp/tools/predictive/predict.py +254 -0
- datarobot_genai/drmcp/tools/predictive/predict_realtime.py +307 -0
- datarobot_genai/drmcp/tools/predictive/project.py +72 -0
- datarobot_genai/drmcp/tools/predictive/training.py +651 -0
- datarobot_genai/langgraph/__init__.py +0 -0
- datarobot_genai/langgraph/agent.py +341 -0
- datarobot_genai/langgraph/mcp.py +73 -0
- datarobot_genai/llama_index/__init__.py +16 -0
- datarobot_genai/llama_index/agent.py +50 -0
- datarobot_genai/llama_index/base.py +299 -0
- datarobot_genai/llama_index/mcp.py +79 -0
- datarobot_genai/nat/__init__.py +0 -0
- datarobot_genai/nat/agent.py +258 -0
- datarobot_genai/nat/datarobot_llm_clients.py +249 -0
- datarobot_genai/nat/datarobot_llm_providers.py +130 -0
- datarobot_genai/py.typed +0 -0
- datarobot_genai-0.2.0.dist-info/METADATA +139 -0
- datarobot_genai-0.2.0.dist-info/RECORD +101 -0
- datarobot_genai-0.2.0.dist-info/WHEEL +4 -0
- datarobot_genai-0.2.0.dist-info/entry_points.txt +3 -0
- datarobot_genai-0.2.0.dist-info/licenses/AUTHORS +2 -0
- datarobot_genai-0.2.0.dist-info/licenses/LICENSE +201 -0
|
@@ -0,0 +1,532 @@
|
|
|
1
|
+
# Copyright 2025 DataRobot, Inc.
|
|
2
|
+
#
|
|
3
|
+
# Licensed under the Apache License, Version 2.0 (the "License");
|
|
4
|
+
# you may not use this file except in compliance with the License.
|
|
5
|
+
# You may obtain a copy of the License at
|
|
6
|
+
#
|
|
7
|
+
# http://www.apache.org/licenses/LICENSE-2.0
|
|
8
|
+
#
|
|
9
|
+
# Unless required by applicable law or agreed to in writing, software
|
|
10
|
+
# distributed under the License is distributed on an "AS IS" BASIS,
|
|
11
|
+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
12
|
+
# See the License for the specific language governing permissions and
|
|
13
|
+
# limitations under the License.
|
|
14
|
+
|
|
15
|
+
from dataclasses import dataclass
|
|
16
|
+
from typing import Any
|
|
17
|
+
from typing import Union
|
|
18
|
+
|
|
19
|
+
from pydantic import BaseModel
|
|
20
|
+
from pydantic import Field
|
|
21
|
+
from pydantic import create_model
|
|
22
|
+
from pydantic.fields import FieldInfo
|
|
23
|
+
|
|
24
|
+
|
|
25
|
+
class SchemaValidationError(Exception):
|
|
26
|
+
"""Raised when a schema violates depth or complexity constraints."""
|
|
27
|
+
|
|
28
|
+
pass
|
|
29
|
+
|
|
30
|
+
|
|
31
|
+
@dataclass(frozen=True)
|
|
32
|
+
class PropertyConfig:
|
|
33
|
+
"""Configuration for top-level input schema property types."""
|
|
34
|
+
|
|
35
|
+
model_name: str
|
|
36
|
+
allow_nested: bool
|
|
37
|
+
allowed_primitive_types: set[str]
|
|
38
|
+
default_description: str
|
|
39
|
+
|
|
40
|
+
|
|
41
|
+
# Type mapping from JSON Schema to Python types
|
|
42
|
+
_JSON_TYPE_MAPPING: dict[str, type[Any]] = {
|
|
43
|
+
"string": str,
|
|
44
|
+
"integer": int,
|
|
45
|
+
"number": float,
|
|
46
|
+
"boolean": bool,
|
|
47
|
+
"array": list,
|
|
48
|
+
"object": dict,
|
|
49
|
+
"null": type(None),
|
|
50
|
+
}
|
|
51
|
+
|
|
52
|
+
# Property configurations for input schema validation
|
|
53
|
+
_PROPERTY_CONFIGS: dict[str, PropertyConfig] = {
|
|
54
|
+
"path_params": PropertyConfig(
|
|
55
|
+
model_name="PathParams",
|
|
56
|
+
allow_nested=False,
|
|
57
|
+
allowed_primitive_types=set(),
|
|
58
|
+
default_description="Path params to substitute in endpoint.",
|
|
59
|
+
),
|
|
60
|
+
"query_params": PropertyConfig(
|
|
61
|
+
model_name="QueryParams",
|
|
62
|
+
allow_nested=False,
|
|
63
|
+
allowed_primitive_types=set(),
|
|
64
|
+
default_description="Query parameters (?key=value).",
|
|
65
|
+
),
|
|
66
|
+
"data": PropertyConfig(
|
|
67
|
+
model_name="Data",
|
|
68
|
+
allow_nested=True,
|
|
69
|
+
allowed_primitive_types={"string"},
|
|
70
|
+
default_description="Form or raw body data for POST requests.",
|
|
71
|
+
),
|
|
72
|
+
"json": PropertyConfig(
|
|
73
|
+
model_name="Json",
|
|
74
|
+
allow_nested=True,
|
|
75
|
+
allowed_primitive_types=set(),
|
|
76
|
+
default_description="JSON body for POST requests.",
|
|
77
|
+
),
|
|
78
|
+
}
|
|
79
|
+
|
|
80
|
+
|
|
81
|
+
def json_schema_to_python_type(schema_type: str | list[str]) -> type[Any]:
|
|
82
|
+
"""Convert JSON schema type to Python type annotation.
|
|
83
|
+
|
|
84
|
+
Args:
|
|
85
|
+
schema_type: JSON schema type string or list of type strings
|
|
86
|
+
|
|
87
|
+
Returns
|
|
88
|
+
-------
|
|
89
|
+
Python type annotation corresponding to the schema type
|
|
90
|
+
"""
|
|
91
|
+
if isinstance(schema_type, list):
|
|
92
|
+
types = [json_schema_to_python_type(t) for t in schema_type]
|
|
93
|
+
return Union[tuple(types)] # type: ignore # noqa: UP007
|
|
94
|
+
|
|
95
|
+
return _JSON_TYPE_MAPPING.get(schema_type, Any)
|
|
96
|
+
|
|
97
|
+
|
|
98
|
+
class SchemaResolver:
|
|
99
|
+
"""Helper class to resolve JSON schema references and handle unions."""
|
|
100
|
+
|
|
101
|
+
def __init__(self, definitions: dict[str, Any] | None = None):
|
|
102
|
+
"""Initialize resolver with definitions.
|
|
103
|
+
|
|
104
|
+
Args:
|
|
105
|
+
definitions: Dictionary of reusable schema definitions ($defs)
|
|
106
|
+
"""
|
|
107
|
+
self.definitions = definitions or {}
|
|
108
|
+
|
|
109
|
+
def resolve_ref(self, ref_path: str) -> dict[str, Any]:
|
|
110
|
+
"""Resolve a JSON Schema $ref reference.
|
|
111
|
+
|
|
112
|
+
Args:
|
|
113
|
+
ref_path: The reference path (e.g., '#/$defs/ModelName')
|
|
114
|
+
|
|
115
|
+
Returns
|
|
116
|
+
-------
|
|
117
|
+
The resolved schema definition
|
|
118
|
+
|
|
119
|
+
Raises
|
|
120
|
+
------
|
|
121
|
+
SchemaValidationError: If the reference cannot be resolved
|
|
122
|
+
"""
|
|
123
|
+
if not ref_path.startswith("#/$defs/"):
|
|
124
|
+
raise SchemaValidationError(
|
|
125
|
+
f"Unsupported reference format: '{ref_path}'. "
|
|
126
|
+
f"Only '#/$defs/...' references are supported."
|
|
127
|
+
)
|
|
128
|
+
|
|
129
|
+
def_name = ref_path.split("/")[-1]
|
|
130
|
+
if def_name not in self.definitions:
|
|
131
|
+
raise SchemaValidationError(f"Reference '{ref_path}' not found in definitions")
|
|
132
|
+
|
|
133
|
+
return self.definitions[def_name] # type: ignore[no-any-return]
|
|
134
|
+
|
|
135
|
+
def resolve_schema(self, schema: dict[str, Any]) -> dict[str, Any]:
|
|
136
|
+
"""Resolve a schema, following $ref if present.
|
|
137
|
+
|
|
138
|
+
Args:
|
|
139
|
+
schema: Schema that may contain a $ref
|
|
140
|
+
|
|
141
|
+
Returns
|
|
142
|
+
-------
|
|
143
|
+
Resolved schema
|
|
144
|
+
"""
|
|
145
|
+
if "$ref" in schema:
|
|
146
|
+
return self.resolve_ref(schema["$ref"])
|
|
147
|
+
return schema
|
|
148
|
+
|
|
149
|
+
def resolve_optional_union(self, schema: dict[str, Any]) -> dict[str, Any]:
|
|
150
|
+
"""Resolve anyOf/oneOf unions for optional fields (Type | None).
|
|
151
|
+
|
|
152
|
+
Args:
|
|
153
|
+
schema: Schema that may contain anyOf/oneOf
|
|
154
|
+
|
|
155
|
+
Returns
|
|
156
|
+
-------
|
|
157
|
+
Resolved schema for the non-null variant, or original schema
|
|
158
|
+
if it's a complex union
|
|
159
|
+
|
|
160
|
+
Raises
|
|
161
|
+
------
|
|
162
|
+
SchemaValidationError: If union is too complex
|
|
163
|
+
"""
|
|
164
|
+
if "anyOf" not in schema and "oneOf" not in schema:
|
|
165
|
+
return schema
|
|
166
|
+
|
|
167
|
+
variants = schema.get("anyOf") or schema.get("oneOf", [])
|
|
168
|
+
non_null_variants = [v for v in variants if v.get("type") != "null"]
|
|
169
|
+
|
|
170
|
+
# If there's exactly one non-null variant, it's a simple optional type (Type | None)
|
|
171
|
+
if len(non_null_variants) == 1:
|
|
172
|
+
return self.resolve_schema(non_null_variants[0])
|
|
173
|
+
|
|
174
|
+
# If there are multiple non-null variants, it's a complex union
|
|
175
|
+
# Return the schema as-is to be handled as Any type
|
|
176
|
+
return schema
|
|
177
|
+
|
|
178
|
+
|
|
179
|
+
def _is_optional_field(field_spec: dict[str, Any]) -> bool:
|
|
180
|
+
"""Check if a field is optional (anyOf/oneOf containing null type).
|
|
181
|
+
|
|
182
|
+
Args:
|
|
183
|
+
field_spec: Field specification to check
|
|
184
|
+
|
|
185
|
+
Returns
|
|
186
|
+
-------
|
|
187
|
+
True if field contains anyOf/oneOf with null type
|
|
188
|
+
"""
|
|
189
|
+
if "anyOf" in field_spec or "oneOf" in field_spec:
|
|
190
|
+
variants = field_spec.get("anyOf") or field_spec.get("oneOf", [])
|
|
191
|
+
return any(v.get("type") == "null" for v in variants)
|
|
192
|
+
return False
|
|
193
|
+
|
|
194
|
+
|
|
195
|
+
class FieldTypeResolver:
|
|
196
|
+
"""Resolves Python types for JSON schema fields."""
|
|
197
|
+
|
|
198
|
+
def __init__(self, model_name: str, allow_nested: bool, resolver: SchemaResolver):
|
|
199
|
+
"""Initialize field type resolver.
|
|
200
|
+
|
|
201
|
+
Args:
|
|
202
|
+
model_name: Name of the parent model
|
|
203
|
+
allow_nested: Whether nested objects/arrays are allowed
|
|
204
|
+
resolver: Schema resolver for handling references
|
|
205
|
+
"""
|
|
206
|
+
self.model_name = model_name
|
|
207
|
+
self.allow_nested = allow_nested
|
|
208
|
+
self.resolver = resolver
|
|
209
|
+
|
|
210
|
+
def _validate_nested_allowed(self, field_name: str, structure_type: str) -> None:
|
|
211
|
+
"""Validate that nested structures are allowed.
|
|
212
|
+
|
|
213
|
+
Args:
|
|
214
|
+
field_name: Name of the field
|
|
215
|
+
structure_type: Type of structure (e.g., "nested object", "array of objects")
|
|
216
|
+
|
|
217
|
+
Raises
|
|
218
|
+
------
|
|
219
|
+
SchemaValidationError: If nested structures not allowed
|
|
220
|
+
"""
|
|
221
|
+
if not self.allow_nested:
|
|
222
|
+
raise SchemaValidationError(
|
|
223
|
+
f"The model '{self.model_name}' supports only flat structures. "
|
|
224
|
+
f"Field '{field_name}' is a {structure_type}, which is not supported. "
|
|
225
|
+
f"Please flatten the schema."
|
|
226
|
+
)
|
|
227
|
+
|
|
228
|
+
def _resolve_object_type(self, field_name: str, field_spec: dict[str, Any]) -> type[BaseModel]:
|
|
229
|
+
"""Resolve type for object fields.
|
|
230
|
+
|
|
231
|
+
Args:
|
|
232
|
+
field_name: Name of the field
|
|
233
|
+
field_spec: Field specification
|
|
234
|
+
|
|
235
|
+
Returns
|
|
236
|
+
-------
|
|
237
|
+
Pydantic model type
|
|
238
|
+
"""
|
|
239
|
+
self._validate_nested_allowed(field_name, "nested object")
|
|
240
|
+
return create_schema_model(
|
|
241
|
+
name=f"{self.model_name}{field_name.capitalize()}",
|
|
242
|
+
schema=field_spec,
|
|
243
|
+
allow_nested=self.allow_nested,
|
|
244
|
+
definitions=self.resolver.definitions,
|
|
245
|
+
)
|
|
246
|
+
|
|
247
|
+
def _resolve_array_type(self, field_name: str, field_spec: dict[str, Any]) -> type[Any]:
|
|
248
|
+
"""Resolve type for array fields.
|
|
249
|
+
|
|
250
|
+
Args:
|
|
251
|
+
field_name: Name of the field
|
|
252
|
+
field_spec: Field specification
|
|
253
|
+
|
|
254
|
+
Returns
|
|
255
|
+
-------
|
|
256
|
+
List type annotation
|
|
257
|
+
"""
|
|
258
|
+
items_spec = field_spec.get("items", {})
|
|
259
|
+
|
|
260
|
+
# Handle complex union types in array items
|
|
261
|
+
if "anyOf" in items_spec or "oneOf" in items_spec:
|
|
262
|
+
return list
|
|
263
|
+
|
|
264
|
+
items_spec = self.resolver.resolve_schema(items_spec)
|
|
265
|
+
items_type = items_spec.get("type") if isinstance(items_spec, dict) else None
|
|
266
|
+
|
|
267
|
+
if items_type == "object":
|
|
268
|
+
self._validate_nested_allowed(field_name, "array of objects")
|
|
269
|
+
item_model = create_schema_model(
|
|
270
|
+
name=f"{self.model_name}{field_name.capitalize()}Item",
|
|
271
|
+
schema=items_spec,
|
|
272
|
+
allow_nested=self.allow_nested,
|
|
273
|
+
definitions=self.resolver.definitions,
|
|
274
|
+
)
|
|
275
|
+
return list[item_model] # type: ignore[valid-type]
|
|
276
|
+
|
|
277
|
+
return json_schema_to_python_type("array")
|
|
278
|
+
|
|
279
|
+
def _resolve_union_type(self, field_name: str, schema_type: list[str]) -> type[Any]:
|
|
280
|
+
"""Resolve type for union fields.
|
|
281
|
+
|
|
282
|
+
Args:
|
|
283
|
+
field_name: Name of the field
|
|
284
|
+
schema_type: List of type strings
|
|
285
|
+
|
|
286
|
+
Returns
|
|
287
|
+
-------
|
|
288
|
+
Union type annotation
|
|
289
|
+
|
|
290
|
+
Raises
|
|
291
|
+
------
|
|
292
|
+
SchemaValidationError: If union contains complex types
|
|
293
|
+
"""
|
|
294
|
+
if any(t in ("object", "array") for t in schema_type):
|
|
295
|
+
raise SchemaValidationError(
|
|
296
|
+
f"Field '{field_name}' contains complex types in a union. "
|
|
297
|
+
"Complex types in unions are not supported. "
|
|
298
|
+
"Use unions of primitive types only (e.g., ['string', 'null'])."
|
|
299
|
+
)
|
|
300
|
+
return json_schema_to_python_type(schema_type)
|
|
301
|
+
|
|
302
|
+
def resolve(self, field_name: str, field_spec: dict[str, Any]) -> type[Any]:
|
|
303
|
+
"""Resolve the Python type for a JSON schema field.
|
|
304
|
+
|
|
305
|
+
Args:
|
|
306
|
+
field_name: Name of the field
|
|
307
|
+
field_spec: JSON schema specification for the field
|
|
308
|
+
|
|
309
|
+
Returns
|
|
310
|
+
-------
|
|
311
|
+
Python type annotation for the field
|
|
312
|
+
"""
|
|
313
|
+
field_spec = self.resolver.resolve_schema(field_spec)
|
|
314
|
+
schema_type = field_spec.get("type")
|
|
315
|
+
|
|
316
|
+
if schema_type == "object":
|
|
317
|
+
return self._resolve_object_type(field_name, field_spec)
|
|
318
|
+
|
|
319
|
+
if schema_type == "array":
|
|
320
|
+
return self._resolve_array_type(field_name, field_spec)
|
|
321
|
+
|
|
322
|
+
if isinstance(schema_type, list):
|
|
323
|
+
return self._resolve_union_type(field_name, schema_type)
|
|
324
|
+
|
|
325
|
+
return json_schema_to_python_type(schema_type) if schema_type else Any
|
|
326
|
+
|
|
327
|
+
|
|
328
|
+
def create_schema_model(
|
|
329
|
+
name: str,
|
|
330
|
+
schema: dict[str, Any],
|
|
331
|
+
allow_nested: bool,
|
|
332
|
+
definitions: dict[str, Any] | None = None,
|
|
333
|
+
) -> type[BaseModel]:
|
|
334
|
+
"""Create a Pydantic model from a JSON schema, supporting nested objects.
|
|
335
|
+
|
|
336
|
+
Args:
|
|
337
|
+
name: Name for the generated model
|
|
338
|
+
schema: JSON schema defining the model structure
|
|
339
|
+
allow_nested: Whether to allow nested objects and arrays in
|
|
340
|
+
the schema. If False, raises error on complex types.
|
|
341
|
+
definitions: Dictionary of reusable schema definitions ($defs)
|
|
342
|
+
|
|
343
|
+
Returns
|
|
344
|
+
-------
|
|
345
|
+
A Pydantic BaseModel class
|
|
346
|
+
"""
|
|
347
|
+
if not schema or not schema.get("properties"):
|
|
348
|
+
return create_model(name)
|
|
349
|
+
|
|
350
|
+
# Extract definitions if present at this level
|
|
351
|
+
definitions = definitions or schema.get("$defs", {})
|
|
352
|
+
|
|
353
|
+
resolver = SchemaResolver(definitions)
|
|
354
|
+
field_resolver = FieldTypeResolver(name, allow_nested, resolver)
|
|
355
|
+
|
|
356
|
+
properties = schema.get("properties", {})
|
|
357
|
+
required_fields = set(schema.get("required", []))
|
|
358
|
+
|
|
359
|
+
fields: dict[str, Any] = {}
|
|
360
|
+
for field_name, field_spec in properties.items():
|
|
361
|
+
# Check if field is optional (has anyOf/oneOf with null)
|
|
362
|
+
is_optional = _is_optional_field(field_spec)
|
|
363
|
+
is_required = field_name in required_fields
|
|
364
|
+
|
|
365
|
+
# Resolve anyOf/oneOf unions to get the actual field spec
|
|
366
|
+
resolved_spec = resolver.resolve_optional_union(field_spec) if is_optional else field_spec
|
|
367
|
+
|
|
368
|
+
# Resolve the field type from the resolved spec
|
|
369
|
+
field_type = field_resolver.resolve(field_name, resolved_spec)
|
|
370
|
+
|
|
371
|
+
# Get default value and description
|
|
372
|
+
default = (
|
|
373
|
+
... if is_required else (resolved_spec.get("default") or field_spec.get("default"))
|
|
374
|
+
)
|
|
375
|
+
description = resolved_spec.get("description") or field_spec.get("description")
|
|
376
|
+
|
|
377
|
+
# Wrap in Optional if field has anyOf/oneOf with null
|
|
378
|
+
if is_optional and not is_required:
|
|
379
|
+
field_type = field_type | None # type: ignore[assignment]
|
|
380
|
+
|
|
381
|
+
fields[field_name] = (field_type, Field(default, description=description))
|
|
382
|
+
|
|
383
|
+
return create_model(name, **fields)
|
|
384
|
+
|
|
385
|
+
|
|
386
|
+
class InputSchemaPropertyHandler:
|
|
387
|
+
"""Handles processing of input schema properties with validation."""
|
|
388
|
+
|
|
389
|
+
def __init__(self, config: PropertyConfig, resolver: SchemaResolver):
|
|
390
|
+
"""Initialize property handler.
|
|
391
|
+
|
|
392
|
+
Args:
|
|
393
|
+
config: Configuration for this property type
|
|
394
|
+
resolver: Schema resolver for handling references
|
|
395
|
+
"""
|
|
396
|
+
self.config = config
|
|
397
|
+
self.resolver = resolver
|
|
398
|
+
|
|
399
|
+
def _validate_primitive_type(self, property_name: str, schema_type: str) -> None:
|
|
400
|
+
"""Validate that a primitive type is allowed for this property.
|
|
401
|
+
|
|
402
|
+
Args:
|
|
403
|
+
property_name: Name of the property
|
|
404
|
+
schema_type: The schema type to validate
|
|
405
|
+
|
|
406
|
+
Raises
|
|
407
|
+
------
|
|
408
|
+
SchemaValidationError: If the primitive type is not allowed
|
|
409
|
+
"""
|
|
410
|
+
if schema_type not in self.config.allowed_primitive_types:
|
|
411
|
+
allowed_types_str = (
|
|
412
|
+
", ".join(sorted(self.config.allowed_primitive_types))
|
|
413
|
+
if self.config.allowed_primitive_types
|
|
414
|
+
else "none"
|
|
415
|
+
)
|
|
416
|
+
raise SchemaValidationError(
|
|
417
|
+
f"Property '{property_name}' does not support primitive type '{schema_type}'. "
|
|
418
|
+
f"Allowed primitive types: {allowed_types_str}. "
|
|
419
|
+
f"Use an object schema with properties instead."
|
|
420
|
+
)
|
|
421
|
+
|
|
422
|
+
def process_property(
|
|
423
|
+
self, property_name: str, property_schema: dict[str, Any]
|
|
424
|
+
) -> tuple[type[Any], FieldInfo]:
|
|
425
|
+
"""Process a property schema and return its field definition.
|
|
426
|
+
|
|
427
|
+
Args:
|
|
428
|
+
property_name: Name of the property
|
|
429
|
+
property_schema: Schema definition for the property
|
|
430
|
+
|
|
431
|
+
Returns
|
|
432
|
+
-------
|
|
433
|
+
Tuple of (type, FieldInfo) for the property
|
|
434
|
+
"""
|
|
435
|
+
# Resolve unions and references
|
|
436
|
+
property_schema = self.resolver.resolve_optional_union(property_schema)
|
|
437
|
+
property_schema = self.resolver.resolve_schema(property_schema)
|
|
438
|
+
|
|
439
|
+
description = property_schema.get("description", self.config.default_description)
|
|
440
|
+
schema_type = property_schema.get("type")
|
|
441
|
+
|
|
442
|
+
# Handle primitive types
|
|
443
|
+
if schema_type and schema_type != "object":
|
|
444
|
+
self._validate_primitive_type(property_name, schema_type)
|
|
445
|
+
python_type = json_schema_to_python_type(schema_type)
|
|
446
|
+
return python_type | None, Field(None, description=description) # type: ignore[return-value]
|
|
447
|
+
|
|
448
|
+
# Handle object types
|
|
449
|
+
property_definitions = property_schema.get("$defs", self.resolver.definitions)
|
|
450
|
+
model = create_schema_model(
|
|
451
|
+
self.config.model_name,
|
|
452
|
+
property_schema,
|
|
453
|
+
self.config.allow_nested,
|
|
454
|
+
property_definitions,
|
|
455
|
+
)
|
|
456
|
+
return model | None, Field(None, description=description) # type: ignore[return-value]
|
|
457
|
+
|
|
458
|
+
|
|
459
|
+
def _validate_input_schema_properties(
|
|
460
|
+
properties: dict[str, Any], expected: set[str], allow_empty: bool
|
|
461
|
+
) -> None:
|
|
462
|
+
"""Validate input schema properties.
|
|
463
|
+
|
|
464
|
+
Args:
|
|
465
|
+
properties: Properties from the input schema
|
|
466
|
+
expected: Expected property names
|
|
467
|
+
allow_empty: Whether empty schemas are allowed
|
|
468
|
+
|
|
469
|
+
Raises
|
|
470
|
+
------
|
|
471
|
+
SchemaValidationError: If validation fails
|
|
472
|
+
"""
|
|
473
|
+
if properties:
|
|
474
|
+
unexpected = set(properties.keys()) - expected
|
|
475
|
+
if unexpected:
|
|
476
|
+
raise SchemaValidationError(
|
|
477
|
+
f"Input schema contains unsupported top-level properties: {unexpected}. "
|
|
478
|
+
f"Please note that top-level properties organize parameters within groups "
|
|
479
|
+
f"corresponding to the HTTP request structure: {expected}. "
|
|
480
|
+
f"Please define parameters within one of these top-level keys."
|
|
481
|
+
)
|
|
482
|
+
elif not allow_empty:
|
|
483
|
+
raise SchemaValidationError(
|
|
484
|
+
f"Input schema must define 'properties' with at least one of: {expected}. "
|
|
485
|
+
f"Empty schemas are disabled by default. "
|
|
486
|
+
f"To enable registration of tools with no input parameters "
|
|
487
|
+
f"(e.g., static endpoints), set environment variable "
|
|
488
|
+
f"MCP_SERVER_TOOL_REGISTRATION_ALLOW_EMPTY_SCHEMA='true'."
|
|
489
|
+
)
|
|
490
|
+
|
|
491
|
+
|
|
492
|
+
def create_input_schema_pydantic_model(
|
|
493
|
+
input_schema: dict[str, Any],
|
|
494
|
+
model_name: str = "InputSchema",
|
|
495
|
+
allow_empty: bool = False,
|
|
496
|
+
) -> type[BaseModel]:
|
|
497
|
+
"""Create a properly typed ExternalToolRegistrationConfig with validated sub-schemas.
|
|
498
|
+
|
|
499
|
+
Args:
|
|
500
|
+
input_schema: JSON schema for input parameters
|
|
501
|
+
model_name: Name for the generated Pydantic model
|
|
502
|
+
allow_empty: Whether to allow empty schema (no properties)
|
|
503
|
+
|
|
504
|
+
Returns
|
|
505
|
+
-------
|
|
506
|
+
A Pydantic BaseModel class with properly typed fields
|
|
507
|
+
|
|
508
|
+
Raises
|
|
509
|
+
------
|
|
510
|
+
SchemaValidationError: If schema validation fails
|
|
511
|
+
"""
|
|
512
|
+
properties = input_schema.get("properties", {})
|
|
513
|
+
expected_properties = set(_PROPERTY_CONFIGS.keys())
|
|
514
|
+
definitions = input_schema.get("$defs", {})
|
|
515
|
+
|
|
516
|
+
# Validate properties
|
|
517
|
+
_validate_input_schema_properties(properties, expected_properties, allow_empty)
|
|
518
|
+
|
|
519
|
+
if not properties:
|
|
520
|
+
return create_model(model_name)
|
|
521
|
+
|
|
522
|
+
# Build field definitions using configuration
|
|
523
|
+
resolver = SchemaResolver(definitions)
|
|
524
|
+
fields: dict[str, Any] = {}
|
|
525
|
+
|
|
526
|
+
for property_name, config in _PROPERTY_CONFIGS.items():
|
|
527
|
+
if property_schema := properties.get(property_name):
|
|
528
|
+
handler = InputSchemaPropertyHandler(config, resolver)
|
|
529
|
+
field_type, field = handler.process_property(property_name, property_schema)
|
|
530
|
+
fields[property_name] = (field_type, field)
|
|
531
|
+
|
|
532
|
+
return create_model(model_name, **fields)
|
|
@@ -0,0 +1,25 @@
|
|
|
1
|
+
# Copyright 2025 DataRobot, Inc.
|
|
2
|
+
#
|
|
3
|
+
# Licensed under the Apache License, Version 2.0 (the "License");
|
|
4
|
+
# you may not use this file except in compliance with the License.
|
|
5
|
+
# You may obtain a copy of the License at
|
|
6
|
+
#
|
|
7
|
+
# http://www.apache.org/licenses/LICENSE-2.0
|
|
8
|
+
#
|
|
9
|
+
# Unless required by applicable law or agreed to in writing, software
|
|
10
|
+
# distributed under the License is distributed on an "AS IS" BASIS,
|
|
11
|
+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
12
|
+
# See the License for the specific language governing permissions and
|
|
13
|
+
# limitations under the License.
|
|
14
|
+
|
|
15
|
+
|
|
16
|
+
class DynamicToolRegistrationError(Exception):
|
|
17
|
+
"""Exception raised for errors in the dynamic tool registration process."""
|
|
18
|
+
|
|
19
|
+
|
|
20
|
+
class DynamicPromptRegistrationError(Exception):
|
|
21
|
+
"""Exception raised for errors in the dynamic prompt registration process."""
|
|
22
|
+
|
|
23
|
+
|
|
24
|
+
class MCPError(Exception):
|
|
25
|
+
"""Base class for MCP errors."""
|
|
@@ -0,0 +1,98 @@
|
|
|
1
|
+
# Copyright 2025 DataRobot, Inc.
|
|
2
|
+
#
|
|
3
|
+
# Licensed under the Apache License, Version 2.0 (the "License");
|
|
4
|
+
# you may not use this file except in compliance with the License.
|
|
5
|
+
# You may obtain a copy of the License at
|
|
6
|
+
#
|
|
7
|
+
# http://www.apache.org/licenses/LICENSE-2.0
|
|
8
|
+
#
|
|
9
|
+
# Unless required by applicable law or agreed to in writing, software
|
|
10
|
+
# distributed under the License is distributed on an "AS IS" BASIS,
|
|
11
|
+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
12
|
+
# See the License for the specific language governing permissions and
|
|
13
|
+
# limitations under the License.
|
|
14
|
+
|
|
15
|
+
import functools
|
|
16
|
+
import logging
|
|
17
|
+
import re
|
|
18
|
+
import traceback
|
|
19
|
+
from collections.abc import Callable
|
|
20
|
+
from typing import Any
|
|
21
|
+
from typing import TypeVar
|
|
22
|
+
|
|
23
|
+
from .exceptions import MCPError
|
|
24
|
+
|
|
25
|
+
# Secret patterns to redact from logs
|
|
26
|
+
SECRET_PATTERNS = [
|
|
27
|
+
r"([a-zA-Z0-9]{20,})", # Long alphanumeric strings (potential tokens)
|
|
28
|
+
r"(sk-[a-zA-Z0-9]{48})", # OpenAI-style keys
|
|
29
|
+
r"(AKIA[0-9A-Z]{16})", # AWS Access Key pattern
|
|
30
|
+
]
|
|
31
|
+
|
|
32
|
+
|
|
33
|
+
class SecretRedactingFormatter(logging.Formatter):
|
|
34
|
+
"""Custom formatter that redacts sensitive information from logs."""
|
|
35
|
+
|
|
36
|
+
def format(self, record: logging.LogRecord) -> str:
|
|
37
|
+
msg = super().format(record)
|
|
38
|
+
return self._redact_secrets(msg)
|
|
39
|
+
|
|
40
|
+
def _redact_secrets(self, message: str) -> str:
|
|
41
|
+
"""Redact potential secrets from log messages."""
|
|
42
|
+
for pattern in SECRET_PATTERNS:
|
|
43
|
+
message = re.sub(pattern, "[REDACTED]", message)
|
|
44
|
+
return message
|
|
45
|
+
|
|
46
|
+
|
|
47
|
+
class MCPLogging:
|
|
48
|
+
"""MCP Logging class."""
|
|
49
|
+
|
|
50
|
+
def __init__(self, level: str = "INFO") -> None:
|
|
51
|
+
"""Initialize the MCP logging."""
|
|
52
|
+
self._level = level
|
|
53
|
+
self._setup_logging()
|
|
54
|
+
|
|
55
|
+
def _setup_logging(self) -> None:
|
|
56
|
+
"""Configure logging with secret redaction and set log level."""
|
|
57
|
+
# Remove all existing handlers
|
|
58
|
+
logging.root.handlers.clear()
|
|
59
|
+
|
|
60
|
+
# Add a console handler with our formatter
|
|
61
|
+
handler = logging.StreamHandler()
|
|
62
|
+
logger_format = "%(asctime)s - %(name)s - %(levelname)s - %(message)s"
|
|
63
|
+
formatter = SecretRedactingFormatter(logger_format)
|
|
64
|
+
handler.setFormatter(formatter)
|
|
65
|
+
logging.root.addHandler(handler)
|
|
66
|
+
logging.root.setLevel(self._level)
|
|
67
|
+
|
|
68
|
+
|
|
69
|
+
# Type variable for generic function type
|
|
70
|
+
F = TypeVar("F", bound=Callable[..., Any])
|
|
71
|
+
|
|
72
|
+
|
|
73
|
+
def _log_error(logger: logging.Logger, func_name: str, error: Exception, **kwargs: Any) -> str:
|
|
74
|
+
"""Log errors in a consistent format."""
|
|
75
|
+
error_msg = f"{type(error).__name__}: {str(error)}"
|
|
76
|
+
logger.error(f"Error in {func_name}: {error_msg}")
|
|
77
|
+
logger.debug(f"Full traceback: {traceback.format_exc()}")
|
|
78
|
+
logger.debug(f"Function arguments: {kwargs}")
|
|
79
|
+
return f"Error in {func_name}: {error_msg}"
|
|
80
|
+
|
|
81
|
+
|
|
82
|
+
def log_execution(func: F) -> F:
|
|
83
|
+
"""Log execution with error handling."""
|
|
84
|
+
logger = logging.getLogger(func.__module__)
|
|
85
|
+
|
|
86
|
+
@functools.wraps(func)
|
|
87
|
+
async def wrapper(*args: Any, **kwargs: Any) -> Any:
|
|
88
|
+
try:
|
|
89
|
+
logger.info(f"Starting {func.__name__}")
|
|
90
|
+
logger.debug(f"Arguments: {args}, {kwargs}")
|
|
91
|
+
result = await func(*args, **kwargs)
|
|
92
|
+
logger.info(f"Completed {func.__name__}")
|
|
93
|
+
return result
|
|
94
|
+
except Exception as e:
|
|
95
|
+
error_msg = _log_error(logger, func.__name__, e, args=args, kwargs=kwargs)
|
|
96
|
+
raise MCPError(error_msg)
|
|
97
|
+
|
|
98
|
+
return wrapper # type: ignore[return-value]
|