agentrun-sdk 0.1.2__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of agentrun-sdk might be problematic. Click here for more details.

Files changed (115) hide show
  1. agentrun_operation_sdk/cli/__init__.py +1 -0
  2. agentrun_operation_sdk/cli/cli.py +19 -0
  3. agentrun_operation_sdk/cli/common.py +21 -0
  4. agentrun_operation_sdk/cli/runtime/__init__.py +1 -0
  5. agentrun_operation_sdk/cli/runtime/commands.py +203 -0
  6. agentrun_operation_sdk/client/client.py +75 -0
  7. agentrun_operation_sdk/operations/runtime/__init__.py +8 -0
  8. agentrun_operation_sdk/operations/runtime/configure.py +101 -0
  9. agentrun_operation_sdk/operations/runtime/launch.py +82 -0
  10. agentrun_operation_sdk/operations/runtime/models.py +31 -0
  11. agentrun_operation_sdk/services/runtime.py +152 -0
  12. agentrun_operation_sdk/utils/logging_config.py +72 -0
  13. agentrun_operation_sdk/utils/runtime/config.py +94 -0
  14. agentrun_operation_sdk/utils/runtime/container.py +280 -0
  15. agentrun_operation_sdk/utils/runtime/entrypoint.py +203 -0
  16. agentrun_operation_sdk/utils/runtime/schema.py +56 -0
  17. agentrun_sdk/__init__.py +7 -0
  18. agentrun_sdk/agent/__init__.py +25 -0
  19. agentrun_sdk/agent/agent.py +696 -0
  20. agentrun_sdk/agent/agent_result.py +46 -0
  21. agentrun_sdk/agent/conversation_manager/__init__.py +26 -0
  22. agentrun_sdk/agent/conversation_manager/conversation_manager.py +88 -0
  23. agentrun_sdk/agent/conversation_manager/null_conversation_manager.py +46 -0
  24. agentrun_sdk/agent/conversation_manager/sliding_window_conversation_manager.py +179 -0
  25. agentrun_sdk/agent/conversation_manager/summarizing_conversation_manager.py +252 -0
  26. agentrun_sdk/agent/state.py +97 -0
  27. agentrun_sdk/event_loop/__init__.py +9 -0
  28. agentrun_sdk/event_loop/event_loop.py +499 -0
  29. agentrun_sdk/event_loop/streaming.py +319 -0
  30. agentrun_sdk/experimental/__init__.py +4 -0
  31. agentrun_sdk/experimental/hooks/__init__.py +15 -0
  32. agentrun_sdk/experimental/hooks/events.py +123 -0
  33. agentrun_sdk/handlers/__init__.py +10 -0
  34. agentrun_sdk/handlers/callback_handler.py +70 -0
  35. agentrun_sdk/hooks/__init__.py +49 -0
  36. agentrun_sdk/hooks/events.py +80 -0
  37. agentrun_sdk/hooks/registry.py +247 -0
  38. agentrun_sdk/models/__init__.py +10 -0
  39. agentrun_sdk/models/anthropic.py +432 -0
  40. agentrun_sdk/models/bedrock.py +649 -0
  41. agentrun_sdk/models/litellm.py +225 -0
  42. agentrun_sdk/models/llamaapi.py +438 -0
  43. agentrun_sdk/models/mistral.py +539 -0
  44. agentrun_sdk/models/model.py +95 -0
  45. agentrun_sdk/models/ollama.py +357 -0
  46. agentrun_sdk/models/openai.py +436 -0
  47. agentrun_sdk/models/sagemaker.py +598 -0
  48. agentrun_sdk/models/writer.py +449 -0
  49. agentrun_sdk/multiagent/__init__.py +22 -0
  50. agentrun_sdk/multiagent/a2a/__init__.py +15 -0
  51. agentrun_sdk/multiagent/a2a/executor.py +148 -0
  52. agentrun_sdk/multiagent/a2a/server.py +252 -0
  53. agentrun_sdk/multiagent/base.py +92 -0
  54. agentrun_sdk/multiagent/graph.py +555 -0
  55. agentrun_sdk/multiagent/swarm.py +656 -0
  56. agentrun_sdk/py.typed +1 -0
  57. agentrun_sdk/session/__init__.py +18 -0
  58. agentrun_sdk/session/file_session_manager.py +216 -0
  59. agentrun_sdk/session/repository_session_manager.py +152 -0
  60. agentrun_sdk/session/s3_session_manager.py +272 -0
  61. agentrun_sdk/session/session_manager.py +73 -0
  62. agentrun_sdk/session/session_repository.py +51 -0
  63. agentrun_sdk/telemetry/__init__.py +21 -0
  64. agentrun_sdk/telemetry/config.py +194 -0
  65. agentrun_sdk/telemetry/metrics.py +476 -0
  66. agentrun_sdk/telemetry/metrics_constants.py +15 -0
  67. agentrun_sdk/telemetry/tracer.py +563 -0
  68. agentrun_sdk/tools/__init__.py +17 -0
  69. agentrun_sdk/tools/decorator.py +569 -0
  70. agentrun_sdk/tools/executor.py +137 -0
  71. agentrun_sdk/tools/loader.py +152 -0
  72. agentrun_sdk/tools/mcp/__init__.py +13 -0
  73. agentrun_sdk/tools/mcp/mcp_agent_tool.py +99 -0
  74. agentrun_sdk/tools/mcp/mcp_client.py +423 -0
  75. agentrun_sdk/tools/mcp/mcp_instrumentation.py +322 -0
  76. agentrun_sdk/tools/mcp/mcp_types.py +63 -0
  77. agentrun_sdk/tools/registry.py +607 -0
  78. agentrun_sdk/tools/structured_output.py +421 -0
  79. agentrun_sdk/tools/tools.py +217 -0
  80. agentrun_sdk/tools/watcher.py +136 -0
  81. agentrun_sdk/types/__init__.py +5 -0
  82. agentrun_sdk/types/collections.py +23 -0
  83. agentrun_sdk/types/content.py +188 -0
  84. agentrun_sdk/types/event_loop.py +48 -0
  85. agentrun_sdk/types/exceptions.py +81 -0
  86. agentrun_sdk/types/guardrails.py +254 -0
  87. agentrun_sdk/types/media.py +89 -0
  88. agentrun_sdk/types/session.py +152 -0
  89. agentrun_sdk/types/streaming.py +201 -0
  90. agentrun_sdk/types/tools.py +258 -0
  91. agentrun_sdk/types/traces.py +5 -0
  92. agentrun_sdk-0.1.2.dist-info/METADATA +51 -0
  93. agentrun_sdk-0.1.2.dist-info/RECORD +115 -0
  94. agentrun_sdk-0.1.2.dist-info/WHEEL +5 -0
  95. agentrun_sdk-0.1.2.dist-info/entry_points.txt +2 -0
  96. agentrun_sdk-0.1.2.dist-info/top_level.txt +3 -0
  97. agentrun_wrapper/__init__.py +11 -0
  98. agentrun_wrapper/_utils/__init__.py +6 -0
  99. agentrun_wrapper/_utils/endpoints.py +16 -0
  100. agentrun_wrapper/identity/__init__.py +5 -0
  101. agentrun_wrapper/identity/auth.py +211 -0
  102. agentrun_wrapper/memory/__init__.py +6 -0
  103. agentrun_wrapper/memory/client.py +1697 -0
  104. agentrun_wrapper/memory/constants.py +103 -0
  105. agentrun_wrapper/memory/controlplane.py +626 -0
  106. agentrun_wrapper/py.typed +1 -0
  107. agentrun_wrapper/runtime/__init__.py +13 -0
  108. agentrun_wrapper/runtime/app.py +473 -0
  109. agentrun_wrapper/runtime/context.py +34 -0
  110. agentrun_wrapper/runtime/models.py +25 -0
  111. agentrun_wrapper/services/__init__.py +1 -0
  112. agentrun_wrapper/services/identity.py +192 -0
  113. agentrun_wrapper/tools/__init__.py +6 -0
  114. agentrun_wrapper/tools/browser_client.py +325 -0
  115. agentrun_wrapper/tools/code_interpreter_client.py +186 -0
@@ -0,0 +1,421 @@
1
+ """Tools for converting Pydantic models to Bedrock tools."""
2
+
3
+ from typing import Any, Dict, Optional, Type, Union
4
+
5
+ from pydantic import BaseModel
6
+
7
+ from ..types.tools import ToolSpec
8
+
9
+
10
+ def _flatten_schema(schema: Dict[str, Any]) -> Dict[str, Any]:
11
+ """Flattens a JSON schema by removing $defs and resolving $ref references.
12
+
13
+ Handles required vs optional fields properly.
14
+
15
+ Args:
16
+ schema: The JSON schema to flatten
17
+
18
+ Returns:
19
+ Flattened JSON schema
20
+ """
21
+ # Extract required fields list
22
+ required_fields = schema.get("required", [])
23
+
24
+ # Initialize the flattened schema with basic properties
25
+ flattened = {
26
+ "type": schema.get("type", "object"),
27
+ "properties": {},
28
+ }
29
+
30
+ # Add title if present
31
+ if "title" in schema:
32
+ flattened["title"] = schema["title"]
33
+
34
+ # Add description from schema if present, or use model docstring
35
+ if "description" in schema and schema["description"]:
36
+ flattened["description"] = schema["description"]
37
+
38
+ # Process properties
39
+ required_props: list[str] = []
40
+ if "properties" in schema:
41
+ required_props = []
42
+ for prop_name, prop_value in schema["properties"].items():
43
+ # Process the property and add to flattened properties
44
+ is_required = prop_name in required_fields
45
+
46
+ # If the property already has nested properties (expanded), preserve them
47
+ if "properties" in prop_value:
48
+ # This is an expanded nested schema, preserve its structure
49
+ processed_prop = {
50
+ "type": prop_value.get("type", "object"),
51
+ "description": prop_value.get("description", ""),
52
+ "properties": {},
53
+ }
54
+
55
+ # Process each nested property
56
+ for nested_prop_name, nested_prop_value in prop_value["properties"].items():
57
+ is_required = "required" in prop_value and nested_prop_name in prop_value["required"]
58
+ sub_property = _process_property(nested_prop_value, schema.get("$defs", {}), is_required)
59
+ processed_prop["properties"][nested_prop_name] = sub_property
60
+
61
+ # Copy required fields if present
62
+ if "required" in prop_value:
63
+ processed_prop["required"] = prop_value["required"]
64
+ else:
65
+ # Process as normal
66
+ processed_prop = _process_property(prop_value, schema.get("$defs", {}), is_required)
67
+
68
+ flattened["properties"][prop_name] = processed_prop
69
+
70
+ # Track which properties are actually required after processing
71
+ if is_required and "null" not in str(processed_prop.get("type", "")):
72
+ required_props.append(prop_name)
73
+
74
+ # Add required fields if any (only those that are truly required after processing)
75
+ # Check if required props are empty, if so, raise an error because it means there is a circular reference
76
+
77
+ if len(required_props) > 0:
78
+ flattened["required"] = required_props
79
+ else:
80
+ raise ValueError("Circular reference detected and not supported")
81
+
82
+ return flattened
83
+
84
+
85
+ def _process_property(
86
+ prop: Dict[str, Any],
87
+ defs: Dict[str, Any],
88
+ is_required: bool = False,
89
+ fully_expand: bool = True,
90
+ ) -> Dict[str, Any]:
91
+ """Process a property in a schema, resolving any references.
92
+
93
+ Args:
94
+ prop: The property to process
95
+ defs: The definitions dictionary for resolving references
96
+ is_required: Whether this property is required
97
+ fully_expand: Whether to fully expand nested properties
98
+
99
+ Returns:
100
+ Processed property
101
+ """
102
+ result = {}
103
+ is_nullable = False
104
+
105
+ # Handle anyOf for optional fields (like Optional[Type])
106
+ if "anyOf" in prop:
107
+ # Check if this is an Optional[...] case (one null, one type)
108
+ null_type = False
109
+ non_null_type = None
110
+
111
+ for option in prop["anyOf"]:
112
+ if option.get("type") == "null":
113
+ null_type = True
114
+ is_nullable = True
115
+ elif "$ref" in option:
116
+ ref_path = option["$ref"].split("/")[-1]
117
+ if ref_path in defs:
118
+ non_null_type = _process_schema_object(defs[ref_path], defs, fully_expand)
119
+ else:
120
+ # Handle missing reference path gracefully
121
+ raise ValueError(f"Missing reference: {ref_path}")
122
+ else:
123
+ non_null_type = option
124
+
125
+ if null_type and non_null_type:
126
+ # For Optional fields, we mark as nullable but copy all properties from the non-null option
127
+ result = non_null_type.copy() if isinstance(non_null_type, dict) else {}
128
+
129
+ # For type, ensure it includes "null"
130
+ if "type" in result and isinstance(result["type"], str):
131
+ result["type"] = [result["type"], "null"]
132
+ elif "type" in result and isinstance(result["type"], list) and "null" not in result["type"]:
133
+ result["type"].append("null")
134
+ elif "type" not in result:
135
+ # Default to object type if not specified
136
+ result["type"] = ["object", "null"]
137
+
138
+ # Copy description if available in the property
139
+ if "description" in prop:
140
+ result["description"] = prop["description"]
141
+
142
+ # Need to process item refs as well (#337)
143
+ if "items" in result:
144
+ result["items"] = _process_property(result["items"], defs)
145
+
146
+ return result
147
+
148
+ # Handle direct references
149
+ elif "$ref" in prop:
150
+ # Resolve reference
151
+ ref_path = prop["$ref"].split("/")[-1]
152
+ if ref_path in defs:
153
+ ref_dict = defs[ref_path]
154
+ # Process the referenced object to get a complete schema
155
+ result = _process_schema_object(ref_dict, defs, fully_expand)
156
+ else:
157
+ # Handle missing reference path gracefully
158
+ raise ValueError(f"Missing reference: {ref_path}")
159
+
160
+ # For regular fields, copy all properties
161
+ for key, value in prop.items():
162
+ if key not in ["$ref", "anyOf"]:
163
+ if isinstance(value, dict):
164
+ result[key] = _process_nested_dict(value, defs)
165
+ elif key == "type" and not is_required and not is_nullable:
166
+ # For non-required fields, ensure type is a list with "null"
167
+ if isinstance(value, str):
168
+ result[key] = [value, "null"]
169
+ elif isinstance(value, list) and "null" not in value:
170
+ result[key] = value + ["null"]
171
+ else:
172
+ result[key] = value
173
+ else:
174
+ result[key] = value
175
+
176
+ return result
177
+
178
+
179
+ def _process_schema_object(
180
+ schema_obj: Dict[str, Any], defs: Dict[str, Any], fully_expand: bool = True
181
+ ) -> Dict[str, Any]:
182
+ """Process a schema object, typically from $defs, to resolve all nested properties.
183
+
184
+ Args:
185
+ schema_obj: The schema object to process
186
+ defs: The definitions dictionary for resolving references
187
+ fully_expand: Whether to fully expand nested properties
188
+
189
+ Returns:
190
+ Processed schema object with all properties resolved
191
+ """
192
+ result = {}
193
+
194
+ # Copy basic attributes
195
+ for key, value in schema_obj.items():
196
+ if key != "properties" and key != "required" and key != "$defs":
197
+ result[key] = value
198
+
199
+ # Process properties if present
200
+ if "properties" in schema_obj:
201
+ result["properties"] = {}
202
+ required_props = []
203
+
204
+ # Get required fields list
205
+ required_fields = schema_obj.get("required", [])
206
+
207
+ for prop_name, prop_value in schema_obj["properties"].items():
208
+ # Process each property
209
+ is_required = prop_name in required_fields
210
+ processed = _process_property(prop_value, defs, is_required, fully_expand)
211
+ result["properties"][prop_name] = processed
212
+
213
+ # Track which properties are actually required after processing
214
+ if is_required and "null" not in str(processed.get("type", "")):
215
+ required_props.append(prop_name)
216
+
217
+ # Add required fields if any
218
+ if required_props:
219
+ result["required"] = required_props
220
+
221
+ return result
222
+
223
+
224
+ def _process_nested_dict(d: Dict[str, Any], defs: Dict[str, Any]) -> Dict[str, Any]:
225
+ """Recursively processes nested dictionaries and resolves $ref references.
226
+
227
+ Args:
228
+ d: The dictionary to process
229
+ defs: The definitions dictionary for resolving references
230
+
231
+ Returns:
232
+ Processed dictionary
233
+ """
234
+ result: Dict[str, Any] = {}
235
+
236
+ # Handle direct reference
237
+ if "$ref" in d:
238
+ ref_path = d["$ref"].split("/")[-1]
239
+ if ref_path in defs:
240
+ ref_dict = defs[ref_path]
241
+ # Recursively process the referenced object
242
+ return _process_schema_object(ref_dict, defs)
243
+ else:
244
+ # Handle missing reference path gracefully
245
+ raise ValueError(f"Missing reference: {ref_path}")
246
+
247
+ # Process each key-value pair
248
+ for key, value in d.items():
249
+ if key == "$ref":
250
+ # Already handled above
251
+ continue
252
+ elif isinstance(value, dict):
253
+ result[key] = _process_nested_dict(value, defs)
254
+ elif isinstance(value, list):
255
+ # Process lists (like for enum values)
256
+ result[key] = [_process_nested_dict(item, defs) if isinstance(item, dict) else item for item in value]
257
+ else:
258
+ result[key] = value
259
+
260
+ return result
261
+
262
+
263
+ def convert_pydantic_to_tool_spec(
264
+ model: Type[BaseModel],
265
+ description: Optional[str] = None,
266
+ ) -> ToolSpec:
267
+ """Converts a Pydantic model to a tool description for the Amazon Bedrock Converse API.
268
+
269
+ Handles optional vs. required fields, resolves $refs, and uses docstrings.
270
+
271
+ Args:
272
+ model: The Pydantic model class to convert
273
+ description: Optional description of the tool's purpose
274
+
275
+ Returns:
276
+ ToolSpec: Dict containing the Bedrock tool specification
277
+ """
278
+ name = model.__name__
279
+
280
+ # Get the JSON schema
281
+ input_schema = model.model_json_schema()
282
+
283
+ # Get model docstring for description if not provided
284
+ model_description = description
285
+ if not model_description and model.__doc__:
286
+ model_description = model.__doc__.strip()
287
+
288
+ # Process all referenced models to ensure proper docstrings
289
+ # This step is important for gathering descriptions from referenced models
290
+ _process_referenced_models(input_schema, model)
291
+
292
+ # Now, let's fully expand the nested models with all their properties
293
+ _expand_nested_properties(input_schema, model)
294
+
295
+ # Flatten the schema
296
+ flattened_schema = _flatten_schema(input_schema)
297
+
298
+ final_schema = flattened_schema
299
+
300
+ # Construct the tool specification
301
+ return ToolSpec(
302
+ name=name,
303
+ description=model_description or f"{name} structured output tool",
304
+ inputSchema={"json": final_schema},
305
+ )
306
+
307
+
308
+ def _expand_nested_properties(schema: Dict[str, Any], model: Type[BaseModel]) -> None:
309
+ """Expand the properties of nested models in the schema to include their full structure.
310
+
311
+ This updates the schema in place.
312
+
313
+ Args:
314
+ schema: The JSON schema to process
315
+ model: The Pydantic model class
316
+ """
317
+ # First, process the properties at this level
318
+ if "properties" not in schema:
319
+ return
320
+
321
+ # Create a modified copy of the properties to avoid modifying while iterating
322
+ for prop_name, prop_info in list(schema["properties"].items()):
323
+ field = model.model_fields.get(prop_name)
324
+ if not field:
325
+ continue
326
+
327
+ field_type = field.annotation
328
+
329
+ # Handle Optional types
330
+ is_optional = False
331
+ if (
332
+ field_type is not None
333
+ and hasattr(field_type, "__origin__")
334
+ and field_type.__origin__ is Union
335
+ and hasattr(field_type, "__args__")
336
+ ):
337
+ # Look for Optional[BaseModel]
338
+ for arg in field_type.__args__:
339
+ if arg is type(None):
340
+ is_optional = True
341
+ elif isinstance(arg, type) and issubclass(arg, BaseModel):
342
+ field_type = arg
343
+
344
+ # If this is a BaseModel field, expand its properties with full details
345
+ if isinstance(field_type, type) and issubclass(field_type, BaseModel):
346
+ # Get the nested model's schema with all its properties
347
+ nested_model_schema = field_type.model_json_schema()
348
+
349
+ # Create a properly expanded nested object
350
+ expanded_object = {
351
+ "type": ["object", "null"] if is_optional else "object",
352
+ "description": prop_info.get("description", field.description or f"The {prop_name}"),
353
+ "properties": {},
354
+ }
355
+
356
+ # Copy all properties from the nested schema
357
+ if "properties" in nested_model_schema:
358
+ expanded_object["properties"] = nested_model_schema["properties"]
359
+
360
+ # Copy required fields
361
+ if "required" in nested_model_schema:
362
+ expanded_object["required"] = nested_model_schema["required"]
363
+
364
+ # Replace the original property with this expanded version
365
+ schema["properties"][prop_name] = expanded_object
366
+
367
+
368
+ def _process_referenced_models(schema: Dict[str, Any], model: Type[BaseModel]) -> None:
369
+ """Process referenced models to ensure their docstrings are included.
370
+
371
+ This updates the schema in place.
372
+
373
+ Args:
374
+ schema: The JSON schema to process
375
+ model: The Pydantic model class
376
+ """
377
+ # Process $defs to add docstrings from the referenced models
378
+ if "$defs" in schema:
379
+ # Look through model fields to find referenced models
380
+ for _, field in model.model_fields.items():
381
+ field_type = field.annotation
382
+
383
+ # Handle Optional types - with null checks
384
+ if field_type is not None and hasattr(field_type, "__origin__"):
385
+ origin = field_type.__origin__
386
+ if origin is Union and hasattr(field_type, "__args__"):
387
+ # Find the non-None type in the Union (for Optional fields)
388
+ for arg in field_type.__args__:
389
+ if arg is not type(None):
390
+ field_type = arg
391
+ break
392
+
393
+ # Check if this is a BaseModel subclass
394
+ if isinstance(field_type, type) and issubclass(field_type, BaseModel):
395
+ # Update $defs with this model's information
396
+ ref_name = field_type.__name__
397
+ if ref_name in schema.get("$defs", {}):
398
+ ref_def = schema["$defs"][ref_name]
399
+
400
+ # Add docstring as description if available
401
+ if field_type.__doc__ and not ref_def.get("description"):
402
+ ref_def["description"] = field_type.__doc__.strip()
403
+
404
+ # Recursively process properties in the referenced model
405
+ _process_properties(ref_def, field_type)
406
+
407
+
408
+ def _process_properties(schema_def: Dict[str, Any], model: Type[BaseModel]) -> None:
409
+ """Process properties in a schema definition to add descriptions from field metadata.
410
+
411
+ Args:
412
+ schema_def: The schema definition to update
413
+ model: The model class that defines the schema
414
+ """
415
+ if "properties" in schema_def:
416
+ for prop_name, prop_info in schema_def["properties"].items():
417
+ field = model.model_fields.get(prop_name)
418
+
419
+ # Add field description if available and not already set
420
+ if field and field.description and not prop_info.get("description"):
421
+ prop_info["description"] = field.description
@@ -0,0 +1,217 @@
1
+ """Core tool implementations.
2
+
3
+ This module provides the base classes for all tool implementations in the SDK, including function-based tools and
4
+ Python module-based tools, as well as utilities for validating tool uses and normalizing tool schemas.
5
+ """
6
+
7
+ import asyncio
8
+ import inspect
9
+ import logging
10
+ import re
11
+ from typing import Any
12
+
13
+ from typing_extensions import override
14
+
15
+ from ..types.tools import AgentTool, ToolFunc, ToolGenerator, ToolSpec, ToolUse
16
+
17
+ logger = logging.getLogger(__name__)
18
+
19
+
20
+ class InvalidToolUseNameException(Exception):
21
+ """Exception raised when a tool use has an invalid name."""
22
+
23
+ pass
24
+
25
+
26
+ def validate_tool_use(tool: ToolUse) -> None:
27
+ """Validate a tool use request.
28
+
29
+ Args:
30
+ tool: The tool use to validate.
31
+ """
32
+ validate_tool_use_name(tool)
33
+
34
+
35
+ def validate_tool_use_name(tool: ToolUse) -> None:
36
+ """Validate the name of a tool use.
37
+
38
+ Args:
39
+ tool: The tool use to validate.
40
+
41
+ Raises:
42
+ InvalidToolUseNameException: If the tool name is invalid.
43
+ """
44
+ # We need to fix some typing here, because we don't actually expect a ToolUse, but dict[str, Any]
45
+ if "name" not in tool:
46
+ message = "tool name missing" # type: ignore[unreachable]
47
+ logger.warning(message)
48
+ raise InvalidToolUseNameException(message)
49
+
50
+ tool_name = tool["name"]
51
+ tool_name_pattern = r"^[a-zA-Z0-9_\-]{1,}$"
52
+ tool_name_max_length = 64
53
+ valid_name_pattern = bool(re.match(tool_name_pattern, tool_name))
54
+ tool_name_len = len(tool_name)
55
+
56
+ if not valid_name_pattern:
57
+ message = f"tool_name=<{tool_name}> | invalid tool name pattern"
58
+ logger.warning(message)
59
+ raise InvalidToolUseNameException(message)
60
+
61
+ if tool_name_len > tool_name_max_length:
62
+ message = f"tool_name=<{tool_name}>, tool_name_max_length=<{tool_name_max_length}> | invalid tool name length"
63
+ logger.warning(message)
64
+ raise InvalidToolUseNameException(message)
65
+
66
+
67
+ def _normalize_property(prop_name: str, prop_def: Any) -> dict[str, Any]:
68
+ """Normalize a single property definition.
69
+
70
+ Args:
71
+ prop_name: The name of the property.
72
+ prop_def: The property definition to normalize.
73
+
74
+ Returns:
75
+ The normalized property definition.
76
+ """
77
+ if not isinstance(prop_def, dict):
78
+ return {"type": "string", "description": f"Property {prop_name}"}
79
+
80
+ if prop_def.get("type") == "object" and "properties" in prop_def:
81
+ return normalize_schema(prop_def) # Recursive call
82
+
83
+ # Copy existing property, ensuring defaults
84
+ normalized_prop = prop_def.copy()
85
+
86
+ # It is expected that type and description are already included in referenced $def.
87
+ if "$ref" in normalized_prop:
88
+ return normalized_prop
89
+
90
+ normalized_prop.setdefault("type", "string")
91
+ normalized_prop.setdefault("description", f"Property {prop_name}")
92
+ return normalized_prop
93
+
94
+
95
+ def normalize_schema(schema: dict[str, Any]) -> dict[str, Any]:
96
+ """Normalize a JSON schema to match expectations.
97
+
98
+ This function recursively processes nested objects to preserve the complete schema structure.
99
+ Uses a copy-then-normalize approach to preserve all original schema properties.
100
+
101
+ Args:
102
+ schema: The schema to normalize.
103
+
104
+ Returns:
105
+ The normalized schema.
106
+ """
107
+ # Start with a complete copy to preserve all existing properties
108
+ normalized = schema.copy()
109
+
110
+ # Ensure essential structure exists
111
+ normalized.setdefault("type", "object")
112
+ normalized.setdefault("properties", {})
113
+ normalized.setdefault("required", [])
114
+
115
+ # Process properties recursively
116
+ if "properties" in normalized:
117
+ properties = normalized["properties"]
118
+ for prop_name, prop_def in properties.items():
119
+ normalized["properties"][prop_name] = _normalize_property(prop_name, prop_def)
120
+
121
+ return normalized
122
+
123
+
124
+ def normalize_tool_spec(tool_spec: ToolSpec) -> ToolSpec:
125
+ """Normalize a complete tool specification by transforming its inputSchema.
126
+
127
+ Args:
128
+ tool_spec: The tool specification to normalize.
129
+
130
+ Returns:
131
+ The normalized tool specification.
132
+ """
133
+ normalized = tool_spec.copy()
134
+
135
+ # Handle inputSchema
136
+ if "inputSchema" in normalized:
137
+ if isinstance(normalized["inputSchema"], dict):
138
+ if "json" in normalized["inputSchema"]:
139
+ # Schema is already in correct format, just normalize inner schema
140
+ normalized["inputSchema"]["json"] = normalize_schema(normalized["inputSchema"]["json"])
141
+ else:
142
+ # Convert direct schema to proper format
143
+ normalized["inputSchema"] = {"json": normalize_schema(normalized["inputSchema"])}
144
+
145
+ return normalized
146
+
147
+
148
+ class PythonAgentTool(AgentTool):
149
+ """Tool implementation for Python-based tools.
150
+
151
+ This class handles tools implemented as Python functions, providing a simple interface for executing Python code
152
+ as SDK tools.
153
+ """
154
+
155
+ _tool_name: str
156
+ _tool_spec: ToolSpec
157
+ _tool_func: ToolFunc
158
+
159
+ def __init__(self, tool_name: str, tool_spec: ToolSpec, tool_func: ToolFunc) -> None:
160
+ """Initialize a Python-based tool.
161
+
162
+ Args:
163
+ tool_name: Unique identifier for the tool.
164
+ tool_spec: Tool specification defining parameters and behavior.
165
+ tool_func: Python function to execute when the tool is invoked.
166
+ """
167
+ super().__init__()
168
+
169
+ self._tool_name = tool_name
170
+ self._tool_spec = tool_spec
171
+ self._tool_func = tool_func
172
+
173
+ @property
174
+ def tool_name(self) -> str:
175
+ """Get the name of the tool.
176
+
177
+ Returns:
178
+ The name of the tool.
179
+ """
180
+ return self._tool_name
181
+
182
+ @property
183
+ def tool_spec(self) -> ToolSpec:
184
+ """Get the tool specification for this Python-based tool.
185
+
186
+ Returns:
187
+ The tool specification.
188
+ """
189
+ return self._tool_spec
190
+
191
+ @property
192
+ def tool_type(self) -> str:
193
+ """Identifies this as a Python-based tool implementation.
194
+
195
+ Returns:
196
+ "python".
197
+ """
198
+ return "python"
199
+
200
+ @override
201
+ async def stream(self, tool_use: ToolUse, invocation_state: dict[str, Any], **kwargs: Any) -> ToolGenerator:
202
+ """Stream the Python function with the given tool use request.
203
+
204
+ Args:
205
+ tool_use: The tool use request.
206
+ invocation_state: Context for the tool invocation, including agent state.
207
+ **kwargs: Additional keyword arguments for future extensibility.
208
+
209
+ Yields:
210
+ Tool events with the last being the tool result.
211
+ """
212
+ if inspect.iscoroutinefunction(self._tool_func):
213
+ result = await self._tool_func(tool_use, **invocation_state)
214
+ else:
215
+ result = await asyncio.to_thread(self._tool_func, tool_use, **invocation_state)
216
+
217
+ yield result