lm-deluge 0.0.80__py3-none-any.whl → 0.0.82__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (36) hide show
  1. lm_deluge/__init__.py +1 -2
  2. lm_deluge/api_requests/anthropic.py +2 -1
  3. lm_deluge/api_requests/base.py +13 -0
  4. lm_deluge/api_requests/gemini.py +1 -1
  5. lm_deluge/api_requests/openai.py +3 -2
  6. lm_deluge/client.py +16 -11
  7. lm_deluge/llm_tools/__init__.py +12 -5
  8. lm_deluge/pipelines/__init__.py +11 -0
  9. lm_deluge/{llm_tools → pipelines}/score.py +2 -2
  10. lm_deluge/{llm_tools → pipelines}/translate.py +5 -3
  11. lm_deluge/prompt.py +105 -0
  12. lm_deluge/request_context.py +2 -2
  13. lm_deluge/{tool.py → tool/__init__.py} +531 -314
  14. lm_deluge/tool/prefab/__init__.py +29 -0
  15. lm_deluge/tool/prefab/batch_tool.py +156 -0
  16. lm_deluge/{llm_tools → tool/prefab}/filesystem.py +1 -1
  17. lm_deluge/tool/prefab/memory.py +190 -0
  18. lm_deluge/tool/prefab/otc/__init__.py +165 -0
  19. lm_deluge/tool/prefab/otc/executor.py +281 -0
  20. lm_deluge/tool/prefab/otc/parse.py +188 -0
  21. lm_deluge/{llm_tools → tool/prefab}/sandbox.py +251 -61
  22. lm_deluge/{llm_tools → tool/prefab}/todos.py +1 -1
  23. lm_deluge/tool/prefab/tool_search.py +169 -0
  24. lm_deluge/tracker.py +16 -13
  25. {lm_deluge-0.0.80.dist-info → lm_deluge-0.0.82.dist-info}/METADATA +2 -3
  26. {lm_deluge-0.0.80.dist-info → lm_deluge-0.0.82.dist-info}/RECORD +34 -28
  27. lm_deluge/presets/cerebras.py +0 -17
  28. lm_deluge/presets/meta.py +0 -13
  29. /lm_deluge/{llm_tools → pipelines}/classify.py +0 -0
  30. /lm_deluge/{llm_tools → pipelines}/extract.py +0 -0
  31. /lm_deluge/{llm_tools → pipelines}/locate.py +0 -0
  32. /lm_deluge/{llm_tools → pipelines}/ocr.py +0 -0
  33. /lm_deluge/{llm_tools → tool/prefab}/subagents.py +0 -0
  34. {lm_deluge-0.0.80.dist-info → lm_deluge-0.0.82.dist-info}/WHEEL +0 -0
  35. {lm_deluge-0.0.80.dist-info → lm_deluge-0.0.82.dist-info}/licenses/LICENSE +0 -0
  36. {lm_deluge-0.0.80.dist-info → lm_deluge-0.0.82.dist-info}/top_level.txt +0 -0
@@ -1,12 +1,13 @@
1
1
  import asyncio
2
2
  import inspect
3
3
  from concurrent.futures import ThreadPoolExecutor
4
+ from functools import lru_cache
4
5
  from typing import (
6
+ Annotated,
5
7
  Any,
6
8
  Callable,
7
9
  Coroutine,
8
10
  Literal,
9
- Type,
10
11
  TypedDict,
11
12
  get_args,
12
13
  get_origin,
@@ -15,13 +16,334 @@ from typing import (
15
16
 
16
17
  from fastmcp import Client # pip install fastmcp >= 2.0
17
18
  from mcp.types import Tool as MCPTool
18
- from pydantic import BaseModel, Field, field_validator
19
+ from pydantic import BaseModel, Field, TypeAdapter, field_validator
19
20
 
20
21
  from lm_deluge.image import Image
21
22
  from lm_deluge.prompt import Text, ToolResultPart
22
23
 
23
24
 
24
- def _python_type_to_json_schema_enhanced(python_type: Any) -> dict[str, Any]:
25
+ @lru_cache(maxsize=1000)
26
+ def _get_cached_typeadapter(cls: type | Callable) -> TypeAdapter:
27
+ """
28
+ Cache TypeAdapters since they're expensive to create.
29
+ For functions, we also handle Annotated[T, "string"] -> Annotated[T, Field(description="string")].
30
+ """
31
+ if inspect.isfunction(cls) or inspect.ismethod(cls):
32
+ if hasattr(cls, "__annotations__") and cls.__annotations__:
33
+ try:
34
+ resolved_hints = get_type_hints(cls, include_extras=True)
35
+ except Exception:
36
+ resolved_hints = cls.__annotations__
37
+
38
+ # Convert Annotated[T, "string"] to Annotated[T, Field(description="string")]
39
+ processed_hints = {}
40
+ for name, annotation in resolved_hints.items():
41
+ if (
42
+ get_origin(annotation) is Annotated
43
+ and len(get_args(annotation)) == 2
44
+ and isinstance(get_args(annotation)[1], str)
45
+ ):
46
+ base_type, description = get_args(annotation)
47
+ processed_hints[name] = Annotated[
48
+ base_type, Field(description=description)
49
+ ]
50
+ else:
51
+ processed_hints[name] = annotation
52
+
53
+ # Create new function with processed annotations if changed
54
+ if processed_hints != cls.__annotations__:
55
+ import types
56
+
57
+ if inspect.ismethod(cls):
58
+ actual_func = cls.__func__
59
+ code = actual_func.__code__
60
+ globals_dict = actual_func.__globals__
61
+ name = actual_func.__name__
62
+ defaults = actual_func.__defaults__
63
+ kwdefaults = actual_func.__kwdefaults__
64
+ closure = actual_func.__closure__
65
+ else:
66
+ code = cls.__code__
67
+ globals_dict = cls.__globals__
68
+ name = cls.__name__
69
+ defaults = cls.__defaults__
70
+ kwdefaults = cls.__kwdefaults__
71
+ closure = cls.__closure__
72
+
73
+ new_func = types.FunctionType(
74
+ code,
75
+ globals_dict,
76
+ name,
77
+ defaults,
78
+ closure,
79
+ )
80
+ if kwdefaults is not None:
81
+ new_func.__kwdefaults__ = kwdefaults
82
+ new_func.__dict__.update(cls.__dict__)
83
+ new_func.__module__ = cls.__module__
84
+ new_func.__qualname__ = getattr(cls, "__qualname__", cls.__name__)
85
+ new_func.__annotations__ = processed_hints
86
+
87
+ if inspect.ismethod(cls):
88
+ new_method = types.MethodType(new_func, cls.__self__)
89
+ return TypeAdapter(new_method)
90
+ else:
91
+ return TypeAdapter(new_func)
92
+
93
+ return TypeAdapter(cls)
94
+
95
+
96
+ def _clean_schema(
97
+ schema: dict[str, Any],
98
+ *,
99
+ prune_titles: bool = True,
100
+ prune_additional_properties: bool = True,
101
+ ) -> dict[str, Any]:
102
+ """
103
+ Clean up a JSON schema by removing titles and additionalProperties: false.
104
+ This is applied recursively to all nested schemas.
105
+ """
106
+
107
+ def _traverse(node: Any) -> Any:
108
+ if isinstance(node, dict):
109
+ new_node = {}
110
+ for key, value in node.items():
111
+ # Skip titles if pruning
112
+ if prune_titles and key == "title":
113
+ continue
114
+ # Skip additionalProperties: false if pruning
115
+ if (
116
+ prune_additional_properties
117
+ and key == "additionalProperties"
118
+ and value is False
119
+ ):
120
+ continue
121
+ new_node[key] = _traverse(value)
122
+ return new_node
123
+ elif isinstance(node, list):
124
+ return [_traverse(item) for item in node]
125
+ else:
126
+ return node
127
+
128
+ return _traverse(schema)
129
+
130
+
131
+ def _get_type_hint_string(type_annotation: Any) -> str:
132
+ """
133
+ Get a readable string representation of a type annotation.
134
+ Handles generic types, unions, etc.
135
+ """
136
+ import re
137
+
138
+ # Handle None type
139
+ if type_annotation is type(None):
140
+ return "None"
141
+
142
+ # For generic types, get_origin and get_args give us the components
143
+ origin = get_origin(type_annotation)
144
+ args = get_args(type_annotation)
145
+
146
+ if origin is not None and args:
147
+ # Get the origin name (list, dict, etc.)
148
+ if hasattr(origin, "__name__"):
149
+ origin_name = origin.__name__
150
+ else:
151
+ origin_name = str(origin).replace("typing.", "")
152
+
153
+ # Recursively get arg strings
154
+ arg_strs = [_get_type_hint_string(arg) for arg in args]
155
+
156
+ # Handle Union types (including | syntax)
157
+ if origin_name in ("Union", "UnionType"):
158
+ return " | ".join(arg_strs)
159
+
160
+ return f"{origin_name}[{', '.join(arg_strs)}]"
161
+
162
+ # Try to get __name__ for simple types (int, str, custom classes)
163
+ if hasattr(type_annotation, "__name__"):
164
+ return type_annotation.__name__
165
+
166
+ # For anything else, use string representation and clean it up
167
+ type_str = str(type_annotation)
168
+
169
+ # Remove module prefixes like '__main__.', 'mymodule.', etc.
170
+ type_str = re.sub(r"\b\w+\.", "", type_str)
171
+ # Remove 'typing.' prefix (in case it's still there)
172
+ type_str = type_str.replace("typing.", "")
173
+ # Remove 'typing_extensions.' prefix
174
+ type_str = type_str.replace("typing_extensions.", "")
175
+
176
+ return type_str
177
+
178
+
179
+ def _format_output_schema_for_description(
180
+ return_type: Any,
181
+ output_schema: dict[str, Any] | None,
182
+ ) -> str | None:
183
+ """
184
+ Format output schema information for inclusion in tool description.
185
+
186
+ Returns a string like:
187
+ "Returns: list[SearchResult]
188
+
189
+ SearchResult: {"properties": {...}, "type": "object"}"
190
+
191
+ Or None if there's no meaningful output schema to show.
192
+ """
193
+ import json
194
+
195
+ if return_type is None or return_type is inspect.Parameter.empty:
196
+ return None
197
+
198
+ # Get the type hint string
199
+ type_hint = _get_type_hint_string(return_type)
200
+
201
+ # Start with the return type
202
+ parts = [f"Returns: {type_hint}"]
203
+
204
+ # If there are $defs, include them
205
+ if output_schema and "$defs" in output_schema:
206
+ defs = output_schema["$defs"]
207
+ for def_name, def_schema in defs.items():
208
+ # Format the schema compactly (single line)
209
+ schema_str = json.dumps(def_schema, separators=(",", ":"))
210
+ parts.append(f"{def_name}: {schema_str}")
211
+
212
+ return "\n\n".join(parts)
213
+
214
+
215
+ def _is_typeddict(cls: Any) -> bool:
216
+ """Check if a class is a TypedDict."""
217
+ return (
218
+ isinstance(cls, type)
219
+ and hasattr(cls, "__annotations__")
220
+ and hasattr(cls, "__total__")
221
+ )
222
+
223
+
224
+ def _normalize_parameters(
225
+ params: Any,
226
+ ) -> tuple[dict[str, Any], list[str], dict[str, Any] | None]:
227
+ """
228
+ Normalize various parameter input formats to JSON schema components.
229
+
230
+ Accepts:
231
+ - None -> empty schema
232
+ - dict with "type" keys (already JSON schema) -> pass through
233
+ - dict mapping names to Python types {name: str, age: int}
234
+ - dict mapping names to (type, extras) tuples {name: (str, {"description": "..."})}
235
+ - Pydantic BaseModel class
236
+ - TypedDict class
237
+
238
+ Returns:
239
+ (properties, required, definitions)
240
+ """
241
+
242
+ def _schema_from_type(annotation: Any) -> dict[str, Any]:
243
+ """
244
+ Prefer TypeAdapter-based schemas (handles Union/Optional, Annotated, etc).
245
+ Fall back to the legacy mapper if TypeAdapter cannot handle the type.
246
+ """
247
+ try:
248
+ ta = TypeAdapter(annotation)
249
+ return _clean_schema(ta.json_schema())
250
+ except Exception:
251
+ return _python_type_to_json_schema(annotation)
252
+
253
+ if params is None:
254
+ return {}, [], None
255
+
256
+ # Pydantic model
257
+ if isinstance(params, type) and issubclass(params, BaseModel):
258
+ schema = params.model_json_schema()
259
+ schema = _clean_schema(schema)
260
+ properties = schema.get("properties", {})
261
+ required = schema.get("required", [])
262
+ definitions = schema.get("$defs")
263
+ return properties, required, definitions
264
+
265
+ # TypedDict
266
+ if _is_typeddict(params):
267
+ try:
268
+ ta = TypeAdapter(params)
269
+ schema = _clean_schema(ta.json_schema())
270
+ properties = schema.get("properties", {})
271
+ required = schema.get("required", [])
272
+ definitions = schema.get("$defs")
273
+ return properties, required, definitions
274
+ except Exception:
275
+ # Fall back to manual extraction
276
+ hints = get_type_hints(params)
277
+ properties = {}
278
+ required = []
279
+ for field_name, field_type in hints.items():
280
+ properties[field_name] = _python_type_to_json_schema(field_type)
281
+ required.append(field_name)
282
+ return properties, required, None
283
+
284
+ # Must be a dict at this point
285
+ if not isinstance(params, dict):
286
+ raise TypeError(
287
+ f"parameters must be a dict, Pydantic model, or TypedDict, "
288
+ f"got {type(params).__name__}"
289
+ )
290
+
291
+ # Check if it's already a JSON schema (has "type" keys in values)
292
+ # vs a simple {name: type} mapping
293
+ if params and all(
294
+ isinstance(v, dict) and "type" in v for v in params.values() if v is not None
295
+ ):
296
+ # Already JSON schema format - extract required from presence of "optional" key
297
+ required = [
298
+ name for name, schema in params.items() if not schema.get("optional", False)
299
+ ]
300
+ # Remove "optional" keys as they're not valid JSON schema
301
+ cleaned = {}
302
+ for name, schema in params.items():
303
+ cleaned[name] = {k: v for k, v in schema.items() if k != "optional"}
304
+ return cleaned, required, None
305
+
306
+ # Simple {name: type} or {name: (type, extras)} mapping
307
+ properties = {}
308
+ required = []
309
+
310
+ for param_name, param_spec in params.items():
311
+ # Tuple of (type, extras)
312
+ if isinstance(param_spec, tuple) and len(param_spec) == 2:
313
+ param_type, extras = param_spec
314
+ if isinstance(extras, dict):
315
+ schema = _schema_from_type(param_type)
316
+ schema.update(extras)
317
+ # Remove "optional" key as it's not valid JSON schema
318
+ is_optional = schema.pop("optional", False)
319
+ properties[param_name] = schema
320
+ if not is_optional:
321
+ required.append(param_name)
322
+ continue
323
+
324
+ # Python type (int, str, list[str], etc.)
325
+ if isinstance(param_spec, type) or get_origin(param_spec) is not None:
326
+ properties[param_name] = _schema_from_type(param_spec)
327
+ required.append(param_name)
328
+ continue
329
+
330
+ # Already a JSON schema dict
331
+ if isinstance(param_spec, dict):
332
+ schema = param_spec.copy()
333
+ is_optional = schema.pop("optional", False)
334
+ properties[param_name] = schema
335
+ if not is_optional:
336
+ required.append(param_name)
337
+ continue
338
+
339
+ # Unknown - try to convert
340
+ properties[param_name] = _schema_from_type(param_spec)
341
+ required.append(param_name)
342
+
343
+ return properties, required, None
344
+
345
+
346
+ def _python_type_to_json_schema(python_type: Any) -> dict[str, Any]:
25
347
  """
26
348
  Convert Python type annotations to JSON Schema.
27
349
  Handles: primitives, Optional, Literal, list[T], dict[str, T], Union.
@@ -42,7 +364,7 @@ def _python_type_to_json_schema_enhanced(python_type: Any) -> dict[str, Any]:
42
364
  # Handle list[T]
43
365
  if origin is list:
44
366
  if args:
45
- items_schema = _python_type_to_json_schema_enhanced(args[0])
367
+ items_schema = _python_type_to_json_schema(args[0])
46
368
  return {"type": "array", "items": items_schema}
47
369
  return {"type": "array"}
48
370
 
@@ -50,7 +372,7 @@ def _python_type_to_json_schema_enhanced(python_type: Any) -> dict[str, Any]:
50
372
  if origin is dict:
51
373
  if len(args) >= 2:
52
374
  # For dict[str, T], we can set additionalProperties
53
- value_schema = _python_type_to_json_schema_enhanced(args[1])
375
+ value_schema = _python_type_to_json_schema(args[1])
54
376
  return {"type": "object", "additionalProperties": value_schema}
55
377
  return {"type": "object"}
56
378
 
@@ -72,145 +394,6 @@ def _python_type_to_json_schema_enhanced(python_type: Any) -> dict[str, Any]:
72
394
  return {"type": "string"}
73
395
 
74
396
 
75
- class ToolParams:
76
- """
77
- Helper class for constructing tool parameters more easily.
78
-
79
- Usage:
80
- # Simple constructor with Python types
81
- params = ToolParams({"city": str, "age": int})
82
-
83
- # With extras (description, enum, etc)
84
- params = ToolParams({
85
- "operation": (str, {"enum": ["add", "sub"], "description": "Math operation"}),
86
- "value": (int, {"description": "The value"})
87
- })
88
-
89
- # From Pydantic model
90
- params = ToolParams.from_pydantic(MyModel)
91
-
92
- # From TypedDict
93
- params = ToolParams.from_typed_dict(MyTypedDict)
94
-
95
- # From existing JSON Schema
96
- params = ToolParams.from_json_schema(schema_dict, required=["field1"])
97
- """
98
-
99
- def __init__(self, spec: dict[str, Any]):
100
- """
101
- Create ToolParams from a dict mapping parameter names to types or (type, extras) tuples.
102
-
103
- Args:
104
- spec: Dict where values can be:
105
- - A Python type (str, int, list[str], etc.)
106
- - A tuple of (type, extras_dict) for additional JSON Schema properties
107
- - An already-formed JSON Schema dict (passed through as-is)
108
- """
109
- self.parameters: dict[str, Any] = {}
110
- self.required: list[str] = []
111
-
112
- for param_name, param_spec in spec.items():
113
- # If it's a tuple, extract (type, extras)
114
- if isinstance(param_spec, tuple):
115
- param_type, extras = param_spec
116
- schema = _python_type_to_json_schema_enhanced(param_type)
117
- schema.update(extras)
118
- self.parameters[param_name] = schema
119
- # Mark as required unless explicitly marked as optional
120
- if extras.get("optional") is not True:
121
- self.required.append(param_name)
122
- # If it's already a dict with "type" key, use as-is
123
- elif isinstance(param_spec, dict) and "type" in param_spec:
124
- self.parameters[param_name] = param_spec
125
- # Assume required unless marked optional
126
- if param_spec.get("optional") is not True:
127
- self.required.append(param_name)
128
- # Otherwise treat as a Python type
129
- else:
130
- self.parameters[param_name] = _python_type_to_json_schema_enhanced(
131
- param_spec
132
- )
133
- self.required.append(param_name)
134
-
135
- @classmethod
136
- def from_pydantic(cls, model: Type[BaseModel]) -> "ToolParams":
137
- """
138
- Create ToolParams from a Pydantic model.
139
-
140
- Args:
141
- model: A Pydantic BaseModel class
142
- """
143
- # Get the JSON schema from Pydantic
144
- schema = model.model_json_schema()
145
- properties = schema.get("properties", {})
146
- required = schema.get("required", [])
147
-
148
- return cls.from_json_schema(properties, required)
149
-
150
- @classmethod
151
- def from_typed_dict(cls, typed_dict: Type) -> "ToolParams":
152
- """
153
- Create ToolParams from a TypedDict.
154
-
155
- Args:
156
- typed_dict: A TypedDict class
157
- """
158
- hints = get_type_hints(typed_dict)
159
-
160
- # TypedDict doesn't have a built-in way to mark optional fields,
161
- # but we can check for Optional in the type hints
162
- params = {}
163
- required = []
164
-
165
- for field_name, field_type in hints.items():
166
- # Check if it's Optional (Union with None)
167
- origin = get_origin(field_type)
168
- # args = get_args(field_type)
169
-
170
- is_optional = False
171
- actual_type = field_type
172
-
173
- # Check for Union types (including Optional[T] which is Union[T, None])
174
- if origin is type(None):
175
- is_optional = True
176
- actual_type = type(None)
177
-
178
- # For now, treat all TypedDict fields as required unless they're explicitly Optional
179
- schema = _python_type_to_json_schema_enhanced(actual_type)
180
- params[field_name] = schema
181
-
182
- if not is_optional:
183
- required.append(field_name)
184
-
185
- instance = cls.__new__(cls)
186
- instance.parameters = params
187
- instance.required = required
188
- return instance
189
-
190
- @classmethod
191
- def from_json_schema(
192
- cls, properties: dict[str, Any], required: list[str] | None = None
193
- ) -> "ToolParams":
194
- """
195
- Create ToolParams from an existing JSON Schema properties dict.
196
-
197
- Args:
198
- properties: The "properties" section of a JSON Schema
199
- required: List of required field names
200
- """
201
- instance = cls.__new__(cls)
202
- instance.parameters = properties
203
- instance.required = required or []
204
- return instance
205
-
206
- def to_dict(self) -> dict[str, Any]:
207
- """
208
- Convert to a dict with 'parameters' and 'required' keys.
209
- Useful for unpacking into Tool constructor.
210
- """
211
- return {"parameters": self.parameters, "required": self.required}
212
-
213
-
214
397
  async def _load_all_mcp_tools(client: Client) -> list["Tool"]:
215
398
  metas: list[MCPTool] = await client.list_tools()
216
399
 
@@ -259,13 +442,44 @@ async def _load_all_mcp_tools(client: Client) -> list["Tool"]:
259
442
  class Tool(BaseModel):
260
443
  """
261
444
  Provider‑agnostic tool definition with no extra nesting.
445
+
446
+ The `parameters` argument accepts multiple formats:
447
+ - dict with JSON schema: {"query": {"type": "string"}}
448
+ - dict with Python types: {"query": str, "limit": int}
449
+ - dict with (type, extras) tuples: {"query": (str, {"description": "..."})}
450
+ - Pydantic BaseModel class
451
+ - TypedDict class
452
+
453
+ Examples:
454
+ # From JSON schema (traditional)
455
+ Tool(name="search", parameters={"query": {"type": "string"}}, ...)
456
+
457
+ # From Python types (simple)
458
+ Tool(name="search", parameters={"query": str, "limit": int}, ...)
459
+
460
+ # From Pydantic model
461
+ class SearchParams(BaseModel):
462
+ query: str
463
+ limit: int = 10
464
+ Tool(name="search", parameters=SearchParams, ...)
465
+
466
+ # From TypedDict
467
+ class SearchParams(TypedDict):
468
+ query: str
469
+ limit: NotRequired[int]
470
+ Tool(name="search", parameters=SearchParams, ...)
471
+
472
+ # From function (recommended for most cases)
473
+ Tool.from_function(my_search_function)
262
474
  """
263
475
 
476
+ model_config = {"arbitrary_types_allowed": True}
477
+
264
478
  name: str
265
479
  description: str | None = None
266
480
  parameters: dict[str, Any] | None = None
267
481
  required: list[str] = Field(default_factory=list)
268
- additionalProperties: bool | None = None # only
482
+ additionalProperties: bool | None = None
269
483
  # if desired, can provide a callable to run the tool
270
484
  run: Callable | None = None
271
485
  # for built-in tools that don't require schema
@@ -274,6 +488,25 @@ class Tool(BaseModel):
274
488
  built_in_args: dict[str, Any] = Field(default_factory=dict)
275
489
  # JSON Schema definitions (for $ref support)
276
490
  definitions: dict[str, Any] | None = None
491
+ # Output schema (extracted from return type annotation)
492
+ output_schema: dict[str, Any] | None = None
493
+ # TypeAdapter for output validation (not serialized, stored as private attr)
494
+ _output_type_adapter: TypeAdapter | None = None
495
+
496
+ def __init__(self, **data):
497
+ # Normalize parameters before passing to Pydantic
498
+ raw_params = data.get("parameters")
499
+ if raw_params is not None:
500
+ properties, required_fields, definitions = _normalize_parameters(raw_params)
501
+ data["parameters"] = properties
502
+ # Only set required if not explicitly provided (check for key presence, not truthiness)
503
+ if "required" not in data:
504
+ data["required"] = required_fields
505
+ # Only set definitions if not explicitly provided and we have new ones
506
+ if definitions and "definitions" not in data:
507
+ data["definitions"] = definitions
508
+
509
+ super().__init__(**data)
277
510
 
278
511
  @field_validator("name")
279
512
  @classmethod
@@ -285,28 +518,41 @@ class Tool(BaseModel):
285
518
  )
286
519
  return v
287
520
 
288
- @field_validator("parameters", mode="before")
289
- @classmethod
290
- def validate_parameters(cls, v: Any) -> dict[str, Any] | None:
291
- """Accept ToolParams objects and convert to dict for backwards compatibility."""
292
- if isinstance(v, ToolParams):
293
- return v.parameters
294
- return v
295
-
296
- def model_post_init(self, __context: Any) -> None:
297
- """
298
- After validation, if parameters came from ToolParams, also update required list.
299
- This is called by Pydantic after __init__.
300
- """
301
- # This is a bit tricky - we need to capture the required list from ToolParams
302
- # Since Pydantic has already converted it in the validator, we can't access it here
303
- # Instead, we'll handle this differently in the convenience constructors
304
- pass
305
-
306
521
  def _is_async(self) -> bool:
307
522
  return inspect.iscoroutinefunction(self.run)
308
523
 
309
- def call(self, **kwargs) -> str | list[ToolResultPart]:
524
+ def _validate_output(self, result: Any) -> Any:
525
+ """Validate output against output_schema if TypeAdapter is available."""
526
+ if self._output_type_adapter is None:
527
+ raise ValueError(
528
+ "Cannot validate output: no output type adapter available. "
529
+ "Make sure the tool was created with from_function() and has a return type annotation."
530
+ )
531
+ # This will raise ValidationError if result doesn't match the schema
532
+ return self._output_type_adapter.validate_python(result)
533
+
534
+ def call(
535
+ self, *, validate_output: bool = False, **kwargs
536
+ ) -> str | list[ToolResultPart]:
537
+ """
538
+ Call the tool with the given arguments.
539
+
540
+ Args:
541
+ validate_output: If True, validate the return value against the
542
+ output schema. Raises ValidationError if validation fails.
543
+ Requires the tool to have been created with from_function()
544
+ and have a return type annotation.
545
+ **kwargs: Arguments to pass to the tool function.
546
+
547
+ Returns:
548
+ The result of the tool function.
549
+
550
+ Raises:
551
+ ValueError: If no run function is provided or validation is requested
552
+ but no output type adapter is available.
553
+ pydantic.ValidationError: If validate_output=True and the result
554
+ doesn't match the output schema.
555
+ """
310
556
  if self.run is None:
311
557
  raise ValueError("No run function provided")
312
558
 
@@ -317,32 +563,96 @@ class Tool(BaseModel):
317
563
  assert loop
318
564
  except RuntimeError:
319
565
  # no loop → safe to block
320
- return asyncio.run(coro)
566
+ result = asyncio.run(coro)
321
567
  else:
322
568
  # Loop is running → execute coroutine in a worker thread
323
569
  def _runner():
324
570
  return asyncio.run(coro)
325
571
 
326
572
  with ThreadPoolExecutor(max_workers=1) as executor:
327
- return executor.submit(_runner).result()
573
+ result = executor.submit(_runner).result()
328
574
  else:
329
575
  # plain function
330
- return self.run(**kwargs)
576
+ result = self.run(**kwargs)
577
+
578
+ if validate_output:
579
+ self._validate_output(result)
331
580
 
332
- async def acall(self, **kwargs) -> str | list[ToolResultPart]:
581
+ return result
582
+
583
+ async def acall(
584
+ self, *, validate_output: bool = False, **kwargs
585
+ ) -> str | list[ToolResultPart]:
586
+ """
587
+ Async version of call().
588
+
589
+ Args:
590
+ validate_output: If True, validate the return value against the
591
+ output schema. Raises ValidationError if validation fails.
592
+ **kwargs: Arguments to pass to the tool function.
593
+
594
+ Returns:
595
+ The result of the tool function.
596
+ """
333
597
  if self.run is None:
334
598
  raise ValueError("No run function provided")
335
599
 
336
600
  if self._is_async():
337
- return await self.run(**kwargs) # type: ignore[func-returns-value]
601
+ result = await self.run(**kwargs) # type: ignore[func-returns-value]
338
602
  else:
339
603
  loop = asyncio.get_running_loop()
340
604
  assert self.run is not None, "can't run None"
341
- return await loop.run_in_executor(None, lambda: self.run(**kwargs)) # type: ignore
605
+ result = await loop.run_in_executor(None, lambda: self.run(**kwargs)) # type: ignore
606
+
607
+ if validate_output:
608
+ self._validate_output(result)
609
+
610
+ return result
342
611
 
343
612
  @classmethod
344
- def from_function(cls, func: Callable) -> "Tool":
345
- """Create a Tool from a function using introspection."""
613
+ def from_function(
614
+ cls,
615
+ func: Callable,
616
+ *,
617
+ include_output_schema_in_description: bool = False,
618
+ ) -> "Tool":
619
+ """
620
+ Create a Tool from a function using introspection.
621
+
622
+ Uses Pydantic's TypeAdapter for robust schema generation, supporting:
623
+ - All Python types (primitives, generics, unions, Literal, etc.)
624
+ - Pydantic models and TypedDict as parameter types
625
+ - Annotated[T, Field(description="...")] for parameter descriptions
626
+ - Annotated[T, "description"] shorthand for descriptions
627
+ - Complex nested types with proper $defs/$ref handling
628
+ - Output schema extraction from return type annotation
629
+
630
+ Args:
631
+ func: The function to create a tool from.
632
+ include_output_schema_in_description: If True, append the return type
633
+ and any complex type definitions to the tool description. This can
634
+ help the model understand what the tool returns. Default is False.
635
+
636
+ Example:
637
+ def search(
638
+ query: Annotated[str, Field(description="Search query")],
639
+ limit: int = 10,
640
+ filters: dict[str, str] | None = None,
641
+ ) -> list[dict]:
642
+ '''Search the database.'''
643
+ ...
644
+
645
+ tool = Tool.from_function(search)
646
+ # tool.output_schema contains schema for list[dict]
647
+ # tool.call(query="test", validate_output=True) validates return value
648
+
649
+ # With output schema in description:
650
+ tool = Tool.from_function(search, include_output_schema_in_description=True)
651
+ # Description becomes:
652
+ # "Search the database.
653
+ #
654
+ # Returns: list[dict]"
655
+ """
346
656
  # Get function name
347
657
  name = func.__name__
348
658
 
@@ -350,38 +660,62 @@ class Tool(BaseModel):
350
660
  description = func.__doc__ or f"Call the {name} function"
351
661
  description = description.strip()
352
662
 
353
- # Get function signature and type hints
354
- sig = inspect.signature(func)
355
- type_hints = get_type_hints(func)
356
-
357
- # Build parameters and required list
358
- parameters = {}
359
- required = []
360
-
361
- for param_name, param in sig.parameters.items():
362
- # Skip *args and **kwargs
363
- if param.kind in (param.VAR_POSITIONAL, param.VAR_KEYWORD):
364
- continue
663
+ # Use TypeAdapter for robust schema generation
664
+ type_adapter = _get_cached_typeadapter(func)
665
+ schema = type_adapter.json_schema()
365
666
 
366
- # Get type hint
367
- param_type = type_hints.get(param_name, str)
667
+ # Clean up the schema (remove titles, additionalProperties: false)
668
+ schema = _clean_schema(schema)
368
669
 
369
- # Convert Python types to JSON Schema types
370
- json_type = _python_type_to_json_schema_enhanced(param_type)
670
+ # Extract parameters and required from the schema
671
+ parameters = schema.get("properties", {})
672
+ required = schema.get("required", [])
673
+ definitions = schema.get("$defs")
371
674
 
372
- parameters[param_name] = json_type
675
+ # Extract output schema from return type annotation
676
+ output_schema = None
677
+ output_type_adapter = None
678
+ sig = inspect.signature(func)
679
+ return_type = sig.return_annotation
373
680
 
374
- # Add to required if no default value
375
- if param.default is param.empty:
376
- required.append(param_name)
681
+ if return_type is not inspect.Parameter.empty:
682
+ try:
683
+ # Resolve string annotations if needed
684
+ if isinstance(return_type, str):
685
+ hints = get_type_hints(func)
686
+ return_type = hints.get("return", return_type)
687
+
688
+ # Create TypeAdapter for output validation
689
+ output_type_adapter = TypeAdapter(return_type)
690
+ output_schema = _clean_schema(output_type_adapter.json_schema())
691
+ except Exception:
692
+ # If we can't create a schema for the return type, that's fine
693
+ # (e.g., for non-serializable types like custom classes)
694
+ pass
695
+
696
+ # Optionally append output schema info to description
697
+ if (
698
+ include_output_schema_in_description
699
+ and return_type is not inspect.Parameter.empty
700
+ ):
701
+ output_info = _format_output_schema_for_description(
702
+ return_type, output_schema
703
+ )
704
+ if output_info:
705
+ description = f"{description}\n\n{output_info}"
377
706
 
378
- return cls(
707
+ tool = cls(
379
708
  name=name,
380
709
  description=description,
381
710
  parameters=parameters,
382
711
  required=required,
712
+ definitions=definitions,
713
+ output_schema=output_schema,
383
714
  run=func,
384
715
  )
716
+ # Store the TypeAdapter for runtime validation (not serialized)
717
+ tool._output_type_adapter = output_type_adapter
718
+ return tool
385
719
 
386
720
  @classmethod
387
721
  async def from_mcp_config(
@@ -433,119 +767,6 @@ class Tool(BaseModel):
433
767
  return t
434
768
  raise ValueError(f"Tool '{tool_name}' not found on that server")
435
769
 
436
- @classmethod
437
- def from_params(
438
- cls,
439
- name: str,
440
- params: ToolParams,
441
- *,
442
- description: str | None = None,
443
- run: Callable | None = None,
444
- **kwargs,
445
- ) -> "Tool":
446
- """
447
- Create a Tool from a ToolParams object.
448
-
449
- Args:
450
- name: Tool name
451
- params: ToolParams object defining the parameter schema
452
- description: Optional description
453
- run: Optional callable to execute the tool
454
- **kwargs: Additional Tool arguments
455
-
456
- Example:
457
- params = ToolParams({"city": str, "age": int})
458
- tool = Tool.from_params("get_user", params, run=my_function)
459
- """
460
- return cls(
461
- name=name,
462
- description=description,
463
- parameters=params.parameters,
464
- required=params.required,
465
- run=run,
466
- **kwargs,
467
- )
468
-
469
- @classmethod
470
- def from_pydantic(
471
- cls,
472
- name: str,
473
- model: Type[BaseModel],
474
- *,
475
- description: str | None = None,
476
- run: Callable | None = None,
477
- **kwargs,
478
- ) -> "Tool":
479
- """
480
- Create a Tool from a Pydantic model.
481
-
482
- Args:
483
- name: Tool name
484
- model: Pydantic BaseModel class
485
- description: Optional description (defaults to model docstring)
486
- run: Optional callable to execute the tool
487
- **kwargs: Additional Tool arguments
488
-
489
- Example:
490
- class UserQuery(BaseModel):
491
- city: str
492
- age: int
493
-
494
- tool = Tool.from_pydantic("get_user", UserQuery, run=my_function)
495
- """
496
- params = ToolParams.from_pydantic(model)
497
-
498
- # Use model docstring as default description if not provided
499
- if description is None and model.__doc__:
500
- description = model.__doc__.strip()
501
-
502
- return cls(
503
- name=name,
504
- description=description,
505
- parameters=params.parameters,
506
- required=params.required,
507
- run=run,
508
- **kwargs,
509
- )
510
-
511
- @classmethod
512
- def from_typed_dict(
513
- cls,
514
- name: str,
515
- typed_dict: Type,
516
- *,
517
- description: str | None = None,
518
- run: Callable | None = None,
519
- **kwargs,
520
- ) -> "Tool":
521
- """
522
- Create a Tool from a TypedDict.
523
-
524
- Args:
525
- name: Tool name
526
- typed_dict: TypedDict class
527
- description: Optional description
528
- run: Optional callable to execute the tool
529
- **kwargs: Additional Tool arguments
530
-
531
- Example:
532
- class UserQuery(TypedDict):
533
- city: str
534
- age: int
535
-
536
- tool = Tool.from_typed_dict("get_user", UserQuery, run=my_function)
537
- """
538
- params = ToolParams.from_typed_dict(typed_dict)
539
-
540
- return cls(
541
- name=name,
542
- description=description,
543
- parameters=params.parameters,
544
- required=params.required,
545
- run=run,
546
- **kwargs,
547
- )
548
-
549
770
  @staticmethod
550
771
  def _tool_from_meta(meta: dict[str, Any], runner) -> "Tool":
551
772
  props = meta["inputSchema"].get("properties", {})
@@ -560,14 +781,6 @@ class Tool(BaseModel):
560
781
  run=runner,
561
782
  )
562
783
 
563
- @staticmethod
564
- def _python_type_to_json_schema(python_type) -> dict[str, Any]:
565
- """
566
- Convert Python type to JSON Schema type definition.
567
- Now delegates to enhanced version for better type support.
568
- """
569
- return _python_type_to_json_schema_enhanced(python_type)
570
-
571
784
  def _is_strict_mode_compatible(self) -> bool:
572
785
  """
573
786
  Check if this tool's schema is compatible with OpenAI strict mode.
@@ -892,3 +1105,7 @@ class MCPServer(BaseModel):
892
1105
  tools: list[Tool] = await Tool.from_mcp(self.name, url=self.url)
893
1106
  self._tools = tools
894
1107
  return tools
1108
+
1109
+
1110
+ # Note: prefab submodule is available via lm_deluge.tool.prefab
1111
+ # but not auto-imported here to avoid circular imports