xpander-sdk 1.60.4__py3-none-any.whl → 2.0.155__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (90) hide show
  1. xpander_sdk/__init__.py +76 -7793
  2. xpander_sdk/consts/__init__.py +0 -0
  3. xpander_sdk/consts/api_routes.py +63 -0
  4. xpander_sdk/core/__init__.py +0 -0
  5. xpander_sdk/core/module_base.py +164 -0
  6. xpander_sdk/core/state.py +10 -0
  7. xpander_sdk/core/xpander_api_client.py +119 -0
  8. xpander_sdk/exceptions/__init__.py +0 -0
  9. xpander_sdk/exceptions/module_exception.py +45 -0
  10. xpander_sdk/models/__init__.py +0 -0
  11. xpander_sdk/models/activity.py +65 -0
  12. xpander_sdk/models/configuration.py +92 -0
  13. xpander_sdk/models/events.py +70 -0
  14. xpander_sdk/models/frameworks.py +64 -0
  15. xpander_sdk/models/shared.py +102 -0
  16. xpander_sdk/models/user.py +21 -0
  17. xpander_sdk/modules/__init__.py +0 -0
  18. xpander_sdk/modules/agents/__init__.py +0 -0
  19. xpander_sdk/modules/agents/agents_module.py +164 -0
  20. xpander_sdk/modules/agents/models/__init__.py +0 -0
  21. xpander_sdk/modules/agents/models/agent.py +477 -0
  22. xpander_sdk/modules/agents/models/agent_list.py +107 -0
  23. xpander_sdk/modules/agents/models/knowledge_bases.py +33 -0
  24. xpander_sdk/modules/agents/sub_modules/__init__.py +0 -0
  25. xpander_sdk/modules/agents/sub_modules/agent.py +953 -0
  26. xpander_sdk/modules/agents/utils/__init__.py +0 -0
  27. xpander_sdk/modules/agents/utils/generic.py +2 -0
  28. xpander_sdk/modules/backend/__init__.py +0 -0
  29. xpander_sdk/modules/backend/backend_module.py +425 -0
  30. xpander_sdk/modules/backend/frameworks/__init__.py +0 -0
  31. xpander_sdk/modules/backend/frameworks/agno.py +627 -0
  32. xpander_sdk/modules/backend/frameworks/dispatch.py +36 -0
  33. xpander_sdk/modules/backend/utils/__init__.py +0 -0
  34. xpander_sdk/modules/backend/utils/mcp_oauth.py +95 -0
  35. xpander_sdk/modules/events/__init__.py +0 -0
  36. xpander_sdk/modules/events/decorators/__init__.py +0 -0
  37. xpander_sdk/modules/events/decorators/on_boot.py +94 -0
  38. xpander_sdk/modules/events/decorators/on_shutdown.py +94 -0
  39. xpander_sdk/modules/events/decorators/on_task.py +203 -0
  40. xpander_sdk/modules/events/events_module.py +629 -0
  41. xpander_sdk/modules/events/models/__init__.py +0 -0
  42. xpander_sdk/modules/events/models/deployments.py +25 -0
  43. xpander_sdk/modules/events/models/events.py +57 -0
  44. xpander_sdk/modules/events/utils/__init__.py +0 -0
  45. xpander_sdk/modules/events/utils/generic.py +56 -0
  46. xpander_sdk/modules/events/utils/git_init.py +32 -0
  47. xpander_sdk/modules/knowledge_bases/__init__.py +0 -0
  48. xpander_sdk/modules/knowledge_bases/knowledge_bases_module.py +217 -0
  49. xpander_sdk/modules/knowledge_bases/models/__init__.py +0 -0
  50. xpander_sdk/modules/knowledge_bases/models/knowledge_bases.py +11 -0
  51. xpander_sdk/modules/knowledge_bases/sub_modules/__init__.py +0 -0
  52. xpander_sdk/modules/knowledge_bases/sub_modules/knowledge_base.py +107 -0
  53. xpander_sdk/modules/knowledge_bases/sub_modules/knowledge_base_document_item.py +40 -0
  54. xpander_sdk/modules/knowledge_bases/utils/__init__.py +0 -0
  55. xpander_sdk/modules/tasks/__init__.py +0 -0
  56. xpander_sdk/modules/tasks/models/__init__.py +0 -0
  57. xpander_sdk/modules/tasks/models/task.py +153 -0
  58. xpander_sdk/modules/tasks/models/tasks_list.py +107 -0
  59. xpander_sdk/modules/tasks/sub_modules/__init__.py +0 -0
  60. xpander_sdk/modules/tasks/sub_modules/task.py +887 -0
  61. xpander_sdk/modules/tasks/tasks_module.py +492 -0
  62. xpander_sdk/modules/tasks/utils/__init__.py +0 -0
  63. xpander_sdk/modules/tasks/utils/files.py +114 -0
  64. xpander_sdk/modules/tools_repository/__init__.py +0 -0
  65. xpander_sdk/modules/tools_repository/decorators/__init__.py +0 -0
  66. xpander_sdk/modules/tools_repository/decorators/register_tool.py +108 -0
  67. xpander_sdk/modules/tools_repository/models/__init__.py +0 -0
  68. xpander_sdk/modules/tools_repository/models/mcp.py +68 -0
  69. xpander_sdk/modules/tools_repository/models/tool_invocation_result.py +14 -0
  70. xpander_sdk/modules/tools_repository/sub_modules/__init__.py +0 -0
  71. xpander_sdk/modules/tools_repository/sub_modules/tool.py +578 -0
  72. xpander_sdk/modules/tools_repository/tools_repository_module.py +259 -0
  73. xpander_sdk/modules/tools_repository/utils/__init__.py +0 -0
  74. xpander_sdk/modules/tools_repository/utils/generic.py +57 -0
  75. xpander_sdk/modules/tools_repository/utils/local_tools.py +52 -0
  76. xpander_sdk/modules/tools_repository/utils/schemas.py +308 -0
  77. xpander_sdk/utils/__init__.py +0 -0
  78. xpander_sdk/utils/env.py +44 -0
  79. xpander_sdk/utils/event_loop.py +67 -0
  80. xpander_sdk/utils/tools.py +32 -0
  81. xpander_sdk-2.0.155.dist-info/METADATA +538 -0
  82. xpander_sdk-2.0.155.dist-info/RECORD +85 -0
  83. {xpander_sdk-1.60.4.dist-info → xpander_sdk-2.0.155.dist-info}/WHEEL +1 -1
  84. {xpander_sdk-1.60.4.dist-info → xpander_sdk-2.0.155.dist-info/licenses}/LICENSE +0 -1
  85. xpander_sdk/_jsii/__init__.py +0 -39
  86. xpander_sdk/_jsii/xpander-sdk@1.60.4.jsii.tgz +0 -0
  87. xpander_sdk/py.typed +0 -1
  88. xpander_sdk-1.60.4.dist-info/METADATA +0 -368
  89. xpander_sdk-1.60.4.dist-info/RECORD +0 -9
  90. {xpander_sdk-1.60.4.dist-info → xpander_sdk-2.0.155.dist-info}/top_level.txt +0 -0
@@ -0,0 +1,259 @@
1
+ """
2
+ Tools Repository module for managing tools in the xpander.ai platform.
3
+
4
+ This module provides functionality to register, list, and manage tools within
5
+ the xpander.ai Backend-as-a-Service platform, supporting tool syncronization and
6
+ integration with AI agents.
7
+ """
8
+
9
+ from inspect import Parameter, Signature
10
+ from typing import Any, Callable, ClassVar, List, Optional, Type
11
+ from pydantic import BaseModel, computed_field
12
+ from xpander_sdk.consts.api_routes import APIRoute
13
+ from xpander_sdk.core.xpander_api_client import APIClient
14
+ from xpander_sdk.exceptions.module_exception import ModuleException
15
+ from xpander_sdk.models.configuration import Configuration
16
+ from xpander_sdk.models.shared import XPanderSharedModel
17
+ from xpander_sdk.modules.tools_repository.sub_modules.tool import Tool
18
+ from xpander_sdk.utils.event_loop import run_sync
19
+ import json
20
+
21
+ class ToolsRepository(XPanderSharedModel):
22
+ """
23
+ Repository for managing tools in xpander.ai.
24
+
25
+ This class provides methods for tool registration, discovery, and
26
+ management. It supports dealing with both local tools defined via decorators
27
+ and tools managed by the backend, ensuring integration consistency.
28
+
29
+ Attributes:
30
+ configuration (Optional[Configuration]): SDK configuration.
31
+ tools (List[Tool]): List of tools managed by the backend.
32
+ _local_tools (ClassVar[List[Tool]]): Registry of tools defined via decorators.
33
+
34
+ Methods:
35
+ register_tool: Register a local tool.
36
+ list: Return a list of all tools.
37
+ get_tool_by_id: Retrieve a tool by its ID.
38
+ should_sync_local_tools: Check if local tools need syncing.
39
+ get_local_tools_for_sync: Retrieve local tools that require syncing.
40
+ functions: Return normalized callable functions for each tool.
41
+
42
+ Example:
43
+ >>> repo = ToolsRepository()
44
+ >>> tools = repo.list
45
+ >>> specific_tool = repo.get_tool_by_id("tool-id")
46
+ """
47
+
48
+ configuration: Optional[Configuration] = Configuration()
49
+
50
+ # Mutable list that can be set/overwritten by backend
51
+ tools: List[Tool] = []
52
+
53
+ agent_graph: Optional[Any] = None
54
+ is_async: Optional[bool] = True
55
+
56
+ # Immutable registry for tools defined via decorator
57
+ _local_tools: ClassVar[List[Tool]] = []
58
+
59
+ @classmethod
60
+ def register_tool(cls, tool: Tool):
61
+ """
62
+ Register a new local tool.
63
+
64
+ Args:
65
+ tool (Tool): The tool to register.
66
+ """
67
+ cls._local_tools.append(tool)
68
+
69
+ @computed_field
70
+ @property
71
+ def list(self) -> List[Tool]:
72
+ """
73
+ Return a list of all available tools.
74
+
75
+ Merges both backend-managed tools and locally registered tools,
76
+ ensuring no duplicate IDs. Sets each tool's configuration for
77
+ further communication.
78
+
79
+ Returns:
80
+ List[Tool]: A list of all available tools.
81
+ """
82
+ # Merge _local_tools and _tools, ensuring no duplicates by id
83
+ all_tools = {tool.id: tool for tool in self.tools}
84
+ for local_tool in self._local_tools:
85
+ all_tools.setdefault(local_tool.id, local_tool)
86
+
87
+ tools: List[Tool] = list(all_tools.values())
88
+
89
+ for tool in tools:
90
+ tool.set_configuration(configuration=self.configuration)
91
+ if self.agent_graph:
92
+ tool.set_schema_overrides(agent_graph=self.agent_graph)
93
+
94
+ return tools
95
+
96
+ def get_tool_by_id(self, tool_id: str):
97
+ """
98
+ Retrieve a tool by its unique identifier.
99
+
100
+ Args:
101
+ tool_id (str): The ID of the tool to retrieve.
102
+
103
+ Returns:
104
+ Tool: The tool corresponding to the given ID.
105
+ """
106
+ return next((tool for tool in self.list if tool.id == tool_id), None)
107
+
108
+ def get_tool_by_name(self, tool_name: str):
109
+ """
110
+ Retrieve a tool by its unique identifier.
111
+
112
+ Args:
113
+ tool_name (str): The ID of the tool to retrieve.
114
+
115
+ Returns:
116
+ Tool: The tool corresponding to the given ID.
117
+ """
118
+ return next((tool for tool in self.list if tool.name == tool_name), None)
119
+
120
+ def should_sync_local_tools(self):
121
+ """
122
+ Determine if local tools need to be synchronized with the backend.
123
+
124
+ Checks whether any local tool is marked for graph addition and
125
+ has not been synced yet.
126
+
127
+ Returns:
128
+ bool: True if any local tools need syncing, False otherwise.
129
+ """
130
+ return any(tool.is_local and tool.should_add_to_graph for tool in self.list)
131
+
132
+ def get_local_tools_for_sync(self):
133
+ """
134
+ Retrieve local tools that require synchronization with the backend.
135
+
136
+ Returns:
137
+ List[Tool]: List of local tools marked for graph addition that are not yet synced.
138
+ """
139
+ return [
140
+ tool
141
+ for tool in self.list
142
+ if tool.is_local and tool.should_add_to_graph and not tool.is_synced
143
+ ]
144
+
145
+ @computed_field
146
+ @property
147
+ def functions(self) -> List[Callable[..., Any]]:
148
+ """
149
+ Get a list of normalized callable functions for each registered tool.
150
+
151
+ Each function is designed to accept a single payload matching the
152
+ tool's expected schema, allowing for direct execution with
153
+ schema-validated data.
154
+
155
+ Returns:
156
+ List[Callable[..., Any]]: List of callable functions corresponding to tools.
157
+ """
158
+ fn_list = []
159
+
160
+ for tool in self.list:
161
+
162
+ # add json schema to the model doc
163
+ tool.schema.__doc__ = "Pay attention to the schema, dont miss. " + json.dumps(tool.schema.model_json_schema(mode="serialization"))
164
+
165
+ schema_cls: Type[BaseModel] = tool.schema
166
+
167
+ # Create closure to capture tool and schema_cls
168
+ def make_tool_function(tool_ref, schema_ref, is_async: bool = False):
169
+ """
170
+ Factory that builds a normalized tool function.
171
+ - If is_async=True, returns an async function (awaitable).
172
+ - If is_async=False, returns a sync function (blocking, calls run_sync).
173
+ """
174
+
175
+ async def _execute(payload_dict: dict) -> Any:
176
+ return await tool_ref.ainvoke(
177
+ agent_id=self.configuration.state.agent.id,
178
+ agent_version=self.configuration.state.agent.version,
179
+ payload=payload_dict,
180
+ configuration=self.configuration,
181
+ task_id=(
182
+ self.configuration.state.task.id
183
+ if self.configuration.state.task
184
+ else None
185
+ ),
186
+ )
187
+
188
+ if is_async:
189
+
190
+ async def tool_function(payload: schema_ref) -> Any:
191
+ """
192
+ Normalized async tool function that accepts a single Pydantic model payload.
193
+ """
194
+ payload_dict = payload.model_dump(exclude_none=True)
195
+ return await _execute(payload_dict)
196
+
197
+ else:
198
+
199
+ def tool_function(payload: schema_ref) -> Any:
200
+ """
201
+ Normalized sync tool function that accepts a single Pydantic model payload.
202
+ """
203
+ if isinstance(payload, dict):
204
+ payload_dict = payload
205
+ else:
206
+ payload_dict = payload.model_dump(exclude_none=True)
207
+ return run_sync(_execute(payload_dict))
208
+
209
+ # --- Metadata ---
210
+ tool_function.__name__ = tool_ref.id
211
+ tool_function.__doc__ = tool_ref.description or tool_ref.name
212
+
213
+ # --- Signature ---
214
+ payload_param = Parameter(
215
+ name="payload",
216
+ kind=Parameter.POSITIONAL_OR_KEYWORD,
217
+ annotation=schema_ref,
218
+ )
219
+ tool_function.__signature__ = Signature(
220
+ [payload_param],
221
+ return_annotation=Any,
222
+ )
223
+
224
+ # --- Annotations (for libraries that read __annotations__) ---
225
+ ann = getattr(tool_function, "__annotations__", {})
226
+ ann["payload"] = schema_ref
227
+ ann["return"] = Any
228
+ tool_function.__annotations__ = ann
229
+
230
+ return tool_function
231
+
232
+ fn = make_tool_function(tool, schema_cls, self.is_async)
233
+ fn_list.append(fn)
234
+
235
+ return fn_list
236
+
237
+ async def aload_tool_by_id(self, tool_id: str):
238
+ try:
239
+ connector_id, operation_id = tool_id.split("_")
240
+ client = APIClient(configuration=self.configuration)
241
+ tool = await client.make_request(
242
+ path=APIRoute.GetOrInvokeToolById.format(tool_id=tool_id)
243
+ )
244
+ self.tools = [
245
+ Tool(
246
+ configuration=self.configuration,
247
+ **tool,
248
+ method="POST",
249
+ path="tool",
250
+ is_standalone=True,
251
+ connector_id=connector_id,
252
+ operation_id=operation_id,
253
+ )
254
+ ]
255
+ except Exception as e:
256
+ raise ModuleException(500, f"Failed to load tool by id - {str(e)}")
257
+
258
+ def load_tool_by_id(self, tool_id: str):
259
+ return run_sync(self.aload_tool_by_id(tool_id=tool_id))
File without changes
@@ -0,0 +1,57 @@
1
+ import re
2
+ from typing import Any, List
3
+
4
+
5
+ def deep_merge(a: dict, b: dict) -> dict:
6
+ result = a.copy()
7
+ for key, b_value in b.items():
8
+ if key in result:
9
+ a_value = result[key]
10
+ if isinstance(a_value, dict) and isinstance(b_value, dict):
11
+ result[key] = deep_merge(a_value, b_value)
12
+ else:
13
+ result[key] = b_value
14
+ else:
15
+ result[key] = b_value
16
+ return result
17
+
18
+
19
+ def json_type_to_python(json_type: str, prop_schema: dict = None):
20
+ # Handle anyOf schemas (union types)
21
+ if prop_schema and "anyOf" in prop_schema:
22
+ # Extract non-null types from anyOf
23
+ non_null_types = []
24
+ for any_of_item in prop_schema["anyOf"]:
25
+ item_type = any_of_item.get("type")
26
+ if item_type and item_type != "null":
27
+ non_null_types.append(json_type_to_python(item_type, any_of_item))
28
+
29
+ # Return the first non-null type (this will be wrapped in Optional later)
30
+ if non_null_types:
31
+ return non_null_types[0]
32
+ else:
33
+ return Any
34
+
35
+ # Extend to support arrays of objects, etc.
36
+ if json_type == "array" and prop_schema:
37
+ items = prop_schema.get("items", {})
38
+ item_type = json_type_to_python(items.get("type"), items)
39
+ return List[item_type]
40
+
41
+ return {
42
+ "string": str,
43
+ "integer": int,
44
+ "number": float,
45
+ "boolean": bool,
46
+ "object": dict,
47
+ "array": list,
48
+ None: Any
49
+ }.get(json_type, Any)
50
+
51
+
52
+ def pascal_case(name: str) -> str:
53
+ """
54
+ Converts a string to PascalCase.
55
+ Example: 'my_tool-name' -> 'MyToolName'
56
+ """
57
+ return "".join(word.capitalize() for word in re.split(r"[\s_\-]+", name) if word)
@@ -0,0 +1,52 @@
1
+ import inspect
2
+ import asyncio
3
+ from inspect import Parameter
4
+ from pydantic import BaseModel
5
+ from typing import Any, get_type_hints
6
+
7
+
8
+ async def invoke_local_fn(fn, payload: Any):
9
+ sig = inspect.signature(fn)
10
+ params = sig.parameters
11
+ is_coroutine = inspect.iscoroutinefunction(fn)
12
+ type_hints = get_type_hints(fn)
13
+
14
+ args = []
15
+ kwargs = {}
16
+
17
+ def build_param_value(param_name, param_type):
18
+ if isinstance(payload, dict):
19
+ if issubclass(param_type, BaseModel):
20
+ return param_type(**payload.get(param_name, {}))
21
+ elif param_name in payload:
22
+ return payload[param_name]
23
+ else:
24
+ raise TypeError(f"Missing required argument: {param_name}")
25
+ else:
26
+ if issubclass(param_type, BaseModel) and isinstance(payload, dict):
27
+ return param_type(**payload)
28
+ return payload # fallback for scalar values
29
+
30
+ # Handle no parameters
31
+ if not params:
32
+ return await fn() if is_coroutine else await asyncio.to_thread(fn)
33
+
34
+ # Match each parameter
35
+ for name, param in params.items():
36
+ expected_type = type_hints.get(name, Any)
37
+ if param.kind in [Parameter.POSITIONAL_ONLY, Parameter.POSITIONAL_OR_KEYWORD]:
38
+ val = build_param_value(name, expected_type)
39
+ args.append(val)
40
+ elif param.kind == Parameter.KEYWORD_ONLY:
41
+ val = build_param_value(name, expected_type)
42
+ kwargs[name] = val
43
+ elif param.kind == Parameter.VAR_POSITIONAL:
44
+ if isinstance(payload, (list, tuple)):
45
+ args.extend(payload)
46
+ elif param.kind == Parameter.VAR_KEYWORD:
47
+ if isinstance(payload, dict):
48
+ kwargs.update(payload)
49
+
50
+ if is_coroutine:
51
+ return await fn(*args, **kwargs)
52
+ return await asyncio.to_thread(fn, *args, **kwargs)
@@ -0,0 +1,308 @@
1
+ from typing import Optional, Type
2
+ from copy import deepcopy
3
+
4
+ from pydantic import BaseModel, ConfigDict, Field, create_model
5
+
6
+ from xpander_sdk.modules.tools_repository.utils.generic import json_type_to_python, pascal_case
7
+
8
+ from pydantic import BaseModel, create_model, ConfigDict
9
+ from typing import Optional, Type, Dict, Any
10
+
11
+ def build_model_from_schema(
12
+ model_name: str,
13
+ schema: dict,
14
+ with_defaults: Optional[bool] = False
15
+ ) -> Type[BaseModel]:
16
+ fields = {}
17
+ properties = schema.get("properties", {})
18
+ required = set(schema.get("required", []))
19
+
20
+ # CRITICAL FIX: Add default={} to empty parameter containers
21
+ # This allows LLMs to omit them when they have no actual content
22
+ for param_name in ("body_params", "query_params", "path_params", "headers"):
23
+ if param_name in properties:
24
+ param_schema = properties[param_name]
25
+ if (param_schema.get("type") == "object" and
26
+ "properties" in param_schema and
27
+ len(param_schema.get("properties", {})) == 0 and
28
+ "default" not in param_schema):
29
+ param_schema["default"] = {}
30
+
31
+ FIELD_SPECS = {
32
+ "body_params": (
33
+ Optional[Dict[str, Any]],
34
+ Field(
35
+ default={},
36
+ description="Request body parameters (default: empty object)."
37
+ )
38
+ ),
39
+ "query_params": (
40
+ Optional[Dict[str, Any]],
41
+ Field(
42
+ default={},
43
+ description="Request query parameters (default: empty object)."
44
+ )
45
+ ),
46
+ "path_params": (
47
+ Optional[Dict[str, Any]],
48
+ Field(
49
+ default={},
50
+ description="Request path parameters (default: empty object)."
51
+ )
52
+ ),
53
+ }
54
+
55
+ # If with_defaults is True and schema is empty, set all three params
56
+ if with_defaults and not properties:
57
+ fields = FIELD_SPECS.copy()
58
+ else:
59
+ for prop_name, prop_schema in properties.items():
60
+ # Skip invalid field names starting with "_"
61
+ if prop_name.startswith("_"):
62
+ continue
63
+ prop_type = prop_schema.get("type")
64
+ description = prop_schema.get("description", None)
65
+ default = prop_schema.get("default", None)
66
+
67
+ # Nested object support
68
+ # CRITICAL: Check if this is an empty parameter container
69
+ is_empty_param_container = False
70
+ if prop_type == "object" and "properties" in prop_schema:
71
+ nested_props = prop_schema.get("properties", {})
72
+ nested_required = prop_schema.get("required", [])
73
+ # Empty if no properties or all properties are optional/empty
74
+ is_empty_param_container = len(nested_props) == 0 or \
75
+ (len(nested_required) == 0 and all(
76
+ p.get("type") == "object" and len(p.get("properties", {})) == 0
77
+ for p in nested_props.values()
78
+ ))
79
+
80
+ # For empty parameter containers, use Dict instead of nested model
81
+ if is_empty_param_container and prop_name in ("body_params", "query_params", "path_params", "headers"):
82
+ base_type = Dict[str, Any]
83
+ else:
84
+ nested_model_name = f"{model_name}{pascal_case(prop_name)}"
85
+ base_type = build_model_from_schema(nested_model_name, prop_schema)
86
+ else:
87
+ # Pass the full property schema to handle anyOf correctly
88
+ base_type = json_type_to_python(prop_type, prop_schema)
89
+
90
+ # Field annotation and Field() construction
91
+ # IMPORTANT: For fields marked as required in the JSON schema, don't wrap in Optional[]
92
+ # Even if they might be nullable, the type annotation determines Pydantic's required array
93
+ # EXCEPTION: Empty parameter containers should always be Optional with default={}
94
+ if is_empty_param_container:
95
+ annotation = Optional[base_type]
96
+ else:
97
+ annotation = base_type if prop_name in required else Optional[base_type]
98
+
99
+ field_args = {}
100
+
101
+ # Enhance description to clarify optional vs required status
102
+ enhanced_description = description or f"Parameter: {prop_name}"
103
+ if is_empty_param_container:
104
+ enhanced_description = f"[OPTIONAL - empty container] {enhanced_description} (default: empty object)"
105
+ elif prop_name in required:
106
+ if default is not None:
107
+ enhanced_description = f"[REQUIRED with default] {enhanced_description} (default: {default})"
108
+ else:
109
+ enhanced_description = f"[REQUIRED] {enhanced_description}"
110
+ else:
111
+ if default is not None:
112
+ enhanced_description = f"[OPTIONAL with default] {enhanced_description} (default: {default})"
113
+ else:
114
+ enhanced_description = f"[OPTIONAL] {enhanced_description} - can be omitted or set to null"
115
+
116
+ field_args["description"] = enhanced_description
117
+
118
+ # Set default or ... (required)
119
+ # The key insight: Pydantic includes a field in the 'required' array of model_json_schema()
120
+ # if and only if the field has Field(...) (no default) AND is not Optional[] in type annotation
121
+ if is_empty_param_container:
122
+ # Empty containers always get default={}
123
+ field_info = Field(default={}, **field_args)
124
+ elif prop_name in required:
125
+ if default is not None:
126
+ # Has a default but still required in schema - use the default
127
+ field_info = Field(default, **field_args)
128
+ else:
129
+ # No default and required - use ellipsis
130
+ field_info = Field(..., **field_args)
131
+ else:
132
+ # Optional fields - always provide a default to keep them out of 'required' array
133
+ if default is not None:
134
+ field_info = Field(default, **field_args)
135
+ else:
136
+ # Optional with no explicit default - use None
137
+ field_info = Field(default=None, **field_args)
138
+
139
+ fields[prop_name] = (annotation, field_info)
140
+
141
+
142
+ # Ensure the presence of all three params if with_defaults is True
143
+ if with_defaults:
144
+ for key, (annotation, field_info) in FIELD_SPECS.items():
145
+ if key not in fields:
146
+ fields[key] = (annotation, field_info)
147
+
148
+ # After building fields, relax body/query/path if present and not already optional with a default
149
+ # CRITICAL FIX: Empty parameter containers (query_params, path_params, body_params) that are marked
150
+ # as required but have no actual properties should default to {} so LLMs can omit them
151
+ for param in ("body_params", "query_params", "path_params"):
152
+ if param in fields:
153
+ ann, fld = fields[param]
154
+ # Check if this field is required (has Ellipsis as default) or has no useful default
155
+ has_no_default = (getattr(fld, 'default', ...) is ... or
156
+ getattr(fld, 'default', None) is None) and \
157
+ getattr(fld, 'default_factory', None) is None
158
+
159
+ # Always make param containers Optional with default={} to allow LLMs to omit empty ones
160
+ if has_no_default or ann is dict or ann is Dict[str, Any] or \
161
+ (hasattr(ann, '__origin__') and ann.__origin__ in (dict, Dict)):
162
+ desc = getattr(fld, 'description', None) or f"Request {param.replace('_', ' ')} (default: empty object)."
163
+ fields[param] = (
164
+ Optional[Dict[str, Any]],
165
+ Field(default={}, description=desc)
166
+ )
167
+
168
+ model_config = ConfigDict(
169
+ strict=False, # Allow flexibility with types to handle AI agent inputs better
170
+ extra="allow",
171
+ title=model_name,
172
+ description="IMPORTANT: Required fields must be provided. Optional fields can be omitted entirely or set to null. All parameters with defaults will use those defaults if not provided. Check the 'required' array in the schema to see which fields are mandatory."
173
+ )
174
+ return create_model(
175
+ model_name,
176
+ __config__=model_config,
177
+ **fields
178
+ )
179
+
180
+ def schema_enforcement_block_and_descriptions(target_schema: dict, reference_schema: dict) -> dict:
181
+ updated_schema = deepcopy(target_schema)
182
+
183
+ def update_properties(target_props: dict, ref_props: dict):
184
+ to_delete = []
185
+ for key, ref_value in ref_props.items():
186
+ if key not in target_props:
187
+ continue
188
+
189
+ # Remove if isBlocked or permanentValue present
190
+ if ref_value.get("isBlocked") is True or "permanentValue" in ref_value:
191
+ to_delete.append(key)
192
+ continue
193
+
194
+ target_field = target_props[key]
195
+
196
+ # Override description if available
197
+ if "description" in ref_value:
198
+ target_field["description"] = ref_value["description"]
199
+
200
+ # Recursively update nested objects
201
+ if (
202
+ ref_value.get("type") == "object"
203
+ and "properties" in ref_value
204
+ and target_field.get("type") == "object"
205
+ and "properties" in target_field
206
+ ):
207
+ update_properties(target_field["properties"], ref_value["properties"])
208
+
209
+ # Remove blocked/permanent fields
210
+ for key in to_delete:
211
+ del target_props[key]
212
+
213
+ def walk(target: dict, ref: dict):
214
+ if not isinstance(target, dict) or not isinstance(ref, dict):
215
+ return
216
+
217
+ if target.get("type") == "object" and "properties" in target and "properties" in ref:
218
+ update_properties(target["properties"], ref["properties"])
219
+ for key in list(target["properties"]):
220
+ walk(target["properties"][key], ref["properties"].get(key, {}))
221
+
222
+ walk(updated_schema, reference_schema)
223
+ return updated_schema
224
+
225
+ def apply_permanent_values_to_payload(schema: dict, payload: dict | list) -> dict | list:
226
+ payload = deepcopy(payload)
227
+
228
+ def apply(schema_node, payload_node):
229
+ if not isinstance(schema_node, dict):
230
+ return
231
+
232
+ schema_type = schema_node.get("type")
233
+
234
+ if schema_type == "object" and "properties" in schema_node:
235
+ if not isinstance(payload_node, dict):
236
+ return # skip if payload_node is not an object
237
+
238
+ for key, sub_schema in schema_node["properties"].items():
239
+ # If permanentValue is present, enforce it
240
+ if "permanentValue" in sub_schema:
241
+ payload_node[key] = sub_schema["permanentValue"]
242
+
243
+ # Recurse
244
+ if sub_schema.get("type") == "object":
245
+ payload_node.setdefault(key, {})
246
+ apply(sub_schema, payload_node[key])
247
+ elif sub_schema.get("type") == "array" and sub_schema.get("items", {}).get("type") == "object":
248
+ payload_node.setdefault(key, [{}]) # if empty, create one
249
+ if isinstance(payload_node[key], list):
250
+ for item in payload_node[key]:
251
+ apply(sub_schema["items"], item)
252
+
253
+ elif schema_type == "array" and schema_node.get("items", {}).get("type") == "object":
254
+ if isinstance(payload_node, list):
255
+ for item in payload_node:
256
+ apply(schema_node["items"], item)
257
+
258
+ apply(schema, payload)
259
+ return payload
260
+
261
+
262
+ def enforce_schema_on_response(schema: dict, response: dict | list) -> dict | list:
263
+ response = deepcopy(response)
264
+
265
+ def apply(schema_node, response_node):
266
+ if not isinstance(schema_node, dict):
267
+ return
268
+
269
+ schema_type = schema_node.get("type")
270
+
271
+ if schema_type == "object" and "properties" in schema_node:
272
+ if not isinstance(response_node, dict):
273
+ return
274
+
275
+ for key in list(response_node.keys()):
276
+ sub_schema = schema_node["properties"].get(key)
277
+
278
+ # If key not in schema, ignore
279
+ if not sub_schema:
280
+ continue
281
+
282
+ # Remove if blocked
283
+ if sub_schema.get("isBlocked"):
284
+ del response_node[key]
285
+ continue
286
+
287
+ # Set permanentValue if defined
288
+ if "permanentValue" in sub_schema:
289
+ response_node[key] = sub_schema["permanentValue"]
290
+
291
+ # Recurse if it's a nested object
292
+ if sub_schema.get("type") == "object" and isinstance(response_node.get(key), dict):
293
+ apply(sub_schema, response_node[key])
294
+
295
+ # Recurse if it's an array of objects
296
+ elif sub_schema.get("type") == "array" and isinstance(response_node.get(key), list):
297
+ item_schema = sub_schema.get("items")
298
+ if item_schema and item_schema.get("type") == "object":
299
+ for item in response_node[key]:
300
+ apply(item_schema, item)
301
+
302
+ elif schema_type == "array" and schema_node.get("items", {}).get("type") == "object":
303
+ if isinstance(response_node, list):
304
+ for item in response_node:
305
+ apply(schema_node["items"], item)
306
+
307
+ apply(schema, response)
308
+ return response
File without changes