mcp-eregistrations-bpa 0.8.5__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of mcp-eregistrations-bpa might be problematic. Click here for more details.

Files changed (66) hide show
  1. mcp_eregistrations_bpa/__init__.py +121 -0
  2. mcp_eregistrations_bpa/__main__.py +6 -0
  3. mcp_eregistrations_bpa/arazzo/__init__.py +21 -0
  4. mcp_eregistrations_bpa/arazzo/expression.py +379 -0
  5. mcp_eregistrations_bpa/audit/__init__.py +56 -0
  6. mcp_eregistrations_bpa/audit/context.py +66 -0
  7. mcp_eregistrations_bpa/audit/logger.py +236 -0
  8. mcp_eregistrations_bpa/audit/models.py +131 -0
  9. mcp_eregistrations_bpa/auth/__init__.py +64 -0
  10. mcp_eregistrations_bpa/auth/callback.py +391 -0
  11. mcp_eregistrations_bpa/auth/cas.py +409 -0
  12. mcp_eregistrations_bpa/auth/oidc.py +252 -0
  13. mcp_eregistrations_bpa/auth/permissions.py +162 -0
  14. mcp_eregistrations_bpa/auth/token_manager.py +348 -0
  15. mcp_eregistrations_bpa/bpa_client/__init__.py +84 -0
  16. mcp_eregistrations_bpa/bpa_client/client.py +740 -0
  17. mcp_eregistrations_bpa/bpa_client/endpoints.py +193 -0
  18. mcp_eregistrations_bpa/bpa_client/errors.py +276 -0
  19. mcp_eregistrations_bpa/bpa_client/models.py +203 -0
  20. mcp_eregistrations_bpa/config.py +349 -0
  21. mcp_eregistrations_bpa/db/__init__.py +21 -0
  22. mcp_eregistrations_bpa/db/connection.py +64 -0
  23. mcp_eregistrations_bpa/db/migrations.py +168 -0
  24. mcp_eregistrations_bpa/exceptions.py +39 -0
  25. mcp_eregistrations_bpa/py.typed +0 -0
  26. mcp_eregistrations_bpa/rollback/__init__.py +19 -0
  27. mcp_eregistrations_bpa/rollback/manager.py +616 -0
  28. mcp_eregistrations_bpa/server.py +152 -0
  29. mcp_eregistrations_bpa/tools/__init__.py +372 -0
  30. mcp_eregistrations_bpa/tools/actions.py +155 -0
  31. mcp_eregistrations_bpa/tools/analysis.py +352 -0
  32. mcp_eregistrations_bpa/tools/audit.py +399 -0
  33. mcp_eregistrations_bpa/tools/behaviours.py +1042 -0
  34. mcp_eregistrations_bpa/tools/bots.py +627 -0
  35. mcp_eregistrations_bpa/tools/classifications.py +575 -0
  36. mcp_eregistrations_bpa/tools/costs.py +765 -0
  37. mcp_eregistrations_bpa/tools/debug_strategies.py +351 -0
  38. mcp_eregistrations_bpa/tools/debugger.py +1230 -0
  39. mcp_eregistrations_bpa/tools/determinants.py +2235 -0
  40. mcp_eregistrations_bpa/tools/document_requirements.py +670 -0
  41. mcp_eregistrations_bpa/tools/export.py +899 -0
  42. mcp_eregistrations_bpa/tools/fields.py +162 -0
  43. mcp_eregistrations_bpa/tools/form_errors.py +36 -0
  44. mcp_eregistrations_bpa/tools/formio_helpers.py +971 -0
  45. mcp_eregistrations_bpa/tools/forms.py +1269 -0
  46. mcp_eregistrations_bpa/tools/jsonlogic_builder.py +466 -0
  47. mcp_eregistrations_bpa/tools/large_response.py +163 -0
  48. mcp_eregistrations_bpa/tools/messages.py +523 -0
  49. mcp_eregistrations_bpa/tools/notifications.py +241 -0
  50. mcp_eregistrations_bpa/tools/registration_institutions.py +680 -0
  51. mcp_eregistrations_bpa/tools/registrations.py +897 -0
  52. mcp_eregistrations_bpa/tools/role_status.py +447 -0
  53. mcp_eregistrations_bpa/tools/role_units.py +400 -0
  54. mcp_eregistrations_bpa/tools/roles.py +1236 -0
  55. mcp_eregistrations_bpa/tools/rollback.py +335 -0
  56. mcp_eregistrations_bpa/tools/services.py +674 -0
  57. mcp_eregistrations_bpa/tools/workflows.py +2487 -0
  58. mcp_eregistrations_bpa/tools/yaml_transformer.py +991 -0
  59. mcp_eregistrations_bpa/workflows/__init__.py +28 -0
  60. mcp_eregistrations_bpa/workflows/loader.py +440 -0
  61. mcp_eregistrations_bpa/workflows/models.py +336 -0
  62. mcp_eregistrations_bpa-0.8.5.dist-info/METADATA +965 -0
  63. mcp_eregistrations_bpa-0.8.5.dist-info/RECORD +66 -0
  64. mcp_eregistrations_bpa-0.8.5.dist-info/WHEEL +4 -0
  65. mcp_eregistrations_bpa-0.8.5.dist-info/entry_points.txt +2 -0
  66. mcp_eregistrations_bpa-0.8.5.dist-info/licenses/LICENSE +86 -0
@@ -0,0 +1,466 @@
1
+ """JSONLogic Builder Helper for BPA determinant conditions.
2
+
3
+ This module provides internal helper functions for building complex JSONLogic
4
+ expressions from nested AND/OR condition trees. It is NOT an MCP tool - it is
5
+ an internal utility used by effect_create and other determinant-related tools.
6
+
7
+ The output format matches BPA's jsonDeterminants field which contains a
8
+ stringified JSON array of JSONLogic expressions.
9
+
10
+ Example output format:
11
+ [{"and": [{"==": [{"var": "data.fieldKey"}, true]}, {"or": [...]}]}]
12
+
13
+ JSONLogic operators supported:
14
+ - and: All conditions must be true
15
+ - or: At least one condition must be true
16
+ - ==: Equality comparison
17
+ - !=: Inequality comparison
18
+ - >, <, >=, <=: Numeric comparisons
19
+ - var: Variable reference (e.g., data.fieldKey)
20
+
21
+ Usage:
22
+ from mcp_eregistrations_bpa.tools.jsonlogic_builder import (
23
+ build_jsonlogic,
24
+ Condition,
25
+ ConditionGroup,
26
+ )
27
+
28
+ # Simple condition
29
+ condition = Condition(
30
+ field_key="applicantName",
31
+ operator="==",
32
+ value="John",
33
+ )
34
+ jsonlogic = build_jsonlogic(condition)
35
+
36
+ # Nested AND/OR conditions
37
+ group = ConditionGroup(
38
+ logic="and",
39
+ conditions=[
40
+ Condition(field_key="age", operator=">=", value=18),
41
+ ConditionGroup(
42
+ logic="or",
43
+ conditions=[
44
+ Condition(field_key="status", operator="==", value="approved"),
45
+ Condition(field_key="status", operator="==", value="pending"),
46
+ ],
47
+ ),
48
+ ],
49
+ )
50
+ jsonlogic = build_jsonlogic(group)
51
+ """
52
+
53
+ from __future__ import annotations
54
+
55
+ import json
56
+ from dataclasses import dataclass, field
57
+ from typing import Any, Literal, Union
58
+
59
+ __all__ = [
60
+ "Condition",
61
+ "ConditionGroup",
62
+ "DeterminantRef",
63
+ "build_jsonlogic",
64
+ "build_jsonlogic_string",
65
+ "build_from_dict",
66
+ "build_jsonlogic_from_dict",
67
+ "parse_jsonlogic_string",
68
+ "validate_condition_tree",
69
+ "JSONLogicError",
70
+ "LogicOperator",
71
+ "ComparisonOperator",
72
+ "DEFAULT_MAX_DEPTH",
73
+ ]
74
+
75
+
76
+ class JSONLogicError(Exception):
77
+ """Error raised when JSONLogic building or validation fails."""
78
+
79
+
80
+ # Default maximum nesting depth for condition trees
81
+ DEFAULT_MAX_DEPTH = 10
82
+
83
+
84
+ # Type aliases for clarity
85
+ LogicOperator = Literal["and", "or"]
86
+ ComparisonOperator = Literal["==", "!=", ">", "<", ">=", "<="]
87
+
88
+
89
+ @dataclass
90
+ class Condition:
91
+ """A single condition comparing a field value.
92
+
93
+ Attributes:
94
+ field_key: The form field key (e.g., "applicantName").
95
+ Will be prefixed with "data." for var reference.
96
+ operator: Comparison operator (==, !=, >, <, >=, <=).
97
+ value: The value to compare against (string, number, bool, or None).
98
+ use_key_suffix: If True, appends ".key" to field_key for catalog fields.
99
+ Default False.
100
+ """
101
+
102
+ field_key: str
103
+ operator: ComparisonOperator
104
+ value: Any
105
+ use_key_suffix: bool = False
106
+
107
+ def __post_init__(self) -> None:
108
+ """Validate condition after initialization."""
109
+ if not self.field_key or not self.field_key.strip():
110
+ raise JSONLogicError("Condition field_key is required and cannot be empty")
111
+ valid_operators = ("==", "!=", ">", "<", ">=", "<=")
112
+ if self.operator not in valid_operators:
113
+ raise JSONLogicError(
114
+ f"Invalid operator '{self.operator}'. "
115
+ f"Must be one of: {', '.join(valid_operators)}"
116
+ )
117
+
118
+ def to_jsonlogic(self) -> dict[str, Any]:
119
+ """Convert condition to JSONLogic expression.
120
+
121
+ Returns:
122
+ JSONLogic dict like {"==": [{"var": "data.fieldKey"}, value]}
123
+ """
124
+ var_path = f"data.{self.field_key}"
125
+ if self.use_key_suffix:
126
+ var_path += ".key"
127
+
128
+ return {self.operator: [{"var": var_path}, self.value]}
129
+
130
+
131
+ @dataclass
132
+ class DeterminantRef:
133
+ """Reference to an existing determinant by ID.
134
+
135
+ Use this when you want to include an existing determinant
136
+ in a condition tree. The actual condition logic will be
137
+ resolved from the determinant.
138
+
139
+ Attributes:
140
+ determinant_id: UUID of the existing determinant.
141
+ field_key: The form field key the determinant targets.
142
+ Required to build the var reference.
143
+ operator: The comparison operator from the determinant.
144
+ value: The comparison value from the determinant.
145
+ use_key_suffix: If True, appends ".key" for catalog fields.
146
+ """
147
+
148
+ determinant_id: str
149
+ field_key: str
150
+ operator: ComparisonOperator
151
+ value: Any
152
+ use_key_suffix: bool = False
153
+
154
+ def __post_init__(self) -> None:
155
+ """Validate determinant reference after initialization."""
156
+ if not self.determinant_id or not self.determinant_id.strip():
157
+ raise JSONLogicError(
158
+ "DeterminantRef determinant_id is required and cannot be empty"
159
+ )
160
+ if not self.field_key or not self.field_key.strip():
161
+ raise JSONLogicError(
162
+ "DeterminantRef field_key is required to build condition"
163
+ )
164
+ valid_operators = ("==", "!=", ">", "<", ">=", "<=")
165
+ if self.operator not in valid_operators:
166
+ raise JSONLogicError(
167
+ f"Invalid operator '{self.operator}' in DeterminantRef. "
168
+ f"Must be one of: {', '.join(valid_operators)}"
169
+ )
170
+
171
+ def to_jsonlogic(self) -> dict[str, Any]:
172
+ """Convert determinant reference to JSONLogic expression.
173
+
174
+ Returns:
175
+ JSONLogic dict like {"==": [{"var": "data.fieldKey"}, value]}
176
+ """
177
+ var_path = f"data.{self.field_key}"
178
+ if self.use_key_suffix:
179
+ var_path += ".key"
180
+
181
+ return {self.operator: [{"var": var_path}, self.value]}
182
+
183
+
184
+ # Type for items in a condition group
185
+ ConditionItem = Union[Condition, DeterminantRef, "ConditionGroup"]
186
+
187
+
188
+ @dataclass
189
+ class ConditionGroup:
190
+ """A group of conditions combined with AND/OR logic.
191
+
192
+ Supports nesting for complex expressions like:
193
+ AND(condition1, OR(condition2, condition3))
194
+
195
+ Attributes:
196
+ logic: The combining logic ("and" or "or").
197
+ conditions: List of conditions, determinant refs, or nested groups.
198
+ """
199
+
200
+ logic: LogicOperator
201
+ conditions: list[ConditionItem] = field(default_factory=list)
202
+
203
+ def __post_init__(self) -> None:
204
+ """Validate condition group after initialization."""
205
+ if self.logic not in ("and", "or"):
206
+ raise JSONLogicError(
207
+ f"Invalid logic operator '{self.logic}'. Must be 'and' or 'or'"
208
+ )
209
+ if not self.conditions:
210
+ raise JSONLogicError(
211
+ f"ConditionGroup with '{self.logic}' logic "
212
+ "requires at least one condition"
213
+ )
214
+
215
+ def to_jsonlogic(self) -> dict[str, Any]:
216
+ """Convert condition group to JSONLogic expression.
217
+
218
+ Returns:
219
+ JSONLogic dict like {"and": [expr1, expr2, ...]}
220
+ """
221
+ expressions = []
222
+ for condition in self.conditions:
223
+ expressions.append(condition.to_jsonlogic())
224
+
225
+ return {self.logic: expressions}
226
+
227
+
228
+ def validate_condition_tree(
229
+ condition: ConditionItem,
230
+ max_depth: int = DEFAULT_MAX_DEPTH,
231
+ _current_depth: int = 0,
232
+ ) -> list[str]:
233
+ """Validate a condition tree for correctness.
234
+
235
+ Checks:
236
+ - Maximum nesting depth (prevents infinite recursion)
237
+ - All conditions have required fields
238
+ - All operators are valid
239
+
240
+ Args:
241
+ condition: The root condition or group to validate.
242
+ max_depth: Maximum allowed nesting depth. Default 10.
243
+ _current_depth: Internal counter for recursion depth.
244
+
245
+ Returns:
246
+ List of validation error messages. Empty list if valid.
247
+ """
248
+ errors: list[str] = []
249
+
250
+ if _current_depth > max_depth:
251
+ errors.append(
252
+ f"Condition tree exceeds maximum depth of {max_depth}. "
253
+ "Simplify the condition structure."
254
+ )
255
+ return errors
256
+
257
+ if isinstance(condition, Condition | DeterminantRef):
258
+ # Individual conditions are validated in __post_init__
259
+ try:
260
+ condition.to_jsonlogic() # Test serialization
261
+ except Exception as e:
262
+ errors.append(f"Invalid condition: {e}")
263
+
264
+ elif isinstance(condition, ConditionGroup):
265
+ # Note: empty conditions already validated in __post_init__,
266
+ # but we check here for programmatically constructed groups
267
+ for i, sub_condition in enumerate(condition.conditions):
268
+ sub_errors = validate_condition_tree(
269
+ sub_condition, max_depth, _current_depth + 1
270
+ )
271
+ for error in sub_errors:
272
+ errors.append(f"conditions[{i}]: {error}")
273
+
274
+ else:
275
+ errors.append(
276
+ f"Unknown condition type: {type(condition).__name__}. "
277
+ "Expected Condition, DeterminantRef, or ConditionGroup."
278
+ )
279
+
280
+ return errors
281
+
282
+
283
+ def build_jsonlogic(condition: ConditionItem) -> list[dict[str, Any]]:
284
+ """Build JSONLogic expression from a condition tree.
285
+
286
+ The output is a list containing the JSONLogic expression,
287
+ matching BPA's expected format.
288
+
289
+ Args:
290
+ condition: A Condition, DeterminantRef, or ConditionGroup.
291
+
292
+ Returns:
293
+ List containing the JSONLogic expression dict.
294
+ Example: [{"and": [{"==": [{"var": "data.field"}, "value"]}, ...]}]
295
+
296
+ Raises:
297
+ JSONLogicError: If the condition tree is invalid.
298
+ """
299
+ # Validate first
300
+ errors = validate_condition_tree(condition)
301
+ if errors:
302
+ raise JSONLogicError(f"Invalid condition tree: {'; '.join(errors)}")
303
+
304
+ return [condition.to_jsonlogic()]
305
+
306
+
307
+ def build_jsonlogic_string(condition: ConditionItem) -> str:
308
+ """Build JSONLogic expression and serialize to string.
309
+
310
+ This produces the exact format expected by BPA's jsonDeterminants field.
311
+
312
+ Args:
313
+ condition: A Condition, DeterminantRef, or ConditionGroup.
314
+
315
+ Returns:
316
+ Stringified JSON array of JSONLogic expressions.
317
+ Example: '[{"and":[{"==":[{"var":"data.field"},"value"]}]}]'
318
+
319
+ Raises:
320
+ JSONLogicError: If the condition tree is invalid.
321
+ """
322
+ jsonlogic = build_jsonlogic(condition)
323
+ return json.dumps(jsonlogic, separators=(",", ":"))
324
+
325
+
326
+ def parse_jsonlogic_string(jsonlogic_str: str) -> list[dict[str, Any]]:
327
+ """Parse a JSONLogic string back to a Python structure.
328
+
329
+ Useful for inspecting or modifying existing JSONLogic expressions.
330
+
331
+ Args:
332
+ jsonlogic_str: Stringified JSONLogic from BPA.
333
+
334
+ Returns:
335
+ Parsed list of JSONLogic expression dicts.
336
+
337
+ Raises:
338
+ JSONLogicError: If parsing fails.
339
+ """
340
+ if not jsonlogic_str or not jsonlogic_str.strip():
341
+ return []
342
+
343
+ try:
344
+ parsed = json.loads(jsonlogic_str)
345
+ if not isinstance(parsed, list):
346
+ raise JSONLogicError(
347
+ f"JSONLogic must be a list of expressions. Got {type(parsed).__name__}."
348
+ )
349
+ return parsed
350
+ except json.JSONDecodeError as e:
351
+ raise JSONLogicError(
352
+ f"Failed to parse JSONLogic string: {e}. Ensure the string is valid JSON."
353
+ )
354
+
355
+
356
+ def build_from_dict(
357
+ condition_dict: dict[str, Any],
358
+ _current_depth: int = 0,
359
+ ) -> ConditionItem:
360
+ """Build a condition tree from a dictionary structure.
361
+
362
+ This allows building conditions from JSON/dict input,
363
+ useful for API integrations.
364
+
365
+ Args:
366
+ condition_dict: Dict with structure like:
367
+ {"and": [{"determinant_id": "..."}, {"or": [...]}]}
368
+ or
369
+ {"field_key": "...", "operator": "==", "value": "..."}
370
+ _current_depth: Internal counter for recursion depth (do not set manually).
371
+
372
+ Returns:
373
+ Condition, DeterminantRef, or ConditionGroup.
374
+
375
+ Raises:
376
+ JSONLogicError: If the dict structure is invalid or exceeds max depth.
377
+
378
+ Example:
379
+ condition_dict = {
380
+ "and": [
381
+ {
382
+ "determinant_id": "det-1",
383
+ "field_key": "f1",
384
+ "operator": "==",
385
+ "value": True,
386
+ },
387
+ {
388
+ "or": [
389
+ {"field_key": "status", "operator": "==", "value": "approved"},
390
+ {"field_key": "status", "operator": "==", "value": "pending"}
391
+ ]
392
+ }
393
+ ]
394
+ }
395
+ condition = build_from_dict(condition_dict)
396
+ """
397
+ # Prevent infinite recursion from malformed input
398
+ if _current_depth > DEFAULT_MAX_DEPTH:
399
+ raise JSONLogicError(
400
+ f"Condition dict exceeds maximum nesting depth of {DEFAULT_MAX_DEPTH}. "
401
+ "Simplify the condition structure."
402
+ )
403
+
404
+ if not isinstance(condition_dict, dict):
405
+ raise JSONLogicError(f"Expected dict, got {type(condition_dict).__name__}")
406
+
407
+ # Check for logic operators (and/or)
408
+ if "and" in condition_dict:
409
+ items = condition_dict["and"]
410
+ if not isinstance(items, list):
411
+ raise JSONLogicError("'and' value must be a list of conditions")
412
+ return ConditionGroup(
413
+ logic="and",
414
+ conditions=[build_from_dict(item, _current_depth + 1) for item in items],
415
+ )
416
+
417
+ if "or" in condition_dict:
418
+ items = condition_dict["or"]
419
+ if not isinstance(items, list):
420
+ raise JSONLogicError("'or' value must be a list of conditions")
421
+ return ConditionGroup(
422
+ logic="or",
423
+ conditions=[build_from_dict(item, _current_depth + 1) for item in items],
424
+ )
425
+
426
+ # Check for determinant reference
427
+ if "determinant_id" in condition_dict:
428
+ return DeterminantRef(
429
+ determinant_id=condition_dict["determinant_id"],
430
+ field_key=condition_dict.get("field_key", ""),
431
+ operator=condition_dict.get("operator", "=="),
432
+ value=condition_dict.get("value"),
433
+ use_key_suffix=condition_dict.get("use_key_suffix", False),
434
+ )
435
+
436
+ # Check for simple condition
437
+ if "field_key" in condition_dict:
438
+ return Condition(
439
+ field_key=condition_dict["field_key"],
440
+ operator=condition_dict.get("operator", "=="),
441
+ value=condition_dict.get("value"),
442
+ use_key_suffix=condition_dict.get("use_key_suffix", False),
443
+ )
444
+
445
+ raise JSONLogicError(
446
+ "Invalid condition dict. Must contain 'and', 'or', "
447
+ "'determinant_id', or 'field_key'."
448
+ )
449
+
450
+
451
+ def build_jsonlogic_from_dict(condition_dict: dict[str, Any]) -> str:
452
+ """Convenience function to build JSONLogic string from dict.
453
+
454
+ Combines build_from_dict and build_jsonlogic_string.
455
+
456
+ Args:
457
+ condition_dict: Dict structure of conditions.
458
+
459
+ Returns:
460
+ Stringified JSONLogic for BPA.
461
+
462
+ Raises:
463
+ JSONLogicError: If the structure is invalid.
464
+ """
465
+ condition = build_from_dict(condition_dict)
466
+ return build_jsonlogic_string(condition)
@@ -0,0 +1,163 @@
1
+ """Centralized large response handling for MCP tools.
2
+
3
+ This module provides a decorator pattern for handling large tool responses
4
+ that might exceed Claude Code's token limits. When a response exceeds a
5
+ configurable threshold (default 100KB), it:
6
+
7
+ 1. Writes the full response to a JSON file
8
+ 2. Returns lightweight metadata with navigation hints
9
+
10
+ This catches large responses BEFORE Claude Code's limit, providing a
11
+ controlled "success" path with better navigation guidance.
12
+ """
13
+
14
+ from __future__ import annotations
15
+
16
+ import json
17
+ import os
18
+ import tempfile
19
+ from collections.abc import Callable
20
+ from datetime import datetime
21
+ from functools import wraps
22
+ from pathlib import Path
23
+ from typing import Any, TypeVar
24
+
25
+ __all__ = [
26
+ "large_response_handler",
27
+ "LARGE_RESPONSE_THRESHOLD_BYTES",
28
+ "RESPONSE_DIR",
29
+ ]
30
+
31
+ # Configurable threshold (default 100KB)
32
+ LARGE_RESPONSE_THRESHOLD_BYTES = int(
33
+ os.getenv("MCP_LARGE_RESPONSE_THRESHOLD_BYTES", 100 * 1024)
34
+ )
35
+
36
+ RESPONSE_DIR = Path(tempfile.gettempdir()) / "bpa-responses"
37
+
38
+ F = TypeVar("F", bound=Callable[..., Any])
39
+
40
+
41
+ def large_response_handler(
42
+ threshold_bytes: int | None = None,
43
+ navigation: dict[str, str] | None = None,
44
+ ) -> Callable[[F], F]:
45
+ """Decorator that handles large responses by writing to file.
46
+
47
+ When a tool's response exceeds the threshold, saves to a JSON file
48
+ and returns metadata with navigation hints for AI agents.
49
+
50
+ Args:
51
+ threshold_bytes: Size threshold in bytes (default: 100KB).
52
+ navigation: Dict of navigation hints (name -> jq/grep command).
53
+
54
+ Returns:
55
+ Decorated function that returns file_path for large responses.
56
+ """
57
+ effective_threshold = threshold_bytes or LARGE_RESPONSE_THRESHOLD_BYTES
58
+
59
+ def decorator(func: F) -> F:
60
+ @wraps(func)
61
+ async def wrapper(*args: Any, **kwargs: Any) -> dict[str, Any]:
62
+ result = await func(*args, **kwargs)
63
+
64
+ # Measure serialized size
65
+ json_str = json.dumps(result, default=str)
66
+ size_bytes = len(json_str.encode("utf-8"))
67
+
68
+ if size_bytes < effective_threshold:
69
+ return dict(result)
70
+
71
+ # Save to file
72
+ file_path = _save_response_to_file(result, func.__name__)
73
+
74
+ return {
75
+ "file_path": str(file_path),
76
+ "size_kb": size_bytes // 1024,
77
+ "record_count": _count_records(result),
78
+ "schema": _infer_schema(result),
79
+ "navigation": navigation or _default_navigation(),
80
+ "message": (
81
+ f"Large response ({size_bytes // 1024}KB) saved to file. "
82
+ "Use Read tool with offset/limit, Grep, or jq to query."
83
+ ),
84
+ }
85
+
86
+ return wrapper # type: ignore[return-value]
87
+
88
+ return decorator
89
+
90
+
91
+ def _save_response_to_file(data: dict[str, Any], tool_name: str) -> Path:
92
+ """Save response data to a temp JSON file.
93
+
94
+ Args:
95
+ data: The response data to save.
96
+ tool_name: Name of the tool (used in filename).
97
+
98
+ Returns:
99
+ Path to the saved file.
100
+ """
101
+ RESPONSE_DIR.mkdir(exist_ok=True)
102
+
103
+ timestamp = datetime.now().strftime("%Y%m%d_%H%M%S")
104
+ file_path = RESPONSE_DIR / f"{tool_name}_{timestamp}.json"
105
+
106
+ file_path.write_text(json.dumps(data, indent=2, default=str))
107
+ return file_path
108
+
109
+
110
+ def _infer_schema(data: dict[str, Any], max_depth: int = 2) -> dict[str, str]:
111
+ """Infer a simple schema from the response data.
112
+
113
+ Args:
114
+ data: The response data to analyze.
115
+ max_depth: Maximum depth for schema inference (unused, reserved).
116
+
117
+ Returns:
118
+ Dict mapping top-level keys to type descriptions.
119
+ """
120
+ schema: dict[str, str] = {}
121
+ for key, value in data.items():
122
+ if isinstance(value, list):
123
+ if value and isinstance(value[0], dict):
124
+ schema[key] = f"[{{...}}] ({len(value)} items)"
125
+ else:
126
+ schema[key] = f"[...] ({len(value)} items)"
127
+ elif isinstance(value, dict):
128
+ schema[key] = "{...}"
129
+ else:
130
+ schema[key] = type(value).__name__
131
+ return schema
132
+
133
+
134
+ def _count_records(data: dict[str, Any]) -> dict[str, int]:
135
+ """Count array lengths in the response for quick reference.
136
+
137
+ Args:
138
+ data: The response data to analyze.
139
+
140
+ Returns:
141
+ Dict mapping array field names to their lengths.
142
+ """
143
+ counts: dict[str, int] = {}
144
+ for key, value in data.items():
145
+ if isinstance(value, list):
146
+ counts[key] = len(value)
147
+ return counts
148
+
149
+
150
+ def _default_navigation() -> dict[str, str]:
151
+ """Default navigation hints for querying saved JSON files.
152
+
153
+ Returns:
154
+ Dict of navigation hint name -> jq command.
155
+ """
156
+ return {
157
+ "view_structure": "jq 'keys'",
158
+ "first_10_items": "jq '.[] | limit(10; .)'",
159
+ "count_arrays": (
160
+ 'jq \'to_entries | map(select(.value | type == "array")) '
161
+ "| from_entries | map_values(length)'"
162
+ ),
163
+ }