digitalkin 0.2.25rc0__py3-none-any.whl → 0.3.2.dev14__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (122) hide show
  1. base_server/server_async_insecure.py +6 -5
  2. base_server/server_async_secure.py +6 -5
  3. base_server/server_sync_insecure.py +5 -4
  4. base_server/server_sync_secure.py +5 -4
  5. digitalkin/__version__.py +1 -1
  6. digitalkin/core/__init__.py +1 -0
  7. digitalkin/core/common/__init__.py +9 -0
  8. digitalkin/core/common/factories.py +156 -0
  9. digitalkin/core/job_manager/__init__.py +1 -0
  10. digitalkin/{modules → core}/job_manager/base_job_manager.py +138 -32
  11. digitalkin/core/job_manager/single_job_manager.py +373 -0
  12. digitalkin/{modules → core}/job_manager/taskiq_broker.py +121 -26
  13. digitalkin/core/job_manager/taskiq_job_manager.py +541 -0
  14. digitalkin/core/task_manager/__init__.py +1 -0
  15. digitalkin/core/task_manager/base_task_manager.py +539 -0
  16. digitalkin/core/task_manager/local_task_manager.py +108 -0
  17. digitalkin/core/task_manager/remote_task_manager.py +87 -0
  18. digitalkin/core/task_manager/surrealdb_repository.py +266 -0
  19. digitalkin/core/task_manager/task_executor.py +249 -0
  20. digitalkin/core/task_manager/task_session.py +368 -0
  21. digitalkin/grpc_servers/__init__.py +1 -19
  22. digitalkin/grpc_servers/_base_server.py +3 -3
  23. digitalkin/grpc_servers/module_server.py +120 -195
  24. digitalkin/grpc_servers/module_servicer.py +81 -44
  25. digitalkin/grpc_servers/utils/__init__.py +1 -0
  26. digitalkin/grpc_servers/utils/exceptions.py +0 -8
  27. digitalkin/grpc_servers/utils/grpc_client_wrapper.py +25 -9
  28. digitalkin/grpc_servers/utils/grpc_error_handler.py +53 -0
  29. digitalkin/grpc_servers/utils/utility_schema_extender.py +100 -0
  30. digitalkin/logger.py +64 -27
  31. digitalkin/mixins/__init__.py +19 -0
  32. digitalkin/mixins/base_mixin.py +10 -0
  33. digitalkin/mixins/callback_mixin.py +24 -0
  34. digitalkin/mixins/chat_history_mixin.py +110 -0
  35. digitalkin/mixins/cost_mixin.py +76 -0
  36. digitalkin/mixins/file_history_mixin.py +93 -0
  37. digitalkin/mixins/filesystem_mixin.py +46 -0
  38. digitalkin/mixins/logger_mixin.py +51 -0
  39. digitalkin/mixins/storage_mixin.py +79 -0
  40. digitalkin/models/__init__.py +1 -1
  41. digitalkin/models/core/__init__.py +1 -0
  42. digitalkin/{modules/job_manager → models/core}/job_manager_models.py +3 -11
  43. digitalkin/models/core/task_monitor.py +74 -0
  44. digitalkin/models/grpc_servers/__init__.py +1 -0
  45. digitalkin/{grpc_servers/utils → models/grpc_servers}/models.py +92 -7
  46. digitalkin/models/module/__init__.py +18 -11
  47. digitalkin/models/module/base_types.py +61 -0
  48. digitalkin/models/module/module.py +9 -1
  49. digitalkin/models/module/module_context.py +282 -6
  50. digitalkin/models/module/module_types.py +29 -105
  51. digitalkin/models/module/setup_types.py +490 -0
  52. digitalkin/models/module/tool_cache.py +68 -0
  53. digitalkin/models/module/tool_reference.py +117 -0
  54. digitalkin/models/module/utility.py +167 -0
  55. digitalkin/models/services/__init__.py +9 -0
  56. digitalkin/models/services/cost.py +1 -0
  57. digitalkin/models/services/registry.py +35 -0
  58. digitalkin/models/services/storage.py +39 -5
  59. digitalkin/modules/__init__.py +5 -1
  60. digitalkin/modules/_base_module.py +265 -167
  61. digitalkin/modules/archetype_module.py +6 -1
  62. digitalkin/modules/tool_module.py +16 -3
  63. digitalkin/modules/trigger_handler.py +7 -6
  64. digitalkin/modules/triggers/__init__.py +8 -0
  65. digitalkin/modules/triggers/healthcheck_ping_trigger.py +45 -0
  66. digitalkin/modules/triggers/healthcheck_services_trigger.py +63 -0
  67. digitalkin/modules/triggers/healthcheck_status_trigger.py +52 -0
  68. digitalkin/services/__init__.py +4 -0
  69. digitalkin/services/communication/__init__.py +7 -0
  70. digitalkin/services/communication/communication_strategy.py +76 -0
  71. digitalkin/services/communication/default_communication.py +101 -0
  72. digitalkin/services/communication/grpc_communication.py +234 -0
  73. digitalkin/services/cost/__init__.py +9 -2
  74. digitalkin/services/cost/grpc_cost.py +9 -42
  75. digitalkin/services/filesystem/default_filesystem.py +0 -2
  76. digitalkin/services/filesystem/grpc_filesystem.py +10 -39
  77. digitalkin/services/registry/__init__.py +22 -1
  78. digitalkin/services/registry/default_registry.py +135 -4
  79. digitalkin/services/registry/exceptions.py +47 -0
  80. digitalkin/services/registry/grpc_registry.py +306 -0
  81. digitalkin/services/registry/registry_models.py +15 -0
  82. digitalkin/services/registry/registry_strategy.py +88 -4
  83. digitalkin/services/services_config.py +25 -3
  84. digitalkin/services/services_models.py +5 -1
  85. digitalkin/services/setup/default_setup.py +6 -7
  86. digitalkin/services/setup/grpc_setup.py +52 -15
  87. digitalkin/services/storage/grpc_storage.py +4 -4
  88. digitalkin/services/user_profile/__init__.py +12 -0
  89. digitalkin/services/user_profile/default_user_profile.py +55 -0
  90. digitalkin/services/user_profile/grpc_user_profile.py +69 -0
  91. digitalkin/services/user_profile/user_profile_strategy.py +25 -0
  92. digitalkin/utils/__init__.py +28 -0
  93. digitalkin/utils/arg_parser.py +1 -1
  94. digitalkin/utils/development_mode_action.py +2 -2
  95. digitalkin/utils/dynamic_schema.py +483 -0
  96. digitalkin/utils/package_discover.py +1 -2
  97. digitalkin/utils/schema_splitter.py +207 -0
  98. {digitalkin-0.2.25rc0.dist-info → digitalkin-0.3.2.dev14.dist-info}/METADATA +11 -30
  99. digitalkin-0.3.2.dev14.dist-info/RECORD +143 -0
  100. {digitalkin-0.2.25rc0.dist-info → digitalkin-0.3.2.dev14.dist-info}/top_level.txt +1 -0
  101. modules/archetype_with_tools_module.py +244 -0
  102. modules/cpu_intensive_module.py +1 -1
  103. modules/dynamic_setup_module.py +338 -0
  104. modules/minimal_llm_module.py +1 -1
  105. modules/text_transform_module.py +1 -1
  106. monitoring/digitalkin_observability/__init__.py +46 -0
  107. monitoring/digitalkin_observability/http_server.py +150 -0
  108. monitoring/digitalkin_observability/interceptors.py +176 -0
  109. monitoring/digitalkin_observability/metrics.py +201 -0
  110. monitoring/digitalkin_observability/prometheus.py +137 -0
  111. monitoring/tests/test_metrics.py +172 -0
  112. services/filesystem_module.py +7 -5
  113. services/storage_module.py +4 -2
  114. digitalkin/grpc_servers/registry_server.py +0 -65
  115. digitalkin/grpc_servers/registry_servicer.py +0 -456
  116. digitalkin/grpc_servers/utils/factory.py +0 -180
  117. digitalkin/modules/job_manager/single_job_manager.py +0 -294
  118. digitalkin/modules/job_manager/taskiq_job_manager.py +0 -290
  119. digitalkin-0.2.25rc0.dist-info/RECORD +0 -89
  120. /digitalkin/{grpc_servers/utils → models/grpc_servers}/types.py +0 -0
  121. {digitalkin-0.2.25rc0.dist-info → digitalkin-0.3.2.dev14.dist-info}/WHEEL +0 -0
  122. {digitalkin-0.2.25rc0.dist-info → digitalkin-0.3.2.dev14.dist-info}/licenses/LICENSE +0 -0
@@ -0,0 +1,483 @@
1
+ """Dynamic schema utilities for runtime value refresh in Pydantic models.
2
+
3
+ This module provides a clean way to mark fields as dynamic using Annotated metadata,
4
+ allowing their schema values to be refreshed at runtime via sync or async fetchers.
5
+
6
+ Example:
7
+ from typing import Annotated
8
+ from digitalkin.utils import DynamicField
9
+
10
+ class AgentSetup(SetupModel):
11
+ model_name: Annotated[str, DynamicField(enum=fetch_models)] = Field(default="gpt-4")
12
+
13
+ See Also:
14
+ - Documentation: docs/api/dynamic_schema.md
15
+ - Tests: tests/utils/test_dynamic_schema.py
16
+ """
17
+
18
+ from __future__ import annotations
19
+
20
+ import asyncio
21
+ import time
22
+ import traceback
23
+ from collections.abc import Awaitable, Callable
24
+ from dataclasses import dataclass, field
25
+ from itertools import starmap
26
+ from typing import TYPE_CHECKING, Any, TypeVar
27
+
28
+ from digitalkin.logger import logger
29
+
30
+ if TYPE_CHECKING:
31
+ from pydantic.fields import FieldInfo
32
+
33
+ T = TypeVar("T")
34
+
35
+ # Fetcher callable type: sync or async function with no arguments
36
+ Fetcher = Callable[[], T | Awaitable[T]]
37
+
38
+ # Default timeout for fetcher resolution (None = no timeout)
39
+ DEFAULT_TIMEOUT: float | None = None
40
+
41
+
42
+ @dataclass
43
+ class ResolveResult:
44
+ """Result of resolving dynamic fetchers.
45
+
46
+ Provides structured access to resolved values and any errors that occurred.
47
+ This allows callers to handle partial failures gracefully.
48
+
49
+ Attributes:
50
+ values: Dict mapping key names to successfully resolved values.
51
+ errors: Dict mapping key names to exceptions that occurred during resolution.
52
+ """
53
+
54
+ values: dict[str, Any] = field(default_factory=dict)
55
+ errors: dict[str, Exception] = field(default_factory=dict)
56
+
57
+ @property
58
+ def success(self) -> bool:
59
+ """Check if all fetchers resolved successfully.
60
+
61
+ Returns:
62
+ True if no errors occurred, False otherwise.
63
+ """
64
+ return len(self.errors) == 0
65
+
66
+ @property
67
+ def partial(self) -> bool:
68
+ """Check if some but not all fetchers succeeded.
69
+
70
+ Returns:
71
+ True if there are both values and errors, False otherwise.
72
+ """
73
+ return len(self.values) > 0 and len(self.errors) > 0
74
+
75
+ def get(self, key: str, default: T | None = None) -> T | None:
76
+ """Get a resolved value by key.
77
+
78
+ Args:
79
+ key: The fetcher key name.
80
+ default: Default value if key not found or errored.
81
+
82
+ Returns:
83
+ The resolved value or default.
84
+ """
85
+ return self.values.get(key, default) # type: ignore[return-value]
86
+
87
+
88
+ class DynamicField:
89
+ """Metadata class for Annotated fields with dynamic fetchers.
90
+
91
+ Use with typing.Annotated to mark fields that need runtime value resolution.
92
+ Fetchers are callables (sync or async) that return values at runtime.
93
+
94
+ Args:
95
+ **fetchers: Mapping of key names to fetcher callables.
96
+ Each fetcher is a function (sync or async) that takes no arguments
97
+ and returns the value for that key (e.g., enum values, defaults).
98
+
99
+ Example:
100
+ from typing import Annotated
101
+
102
+ async def fetch_models() -> list[str]:
103
+ return await api.get_models()
104
+
105
+ class Setup(SetupModel):
106
+ model: Annotated[str, DynamicField(enum=fetch_models)] = Field(default="gpt-4")
107
+ """
108
+
109
+ __slots__ = ("fetchers",)
110
+
111
+ def __init__(self, **fetchers: Fetcher[Any]) -> None:
112
+ """Initialize with fetcher callables."""
113
+ self.fetchers: dict[str, Fetcher[Any]] = fetchers
114
+
115
+ def __repr__(self) -> str:
116
+ """Return string representation."""
117
+ keys = ", ".join(self.fetchers.keys())
118
+ return f"DynamicField({keys})"
119
+
120
+ def __eq__(self, other: object) -> bool:
121
+ """Check equality based on fetchers.
122
+
123
+ Returns:
124
+ True if fetchers are equal, NotImplemented for non-DynamicField types.
125
+ """
126
+ if not isinstance(other, DynamicField):
127
+ return NotImplemented
128
+ return self.fetchers == other.fetchers
129
+
130
+ def __hash__(self) -> int:
131
+ """Hash based on fetcher keys (fetchers themselves aren't hashable).
132
+
133
+ Returns:
134
+ Hash value based on sorted fetcher keys.
135
+ """
136
+ return hash(tuple(sorted(self.fetchers.keys())))
137
+
138
+
139
+ def get_dynamic_metadata(field_info: FieldInfo) -> DynamicField | None:
140
+ """Extract DynamicField metadata from a FieldInfo's metadata list.
141
+
142
+ Args:
143
+ field_info: The Pydantic FieldInfo object to inspect.
144
+
145
+ Returns:
146
+ The DynamicField metadata instance if found, None otherwise.
147
+ """
148
+ for meta in field_info.metadata:
149
+ if isinstance(meta, DynamicField):
150
+ return meta
151
+ return None
152
+
153
+
154
+ def has_dynamic(field_info: FieldInfo) -> bool:
155
+ """Check if a field has DynamicField metadata.
156
+
157
+ Args:
158
+ field_info: The Pydantic FieldInfo object to check.
159
+
160
+ Returns:
161
+ True if the field has DynamicField metadata, False otherwise.
162
+ """
163
+ return get_dynamic_metadata(field_info) is not None
164
+
165
+
166
+ def get_fetchers(field_info: FieldInfo) -> dict[str, Fetcher[Any]]:
167
+ """Extract fetchers from a field's DynamicField metadata.
168
+
169
+ Args:
170
+ field_info: The Pydantic FieldInfo object to extract from.
171
+
172
+ Returns:
173
+ Dict mapping key names to fetcher callables, empty if no DynamicField metadata.
174
+ """
175
+ meta = get_dynamic_metadata(field_info)
176
+ if meta is None:
177
+ return {}
178
+ return meta.fetchers
179
+
180
+
181
+ def _get_fetcher_info(fetcher: Fetcher[Any]) -> str:
182
+ """Get descriptive info about a fetcher for logging.
183
+
184
+ Args:
185
+ fetcher: The fetcher callable.
186
+
187
+ Returns:
188
+ A string describing the fetcher (module.name or repr).
189
+ """
190
+ if hasattr(fetcher, "__module__") and hasattr(fetcher, "__qualname__"):
191
+ return f"{fetcher.__module__}.{fetcher.__qualname__}"
192
+ if hasattr(fetcher, "__name__"):
193
+ return fetcher.__name__
194
+ return repr(fetcher)
195
+
196
+
197
+ async def _resolve_one(key: str, fetcher: Fetcher[Any]) -> tuple[str, Any]:
198
+ """Resolve a single fetcher.
199
+
200
+ Args:
201
+ key: The fetcher key name.
202
+ fetcher: The fetcher callable.
203
+
204
+ Returns:
205
+ Tuple of (key, resolved_value).
206
+
207
+ Raises:
208
+ Exception: If the fetcher raises an exception.
209
+ """
210
+ fetcher_info = _get_fetcher_info(fetcher)
211
+ logger.debug(
212
+ "Resolving fetcher '%s' using %s",
213
+ key,
214
+ fetcher_info,
215
+ extra={"fetcher_key": key, "fetcher": fetcher_info},
216
+ )
217
+
218
+ start_time = time.perf_counter()
219
+
220
+ try:
221
+ result = fetcher()
222
+ is_async = asyncio.iscoroutine(result)
223
+
224
+ if is_async:
225
+ logger.debug(
226
+ "Fetcher '%s' returned coroutine, awaiting...",
227
+ key,
228
+ extra={"fetcher_key": key, "is_async": True},
229
+ )
230
+ result = await result
231
+
232
+ except Exception as e:
233
+ elapsed_ms = (time.perf_counter() - start_time) * 1000
234
+ logger.error(
235
+ "Fetcher '%s' (%s) failed after %.2fms: %s: %s",
236
+ key,
237
+ fetcher_info,
238
+ elapsed_ms,
239
+ type(e).__name__,
240
+ str(e) or "(no message)",
241
+ extra={
242
+ "fetcher_key": key,
243
+ "fetcher": fetcher_info,
244
+ "elapsed_ms": elapsed_ms,
245
+ "error_type": type(e).__name__,
246
+ "error_message": str(e),
247
+ "traceback": traceback.format_exc(),
248
+ },
249
+ )
250
+ raise
251
+
252
+ elapsed_ms = (time.perf_counter() - start_time) * 1000
253
+
254
+ logger.debug(
255
+ "Fetcher '%s' resolved successfully in %.2fms, result type: %s",
256
+ key,
257
+ elapsed_ms,
258
+ type(result).__name__,
259
+ extra={
260
+ "fetcher_key": key,
261
+ "elapsed_ms": elapsed_ms,
262
+ "result_type": type(result).__name__,
263
+ },
264
+ )
265
+
266
+ return key, result
267
+
268
+
269
+ async def resolve(
270
+ fetchers: dict[str, Fetcher[Any]],
271
+ *,
272
+ timeout: float | None = DEFAULT_TIMEOUT,
273
+ ) -> dict[str, Any]:
274
+ """Resolve all dynamic fetchers to their actual values in parallel.
275
+
276
+ Fetchers are executed concurrently using asyncio.gather() for better
277
+ performance when multiple async fetchers are involved.
278
+
279
+ Args:
280
+ fetchers: Dict mapping key names to fetcher callables.
281
+ timeout: Optional timeout in seconds for all fetchers combined.
282
+ If None (default), no timeout is applied.
283
+
284
+ Returns:
285
+ Dict mapping key names to resolved values.
286
+
287
+ Raises:
288
+ asyncio.TimeoutError: If timeout is exceeded.
289
+ Exception: If any fetcher raises an exception, it is propagated.
290
+
291
+ Example:
292
+ fetchers = {"enum": fetch_models, "default": get_default}
293
+ resolved = await resolve(fetchers, timeout=5.0)
294
+ # resolved = {"enum": ["gpt-4", "gpt-3.5"], "default": "gpt-4"}
295
+ """
296
+ if not fetchers:
297
+ logger.debug("resolve() called with empty fetchers, returning {}")
298
+ return {}
299
+
300
+ fetcher_keys = list(fetchers.keys())
301
+ fetcher_infos = {k: _get_fetcher_info(f) for k, f in fetchers.items()}
302
+
303
+ logger.info(
304
+ "resolve() starting parallel resolution of %d fetcher(s): %s",
305
+ len(fetchers),
306
+ fetcher_keys,
307
+ extra={
308
+ "fetcher_count": len(fetchers),
309
+ "fetcher_keys": fetcher_keys,
310
+ "fetcher_infos": fetcher_infos,
311
+ "timeout": timeout,
312
+ },
313
+ )
314
+
315
+ start_time = time.perf_counter()
316
+
317
+ # Create tasks for parallel execution
318
+ tasks = list(starmap(_resolve_one, fetchers.items()))
319
+
320
+ # Execute with optional timeout
321
+ try:
322
+ if timeout is not None:
323
+ results = await asyncio.wait_for(asyncio.gather(*tasks), timeout=timeout)
324
+ else:
325
+ results = await asyncio.gather(*tasks)
326
+ except asyncio.TimeoutError:
327
+ elapsed_ms = (time.perf_counter() - start_time) * 1000
328
+ logger.error(
329
+ "resolve() timed out after %.2fms (timeout=%.2fs)",
330
+ elapsed_ms,
331
+ timeout,
332
+ extra={"elapsed_ms": elapsed_ms, "timeout": timeout},
333
+ )
334
+ raise
335
+
336
+ elapsed_ms = (time.perf_counter() - start_time) * 1000
337
+ logger.info(
338
+ "resolve() completed successfully in %.2fms, resolved %d fetcher(s)",
339
+ elapsed_ms,
340
+ len(results),
341
+ extra={"elapsed_ms": elapsed_ms, "resolved_count": len(results)},
342
+ )
343
+
344
+ return dict(results)
345
+
346
+
347
+ async def resolve_safe(
348
+ fetchers: dict[str, Fetcher[Any]],
349
+ *,
350
+ timeout: float | None = DEFAULT_TIMEOUT,
351
+ ) -> ResolveResult:
352
+ """Resolve fetchers with structured error handling.
353
+
354
+ Unlike `resolve()`, this function catches individual fetcher errors
355
+ and returns them in a structured result, allowing partial success.
356
+
357
+ Args:
358
+ fetchers: Dict mapping key names to fetcher callables.
359
+ timeout: Optional timeout in seconds for all fetchers combined.
360
+ If None (default), no timeout is applied. Note: timeout applies
361
+ to the entire operation, not individual fetchers.
362
+
363
+ Returns:
364
+ ResolveResult with values and any errors that occurred.
365
+
366
+ Example:
367
+ result = await resolve_safe(fetchers, timeout=5.0)
368
+ if result.success:
369
+ print("All resolved:", result.values)
370
+ elif result.partial:
371
+ print("Partial success:", result.values)
372
+ print("Errors:", result.errors)
373
+ else:
374
+ print("All failed:", result.errors)
375
+ """
376
+ if not fetchers:
377
+ logger.debug("resolve_safe() called with empty fetchers, returning empty ResolveResult")
378
+ return ResolveResult()
379
+
380
+ fetcher_keys = list(fetchers.keys())
381
+ fetcher_infos = {k: _get_fetcher_info(f) for k, f in fetchers.items()}
382
+
383
+ logger.info(
384
+ "resolve_safe() starting parallel resolution of %d fetcher(s): %s",
385
+ len(fetchers),
386
+ fetcher_keys,
387
+ extra={
388
+ "fetcher_count": len(fetchers),
389
+ "fetcher_keys": fetcher_keys,
390
+ "fetcher_infos": fetcher_infos,
391
+ "timeout": timeout,
392
+ },
393
+ )
394
+
395
+ start_time = time.perf_counter()
396
+ result = ResolveResult()
397
+
398
+ async def safe_resolve_one(key: str, fetcher: Fetcher[Any]) -> None:
399
+ """Resolve one fetcher, capturing errors."""
400
+ try:
401
+ _, value = await _resolve_one(key, fetcher)
402
+ result.values[key] = value
403
+ except Exception as e:
404
+ # Error already logged in _resolve_one, just capture it
405
+ result.errors[key] = e
406
+
407
+ # Create tasks for parallel execution
408
+ tasks = list(starmap(safe_resolve_one, fetchers.items()))
409
+
410
+ try:
411
+ if timeout is not None:
412
+ await asyncio.wait_for(asyncio.gather(*tasks), timeout=timeout)
413
+ else:
414
+ await asyncio.gather(*tasks)
415
+ except asyncio.TimeoutError as e:
416
+ elapsed_ms = (time.perf_counter() - start_time) * 1000
417
+ # Add timeout error for any keys that didn't complete
418
+ resolved_keys = set(result.values.keys()) | set(result.errors.keys())
419
+ timed_out_keys = [key for key in fetchers if key not in resolved_keys]
420
+ for key in timed_out_keys:
421
+ result.errors[key] = e
422
+
423
+ logger.error(
424
+ "resolve_safe() timed out after %.2fms (timeout=%.2fs), %d succeeded, %d failed, %d timed out",
425
+ elapsed_ms,
426
+ timeout,
427
+ len(result.values),
428
+ len(result.errors) - len(timed_out_keys),
429
+ len(timed_out_keys),
430
+ extra={
431
+ "elapsed_ms": elapsed_ms,
432
+ "timeout": timeout,
433
+ "succeeded_keys": list(result.values.keys()),
434
+ "failed_keys": [k for k in result.errors if k not in timed_out_keys],
435
+ "timed_out_keys": timed_out_keys,
436
+ },
437
+ )
438
+
439
+ elapsed_ms = (time.perf_counter() - start_time) * 1000
440
+
441
+ # Log summary
442
+ if result.success:
443
+ logger.info(
444
+ "resolve_safe() completed successfully in %.2fms, all %d fetcher(s) resolved",
445
+ elapsed_ms,
446
+ len(result.values),
447
+ extra={
448
+ "elapsed_ms": elapsed_ms,
449
+ "success": True,
450
+ "resolved_count": len(result.values),
451
+ },
452
+ )
453
+ elif result.partial:
454
+ logger.warning(
455
+ "resolve_safe() completed with partial success in %.2fms: %d succeeded, %d failed",
456
+ elapsed_ms,
457
+ len(result.values),
458
+ len(result.errors),
459
+ extra={
460
+ "elapsed_ms": elapsed_ms,
461
+ "success": False,
462
+ "partial": True,
463
+ "resolved_count": len(result.values),
464
+ "error_count": len(result.errors),
465
+ "succeeded_keys": list(result.values.keys()),
466
+ "failed_keys": list(result.errors.keys()),
467
+ },
468
+ )
469
+ else:
470
+ logger.error(
471
+ "resolve_safe() completed with all failures in %.2fms: %d failed",
472
+ elapsed_ms,
473
+ len(result.errors),
474
+ extra={
475
+ "elapsed_ms": elapsed_ms,
476
+ "success": False,
477
+ "partial": False,
478
+ "error_count": len(result.errors),
479
+ "failed_keys": list(result.errors.keys()),
480
+ },
481
+ )
482
+
483
+ return result
@@ -1,4 +1,4 @@
1
- """."""
1
+ """Secure module discovery and import utility for trigger handlers."""
2
2
 
3
3
  import importlib
4
4
  import importlib.util
@@ -276,7 +276,6 @@ class ModuleDiscoverer:
276
276
  Args:
277
277
  packages: List of package names to scan.
278
278
  file_pattern: Glob pattern for matching modules.
279
- safe_mode: If True, blocks modules with forbidden names.
280
279
  max_file_size: Limit for module file sizes in bytes.
281
280
  """
282
281
  self.packages = packages
@@ -0,0 +1,207 @@
1
+ """Schema splitter for react-jsonschema-form."""
2
+
3
+ from typing import Any
4
+
5
+
6
+ class SchemaSplitter:
7
+ """Splits a combined JSON schema into jsonschema and uischema for react-jsonschema-form."""
8
+
9
+ @classmethod
10
+ def split(cls, combined_schema: dict[str, Any]) -> tuple[dict[str, Any], dict[str, Any]]:
11
+ """Split schema into (jsonschema, uischema).
12
+
13
+ Args:
14
+ combined_schema: Combined JSON schema with ui:* properties.
15
+
16
+ Returns:
17
+ Tuple of (jsonschema, uischema).
18
+ """
19
+ defs_ui: dict[str, dict[str, Any]] = {}
20
+ if "$defs" in combined_schema:
21
+ for def_name, def_value in combined_schema["$defs"].items():
22
+ if isinstance(def_value, dict):
23
+ defs_ui[def_name] = {}
24
+ cls._extract_ui_properties(def_value, defs_ui[def_name])
25
+
26
+ json_schema: dict[str, Any] = {}
27
+ ui_schema: dict[str, Any] = {}
28
+ cls._process_object(combined_schema, json_schema, ui_schema, defs_ui)
29
+ return json_schema, ui_schema
30
+
31
+ @classmethod
32
+ def _extract_ui_properties(cls, source: dict[str, Any], ui_target: dict[str, Any]) -> None: # noqa: C901
33
+ """Extract ui:* properties from source into ui_target recursively.
34
+
35
+ Args:
36
+ source: Source dict to extract from.
37
+ ui_target: Target dict for ui properties.
38
+ """
39
+ for key, value in source.items():
40
+ if key.startswith("ui:"):
41
+ ui_target[key] = value
42
+ elif key == "properties" and isinstance(value, dict):
43
+ for prop_name, prop_value in value.items():
44
+ if isinstance(prop_value, dict):
45
+ prop_ui: dict[str, Any] = {}
46
+ cls._extract_ui_properties(prop_value, prop_ui)
47
+ if prop_ui:
48
+ ui_target[prop_name] = prop_ui
49
+ elif key == "items" and isinstance(value, dict):
50
+ items_ui: dict[str, Any] = {}
51
+ cls._extract_ui_properties(value, items_ui)
52
+ if items_ui:
53
+ ui_target["items"] = items_ui
54
+ elif key == "allOf" and isinstance(value, list):
55
+ for item in value:
56
+ if isinstance(item, dict):
57
+ cls._extract_ui_properties(item, ui_target)
58
+
59
+ @classmethod
60
+ def _process_object( # noqa: C901, PLR0912
61
+ cls,
62
+ source: dict[str, Any],
63
+ json_target: dict[str, Any],
64
+ ui_target: dict[str, Any],
65
+ defs_ui: dict[str, dict[str, Any]],
66
+ ) -> None:
67
+ """Process an object, splitting json and ui properties.
68
+
69
+ Args:
70
+ source: Source object to process.
71
+ json_target: Target dict for json schema.
72
+ ui_target: Target dict for ui schema.
73
+ defs_ui: Pre-extracted UI properties from $defs.
74
+ """
75
+ for key, value in source.items():
76
+ if key.startswith("ui:"):
77
+ ui_target[key] = value
78
+ elif key == "properties" and isinstance(value, dict):
79
+ json_target["properties"] = {}
80
+ for prop_name, prop_value in value.items():
81
+ if isinstance(prop_value, dict):
82
+ json_target["properties"][prop_name] = {}
83
+ prop_ui: dict[str, Any] = {}
84
+ cls._process_property(prop_value, json_target["properties"][prop_name], prop_ui, defs_ui)
85
+ if prop_ui:
86
+ ui_target[prop_name] = prop_ui
87
+ else:
88
+ json_target["properties"][prop_name] = prop_value
89
+ elif key == "$defs" and isinstance(value, dict):
90
+ json_target["$defs"] = {}
91
+ for def_name, def_value in value.items():
92
+ if isinstance(def_value, dict):
93
+ json_target["$defs"][def_name] = {}
94
+ cls._strip_ui_properties(def_value, json_target["$defs"][def_name])
95
+ else:
96
+ json_target["$defs"][def_name] = def_value
97
+ elif key == "items" and isinstance(value, dict):
98
+ json_target["items"] = {}
99
+ items_ui: dict[str, Any] = {}
100
+ cls._process_property(value, json_target["items"], items_ui, defs_ui)
101
+ if items_ui:
102
+ ui_target["items"] = items_ui
103
+ elif key == "allOf" and isinstance(value, list):
104
+ json_target["allOf"] = []
105
+ for item in value:
106
+ if isinstance(item, dict):
107
+ item_json: dict[str, Any] = {}
108
+ cls._strip_ui_properties(item, item_json)
109
+ json_target["allOf"].append(item_json)
110
+ else:
111
+ json_target["allOf"].append(item)
112
+ elif key in {"if", "then", "else"} and isinstance(value, dict):
113
+ json_target[key] = {}
114
+ cls._strip_ui_properties(value, json_target[key])
115
+ else:
116
+ json_target[key] = value
117
+
118
+ @classmethod
119
+ def _process_property( # noqa: C901, PLR0912
120
+ cls,
121
+ source: dict[str, Any],
122
+ json_target: dict[str, Any],
123
+ ui_target: dict[str, Any],
124
+ defs_ui: dict[str, dict[str, Any]],
125
+ ) -> None:
126
+ """Process a property, resolving $ref for UI properties.
127
+
128
+ Args:
129
+ source: Source property dict.
130
+ json_target: Target dict for json schema.
131
+ ui_target: Target dict for ui schema.
132
+ defs_ui: Pre-extracted UI properties from $defs.
133
+ """
134
+ if "$ref" in source:
135
+ ref_path = source["$ref"]
136
+ if ref_path.startswith("#/$defs/"):
137
+ def_name = ref_path[8:]
138
+ if def_name in defs_ui:
139
+ ui_target.update(defs_ui[def_name])
140
+
141
+ for key, value in source.items():
142
+ if key.startswith("ui:"):
143
+ ui_target[key] = value
144
+ elif key == "properties" and isinstance(value, dict):
145
+ json_target["properties"] = {}
146
+ for prop_name, prop_value in value.items():
147
+ if isinstance(prop_value, dict):
148
+ json_target["properties"][prop_name] = {}
149
+ prop_ui: dict[str, Any] = {}
150
+ cls._process_property(prop_value, json_target["properties"][prop_name], prop_ui, defs_ui)
151
+ if prop_ui:
152
+ ui_target[prop_name] = prop_ui
153
+ else:
154
+ json_target["properties"][prop_name] = prop_value
155
+ elif key == "items" and isinstance(value, dict):
156
+ json_target["items"] = {}
157
+ items_ui: dict[str, Any] = {}
158
+ cls._process_property(value, json_target["items"], items_ui, defs_ui)
159
+ if items_ui:
160
+ ui_target["items"] = items_ui
161
+ else:
162
+ json_target[key] = value
163
+
164
+ @classmethod
165
+ def _strip_ui_properties(cls, source: dict[str, Any], json_target: dict[str, Any]) -> None: # noqa: C901, PLR0912
166
+ """Copy source to json_target, stripping ui:* properties.
167
+
168
+ Args:
169
+ source: Source dict.
170
+ json_target: Target dict without ui:* properties.
171
+ """
172
+ for key, value in source.items():
173
+ if key.startswith("ui:"):
174
+ continue
175
+ if key == "properties" and isinstance(value, dict):
176
+ json_target["properties"] = {}
177
+ for prop_name, prop_value in value.items():
178
+ if isinstance(prop_value, dict):
179
+ json_target["properties"][prop_name] = {}
180
+ cls._strip_ui_properties(prop_value, json_target["properties"][prop_name])
181
+ else:
182
+ json_target["properties"][prop_name] = prop_value
183
+ elif key == "$defs" and isinstance(value, dict):
184
+ json_target["$defs"] = {}
185
+ for def_name, def_value in value.items():
186
+ if isinstance(def_value, dict):
187
+ json_target["$defs"][def_name] = {}
188
+ cls._strip_ui_properties(def_value, json_target["$defs"][def_name])
189
+ else:
190
+ json_target["$defs"][def_name] = def_value
191
+ elif key == "items" and isinstance(value, dict):
192
+ json_target["items"] = {}
193
+ cls._strip_ui_properties(value, json_target["items"])
194
+ elif key == "allOf" and isinstance(value, list):
195
+ json_target["allOf"] = []
196
+ for item in value:
197
+ if isinstance(item, dict):
198
+ item_json: dict[str, Any] = {}
199
+ cls._strip_ui_properties(item, item_json)
200
+ json_target["allOf"].append(item_json)
201
+ else:
202
+ json_target["allOf"].append(item)
203
+ elif key in {"if", "then", "else"} and isinstance(value, dict):
204
+ json_target[key] = {}
205
+ cls._strip_ui_properties(value, json_target[key])
206
+ else:
207
+ json_target[key] = value