digitalkin 0.3.1.dev2__py3-none-any.whl → 0.3.2.dev14__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (79) hide show
  1. base_server/server_async_insecure.py +6 -5
  2. base_server/server_async_secure.py +6 -5
  3. base_server/server_sync_insecure.py +5 -4
  4. base_server/server_sync_secure.py +5 -4
  5. digitalkin/__version__.py +1 -1
  6. digitalkin/core/job_manager/base_job_manager.py +1 -1
  7. digitalkin/core/job_manager/single_job_manager.py +28 -9
  8. digitalkin/core/job_manager/taskiq_broker.py +7 -6
  9. digitalkin/core/job_manager/taskiq_job_manager.py +1 -1
  10. digitalkin/core/task_manager/surrealdb_repository.py +7 -7
  11. digitalkin/core/task_manager/task_session.py +60 -98
  12. digitalkin/grpc_servers/module_server.py +109 -168
  13. digitalkin/grpc_servers/module_servicer.py +38 -16
  14. digitalkin/grpc_servers/utils/grpc_client_wrapper.py +24 -8
  15. digitalkin/grpc_servers/utils/utility_schema_extender.py +100 -0
  16. digitalkin/models/__init__.py +1 -1
  17. digitalkin/models/core/job_manager_models.py +0 -8
  18. digitalkin/models/core/task_monitor.py +4 -0
  19. digitalkin/models/grpc_servers/models.py +91 -6
  20. digitalkin/models/module/__init__.py +18 -13
  21. digitalkin/models/module/base_types.py +61 -0
  22. digitalkin/models/module/module_context.py +173 -13
  23. digitalkin/models/module/module_types.py +28 -392
  24. digitalkin/models/module/setup_types.py +490 -0
  25. digitalkin/models/module/tool_cache.py +68 -0
  26. digitalkin/models/module/tool_reference.py +117 -0
  27. digitalkin/models/module/utility.py +167 -0
  28. digitalkin/models/services/registry.py +35 -0
  29. digitalkin/modules/__init__.py +5 -1
  30. digitalkin/modules/_base_module.py +154 -61
  31. digitalkin/modules/archetype_module.py +6 -1
  32. digitalkin/modules/tool_module.py +6 -1
  33. digitalkin/modules/triggers/__init__.py +8 -0
  34. digitalkin/modules/triggers/healthcheck_ping_trigger.py +45 -0
  35. digitalkin/modules/triggers/healthcheck_services_trigger.py +63 -0
  36. digitalkin/modules/triggers/healthcheck_status_trigger.py +52 -0
  37. digitalkin/services/__init__.py +4 -0
  38. digitalkin/services/communication/__init__.py +7 -0
  39. digitalkin/services/communication/communication_strategy.py +76 -0
  40. digitalkin/services/communication/default_communication.py +101 -0
  41. digitalkin/services/communication/grpc_communication.py +234 -0
  42. digitalkin/services/cost/grpc_cost.py +1 -1
  43. digitalkin/services/filesystem/grpc_filesystem.py +1 -1
  44. digitalkin/services/registry/__init__.py +22 -1
  45. digitalkin/services/registry/default_registry.py +135 -4
  46. digitalkin/services/registry/exceptions.py +47 -0
  47. digitalkin/services/registry/grpc_registry.py +306 -0
  48. digitalkin/services/registry/registry_models.py +15 -0
  49. digitalkin/services/registry/registry_strategy.py +88 -4
  50. digitalkin/services/services_config.py +25 -3
  51. digitalkin/services/services_models.py +5 -1
  52. digitalkin/services/setup/default_setup.py +1 -1
  53. digitalkin/services/setup/grpc_setup.py +1 -1
  54. digitalkin/services/storage/grpc_storage.py +1 -1
  55. digitalkin/services/user_profile/__init__.py +11 -0
  56. digitalkin/services/user_profile/grpc_user_profile.py +2 -2
  57. digitalkin/services/user_profile/user_profile_strategy.py +0 -15
  58. digitalkin/utils/schema_splitter.py +207 -0
  59. {digitalkin-0.3.1.dev2.dist-info → digitalkin-0.3.2.dev14.dist-info}/METADATA +5 -5
  60. digitalkin-0.3.2.dev14.dist-info/RECORD +143 -0
  61. {digitalkin-0.3.1.dev2.dist-info → digitalkin-0.3.2.dev14.dist-info}/top_level.txt +1 -0
  62. modules/archetype_with_tools_module.py +244 -0
  63. modules/cpu_intensive_module.py +1 -1
  64. modules/dynamic_setup_module.py +5 -29
  65. modules/minimal_llm_module.py +1 -1
  66. modules/text_transform_module.py +1 -1
  67. monitoring/digitalkin_observability/__init__.py +46 -0
  68. monitoring/digitalkin_observability/http_server.py +150 -0
  69. monitoring/digitalkin_observability/interceptors.py +176 -0
  70. monitoring/digitalkin_observability/metrics.py +201 -0
  71. monitoring/digitalkin_observability/prometheus.py +137 -0
  72. monitoring/tests/test_metrics.py +172 -0
  73. services/filesystem_module.py +7 -5
  74. services/storage_module.py +4 -2
  75. digitalkin/grpc_servers/registry_server.py +0 -65
  76. digitalkin/grpc_servers/registry_servicer.py +0 -456
  77. digitalkin-0.3.1.dev2.dist-info/RECORD +0 -119
  78. {digitalkin-0.3.1.dev2.dist-info → digitalkin-0.3.2.dev14.dist-info}/WHEEL +0 -0
  79. {digitalkin-0.3.1.dev2.dist-info → digitalkin-0.3.2.dev14.dist-info}/licenses/LICENSE +0 -0
@@ -1,393 +1,29 @@
1
- """Types for module models."""
2
-
3
- from __future__ import annotations
4
-
5
- import copy
6
- import types
7
- import typing
8
- from datetime import datetime, timezone
9
- from typing import TYPE_CHECKING, Any, ClassVar, Generic, TypeVar, cast, get_args, get_origin
10
-
11
- from pydantic import BaseModel, ConfigDict, Field, create_model
12
-
13
- from digitalkin.logger import logger
14
- from digitalkin.utils.dynamic_schema import (
15
- DynamicField,
16
- get_fetchers,
17
- has_dynamic,
18
- resolve_safe,
1
+ """Types for module models - backward compatibility re-exports.
2
+
3
+ This module re-exports types from their new locations for backward compatibility.
4
+ New code should import directly from the specific modules:
5
+ - digitalkin.models.module.base_types for DataTrigger, DataModel, TypeVars
6
+ - digitalkin.models.module.setup_types for SetupModel
7
+ """
8
+
9
+ from digitalkin.models.module.base_types import (
10
+ DataModel,
11
+ DataTrigger,
12
+ DataTriggerT,
13
+ InputModelT,
14
+ OutputModelT,
15
+ SecretModelT,
16
+ SetupModelT,
19
17
  )
20
-
21
- if TYPE_CHECKING:
22
- from pydantic.fields import FieldInfo
23
-
24
-
25
- class DataTrigger(BaseModel):
26
- """Defines the root input/output model exposing the protocol.
27
-
28
- The mandatory protocol is important to define the module beahvior following the user or agent input/output.
29
-
30
- Example:
31
- class MyInput(DataModel):
32
- root: DataTrigger
33
- user_define_data: Any
34
-
35
- # Usage
36
- my_input = MyInput(root=DataTrigger(protocol="message"))
37
- print(my_input.root.protocol) # Output: message
38
- """
39
-
40
- protocol: ClassVar[str]
41
- created_at: str = Field(
42
- default_factory=lambda: datetime.now(tz=timezone.utc).isoformat(),
43
- title="Created At",
44
- description="Timestamp when the payload was created.",
45
- )
46
-
47
-
48
- DataTriggerT = TypeVar("DataTriggerT", bound=DataTrigger)
49
-
50
-
51
- class DataModel(BaseModel, Generic[DataTriggerT]):
52
- """Base definition of input/output model showing mandatory root fields.
53
-
54
- The Model define the Module Input/output, usually referring to multiple input/output type defined by an union.
55
-
56
- Example:
57
- class ModuleInput(DataModel):
58
- root: FileInput | MessageInput
59
- """
60
-
61
- root: DataTriggerT
62
- annotations: dict[str, str] = Field(
63
- default={},
64
- title="Annotations",
65
- description="Additional metadata or annotations related to the output. ex {'role': 'user'}",
66
- )
67
-
68
-
69
- InputModelT = TypeVar("InputModelT", bound=DataModel)
70
- OutputModelT = TypeVar("OutputModelT", bound=DataModel)
71
- SecretModelT = TypeVar("SecretModelT", bound=BaseModel)
72
- SetupModelT = TypeVar("SetupModelT", bound="SetupModel")
73
-
74
-
75
- class SetupModel(BaseModel):
76
- """Base definition of setup model showing mandatory root fields.
77
-
78
- Optionally, the setup model can define a config option in json_schema_extra
79
- to be used to initialize the Kin. Supports dynamic schema providers for
80
- runtime value generation.
81
-
82
- Attributes:
83
- model_fields: Inherited from Pydantic BaseModel, contains field definitions.
84
-
85
- See Also:
86
- - Documentation: docs/api/dynamic_schema.md
87
- - Tests: tests/modules/test_setup_model.py
88
- """
89
-
90
- @classmethod
91
- async def get_clean_model(
92
- cls,
93
- *,
94
- config_fields: bool,
95
- hidden_fields: bool,
96
- force: bool = False,
97
- ) -> type[SetupModelT]:
98
- """Dynamically builds and returns a new BaseModel subclass with filtered fields.
99
-
100
- This method filters fields based on their `json_schema_extra` metadata:
101
- - Fields with `{"config": True}` are included only when `config_fields=True`
102
- - Fields with `{"hidden": True}` are included only when `hidden_fields=True`
103
-
104
- When `force=True`, fields with dynamic schema providers will have their
105
- providers called to fetch fresh values for schema metadata like enums.
106
- This includes recursively processing nested BaseModel fields.
107
-
108
- Args:
109
- config_fields: If True, include fields marked with `{"config": True}`.
110
- These are typically initial configuration fields.
111
- hidden_fields: If True, include fields marked with `{"hidden": True}`.
112
- These are typically runtime-only fields not shown in initial config.
113
- force: If True, refresh dynamic schema fields by calling their providers.
114
- Use this when you need up-to-date values from external sources like
115
- databases or APIs. Default is False for performance.
116
-
117
- Returns:
118
- A new BaseModel subclass with filtered fields.
119
- """
120
- clean_fields: dict[str, Any] = {}
121
-
122
- for name, field_info in cls.model_fields.items():
123
- extra = getattr(field_info, "json_schema_extra", {}) or {}
124
- is_config = bool(extra.get("config", False))
125
- is_hidden = bool(extra.get("hidden", False))
126
-
127
- # Skip config unless explicitly included
128
- if is_config and not config_fields:
129
- logger.debug("Skipping '%s' (config-only)", name)
130
- continue
131
-
132
- # Skip hidden unless explicitly included
133
- if is_hidden and not hidden_fields:
134
- logger.debug("Skipping '%s' (hidden-only)", name)
135
- continue
136
-
137
- # Refresh dynamic schema fields when force=True
138
- current_field_info = field_info
139
- current_annotation = field_info.annotation
140
-
141
- if force:
142
- # Check if this field has DynamicField metadata
143
- if has_dynamic(field_info):
144
- current_field_info = await cls._refresh_field_schema(name, field_info)
145
-
146
- # Check if the annotation is a nested BaseModel that might have dynamic fields
147
- nested_model = cls._get_base_model_type(current_annotation)
148
- if nested_model is not None:
149
- refreshed_nested = await cls._refresh_nested_model(nested_model)
150
- if refreshed_nested is not nested_model:
151
- # Update annotation to use refreshed nested model
152
- current_annotation = refreshed_nested
153
- # Create new field_info with updated annotation (deep copy for safety)
154
- current_field_info = copy.deepcopy(current_field_info)
155
- setattr(current_field_info, "annotation", current_annotation)
156
-
157
- clean_fields[name] = (current_annotation, current_field_info)
158
-
159
- # Dynamically create a model e.g. "SetupModel"
160
- m = create_model(
161
- f"{cls.__name__}",
162
- __base__=BaseModel,
163
- __config__=ConfigDict(arbitrary_types_allowed=True),
164
- **clean_fields,
165
- )
166
- return cast("type[SetupModelT]", m)
167
-
168
- @classmethod
169
- def _get_base_model_type(cls, annotation: type | None) -> type[BaseModel] | None:
170
- """Extract BaseModel type from an annotation.
171
-
172
- Handles direct types, Optional, Union, list, dict, set, tuple, and other generics.
173
-
174
- Args:
175
- annotation: The type annotation to inspect.
176
-
177
- Returns:
178
- The BaseModel subclass if found, None otherwise.
179
- """
180
- if annotation is None:
181
- return None
182
-
183
- # Direct BaseModel subclass check
184
- if isinstance(annotation, type) and issubclass(annotation, BaseModel):
185
- return annotation
186
-
187
- origin = get_origin(annotation)
188
- if origin is None:
189
- return None
190
-
191
- args = get_args(annotation)
192
- return cls._extract_base_model_from_args(origin, args)
193
-
194
- @classmethod
195
- def _extract_base_model_from_args(
196
- cls,
197
- origin: type,
198
- args: tuple[type, ...],
199
- ) -> type[BaseModel] | None:
200
- """Extract BaseModel from generic type arguments.
201
-
202
- Args:
203
- origin: The generic origin type (list, dict, Union, etc.).
204
- args: The type arguments.
205
-
206
- Returns:
207
- The BaseModel subclass if found, None otherwise.
208
- """
209
- # Union/Optional: check each arg (supports both typing.Union and types.UnionType)
210
- # Python 3.10+ uses types.UnionType for X | Y syntax
211
- if origin is typing.Union or origin is types.UnionType:
212
- return cls._find_base_model_in_args(args)
213
-
214
- # list, set, frozenset: check first arg
215
- if origin in {list, set, frozenset} and args:
216
- return cls._check_base_model(args[0])
217
-
218
- # dict: check value type (second arg)
219
- dict_value_index = 1
220
- if origin is dict and len(args) > dict_value_index:
221
- return cls._check_base_model(args[dict_value_index])
222
-
223
- # tuple: check first non-ellipsis arg
224
- if origin is tuple:
225
- return cls._find_base_model_in_args(args, skip_ellipsis=True)
226
-
227
- return None
228
-
229
- @classmethod
230
- def _check_base_model(cls, arg: type) -> type[BaseModel] | None:
231
- """Check if arg is a BaseModel subclass.
232
-
233
- Returns:
234
- The BaseModel subclass if arg is one, None otherwise.
235
- """
236
- if isinstance(arg, type) and issubclass(arg, BaseModel):
237
- return arg
238
- return None
239
-
240
- @classmethod
241
- def _find_base_model_in_args(
242
- cls,
243
- args: tuple[type, ...],
244
- *,
245
- skip_ellipsis: bool = False,
246
- ) -> type[BaseModel] | None:
247
- """Find first BaseModel in args.
248
-
249
- Returns:
250
- The first BaseModel subclass found, None otherwise.
251
- """
252
- for arg in args:
253
- if arg is type(None):
254
- continue
255
- if skip_ellipsis and arg is ...:
256
- continue
257
- result = cls._check_base_model(arg)
258
- if result is not None:
259
- return result
260
- return None
261
-
262
- @classmethod
263
- async def _refresh_nested_model(cls, model_cls: type[BaseModel]) -> type[BaseModel]:
264
- """Refresh dynamic fields in a nested BaseModel.
265
-
266
- Creates a new model class with all DynamicField metadata resolved.
267
-
268
- Args:
269
- model_cls: The nested model class to refresh.
270
-
271
- Returns:
272
- A new model class with refreshed fields, or the original if no changes.
273
- """
274
- has_changes = False
275
- clean_fields: dict[str, Any] = {}
276
-
277
- for name, field_info in model_cls.model_fields.items():
278
- current_field_info = field_info
279
- current_annotation = field_info.annotation
280
-
281
- # Check if field has DynamicField metadata
282
- if has_dynamic(field_info):
283
- current_field_info = await cls._refresh_field_schema(name, field_info)
284
- has_changes = True
285
-
286
- # Recursively check nested models
287
- nested_model = cls._get_base_model_type(current_annotation)
288
- if nested_model is not None:
289
- refreshed_nested = await cls._refresh_nested_model(nested_model)
290
- if refreshed_nested is not nested_model:
291
- current_annotation = refreshed_nested
292
- current_field_info = copy.deepcopy(current_field_info)
293
- setattr(current_field_info, "annotation", current_annotation)
294
- has_changes = True
295
-
296
- clean_fields[name] = (current_annotation, current_field_info)
297
-
298
- if not has_changes:
299
- return model_cls
300
-
301
- # Create new model with refreshed fields
302
- logger.debug("Creating refreshed nested model for '%s'", model_cls.__name__)
303
- return create_model(
304
- model_cls.__name__,
305
- __base__=BaseModel,
306
- __config__=ConfigDict(arbitrary_types_allowed=True),
307
- **clean_fields,
308
- )
309
-
310
- @classmethod
311
- async def _refresh_field_schema(cls, field_name: str, field_info: FieldInfo) -> FieldInfo:
312
- """Refresh a field's json_schema_extra with fresh values from dynamic providers.
313
-
314
- This method calls all dynamic providers registered for a field (via Annotated
315
- metadata) and creates a new FieldInfo with the resolved values. The original
316
- field_info is not modified.
317
-
318
- Uses `resolve_safe()` for structured error handling, allowing partial success
319
- when some fetchers fail. Successfully resolved values are still applied.
320
-
321
- Args:
322
- field_name: The name of the field being refreshed (used for logging).
323
- field_info: The original FieldInfo object containing the dynamic providers.
324
-
325
- Returns:
326
- A new FieldInfo object with the same attributes as the original, but with
327
- `json_schema_extra` containing resolved values and Dynamic metadata removed.
328
-
329
- Note:
330
- If all fetchers fail, the original field_info is returned unchanged.
331
- If some fetchers fail, successfully resolved values are still applied.
332
- """
333
- fetchers = get_fetchers(field_info)
334
-
335
- if not fetchers:
336
- return field_info
337
-
338
- fetcher_keys = list(fetchers.keys())
339
- logger.debug(
340
- "Refreshing dynamic schema for field '%s' with fetchers: %s",
341
- field_name,
342
- fetcher_keys,
343
- extra={"field_name": field_name, "fetcher_keys": fetcher_keys},
344
- )
345
-
346
- # Resolve all fetchers with structured error handling
347
- result = await resolve_safe(fetchers)
348
-
349
- # Log any errors that occurred with full details
350
- if result.errors:
351
- for key, error in result.errors.items():
352
- logger.warning(
353
- "Failed to resolve '%s' for field '%s': %s: %s",
354
- key,
355
- field_name,
356
- type(error).__name__,
357
- str(error) or "(no message)",
358
- extra={
359
- "field_name": field_name,
360
- "fetcher_key": key,
361
- "error_type": type(error).__name__,
362
- "error_message": str(error),
363
- "error_repr": repr(error),
364
- },
365
- )
366
-
367
- # If no values were resolved, return original field_info
368
- if not result.values:
369
- logger.warning(
370
- "All fetchers failed for field '%s', keeping original",
371
- field_name,
372
- )
373
- return field_info
374
-
375
- # Build new json_schema_extra with resolved values merged
376
- extra = getattr(field_info, "json_schema_extra", {}) or {}
377
- new_extra = {**extra, **result.values}
378
-
379
- # Create a deep copy of the FieldInfo to avoid shared mutable state
380
- new_field_info = copy.deepcopy(field_info)
381
- setattr(new_field_info, "json_schema_extra", new_extra)
382
-
383
- # Remove Dynamic from metadata (it's been resolved)
384
- new_metadata = [m for m in new_field_info.metadata if not isinstance(m, DynamicField)]
385
- setattr(new_field_info, "metadata", new_metadata)
386
-
387
- logger.debug(
388
- "Refreshed '%s' with dynamic values: %s",
389
- field_name,
390
- list(result.values.keys()),
391
- )
392
-
393
- return new_field_info
18
+ from digitalkin.models.module.setup_types import SetupModel
19
+
20
+ __all__ = [
21
+ "DataModel",
22
+ "DataTrigger",
23
+ "DataTriggerT",
24
+ "InputModelT",
25
+ "OutputModelT",
26
+ "SecretModelT",
27
+ "SetupModel",
28
+ "SetupModelT",
29
+ ]