digitalkin 0.3.1__py3-none-any.whl → 0.3.1.dev1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- digitalkin/__version__.py +1 -1
- digitalkin/core/job_manager/taskiq_broker.py +1 -1
- digitalkin/core/task_manager/task_session.py +2 -12
- digitalkin/grpc_servers/module_servicer.py +8 -12
- digitalkin/models/grpc_servers/models.py +4 -4
- digitalkin/models/module/module_context.py +0 -5
- digitalkin/models/module/module_types.py +15 -299
- digitalkin/modules/_base_module.py +28 -66
- digitalkin/services/services_config.py +0 -11
- digitalkin/services/services_models.py +1 -3
- digitalkin/services/user_profile/__init__.py +0 -11
- digitalkin/services/user_profile/grpc_user_profile.py +2 -2
- digitalkin/utils/__init__.py +0 -28
- {digitalkin-0.3.1.dist-info → digitalkin-0.3.1.dev1.dist-info}/METADATA +4 -4
- {digitalkin-0.3.1.dist-info → digitalkin-0.3.1.dev1.dist-info}/RECORD +18 -20
- digitalkin/utils/dynamic_schema.py +0 -483
- modules/dynamic_setup_module.py +0 -362
- {digitalkin-0.3.1.dist-info → digitalkin-0.3.1.dev1.dist-info}/WHEEL +0 -0
- {digitalkin-0.3.1.dist-info → digitalkin-0.3.1.dev1.dist-info}/licenses/LICENSE +0 -0
- {digitalkin-0.3.1.dist-info → digitalkin-0.3.1.dev1.dist-info}/top_level.txt +0 -0
digitalkin/__version__.py
CHANGED
|
@@ -208,7 +208,7 @@ async def run_start_module(
|
|
|
208
208
|
# Reconstruct Pydantic models from dicts for type safety
|
|
209
209
|
try:
|
|
210
210
|
input_model = module_class.create_input_model(input_data)
|
|
211
|
-
setup_model =
|
|
211
|
+
setup_model = module_class.create_setup_model(setup_data)
|
|
212
212
|
except Exception as e:
|
|
213
213
|
logger.error("Failed to reconstruct models for job %s: %s", job_id, e, exc_info=True)
|
|
214
214
|
raise
|
|
@@ -49,24 +49,14 @@ class TaskSession:
|
|
|
49
49
|
db: SurrealDBConnection,
|
|
50
50
|
module: BaseModule,
|
|
51
51
|
heartbeat_interval: datetime.timedelta = datetime.timedelta(seconds=2),
|
|
52
|
-
queue_maxsize: int = 1000,
|
|
53
52
|
) -> None:
|
|
54
|
-
"""Initialize Task Session.
|
|
55
|
-
|
|
56
|
-
Args:
|
|
57
|
-
task_id: Unique task identifier
|
|
58
|
-
mission_id: Mission identifier
|
|
59
|
-
db: SurrealDB connection
|
|
60
|
-
module: Module instance
|
|
61
|
-
heartbeat_interval: Interval between heartbeats
|
|
62
|
-
queue_maxsize: Maximum size for the queue (0 = unlimited)
|
|
63
|
-
"""
|
|
53
|
+
"""Initialize Task Session."""
|
|
64
54
|
self.db = db
|
|
65
55
|
self.module = module
|
|
66
56
|
|
|
67
57
|
self.status = TaskStatus.PENDING
|
|
68
58
|
# Bounded queue to prevent unbounded memory growth (max 1000 items)
|
|
69
|
-
self.queue: asyncio.Queue = asyncio.Queue(maxsize=
|
|
59
|
+
self.queue: asyncio.Queue = asyncio.Queue(maxsize=1000)
|
|
70
60
|
|
|
71
61
|
self.task_id = task_id
|
|
72
62
|
self.mission_id = mission_id
|
|
@@ -112,7 +112,7 @@ class ModuleServicer(module_service_pb2_grpc.ModuleServiceServicer, ArgParser):
|
|
|
112
112
|
# TODO: Secret should be used here as well
|
|
113
113
|
setup_version = request.setup_version
|
|
114
114
|
config_setup_data = self.module_class.create_config_setup_model(json_format.MessageToDict(request.content))
|
|
115
|
-
setup_version_data =
|
|
115
|
+
setup_version_data = self.module_class.create_setup_model(
|
|
116
116
|
json_format.MessageToDict(request.setup_version.content),
|
|
117
117
|
config_fields=True,
|
|
118
118
|
)
|
|
@@ -185,7 +185,7 @@ class ModuleServicer(module_service_pb2_grpc.ModuleServiceServicer, ArgParser):
|
|
|
185
185
|
msg = "No setup data returned."
|
|
186
186
|
raise ServicerError(msg)
|
|
187
187
|
|
|
188
|
-
setup_data =
|
|
188
|
+
setup_data = self.module_class.create_setup_model(setup_data_class.current_setup_version.content)
|
|
189
189
|
|
|
190
190
|
# create a task to run the module in background
|
|
191
191
|
job_id = await self.job_manager.create_module_instance_job(
|
|
@@ -350,9 +350,7 @@ class ModuleServicer(module_service_pb2_grpc.ModuleServiceServicer, ArgParser):
|
|
|
350
350
|
# Get input schema if available
|
|
351
351
|
try:
|
|
352
352
|
# Convert schema to proto format
|
|
353
|
-
input_schema_proto =
|
|
354
|
-
llm_format=request.llm_format,
|
|
355
|
-
)
|
|
353
|
+
input_schema_proto = self.module_class.get_input_format(llm_format=request.llm_format)
|
|
356
354
|
input_format_struct = json_format.Parse(
|
|
357
355
|
text=input_schema_proto,
|
|
358
356
|
message=struct_pb2.Struct(), # pylint: disable=no-member
|
|
@@ -388,9 +386,7 @@ class ModuleServicer(module_service_pb2_grpc.ModuleServiceServicer, ArgParser):
|
|
|
388
386
|
# Get output schema if available
|
|
389
387
|
try:
|
|
390
388
|
# Convert schema to proto format
|
|
391
|
-
output_schema_proto =
|
|
392
|
-
llm_format=request.llm_format,
|
|
393
|
-
)
|
|
389
|
+
output_schema_proto = self.module_class.get_output_format(llm_format=request.llm_format)
|
|
394
390
|
output_format_struct = json_format.Parse(
|
|
395
391
|
text=output_schema_proto,
|
|
396
392
|
message=struct_pb2.Struct(), # pylint: disable=no-member
|
|
@@ -426,7 +422,7 @@ class ModuleServicer(module_service_pb2_grpc.ModuleServiceServicer, ArgParser):
|
|
|
426
422
|
# Get setup schema if available
|
|
427
423
|
try:
|
|
428
424
|
# Convert schema to proto format
|
|
429
|
-
setup_schema_proto =
|
|
425
|
+
setup_schema_proto = self.module_class.get_setup_format(llm_format=request.llm_format)
|
|
430
426
|
setup_format_struct = json_format.Parse(
|
|
431
427
|
text=setup_schema_proto,
|
|
432
428
|
message=struct_pb2.Struct(), # pylint: disable=no-member
|
|
@@ -443,7 +439,7 @@ class ModuleServicer(module_service_pb2_grpc.ModuleServiceServicer, ArgParser):
|
|
|
443
439
|
setup_schema=setup_format_struct,
|
|
444
440
|
)
|
|
445
441
|
|
|
446
|
-
|
|
442
|
+
def GetModuleSecret( # noqa: N802
|
|
447
443
|
self,
|
|
448
444
|
request: information_pb2.GetModuleSecretRequest,
|
|
449
445
|
context: grpc.ServicerContext,
|
|
@@ -462,7 +458,7 @@ class ModuleServicer(module_service_pb2_grpc.ModuleServiceServicer, ArgParser):
|
|
|
462
458
|
# Get secret schema if available
|
|
463
459
|
try:
|
|
464
460
|
# Convert schema to proto format
|
|
465
|
-
secret_schema_proto =
|
|
461
|
+
secret_schema_proto = self.module_class.get_secret_format(llm_format=request.llm_format)
|
|
466
462
|
secret_format_struct = json_format.Parse(
|
|
467
463
|
text=secret_schema_proto,
|
|
468
464
|
message=struct_pb2.Struct(), # pylint: disable=no-member
|
|
@@ -498,7 +494,7 @@ class ModuleServicer(module_service_pb2_grpc.ModuleServiceServicer, ArgParser):
|
|
|
498
494
|
# Get setup schema if available
|
|
499
495
|
try:
|
|
500
496
|
# Convert schema to proto format
|
|
501
|
-
config_setup_schema_proto =
|
|
497
|
+
config_setup_schema_proto = self.module_class.get_config_setup_format(llm_format=request.llm_format)
|
|
502
498
|
config_setup_format_struct = json_format.Parse(
|
|
503
499
|
text=config_setup_schema_proto,
|
|
504
500
|
message=struct_pb2.Struct(), # pylint: disable=no-member
|
|
@@ -175,8 +175,8 @@ class ClientConfig(ChannelConfig):
|
|
|
175
175
|
credentials: ClientCredentials | None = Field(None, description="Client credentials for secure mode")
|
|
176
176
|
channel_options: list[tuple[str, Any]] = Field(
|
|
177
177
|
default_factory=lambda: [
|
|
178
|
-
("grpc.max_receive_message_length",
|
|
179
|
-
("grpc.max_send_message_length",
|
|
178
|
+
("grpc.max_receive_message_length", 50 * 1024 * 1024), # 50MB
|
|
179
|
+
("grpc.max_send_message_length", 50 * 1024 * 1024), # 50MB
|
|
180
180
|
],
|
|
181
181
|
description="Additional channel options",
|
|
182
182
|
)
|
|
@@ -223,8 +223,8 @@ class ServerConfig(ChannelConfig):
|
|
|
223
223
|
credentials: ServerCredentials | None = Field(None, description="Server credentials for secure mode")
|
|
224
224
|
server_options: list[tuple[str, Any]] = Field(
|
|
225
225
|
default_factory=lambda: [
|
|
226
|
-
("grpc.max_receive_message_length",
|
|
227
|
-
("grpc.max_send_message_length",
|
|
226
|
+
("grpc.max_receive_message_length", 50 * 1024 * 1024), # 50MB
|
|
227
|
+
("grpc.max_send_message_length", 50 * 1024 * 1024), # 50MB
|
|
228
228
|
],
|
|
229
229
|
description="Additional server options",
|
|
230
230
|
)
|
|
@@ -10,7 +10,6 @@ from digitalkin.services.identity.identity_strategy import IdentityStrategy
|
|
|
10
10
|
from digitalkin.services.registry.registry_strategy import RegistryStrategy
|
|
11
11
|
from digitalkin.services.snapshot.snapshot_strategy import SnapshotStrategy
|
|
12
12
|
from digitalkin.services.storage.storage_strategy import StorageStrategy
|
|
13
|
-
from digitalkin.services.user_profile.user_profile_strategy import UserProfileStrategy
|
|
14
13
|
|
|
15
14
|
|
|
16
15
|
class Session(SimpleNamespace):
|
|
@@ -90,7 +89,6 @@ class ModuleContext:
|
|
|
90
89
|
registry: RegistryStrategy
|
|
91
90
|
snapshot: SnapshotStrategy
|
|
92
91
|
storage: StorageStrategy
|
|
93
|
-
user_profile: UserProfileStrategy
|
|
94
92
|
|
|
95
93
|
session: Session
|
|
96
94
|
callbacks: SimpleNamespace
|
|
@@ -107,7 +105,6 @@ class ModuleContext:
|
|
|
107
105
|
registry: RegistryStrategy,
|
|
108
106
|
snapshot: SnapshotStrategy,
|
|
109
107
|
storage: StorageStrategy,
|
|
110
|
-
user_profile: UserProfileStrategy,
|
|
111
108
|
session: dict[str, Any],
|
|
112
109
|
metadata: dict[str, Any] = {},
|
|
113
110
|
helpers: dict[str, Any] = {},
|
|
@@ -123,7 +120,6 @@ class ModuleContext:
|
|
|
123
120
|
registry: RegistryStrategy.
|
|
124
121
|
snapshot: SnapshotStrategy.
|
|
125
122
|
storage: StorageStrategy.
|
|
126
|
-
user_profile: UserProfileStrategy.
|
|
127
123
|
metadata: dict defining differents Module metadata.
|
|
128
124
|
helpers: dict different user defined helpers.
|
|
129
125
|
session: dict referring the session IDs or informations.
|
|
@@ -137,7 +133,6 @@ class ModuleContext:
|
|
|
137
133
|
self.registry = registry
|
|
138
134
|
self.snapshot = snapshot
|
|
139
135
|
self.storage = storage
|
|
140
|
-
self.user_profile = user_profile
|
|
141
136
|
|
|
142
137
|
self.metadata = SimpleNamespace(**metadata)
|
|
143
138
|
self.session = Session(**session)
|
|
@@ -1,25 +1,11 @@
|
|
|
1
1
|
"""Types for module models."""
|
|
2
2
|
|
|
3
|
-
from __future__ import annotations
|
|
4
|
-
|
|
5
|
-
import copy
|
|
6
|
-
import types
|
|
7
|
-
import typing
|
|
8
3
|
from datetime import datetime, timezone
|
|
9
|
-
from typing import
|
|
4
|
+
from typing import Any, ClassVar, Generic, TypeVar, cast
|
|
10
5
|
|
|
11
6
|
from pydantic import BaseModel, ConfigDict, Field, create_model
|
|
12
7
|
|
|
13
8
|
from digitalkin.logger import logger
|
|
14
|
-
from digitalkin.utils.dynamic_schema import (
|
|
15
|
-
DynamicField,
|
|
16
|
-
get_fetchers,
|
|
17
|
-
has_dynamic,
|
|
18
|
-
resolve_safe,
|
|
19
|
-
)
|
|
20
|
-
|
|
21
|
-
if TYPE_CHECKING:
|
|
22
|
-
from pydantic.fields import FieldInfo
|
|
23
9
|
|
|
24
10
|
|
|
25
11
|
class DataTrigger(BaseModel):
|
|
@@ -75,50 +61,27 @@ SetupModelT = TypeVar("SetupModelT", bound="SetupModel")
|
|
|
75
61
|
class SetupModel(BaseModel):
|
|
76
62
|
"""Base definition of setup model showing mandatory root fields.
|
|
77
63
|
|
|
78
|
-
Optionally, the setup model can define a config option in json_schema_extra
|
|
79
|
-
to be used to initialize the Kin. Supports dynamic schema providers for
|
|
80
|
-
runtime value generation.
|
|
81
|
-
|
|
82
|
-
Attributes:
|
|
83
|
-
model_fields: Inherited from Pydantic BaseModel, contains field definitions.
|
|
64
|
+
Optionally, the setup model can define a config option in json_schema_extra to be used to initialize the Kin.
|
|
84
65
|
|
|
85
|
-
|
|
86
|
-
|
|
87
|
-
|
|
66
|
+
Example:
|
|
67
|
+
class MySetup(SetupModel):
|
|
68
|
+
name: str = Field()
|
|
69
|
+
number: int = Field(..., json_schema_extra={"config": True})
|
|
88
70
|
"""
|
|
89
71
|
|
|
90
72
|
@classmethod
|
|
91
|
-
|
|
92
|
-
|
|
93
|
-
*,
|
|
94
|
-
config_fields: bool,
|
|
95
|
-
hidden_fields: bool,
|
|
96
|
-
force: bool = False,
|
|
97
|
-
) -> type[SetupModelT]:
|
|
98
|
-
"""Dynamically builds and returns a new BaseModel subclass with filtered fields.
|
|
99
|
-
|
|
100
|
-
This method filters fields based on their `json_schema_extra` metadata:
|
|
101
|
-
- Fields with `{"config": True}` are included only when `config_fields=True`
|
|
102
|
-
- Fields with `{"hidden": True}` are included only when `hidden_fields=True`
|
|
103
|
-
|
|
104
|
-
When `force=True`, fields with dynamic schema providers will have their
|
|
105
|
-
providers called to fetch fresh values for schema metadata like enums.
|
|
106
|
-
This includes recursively processing nested BaseModel fields.
|
|
73
|
+
def get_clean_model(cls, *, config_fields: bool, hidden_fields: bool) -> type[SetupModelT]: # type: ignore
|
|
74
|
+
"""Dynamically builds and returns a new BaseModel subclass.
|
|
107
75
|
|
|
108
|
-
|
|
109
|
-
config_fields: If True, include fields marked with `{"config": True}`.
|
|
110
|
-
These are typically initial configuration fields.
|
|
111
|
-
hidden_fields: If True, include fields marked with `{"hidden": True}`.
|
|
112
|
-
These are typically runtime-only fields not shown in initial config.
|
|
113
|
-
force: If True, refresh dynamic schema fields by calling their providers.
|
|
114
|
-
Use this when you need up-to-date values from external sources like
|
|
115
|
-
databases or APIs. Default is False for performance.
|
|
76
|
+
containing only those fields where json_schema_extra["config"] == True.
|
|
116
77
|
|
|
117
78
|
Returns:
|
|
118
|
-
A new BaseModel subclass with filtered fields.
|
|
79
|
+
Type[BaseModel]: A new BaseModel subclass with the filtered fields.
|
|
80
|
+
|
|
81
|
+
Raises:
|
|
82
|
+
ValueError: If both config_fields and hidden_fields are set to True.
|
|
119
83
|
"""
|
|
120
84
|
clean_fields: dict[str, Any] = {}
|
|
121
|
-
|
|
122
85
|
for name, field_info in cls.model_fields.items():
|
|
123
86
|
extra = getattr(field_info, "json_schema_extra", {}) or {}
|
|
124
87
|
is_config = bool(extra.get("config", False))
|
|
@@ -134,27 +97,7 @@ class SetupModel(BaseModel):
|
|
|
134
97
|
logger.debug("Skipping '%s' (hidden-only)", name)
|
|
135
98
|
continue
|
|
136
99
|
|
|
137
|
-
|
|
138
|
-
current_field_info = field_info
|
|
139
|
-
current_annotation = field_info.annotation
|
|
140
|
-
|
|
141
|
-
if force:
|
|
142
|
-
# Check if this field has DynamicField metadata
|
|
143
|
-
if has_dynamic(field_info):
|
|
144
|
-
current_field_info = await cls._refresh_field_schema(name, field_info)
|
|
145
|
-
|
|
146
|
-
# Check if the annotation is a nested BaseModel that might have dynamic fields
|
|
147
|
-
nested_model = cls._get_base_model_type(current_annotation)
|
|
148
|
-
if nested_model is not None:
|
|
149
|
-
refreshed_nested = await cls._refresh_nested_model(nested_model)
|
|
150
|
-
if refreshed_nested is not nested_model:
|
|
151
|
-
# Update annotation to use refreshed nested model
|
|
152
|
-
current_annotation = refreshed_nested
|
|
153
|
-
# Create new field_info with updated annotation (deep copy for safety)
|
|
154
|
-
current_field_info = copy.deepcopy(current_field_info)
|
|
155
|
-
setattr(current_field_info, "annotation", current_annotation)
|
|
156
|
-
|
|
157
|
-
clean_fields[name] = (current_annotation, current_field_info)
|
|
100
|
+
clean_fields[name] = (field_info.annotation, field_info)
|
|
158
101
|
|
|
159
102
|
# Dynamically create a model e.g. "SetupModel"
|
|
160
103
|
m = create_model(
|
|
@@ -163,231 +106,4 @@ class SetupModel(BaseModel):
|
|
|
163
106
|
__config__=ConfigDict(arbitrary_types_allowed=True),
|
|
164
107
|
**clean_fields,
|
|
165
108
|
)
|
|
166
|
-
return cast("type[SetupModelT]", m)
|
|
167
|
-
|
|
168
|
-
@classmethod
|
|
169
|
-
def _get_base_model_type(cls, annotation: type | None) -> type[BaseModel] | None:
|
|
170
|
-
"""Extract BaseModel type from an annotation.
|
|
171
|
-
|
|
172
|
-
Handles direct types, Optional, Union, list, dict, set, tuple, and other generics.
|
|
173
|
-
|
|
174
|
-
Args:
|
|
175
|
-
annotation: The type annotation to inspect.
|
|
176
|
-
|
|
177
|
-
Returns:
|
|
178
|
-
The BaseModel subclass if found, None otherwise.
|
|
179
|
-
"""
|
|
180
|
-
if annotation is None:
|
|
181
|
-
return None
|
|
182
|
-
|
|
183
|
-
# Direct BaseModel subclass check
|
|
184
|
-
if isinstance(annotation, type) and issubclass(annotation, BaseModel):
|
|
185
|
-
return annotation
|
|
186
|
-
|
|
187
|
-
origin = get_origin(annotation)
|
|
188
|
-
if origin is None:
|
|
189
|
-
return None
|
|
190
|
-
|
|
191
|
-
args = get_args(annotation)
|
|
192
|
-
return cls._extract_base_model_from_args(origin, args)
|
|
193
|
-
|
|
194
|
-
@classmethod
|
|
195
|
-
def _extract_base_model_from_args(
|
|
196
|
-
cls,
|
|
197
|
-
origin: type,
|
|
198
|
-
args: tuple[type, ...],
|
|
199
|
-
) -> type[BaseModel] | None:
|
|
200
|
-
"""Extract BaseModel from generic type arguments.
|
|
201
|
-
|
|
202
|
-
Args:
|
|
203
|
-
origin: The generic origin type (list, dict, Union, etc.).
|
|
204
|
-
args: The type arguments.
|
|
205
|
-
|
|
206
|
-
Returns:
|
|
207
|
-
The BaseModel subclass if found, None otherwise.
|
|
208
|
-
"""
|
|
209
|
-
# Union/Optional: check each arg (supports both typing.Union and types.UnionType)
|
|
210
|
-
# Python 3.10+ uses types.UnionType for X | Y syntax
|
|
211
|
-
if origin is typing.Union or origin is types.UnionType:
|
|
212
|
-
return cls._find_base_model_in_args(args)
|
|
213
|
-
|
|
214
|
-
# list, set, frozenset: check first arg
|
|
215
|
-
if origin in {list, set, frozenset} and args:
|
|
216
|
-
return cls._check_base_model(args[0])
|
|
217
|
-
|
|
218
|
-
# dict: check value type (second arg)
|
|
219
|
-
dict_value_index = 1
|
|
220
|
-
if origin is dict and len(args) > dict_value_index:
|
|
221
|
-
return cls._check_base_model(args[dict_value_index])
|
|
222
|
-
|
|
223
|
-
# tuple: check first non-ellipsis arg
|
|
224
|
-
if origin is tuple:
|
|
225
|
-
return cls._find_base_model_in_args(args, skip_ellipsis=True)
|
|
226
|
-
|
|
227
|
-
return None
|
|
228
|
-
|
|
229
|
-
@classmethod
|
|
230
|
-
def _check_base_model(cls, arg: type) -> type[BaseModel] | None:
|
|
231
|
-
"""Check if arg is a BaseModel subclass.
|
|
232
|
-
|
|
233
|
-
Returns:
|
|
234
|
-
The BaseModel subclass if arg is one, None otherwise.
|
|
235
|
-
"""
|
|
236
|
-
if isinstance(arg, type) and issubclass(arg, BaseModel):
|
|
237
|
-
return arg
|
|
238
|
-
return None
|
|
239
|
-
|
|
240
|
-
@classmethod
|
|
241
|
-
def _find_base_model_in_args(
|
|
242
|
-
cls,
|
|
243
|
-
args: tuple[type, ...],
|
|
244
|
-
*,
|
|
245
|
-
skip_ellipsis: bool = False,
|
|
246
|
-
) -> type[BaseModel] | None:
|
|
247
|
-
"""Find first BaseModel in args.
|
|
248
|
-
|
|
249
|
-
Returns:
|
|
250
|
-
The first BaseModel subclass found, None otherwise.
|
|
251
|
-
"""
|
|
252
|
-
for arg in args:
|
|
253
|
-
if arg is type(None):
|
|
254
|
-
continue
|
|
255
|
-
if skip_ellipsis and arg is ...:
|
|
256
|
-
continue
|
|
257
|
-
result = cls._check_base_model(arg)
|
|
258
|
-
if result is not None:
|
|
259
|
-
return result
|
|
260
|
-
return None
|
|
261
|
-
|
|
262
|
-
@classmethod
|
|
263
|
-
async def _refresh_nested_model(cls, model_cls: type[BaseModel]) -> type[BaseModel]:
|
|
264
|
-
"""Refresh dynamic fields in a nested BaseModel.
|
|
265
|
-
|
|
266
|
-
Creates a new model class with all DynamicField metadata resolved.
|
|
267
|
-
|
|
268
|
-
Args:
|
|
269
|
-
model_cls: The nested model class to refresh.
|
|
270
|
-
|
|
271
|
-
Returns:
|
|
272
|
-
A new model class with refreshed fields, or the original if no changes.
|
|
273
|
-
"""
|
|
274
|
-
has_changes = False
|
|
275
|
-
clean_fields: dict[str, Any] = {}
|
|
276
|
-
|
|
277
|
-
for name, field_info in model_cls.model_fields.items():
|
|
278
|
-
current_field_info = field_info
|
|
279
|
-
current_annotation = field_info.annotation
|
|
280
|
-
|
|
281
|
-
# Check if field has DynamicField metadata
|
|
282
|
-
if has_dynamic(field_info):
|
|
283
|
-
current_field_info = await cls._refresh_field_schema(name, field_info)
|
|
284
|
-
has_changes = True
|
|
285
|
-
|
|
286
|
-
# Recursively check nested models
|
|
287
|
-
nested_model = cls._get_base_model_type(current_annotation)
|
|
288
|
-
if nested_model is not None:
|
|
289
|
-
refreshed_nested = await cls._refresh_nested_model(nested_model)
|
|
290
|
-
if refreshed_nested is not nested_model:
|
|
291
|
-
current_annotation = refreshed_nested
|
|
292
|
-
current_field_info = copy.deepcopy(current_field_info)
|
|
293
|
-
setattr(current_field_info, "annotation", current_annotation)
|
|
294
|
-
has_changes = True
|
|
295
|
-
|
|
296
|
-
clean_fields[name] = (current_annotation, current_field_info)
|
|
297
|
-
|
|
298
|
-
if not has_changes:
|
|
299
|
-
return model_cls
|
|
300
|
-
|
|
301
|
-
# Create new model with refreshed fields
|
|
302
|
-
logger.debug("Creating refreshed nested model for '%s'", model_cls.__name__)
|
|
303
|
-
return create_model(
|
|
304
|
-
model_cls.__name__,
|
|
305
|
-
__base__=BaseModel,
|
|
306
|
-
__config__=ConfigDict(arbitrary_types_allowed=True),
|
|
307
|
-
**clean_fields,
|
|
308
|
-
)
|
|
309
|
-
|
|
310
|
-
@classmethod
|
|
311
|
-
async def _refresh_field_schema(cls, field_name: str, field_info: FieldInfo) -> FieldInfo:
|
|
312
|
-
"""Refresh a field's json_schema_extra with fresh values from dynamic providers.
|
|
313
|
-
|
|
314
|
-
This method calls all dynamic providers registered for a field (via Annotated
|
|
315
|
-
metadata) and creates a new FieldInfo with the resolved values. The original
|
|
316
|
-
field_info is not modified.
|
|
317
|
-
|
|
318
|
-
Uses `resolve_safe()` for structured error handling, allowing partial success
|
|
319
|
-
when some fetchers fail. Successfully resolved values are still applied.
|
|
320
|
-
|
|
321
|
-
Args:
|
|
322
|
-
field_name: The name of the field being refreshed (used for logging).
|
|
323
|
-
field_info: The original FieldInfo object containing the dynamic providers.
|
|
324
|
-
|
|
325
|
-
Returns:
|
|
326
|
-
A new FieldInfo object with the same attributes as the original, but with
|
|
327
|
-
`json_schema_extra` containing resolved values and Dynamic metadata removed.
|
|
328
|
-
|
|
329
|
-
Note:
|
|
330
|
-
If all fetchers fail, the original field_info is returned unchanged.
|
|
331
|
-
If some fetchers fail, successfully resolved values are still applied.
|
|
332
|
-
"""
|
|
333
|
-
fetchers = get_fetchers(field_info)
|
|
334
|
-
|
|
335
|
-
if not fetchers:
|
|
336
|
-
return field_info
|
|
337
|
-
|
|
338
|
-
fetcher_keys = list(fetchers.keys())
|
|
339
|
-
logger.debug(
|
|
340
|
-
"Refreshing dynamic schema for field '%s' with fetchers: %s",
|
|
341
|
-
field_name,
|
|
342
|
-
fetcher_keys,
|
|
343
|
-
extra={"field_name": field_name, "fetcher_keys": fetcher_keys},
|
|
344
|
-
)
|
|
345
|
-
|
|
346
|
-
# Resolve all fetchers with structured error handling
|
|
347
|
-
result = await resolve_safe(fetchers)
|
|
348
|
-
|
|
349
|
-
# Log any errors that occurred with full details
|
|
350
|
-
if result.errors:
|
|
351
|
-
for key, error in result.errors.items():
|
|
352
|
-
logger.warning(
|
|
353
|
-
"Failed to resolve '%s' for field '%s': %s: %s",
|
|
354
|
-
key,
|
|
355
|
-
field_name,
|
|
356
|
-
type(error).__name__,
|
|
357
|
-
str(error) or "(no message)",
|
|
358
|
-
extra={
|
|
359
|
-
"field_name": field_name,
|
|
360
|
-
"fetcher_key": key,
|
|
361
|
-
"error_type": type(error).__name__,
|
|
362
|
-
"error_message": str(error),
|
|
363
|
-
"error_repr": repr(error),
|
|
364
|
-
},
|
|
365
|
-
)
|
|
366
|
-
|
|
367
|
-
# If no values were resolved, return original field_info
|
|
368
|
-
if not result.values:
|
|
369
|
-
logger.warning(
|
|
370
|
-
"All fetchers failed for field '%s', keeping original",
|
|
371
|
-
field_name,
|
|
372
|
-
)
|
|
373
|
-
return field_info
|
|
374
|
-
|
|
375
|
-
# Build new json_schema_extra with resolved values merged
|
|
376
|
-
extra = getattr(field_info, "json_schema_extra", {}) or {}
|
|
377
|
-
new_extra = {**extra, **result.values}
|
|
378
|
-
|
|
379
|
-
# Create a deep copy of the FieldInfo to avoid shared mutable state
|
|
380
|
-
new_field_info = copy.deepcopy(field_info)
|
|
381
|
-
setattr(new_field_info, "json_schema_extra", new_extra)
|
|
382
|
-
|
|
383
|
-
# Remove Dynamic from metadata (it's been resolved)
|
|
384
|
-
new_metadata = [m for m in new_field_info.metadata if not isinstance(m, DynamicField)]
|
|
385
|
-
setattr(new_field_info, "metadata", new_metadata)
|
|
386
|
-
|
|
387
|
-
logger.debug(
|
|
388
|
-
"Refreshed '%s' with dynamic values: %s",
|
|
389
|
-
field_name,
|
|
390
|
-
list(result.values.keys()),
|
|
391
|
-
)
|
|
392
|
-
|
|
393
|
-
return new_field_info
|
|
109
|
+
return cast("type[SetupModelT]", m) # type: ignore
|