pyrmute 0.1.0__py3-none-any.whl → 0.3.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- pyrmute/__init__.py +35 -8
- pyrmute/_migration_manager.py +112 -37
- pyrmute/_registry.py +24 -11
- pyrmute/_schema_manager.py +31 -34
- pyrmute/_version.py +2 -2
- pyrmute/exceptions.py +55 -0
- pyrmute/migration_testing.py +161 -0
- pyrmute/model_diff.py +272 -0
- pyrmute/model_manager.py +529 -68
- pyrmute/model_version.py +16 -8
- pyrmute/types.py +17 -5
- pyrmute-0.3.0.dist-info/METADATA +352 -0
- pyrmute-0.3.0.dist-info/RECORD +17 -0
- pyrmute-0.1.0.dist-info/METADATA +0 -130
- pyrmute-0.1.0.dist-info/RECORD +0 -14
- {pyrmute-0.1.0.dist-info → pyrmute-0.3.0.dist-info}/WHEEL +0 -0
- {pyrmute-0.1.0.dist-info → pyrmute-0.3.0.dist-info}/licenses/LICENSE +0 -0
- {pyrmute-0.1.0.dist-info → pyrmute-0.3.0.dist-info}/top_level.txt +0 -0
pyrmute/model_manager.py
CHANGED
@@ -1,6 +1,7 @@
|
|
1
1
|
"""Model manager."""
|
2
2
|
|
3
|
-
from collections.abc import Callable
|
3
|
+
from collections.abc import Callable, Iterable
|
4
|
+
from concurrent.futures import ProcessPoolExecutor, ThreadPoolExecutor
|
4
5
|
from pathlib import Path
|
5
6
|
from typing import Any, Self
|
6
7
|
|
@@ -9,31 +10,40 @@ from pydantic import BaseModel
|
|
9
10
|
from ._migration_manager import MigrationManager
|
10
11
|
from ._registry import Registry
|
11
12
|
from ._schema_manager import SchemaManager
|
13
|
+
from .exceptions import MigrationError, ModelNotFoundError
|
14
|
+
from .migration_testing import (
|
15
|
+
MigrationTestCase,
|
16
|
+
MigrationTestResult,
|
17
|
+
MigrationTestResults,
|
18
|
+
)
|
19
|
+
from .model_diff import ModelDiff
|
12
20
|
from .model_version import ModelVersion
|
13
21
|
from .types import (
|
14
22
|
DecoratedBaseModel,
|
15
23
|
JsonSchema,
|
16
24
|
JsonSchemaGenerator,
|
17
|
-
MigrationData,
|
18
25
|
MigrationFunc,
|
19
|
-
|
26
|
+
ModelData,
|
27
|
+
NestedModelInfo,
|
20
28
|
)
|
21
29
|
|
22
30
|
|
23
31
|
class ModelManager:
|
24
32
|
"""High-level interface for versioned model management.
|
25
33
|
|
26
|
-
|
27
|
-
|
34
|
+
ModelManager provides a unified API for managing schema evolution across different
|
35
|
+
versions of Pydantic models. It handles model registration, automatic migration
|
36
|
+
between versions, schema generation, and batch processing operations.
|
28
37
|
|
29
38
|
Attributes:
|
30
|
-
registry: Registry instance.
|
31
|
-
migration_manager: MigrationManager instance.
|
32
|
-
schema_manager: SchemaManager instance.
|
39
|
+
registry: Registry instance managing all registered model versions.
|
40
|
+
migration_manager: MigrationManager instance handling migration logic and paths.
|
41
|
+
schema_manager: SchemaManager instance for JSON schema generation and export.
|
33
42
|
|
34
|
-
|
43
|
+
Basic Usage:
|
35
44
|
>>> manager = ModelManager()
|
36
45
|
>>>
|
46
|
+
>>> # Register model versions
|
37
47
|
>>> @manager.model("User", "1.0.0")
|
38
48
|
... class UserV1(BaseModel):
|
39
49
|
... name: str
|
@@ -43,16 +53,47 @@ class ModelManager:
|
|
43
53
|
... name: str
|
44
54
|
... email: str
|
45
55
|
>>>
|
56
|
+
>>> # Define migration between versions
|
46
57
|
>>> @manager.migration("User", "1.0.0", "2.0.0")
|
47
|
-
... def migrate(data:
|
58
|
+
... def migrate(data: ModelData) -> ModelData:
|
48
59
|
... return {**data, "email": "unknown@example.com"}
|
49
|
-
|
60
|
+
>>>
|
61
|
+
>>> # Migrate legacy data
|
62
|
+
>>> old_data = {"name": "Alice"}
|
63
|
+
>>> user = manager.migrate(old_data, "User", "1.0.0", "2.0.0")
|
64
|
+
>>> # Result: UserV2(name="Alice", email="unknown@example.com")
|
65
|
+
|
66
|
+
Advanced Features:
|
67
|
+
>>> # Batch migration with parallel processing
|
68
|
+
>>> users = manager.migrate_batch(
|
69
|
+
... legacy_users, "User", "1.0.0", "2.0.0",
|
70
|
+
... parallel=True, max_workers=4
|
71
|
+
... )
|
72
|
+
>>>
|
73
|
+
>>> # Stream large datasets efficiently
|
74
|
+
>>> for user in manager.migrate_batch_streaming(large_dataset, "User", "1.0.0", "2.0.0"):
|
75
|
+
... save_to_database(user)
|
76
|
+
>>>
|
77
|
+
>>> # Compare versions and export schemas
|
78
|
+
>>> diff = manager.diff("User", "1.0.0", "2.0.0")
|
79
|
+
>>> print(diff.to_markdown())
|
80
|
+
>>> manager.dump_schemas("schemas/", separate_definitions=True)
|
81
|
+
>>>
|
82
|
+
>>> # Test migrations with validation
|
83
|
+
>>> results = manager.test_migration(
|
84
|
+
... "User", "1.0.0", "2.0.0",
|
85
|
+
... test_cases=[
|
86
|
+
... ({"name": "Alice"}, {"name": "Alice", "email": "unknown@example.com"})
|
87
|
+
... ]
|
88
|
+
... )
|
89
|
+
>>> results.assert_all_passed()
|
90
|
+
""" # noqa: E501
|
50
91
|
|
51
92
|
def __init__(self: Self) -> None:
|
52
93
|
"""Initialize the versioned model manager."""
|
53
|
-
self.
|
54
|
-
self.
|
55
|
-
self.
|
94
|
+
self._registry = Registry()
|
95
|
+
self._migration_manager = MigrationManager(self._registry)
|
96
|
+
self._schema_manager = SchemaManager(self._registry)
|
56
97
|
|
57
98
|
def model(
|
58
99
|
self: Self,
|
@@ -60,6 +101,7 @@ class ModelManager:
|
|
60
101
|
version: str | ModelVersion,
|
61
102
|
schema_generator: JsonSchemaGenerator | None = None,
|
62
103
|
enable_ref: bool = False,
|
104
|
+
backward_compatible: bool = False,
|
63
105
|
) -> Callable[[type[DecoratedBaseModel]], type[DecoratedBaseModel]]:
|
64
106
|
"""Register a versioned model.
|
65
107
|
|
@@ -67,8 +109,11 @@ class ModelManager:
|
|
67
109
|
name: Name of the model.
|
68
110
|
version: Semantic version.
|
69
111
|
schema_generator: Optional custom schema generator.
|
70
|
-
enable_ref: If True, this model can be referenced via $ref in
|
71
|
-
|
112
|
+
enable_ref: If True, this model can be referenced via $ref in separate
|
113
|
+
schema files. If False, it will always be inlined.
|
114
|
+
backward_compatible: If True, this model does not need a migration function
|
115
|
+
to migrate to the next version. If a migration function is defined it
|
116
|
+
will use it.
|
72
117
|
|
73
118
|
Returns:
|
74
119
|
Decorator function for model class.
|
@@ -84,7 +129,9 @@ class ModelManager:
|
|
84
129
|
... class CityV1(BaseModel):
|
85
130
|
... city: City
|
86
131
|
"""
|
87
|
-
return self.
|
132
|
+
return self._registry.register(
|
133
|
+
name, version, schema_generator, enable_ref, backward_compatible
|
134
|
+
)
|
88
135
|
|
89
136
|
def migration(
|
90
137
|
self: Self,
|
@@ -102,11 +149,11 @@ class ModelManager:
|
|
102
149
|
Returns:
|
103
150
|
Decorator function for migration function.
|
104
151
|
"""
|
105
|
-
return self.
|
152
|
+
return self._migration_manager.register_migration(
|
153
|
+
name, from_version, to_version
|
154
|
+
)
|
106
155
|
|
107
|
-
def get(
|
108
|
-
self: Self, name: str, version: str | ModelVersion | None = None
|
109
|
-
) -> type[BaseModel]:
|
156
|
+
def get(self: Self, name: str, version: str | ModelVersion) -> type[BaseModel]:
|
110
157
|
"""Get a model by name and version.
|
111
158
|
|
112
159
|
Args:
|
@@ -116,13 +163,94 @@ class ModelManager:
|
|
116
163
|
Returns:
|
117
164
|
Model class.
|
118
165
|
"""
|
119
|
-
|
120
|
-
|
121
|
-
|
166
|
+
return self._registry.get_model(name, version)
|
167
|
+
|
168
|
+
def get_latest(self: Self, name: str) -> type[BaseModel]:
|
169
|
+
"""Get the latest version of a model by name.
|
170
|
+
|
171
|
+
Args:
|
172
|
+
name: Name of the model.
|
173
|
+
|
174
|
+
Returns:
|
175
|
+
Model class.
|
176
|
+
"""
|
177
|
+
return self._registry.get_latest(name)
|
178
|
+
|
179
|
+
def has_migration_path(
|
180
|
+
self: Self,
|
181
|
+
name: str,
|
182
|
+
from_version: str | ModelVersion,
|
183
|
+
to_version: str | ModelVersion,
|
184
|
+
) -> bool:
|
185
|
+
"""Check if a migration path exists between two versions.
|
186
|
+
|
187
|
+
Args:
|
188
|
+
name: Name of the model.
|
189
|
+
from_version: Source version.
|
190
|
+
to_version: Target version.
|
191
|
+
|
192
|
+
Returns:
|
193
|
+
True if a migration path exists, False otherwise.
|
194
|
+
|
195
|
+
Example:
|
196
|
+
>>> if manager.has_migration_path("User", "1.0.0", "3.0.0"):
|
197
|
+
... users = manager.migrate_batch(old_users, "User", "1.0.0", "3.0.0")
|
198
|
+
... else:
|
199
|
+
... logger.error("Cannot migrate users to v3.0.0")
|
200
|
+
"""
|
201
|
+
from_ver = (
|
202
|
+
ModelVersion.parse(from_version)
|
203
|
+
if isinstance(from_version, str)
|
204
|
+
else from_version
|
205
|
+
)
|
206
|
+
to_ver = (
|
207
|
+
ModelVersion.parse(to_version)
|
208
|
+
if isinstance(to_version, str)
|
209
|
+
else to_version
|
210
|
+
)
|
211
|
+
try:
|
212
|
+
self._migration_manager.validate_migration_path(name, from_ver, to_ver)
|
213
|
+
return True
|
214
|
+
except (KeyError, ModelNotFoundError, MigrationError):
|
215
|
+
return False
|
216
|
+
|
217
|
+
def validate_data(
|
218
|
+
self: Self,
|
219
|
+
data: ModelData,
|
220
|
+
name: str,
|
221
|
+
version: str | ModelVersion,
|
222
|
+
) -> bool:
|
223
|
+
"""Check if data is valid for a specific model version.
|
224
|
+
|
225
|
+
Validates whether the provided data conforms to the schema of the specified
|
226
|
+
model version without raising an exception.
|
227
|
+
|
228
|
+
Args:
|
229
|
+
data: Data dictionary to validate.
|
230
|
+
name: Name of the model.
|
231
|
+
version: Semantic version to validate against.
|
232
|
+
|
233
|
+
Returns:
|
234
|
+
True if data is valid for the model version, False otherwise.
|
235
|
+
|
236
|
+
Example:
|
237
|
+
>>> data = {"name": "Alice"}
|
238
|
+
>>> is_valid = manager.validate_data(data, "User", "1.0.0")
|
239
|
+
>>> # Returns: True
|
240
|
+
>>>
|
241
|
+
>>> is_valid = manager.validate_data(data, "User", "2.0.0")
|
242
|
+
>>> # Returns: False, missing required field 'email'
|
243
|
+
"""
|
244
|
+
try:
|
245
|
+
model = self.get(name, version)
|
246
|
+
model.model_validate(data)
|
247
|
+
return True
|
248
|
+
except Exception:
|
249
|
+
return False
|
122
250
|
|
123
251
|
def migrate(
|
124
252
|
self: Self,
|
125
|
-
data:
|
253
|
+
data: ModelData,
|
126
254
|
name: str,
|
127
255
|
from_version: str | ModelVersion,
|
128
256
|
to_version: str | ModelVersion,
|
@@ -130,7 +258,7 @@ class ModelManager:
|
|
130
258
|
"""Migrate data between versions.
|
131
259
|
|
132
260
|
Args:
|
133
|
-
data: Data dictionary
|
261
|
+
data: Data dictionary to migrate.
|
134
262
|
name: Name of the model.
|
135
263
|
from_version: Source version.
|
136
264
|
to_version: Target version.
|
@@ -138,12 +266,281 @@ class ModelManager:
|
|
138
266
|
Returns:
|
139
267
|
Migrated BaseModel.
|
140
268
|
"""
|
141
|
-
migrated_data = self.
|
142
|
-
data, name, from_version, to_version
|
143
|
-
)
|
269
|
+
migrated_data = self.migrate_data(data, name, from_version, to_version)
|
144
270
|
target_model = self.get(name, to_version)
|
145
271
|
return target_model.model_validate(migrated_data)
|
146
272
|
|
273
|
+
def migrate_data(
|
274
|
+
self: Self,
|
275
|
+
data: ModelData,
|
276
|
+
name: str,
|
277
|
+
from_version: str | ModelVersion,
|
278
|
+
to_version: str | ModelVersion,
|
279
|
+
) -> ModelData:
|
280
|
+
"""Migrate data between versions.
|
281
|
+
|
282
|
+
Args:
|
283
|
+
data: Data dictionary to migrate.
|
284
|
+
name: Name of the model.
|
285
|
+
from_version: Source version.
|
286
|
+
to_version: Target version.
|
287
|
+
|
288
|
+
Returns:
|
289
|
+
Raw migrated dictionary.
|
290
|
+
"""
|
291
|
+
return self._migration_manager.migrate(data, name, from_version, to_version)
|
292
|
+
|
293
|
+
def migrate_batch( # noqa: PLR0913
|
294
|
+
self: Self,
|
295
|
+
data_list: Iterable[ModelData],
|
296
|
+
name: str,
|
297
|
+
from_version: str | ModelVersion,
|
298
|
+
to_version: str | ModelVersion,
|
299
|
+
parallel: bool = False,
|
300
|
+
max_workers: int | None = None,
|
301
|
+
use_processes: bool = False,
|
302
|
+
) -> list[BaseModel]:
|
303
|
+
"""Migrate multiple data items between versions.
|
304
|
+
|
305
|
+
Args:
|
306
|
+
data_list: Iterable of data dictionaries to migrate.
|
307
|
+
name: Name of the model.
|
308
|
+
from_version: Source version.
|
309
|
+
to_version: Target version.
|
310
|
+
parallel: If True, use parallel processing.
|
311
|
+
max_workers: Maximum number of workers for parallel processing. Defaults to
|
312
|
+
None (uses executor default).
|
313
|
+
use_processes: If True, use ProcessPoolExecutor instead of
|
314
|
+
ThreadPoolExecutor. Useful for CPU-intensive migrations.
|
315
|
+
|
316
|
+
Returns:
|
317
|
+
List of migrated BaseModel instances.
|
318
|
+
|
319
|
+
Example:
|
320
|
+
>>> legacy_users = [
|
321
|
+
... {"name": "Alice"},
|
322
|
+
... {"name": "Bob"},
|
323
|
+
... {"name": "Charlie"}
|
324
|
+
... ]
|
325
|
+
>>> users = manager.migrate_batch(
|
326
|
+
... legacy_users,
|
327
|
+
... "User",
|
328
|
+
... from_version="1.0.0",
|
329
|
+
... to_version="3.0.0",
|
330
|
+
... parallel=True
|
331
|
+
... )
|
332
|
+
"""
|
333
|
+
data_list = list(data_list)
|
334
|
+
|
335
|
+
if not data_list:
|
336
|
+
return []
|
337
|
+
|
338
|
+
if not parallel:
|
339
|
+
return [
|
340
|
+
self.migrate(item, name, from_version, to_version) for item in data_list
|
341
|
+
]
|
342
|
+
|
343
|
+
executor_class = ProcessPoolExecutor if use_processes else ThreadPoolExecutor
|
344
|
+
with executor_class(max_workers=max_workers) as executor:
|
345
|
+
futures = [
|
346
|
+
executor.submit(self.migrate, item, name, from_version, to_version)
|
347
|
+
for item in data_list
|
348
|
+
]
|
349
|
+
return [future.result() for future in futures]
|
350
|
+
|
351
|
+
def migrate_batch_data( # noqa: PLR0913
|
352
|
+
self: Self,
|
353
|
+
data_list: Iterable[ModelData],
|
354
|
+
name: str,
|
355
|
+
from_version: str | ModelVersion,
|
356
|
+
to_version: str | ModelVersion,
|
357
|
+
parallel: bool = False,
|
358
|
+
max_workers: int | None = None,
|
359
|
+
use_processes: bool = False,
|
360
|
+
) -> list[ModelData]:
|
361
|
+
"""Migrate multiple data items between versions, returning raw dictionaries.
|
362
|
+
|
363
|
+
Args:
|
364
|
+
data_list: Iterable of data dictionaries to migrate.
|
365
|
+
name: Name of the model.
|
366
|
+
from_version: Source version.
|
367
|
+
to_version: Target version.
|
368
|
+
parallel: If True, use parallel processing.
|
369
|
+
max_workers: Maximum number of workers for parallel processing.
|
370
|
+
use_processes: If True, use ProcessPoolExecutor.
|
371
|
+
|
372
|
+
Returns:
|
373
|
+
List of raw migrated dictionaries.
|
374
|
+
|
375
|
+
Example:
|
376
|
+
>>> legacy_data = [{"name": "Alice"}, {"name": "Bob"}]
|
377
|
+
>>> migrated_data = manager.migrate_batch_data(
|
378
|
+
... legacy_data,
|
379
|
+
... "User",
|
380
|
+
... from_version="1.0.0",
|
381
|
+
... to_version="2.0.0"
|
382
|
+
... )
|
383
|
+
"""
|
384
|
+
data_list = list(data_list)
|
385
|
+
|
386
|
+
if not data_list:
|
387
|
+
return []
|
388
|
+
|
389
|
+
if not parallel:
|
390
|
+
return [
|
391
|
+
self.migrate_data(item, name, from_version, to_version)
|
392
|
+
for item in data_list
|
393
|
+
]
|
394
|
+
|
395
|
+
executor_class = ProcessPoolExecutor if use_processes else ThreadPoolExecutor
|
396
|
+
with executor_class(max_workers=max_workers) as executor:
|
397
|
+
futures = [
|
398
|
+
executor.submit(self.migrate_data, item, name, from_version, to_version)
|
399
|
+
for item in data_list
|
400
|
+
]
|
401
|
+
return [future.result() for future in futures]
|
402
|
+
|
403
|
+
def migrate_batch_streaming(
|
404
|
+
self: Self,
|
405
|
+
data_list: Iterable[ModelData],
|
406
|
+
name: str,
|
407
|
+
from_version: str | ModelVersion,
|
408
|
+
to_version: str | ModelVersion,
|
409
|
+
chunk_size: int = 100,
|
410
|
+
) -> Iterable[BaseModel]:
|
411
|
+
"""Migrate data in chunks, yielding results as they complete.
|
412
|
+
|
413
|
+
Useful for large datasets where you want to start processing results before all
|
414
|
+
migrations complete.
|
415
|
+
|
416
|
+
Args:
|
417
|
+
data_list: Iterable of data dictionaries to migrate.
|
418
|
+
name: Name of the model.
|
419
|
+
from_version: Source version.
|
420
|
+
to_version: Target version.
|
421
|
+
chunk_size: Number of items to process in each chunk.
|
422
|
+
|
423
|
+
Yields:
|
424
|
+
Migrated BaseModel instances.
|
425
|
+
|
426
|
+
Example:
|
427
|
+
>>> legacy_users = load_large_dataset()
|
428
|
+
>>> for user in manager.migrate_batch_streaming(
|
429
|
+
... legacy_users,
|
430
|
+
... "User",
|
431
|
+
... from_version="1.0.0",
|
432
|
+
... to_version="3.0.0"
|
433
|
+
... ):
|
434
|
+
... # Process each user as it's migrated
|
435
|
+
... save_to_database(user)
|
436
|
+
"""
|
437
|
+
chunk = []
|
438
|
+
|
439
|
+
for item in data_list:
|
440
|
+
chunk.append(item)
|
441
|
+
|
442
|
+
if len(chunk) >= chunk_size:
|
443
|
+
yield from self.migrate_batch(chunk, name, from_version, to_version)
|
444
|
+
chunk = []
|
445
|
+
|
446
|
+
if chunk:
|
447
|
+
yield from self.migrate_batch(chunk, name, from_version, to_version)
|
448
|
+
|
449
|
+
def migrate_batch_data_streaming(
|
450
|
+
self: Self,
|
451
|
+
data_list: Iterable[ModelData],
|
452
|
+
name: str,
|
453
|
+
from_version: str | ModelVersion,
|
454
|
+
to_version: str | ModelVersion,
|
455
|
+
chunk_size: int = 100,
|
456
|
+
) -> Iterable[ModelData]:
|
457
|
+
"""Migrate data in chunks, yielding raw dictionaries as they complete.
|
458
|
+
|
459
|
+
Useful for large datasets where you want to start processing results before all
|
460
|
+
migrations complete, without the validation overhead.
|
461
|
+
|
462
|
+
Args:
|
463
|
+
data_list: Iterable of data dictionaries to migrate.
|
464
|
+
name: Name of the model.
|
465
|
+
from_version: Source version.
|
466
|
+
to_version: Target version.
|
467
|
+
chunk_size: Number of items to process in each chunk.
|
468
|
+
|
469
|
+
Yields:
|
470
|
+
Raw migrated dictionaries.
|
471
|
+
|
472
|
+
Example:
|
473
|
+
>>> legacy_data = load_large_dataset()
|
474
|
+
>>> for data in manager.migrate_batch_data_streaming(
|
475
|
+
... legacy_data,
|
476
|
+
... "User",
|
477
|
+
... from_version="1.0.0",
|
478
|
+
... to_version="3.0.0"
|
479
|
+
... ):
|
480
|
+
... # Process raw data as it's migrated
|
481
|
+
... bulk_insert_to_database(data)
|
482
|
+
"""
|
483
|
+
chunk = []
|
484
|
+
|
485
|
+
for item in data_list:
|
486
|
+
chunk.append(item)
|
487
|
+
|
488
|
+
if len(chunk) >= chunk_size:
|
489
|
+
yield from self.migrate_batch_data(
|
490
|
+
chunk, name, from_version, to_version
|
491
|
+
)
|
492
|
+
chunk = []
|
493
|
+
|
494
|
+
if chunk:
|
495
|
+
yield from self.migrate_batch_data(chunk, name, from_version, to_version)
|
496
|
+
|
497
|
+
def diff(
|
498
|
+
self: Self,
|
499
|
+
name: str,
|
500
|
+
from_version: str | ModelVersion,
|
501
|
+
to_version: str | ModelVersion,
|
502
|
+
) -> ModelDiff:
|
503
|
+
"""Get a detailed diff between two model versions.
|
504
|
+
|
505
|
+
Compares field names, types, requirements, and default values to provide a
|
506
|
+
comprehensive view of what changed between versions.
|
507
|
+
|
508
|
+
Args:
|
509
|
+
name: Name of the model.
|
510
|
+
from_version: Source version.
|
511
|
+
to_version: Target version.
|
512
|
+
|
513
|
+
Returns:
|
514
|
+
ModelDiff with detailed change information.
|
515
|
+
|
516
|
+
Example:
|
517
|
+
>>> diff = manager.diff("User", "1.0.0", "2.0.0")
|
518
|
+
>>> print(diff.to_markdown())
|
519
|
+
>>> print(f"Added: {diff.added_fields}")
|
520
|
+
>>> print(f"Removed: {diff.removed_fields}")
|
521
|
+
"""
|
522
|
+
from_ver_str = str(
|
523
|
+
ModelVersion.parse(from_version)
|
524
|
+
if isinstance(from_version, str)
|
525
|
+
else from_version
|
526
|
+
)
|
527
|
+
to_ver_str = str(
|
528
|
+
ModelVersion.parse(to_version)
|
529
|
+
if isinstance(to_version, str)
|
530
|
+
else to_version
|
531
|
+
)
|
532
|
+
|
533
|
+
from_model = self.get(name, from_version)
|
534
|
+
to_model = self.get(name, to_version)
|
535
|
+
|
536
|
+
return ModelDiff.from_models(
|
537
|
+
name=name,
|
538
|
+
from_model=from_model,
|
539
|
+
to_model=to_model,
|
540
|
+
from_version=from_ver_str,
|
541
|
+
to_version=to_ver_str,
|
542
|
+
)
|
543
|
+
|
147
544
|
def get_schema(
|
148
545
|
self: Self,
|
149
546
|
name: str,
|
@@ -160,7 +557,7 @@ class ModelManager:
|
|
160
557
|
Returns:
|
161
558
|
JSON schema dictionary.
|
162
559
|
"""
|
163
|
-
return self.
|
560
|
+
return self._schema_manager.get_schema(name, version, **kwargs)
|
164
561
|
|
165
562
|
def list_models(self: Self) -> list[str]:
|
166
563
|
"""Get list of all registered models.
|
@@ -168,7 +565,7 @@ class ModelManager:
|
|
168
565
|
Returns:
|
169
566
|
List of model names.
|
170
567
|
"""
|
171
|
-
return self.
|
568
|
+
return self._registry.list_models()
|
172
569
|
|
173
570
|
def list_versions(self: Self, name: str) -> list[ModelVersion]:
|
174
571
|
"""Get all versions for a model.
|
@@ -179,7 +576,7 @@ class ModelManager:
|
|
179
576
|
Returns:
|
180
577
|
Sorted list of versions.
|
181
578
|
"""
|
182
|
-
return self.
|
579
|
+
return self._registry.get_versions(name)
|
183
580
|
|
184
581
|
def dump_schemas(
|
185
582
|
self: Self,
|
@@ -193,8 +590,9 @@ class ModelManager:
|
|
193
590
|
Args:
|
194
591
|
output_dir: Directory path for output.
|
195
592
|
indent: JSON indentation level.
|
196
|
-
separate_definitions: If True, create separate schema files for
|
197
|
-
|
593
|
+
separate_definitions: If True, create separate schema files for nested
|
594
|
+
models and use $ref to reference them. Only applies to models with
|
595
|
+
'enable_ref=True'.
|
198
596
|
ref_template: Template for $ref URLs when separate_definitions=True.
|
199
597
|
Defaults to relative file references if not provided.
|
200
598
|
|
@@ -212,53 +610,116 @@ class ModelManager:
|
|
212
610
|
... ref_template="https://example.com/schemas/{model}_v{version}.json"
|
213
611
|
... )
|
214
612
|
"""
|
215
|
-
self.
|
613
|
+
self._schema_manager.dump_schemas(
|
216
614
|
output_dir, indent, separate_definitions, ref_template
|
217
615
|
)
|
218
616
|
|
219
|
-
def
|
617
|
+
def get_nested_models(
|
220
618
|
self: Self,
|
221
|
-
|
222
|
-
|
223
|
-
|
224
|
-
|
225
|
-
"""Export schemas with separate files for nested models.
|
226
|
-
|
227
|
-
This is a convenience method that calls dump_schemas with
|
228
|
-
separate_definitions=True.
|
619
|
+
name: str,
|
620
|
+
version: str | ModelVersion,
|
621
|
+
) -> list[NestedModelInfo]:
|
622
|
+
"""Get all nested models used by a model.
|
229
623
|
|
230
624
|
Args:
|
231
|
-
|
232
|
-
|
233
|
-
{version} placeholders. Defaults to relative file refs.
|
234
|
-
indent: JSON indentation level.
|
625
|
+
name: Name of the model.
|
626
|
+
version: Semantic version.
|
235
627
|
|
236
|
-
|
237
|
-
|
238
|
-
>>> manager.dump_schemas_with_refs("schemas/")
|
239
|
-
>>>
|
240
|
-
>>> # Absolute URL references
|
241
|
-
>>> manager.dump_schemas_with_refs(
|
242
|
-
... "schemas/",
|
243
|
-
... ref_template="https://example.com/schemas/{model}_v{version}.json"
|
244
|
-
... )
|
628
|
+
Returns:
|
629
|
+
List of NestedModelInfo.
|
245
630
|
"""
|
246
|
-
self.
|
247
|
-
output_dir, indent, separate_definitions=True, ref_template=ref_template
|
248
|
-
)
|
631
|
+
return self._schema_manager.get_nested_models(name, version)
|
249
632
|
|
250
|
-
def
|
633
|
+
def test_migration(
|
251
634
|
self: Self,
|
252
635
|
name: str,
|
253
|
-
|
254
|
-
|
255
|
-
|
636
|
+
from_version: str | ModelVersion,
|
637
|
+
to_version: str | ModelVersion,
|
638
|
+
test_cases: list[tuple[ModelData, ModelData] | MigrationTestCase],
|
639
|
+
) -> MigrationTestResults:
|
640
|
+
"""Test a migration with multiple test cases.
|
641
|
+
|
642
|
+
Executes a migration on multiple test inputs and validates the outputs match
|
643
|
+
expected values. Useful for regression testing and validating migration logic.
|
256
644
|
|
257
645
|
Args:
|
258
646
|
name: Name of the model.
|
259
|
-
|
647
|
+
from_version: Source version to migrate from.
|
648
|
+
to_version: Target version to migrate to.
|
649
|
+
test_cases: List of test cases, either as (source, target) tuples or
|
650
|
+
MigrationTestCase objects. If target is None, only verifies the
|
651
|
+
migration completes without errors.
|
260
652
|
|
261
653
|
Returns:
|
262
|
-
|
263
|
-
|
264
|
-
|
654
|
+
MigrationTestResults containing individual results for each test case.
|
655
|
+
|
656
|
+
Example:
|
657
|
+
>>> # Using tuples (source, target)
|
658
|
+
>>> results = manager.test_migration(
|
659
|
+
... "User", "1.0.0", "2.0.0",
|
660
|
+
... test_cases=[
|
661
|
+
... ({"name": "Alice"}, {"name": "Alice", "email": "alice@example.com"}),
|
662
|
+
... ({"name": "Bob"}, {"name": "Bob", "email": "bob@example.com"})
|
663
|
+
... ]
|
664
|
+
... )
|
665
|
+
>>> assert results.all_passed
|
666
|
+
>>>
|
667
|
+
>>> # Using MigrationTestCase objects
|
668
|
+
>>> results = manager.test_migration(
|
669
|
+
... "User", "1.0.0", "2.0.0",
|
670
|
+
... test_cases=[
|
671
|
+
... MigrationTestCase(
|
672
|
+
... source={"name": "Alice"},
|
673
|
+
... target={"name": "Alice", "email": "alice@example.com"},
|
674
|
+
... description="Standard user migration"
|
675
|
+
... )
|
676
|
+
... ]
|
677
|
+
... )
|
678
|
+
>>>
|
679
|
+
>>> # Use in pytest
|
680
|
+
>>> def test_user_migration():
|
681
|
+
... results = manager.test_migration("User", "1.0.0", "2.0.0", test_cases)
|
682
|
+
... results.assert_all_passed() # Raises AssertionError with details if failed
|
683
|
+
>>>
|
684
|
+
>>> # Inspect failures
|
685
|
+
>>> if not results.all_passed:
|
686
|
+
... for failure in results.failures:
|
687
|
+
... print(f"Failed: {failure.test_case.description}")
|
688
|
+
... print(f" Error: {failure.error}")
|
689
|
+
""" # noqa: E501
|
690
|
+
results = []
|
691
|
+
|
692
|
+
for test_case_input in test_cases:
|
693
|
+
if isinstance(test_case_input, tuple):
|
694
|
+
test_case = MigrationTestCase(
|
695
|
+
source=test_case_input[0], target=test_case_input[1]
|
696
|
+
)
|
697
|
+
else:
|
698
|
+
test_case = test_case_input
|
699
|
+
|
700
|
+
try:
|
701
|
+
actual = self.migrate_data(
|
702
|
+
test_case.source, name, from_version, to_version
|
703
|
+
)
|
704
|
+
|
705
|
+
if test_case.target is not None:
|
706
|
+
passed = actual == test_case.target
|
707
|
+
error = None if passed else "Output mismatch"
|
708
|
+
else:
|
709
|
+
# Just verify it doesn't crash
|
710
|
+
passed = True
|
711
|
+
error = None
|
712
|
+
|
713
|
+
results.append(
|
714
|
+
MigrationTestResult(
|
715
|
+
test_case=test_case, actual=actual, passed=passed, error=error
|
716
|
+
)
|
717
|
+
)
|
718
|
+
except Exception as e:
|
719
|
+
results.append(
|
720
|
+
MigrationTestResult(
|
721
|
+
test_case=test_case, actual={}, passed=False, error=str(e)
|
722
|
+
)
|
723
|
+
)
|
724
|
+
|
725
|
+
return MigrationTestResults(results)
|