statikk 0.1.14__tar.gz → 0.1.16__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {statikk-0.1.14 → statikk-0.1.16}/PKG-INFO +1 -1
- {statikk-0.1.14 → statikk-0.1.16}/src/statikk/engine.py +60 -110
- {statikk-0.1.14 → statikk-0.1.16}/src/statikk/fields.py +1 -0
- {statikk-0.1.14 → statikk-0.1.16}/src/statikk/models.py +22 -33
- {statikk-0.1.14 → statikk-0.1.16}/src/statikk.egg-info/PKG-INFO +1 -1
- {statikk-0.1.14 → statikk-0.1.16}/tests/test_engine.py +24 -86
- {statikk-0.1.14 → statikk-0.1.16}/.coveragerc +0 -0
- {statikk-0.1.14 → statikk-0.1.16}/.gitignore +0 -0
- {statikk-0.1.14 → statikk-0.1.16}/.readthedocs.yml +0 -0
- {statikk-0.1.14 → statikk-0.1.16}/AUTHORS.rst +0 -0
- {statikk-0.1.14 → statikk-0.1.16}/CHANGELOG.rst +0 -0
- {statikk-0.1.14 → statikk-0.1.16}/CONTRIBUTING.rst +0 -0
- {statikk-0.1.14 → statikk-0.1.16}/LICENSE.txt +0 -0
- {statikk-0.1.14 → statikk-0.1.16}/README.rst +0 -0
- {statikk-0.1.14 → statikk-0.1.16}/assets/favicon.png +0 -0
- {statikk-0.1.14 → statikk-0.1.16}/assets/logo.png +0 -0
- {statikk-0.1.14 → statikk-0.1.16}/docs/Makefile +0 -0
- {statikk-0.1.14 → statikk-0.1.16}/docs/_static/.gitignore +0 -0
- {statikk-0.1.14 → statikk-0.1.16}/docs/authors.rst +0 -0
- {statikk-0.1.14 → statikk-0.1.16}/docs/changelog.rst +0 -0
- {statikk-0.1.14 → statikk-0.1.16}/docs/conf.py +0 -0
- {statikk-0.1.14 → statikk-0.1.16}/docs/contributing.rst +0 -0
- {statikk-0.1.14 → statikk-0.1.16}/docs/index.rst +0 -0
- {statikk-0.1.14 → statikk-0.1.16}/docs/license.rst +0 -0
- {statikk-0.1.14 → statikk-0.1.16}/docs/readme.rst +0 -0
- {statikk-0.1.14 → statikk-0.1.16}/docs/requirements.txt +0 -0
- {statikk-0.1.14 → statikk-0.1.16}/docs/usage.rst +0 -0
- {statikk-0.1.14 → statikk-0.1.16}/pyproject.toml +0 -0
- {statikk-0.1.14 → statikk-0.1.16}/setup.cfg +0 -0
- {statikk-0.1.14 → statikk-0.1.16}/setup.py +0 -0
- {statikk-0.1.14 → statikk-0.1.16}/src/statikk/__init__.py +0 -0
- {statikk-0.1.14 → statikk-0.1.16}/src/statikk/conditions.py +0 -0
- {statikk-0.1.14 → statikk-0.1.16}/src/statikk/expressions.py +0 -0
- {statikk-0.1.14 → statikk-0.1.16}/src/statikk/typing.py +0 -0
- {statikk-0.1.14 → statikk-0.1.16}/src/statikk.egg-info/SOURCES.txt +0 -0
- {statikk-0.1.14 → statikk-0.1.16}/src/statikk.egg-info/dependency_links.txt +0 -0
- {statikk-0.1.14 → statikk-0.1.16}/src/statikk.egg-info/not-zip-safe +0 -0
- {statikk-0.1.14 → statikk-0.1.16}/src/statikk.egg-info/requires.txt +0 -0
- {statikk-0.1.14 → statikk-0.1.16}/src/statikk.egg-info/top_level.txt +0 -0
- {statikk-0.1.14 → statikk-0.1.16}/tests/conftest.py +0 -0
- {statikk-0.1.14 → statikk-0.1.16}/tests/test_expressions.py +0 -0
- {statikk-0.1.14 → statikk-0.1.16}/tests/test_models.py +0 -0
- {statikk-0.1.14 → statikk-0.1.16}/tox.ini +0 -0
@@ -16,7 +16,7 @@ from statikk.models import (
|
|
16
16
|
GSI,
|
17
17
|
KeySchema,
|
18
18
|
)
|
19
|
-
from statikk.fields import FIELD_STATIKK_TYPE, FIELD_STATIKK_PARENT_ID
|
19
|
+
from statikk.fields import FIELD_STATIKK_TYPE, FIELD_STATIKK_PARENT_ID, FIELD_STATIKK_PARENT_FIELD_NAME
|
20
20
|
from copy import deepcopy
|
21
21
|
from aws_xray_sdk.core import patch_all
|
22
22
|
|
@@ -296,20 +296,23 @@ class Table:
|
|
296
296
|
response = self._get_dynamodb_table().update_item(**request)
|
297
297
|
data = response["Attributes"]
|
298
298
|
for key, value in data.items():
|
299
|
-
if key in [FIELD_STATIKK_TYPE, FIELD_STATIKK_PARENT_ID]:
|
299
|
+
if key in [FIELD_STATIKK_TYPE, FIELD_STATIKK_PARENT_ID, FIELD_STATIKK_PARENT_FIELD_NAME]:
|
300
300
|
continue
|
301
301
|
data[key] = self._deserialize_value(value, model.model_fields[key])
|
302
302
|
return type(model)(**data)
|
303
303
|
|
304
|
-
def reparent_subtree(self, subtree_root: T, new_parent: T) -> T:
|
304
|
+
def reparent_subtree(self, subtree_root: T, new_parent: T, field_name: str) -> T:
|
305
305
|
subtree_copy = deepcopy(subtree_root)
|
306
306
|
subtree_root._parent_changed = True
|
307
307
|
|
308
308
|
subtree_copy.set_parent_references(subtree_copy, force_override=True)
|
309
309
|
subtree_copy._parent = new_parent
|
310
|
+
subtree_copy._parent_field_name = field_name
|
311
|
+
parent = subtree_copy._parent
|
310
312
|
for node in subtree_copy.dfs_traverse_hierarchy():
|
311
313
|
self.build_model_indexes(node)
|
312
|
-
setattr(node, FIELD_STATIKK_PARENT_ID,
|
314
|
+
setattr(node, FIELD_STATIKK_PARENT_ID, parent.id)
|
315
|
+
parent = node
|
313
316
|
|
314
317
|
return subtree_copy
|
315
318
|
|
@@ -427,7 +430,10 @@ class Table:
|
|
427
430
|
model_class = self._get_model_type_by_statikk_type(model_type)
|
428
431
|
|
429
432
|
reconstructed_dict.pop(FIELD_STATIKK_TYPE, None)
|
430
|
-
|
433
|
+
model = model_class.model_validate(reconstructed_dict)
|
434
|
+
for node in model.dfs_traverse_hierarchy():
|
435
|
+
node._is_persisted = True
|
436
|
+
return model
|
431
437
|
|
432
438
|
def scan(
|
433
439
|
self,
|
@@ -626,6 +632,7 @@ class Table:
|
|
626
632
|
continue
|
627
633
|
if not enriched_item.should_write_to_database():
|
628
634
|
continue
|
635
|
+
enriched_item._is_persisted = True
|
629
636
|
data = self._serialize_item(enriched_item)
|
630
637
|
batch.put_item(Item=data)
|
631
638
|
|
@@ -640,9 +647,6 @@ class Table:
|
|
640
647
|
Returns:
|
641
648
|
The top-level dictionary with its hierarchy fully reconstructed, or None if the list is empty
|
642
649
|
"""
|
643
|
-
if not items:
|
644
|
-
return None
|
645
|
-
|
646
650
|
items_by_id = {item["id"]: item for item in items}
|
647
651
|
children_by_parent_id = {}
|
648
652
|
for item in items:
|
@@ -652,128 +656,74 @@ class Table:
|
|
652
656
|
children_by_parent_id[parent_id] = []
|
653
657
|
children_by_parent_id[parent_id].append(item)
|
654
658
|
|
655
|
-
|
656
|
-
root_items = [item for item in items if FIELD_STATIKK_PARENT_ID not in item]
|
659
|
+
root_item = [item for item in items if FIELD_STATIKK_PARENT_ID not in item][0]
|
657
660
|
|
658
|
-
|
659
|
-
|
660
|
-
|
661
|
-
if len(root_items) > 1:
|
662
|
-
root_item = root_items[0]
|
663
|
-
else:
|
664
|
-
root_item = root_items[0]
|
661
|
+
processed_root = self._process_item(root_item, items_by_id, children_by_parent_id)
|
662
|
+
return processed_root
|
665
663
|
|
666
|
-
|
667
|
-
return self._reconstruct_item_with_children(root_item, items_by_id, children_by_parent_id, processed_items)
|
668
|
-
|
669
|
-
def _reconstruct_item_with_children(
|
670
|
-
self, item: dict, items_by_id: dict, children_by_parent_id: dict, processed_items: set
|
671
|
-
) -> dict:
|
664
|
+
def _process_item(self, item: dict, items_by_id: dict, children_by_parent_id: dict) -> dict:
|
672
665
|
"""
|
673
|
-
Recursively
|
666
|
+
Recursively processes an item and all its children to rebuild the hierarchical structure.
|
674
667
|
|
675
668
|
Args:
|
676
|
-
item: The item to
|
677
|
-
items_by_id:
|
678
|
-
children_by_parent_id:
|
679
|
-
processed_items: Set of already processed item IDs to avoid duplicates
|
669
|
+
item: The current item to process
|
670
|
+
items_by_id: Dictionary mapping item IDs to items
|
671
|
+
children_by_parent_id: Dictionary mapping parent IDs to lists of child items
|
680
672
|
|
681
673
|
Returns:
|
682
|
-
The
|
674
|
+
The processed item with all its child relationships resolved
|
683
675
|
"""
|
684
|
-
|
685
|
-
return item
|
686
|
-
processed_items.add(item["id"])
|
687
|
-
result = item.copy()
|
688
|
-
|
689
|
-
if FIELD_STATIKK_PARENT_ID in result:
|
690
|
-
result.pop(FIELD_STATIKK_PARENT_ID)
|
676
|
+
processed_item = deepcopy(item)
|
691
677
|
|
692
|
-
|
693
|
-
|
694
|
-
|
678
|
+
if FIELD_STATIKK_TYPE in processed_item:
|
679
|
+
model_class = self._get_model_type_by_statikk_type(processed_item[FIELD_STATIKK_TYPE])
|
680
|
+
model_fields = model_class.model_fields
|
681
|
+
else:
|
682
|
+
return processed_item
|
695
683
|
|
696
|
-
|
684
|
+
# Get children of this item
|
685
|
+
children = children_by_parent_id.get(processed_item["id"], [])
|
697
686
|
|
698
|
-
|
687
|
+
# Group children by parent field name
|
688
|
+
children_by_field = {}
|
699
689
|
for child in children:
|
700
|
-
|
701
|
-
if
|
702
|
-
|
703
|
-
|
704
|
-
|
705
|
-
|
706
|
-
|
707
|
-
|
708
|
-
|
709
|
-
for field_name, field_info in parent_model_class.model_fields.items():
|
710
|
-
if field_name.startswith("_"):
|
711
|
-
continue
|
712
|
-
|
713
|
-
is_optional, inner_type = inspect_optional_field(parent_model_class, field_name)
|
714
|
-
|
715
|
-
field_type = inner_type if is_optional else field_info.annotation
|
716
|
-
|
717
|
-
if field_type == child_model_class:
|
718
|
-
matching_fields.append((field_name, "single"))
|
719
|
-
|
720
|
-
elif hasattr(field_type, "__origin__") and field_type.__origin__ == list:
|
721
|
-
args = getattr(field_type, "__args__", [])
|
722
|
-
if args and args[0] == child_model_class:
|
723
|
-
matching_fields.append((field_name, "list"))
|
724
|
-
|
725
|
-
elif hasattr(field_type, "__origin__") and field_type.__origin__ == set:
|
726
|
-
args = getattr(field_type, "__args__", [])
|
727
|
-
if args and args[0] == child_model_class:
|
728
|
-
matching_fields.append((field_name, "set"))
|
729
|
-
|
730
|
-
if matching_fields:
|
731
|
-
for field_name, container_type in matching_fields:
|
732
|
-
if container_type == "list":
|
733
|
-
if field_name not in result:
|
734
|
-
result[field_name] = []
|
735
|
-
|
736
|
-
existing_ids = {
|
737
|
-
item.get("id") for item in result[field_name] if isinstance(item, dict) and "id" in item
|
738
|
-
}
|
739
|
-
|
740
|
-
for child in child_items:
|
741
|
-
if child["id"] in existing_ids:
|
742
|
-
continue
|
690
|
+
field_name = child.get(FIELD_STATIKK_PARENT_FIELD_NAME)
|
691
|
+
if field_name:
|
692
|
+
if field_name not in children_by_field:
|
693
|
+
children_by_field[field_name] = []
|
694
|
+
children_by_field[field_name].append(child)
|
695
|
+
|
696
|
+
for field_name, field_info in model_fields.items():
|
697
|
+
if field_name not in children_by_field:
|
698
|
+
continue
|
743
699
|
|
744
|
-
|
745
|
-
child, items_by_id, children_by_parent_id, processed_items
|
746
|
-
)
|
700
|
+
field_children = children_by_field[field_name]
|
747
701
|
|
748
|
-
|
749
|
-
|
702
|
+
field_type = field_info.annotation
|
703
|
+
is_optional = False
|
704
|
+
inner_type = field_type
|
750
705
|
|
751
|
-
|
752
|
-
|
753
|
-
|
706
|
+
if hasattr(field_type, "__origin__") and field_type.__origin__ is Union:
|
707
|
+
args = field_type.__args__
|
708
|
+
if type(None) in args:
|
709
|
+
is_optional = True
|
710
|
+
# Get the non-None type
|
711
|
+
inner_type = next(arg for arg in args if arg is not type(None))
|
754
712
|
|
755
|
-
|
756
|
-
|
757
|
-
}
|
713
|
+
if hasattr(inner_type, "__origin__") and inner_type.__origin__ == list:
|
714
|
+
child_list = []
|
758
715
|
|
759
|
-
|
760
|
-
|
761
|
-
|
762
|
-
reconstructed_child = self._reconstruct_item_with_children(
|
763
|
-
child, items_by_id, children_by_parent_id, processed_items
|
764
|
-
)
|
716
|
+
for child in field_children:
|
717
|
+
processed_child = self._process_item(child, items_by_id, children_by_parent_id)
|
718
|
+
child_list.append(processed_child)
|
765
719
|
|
766
|
-
|
767
|
-
existing_ids.add(child["id"])
|
720
|
+
processed_item[field_name] = child_list
|
768
721
|
|
769
|
-
|
770
|
-
|
771
|
-
|
772
|
-
child_items[0], items_by_id, children_by_parent_id, processed_items
|
773
|
-
)
|
774
|
-
result[field_name] = reconstructed_child
|
722
|
+
elif len(field_children) == 1:
|
723
|
+
processed_child = self._process_item(field_children[0], items_by_id, children_by_parent_id)
|
724
|
+
processed_item[field_name] = processed_child
|
775
725
|
|
776
|
-
return
|
726
|
+
return processed_item
|
777
727
|
|
778
728
|
|
779
729
|
class BatchWriteContext:
|
@@ -14,7 +14,7 @@ from pydantic_core._pydantic_core import PydanticUndefined
|
|
14
14
|
|
15
15
|
from statikk.conditions import Condition
|
16
16
|
from statikk.expressions import DatabaseModelUpdateExpressionBuilder
|
17
|
-
from statikk.fields import FIELD_STATIKK_TYPE, FIELD_STATIKK_PARENT_ID
|
17
|
+
from statikk.fields import FIELD_STATIKK_TYPE, FIELD_STATIKK_PARENT_ID, FIELD_STATIKK_PARENT_FIELD_NAME
|
18
18
|
|
19
19
|
if typing.TYPE_CHECKING:
|
20
20
|
from statikk.engine import Table
|
@@ -196,14 +196,20 @@ class TrackingMixin:
|
|
196
196
|
return {}
|
197
197
|
|
198
198
|
|
199
|
-
class DatabaseModel(BaseModel, TrackingMixin
|
199
|
+
class DatabaseModel(BaseModel, TrackingMixin):
|
200
200
|
id: str = Field(default_factory=lambda: str(uuid4()))
|
201
201
|
_parent: Optional[DatabaseModel] = None
|
202
|
+
_parent_field_name: Optional[str] = None
|
202
203
|
_model_types_in_hierarchy: dict[str, Type[DatabaseModel]] = {}
|
203
204
|
_should_delete: bool = False
|
204
205
|
_parent_changed: bool = False
|
206
|
+
_is_persisted: bool = False
|
205
207
|
_session = Session()
|
206
208
|
|
209
|
+
class Config:
|
210
|
+
extra = Extra.allow
|
211
|
+
arbitrary_types_allowed = True
|
212
|
+
|
207
213
|
def __eq__(self, other):
|
208
214
|
return self.id == other.id
|
209
215
|
|
@@ -255,12 +261,16 @@ class DatabaseModel(BaseModel, TrackingMixin, extra=Extra.allow):
|
|
255
261
|
def is_simple_object(self) -> bool:
|
256
262
|
return len(self._model_types_in_hierarchy) == 1
|
257
263
|
|
264
|
+
@property
|
265
|
+
def is_persisted(self) -> bool:
|
266
|
+
return self._is_persisted
|
267
|
+
|
258
268
|
@property
|
259
269
|
def should_delete(self) -> bool:
|
260
270
|
if self._is_any_parent_marked_for_deletion():
|
261
271
|
return True
|
262
272
|
|
263
|
-
return self._should_delete or self.is_parent_changed()
|
273
|
+
return self.is_persisted and (self._should_delete or self.is_parent_changed())
|
264
274
|
|
265
275
|
def _is_any_parent_marked_for_deletion(self) -> bool:
|
266
276
|
current = self._parent
|
@@ -346,8 +356,8 @@ class DatabaseModel(BaseModel, TrackingMixin, extra=Extra.allow):
|
|
346
356
|
def mark_for_delete(self):
|
347
357
|
self._should_delete = True
|
348
358
|
|
349
|
-
def
|
350
|
-
return self._table.reparent_subtree(self, new_parent)
|
359
|
+
def _change_parent_to(self, new_parent: DatabaseModel, field_name: str) -> T:
|
360
|
+
return self._table.reparent_subtree(self, new_parent, field_name)
|
351
361
|
|
352
362
|
def _remove_from_parent(self, parent, field_name, subtree):
|
353
363
|
is_optional, inner_type = inspect_optional_field(parent.__class__, field_name)
|
@@ -384,17 +394,17 @@ class DatabaseModel(BaseModel, TrackingMixin, extra=Extra.allow):
|
|
384
394
|
if hasattr(field_type, "__origin__") and field_type.__origin__ == list:
|
385
395
|
if not isinstance(getattr(self, field_name), list):
|
386
396
|
setattr(self, field_name, [])
|
387
|
-
reparented = child_node.
|
397
|
+
reparented = child_node._change_parent_to(self, field_name)
|
388
398
|
getattr(self, field_name).append(reparented)
|
389
399
|
|
390
400
|
elif hasattr(field_type, "__origin__") and field_type.__origin__ == set:
|
391
401
|
if not isinstance(getattr(self, field_name), set):
|
392
402
|
setattr(self, field_name, set())
|
393
|
-
reparented = child_node.
|
403
|
+
reparented = child_node._change_parent_to(self, field_name)
|
394
404
|
getattr(self, field_name).add(reparented)
|
395
405
|
|
396
406
|
elif issubclass(field_type, DatabaseModel):
|
397
|
-
reparented = child_node.
|
407
|
+
reparented = child_node._change_parent_to(self, field_name)
|
398
408
|
setattr(self, field_name, reparented)
|
399
409
|
|
400
410
|
if reparented:
|
@@ -418,6 +428,7 @@ class DatabaseModel(BaseModel, TrackingMixin, extra=Extra.allow):
|
|
418
428
|
data[FIELD_STATIKK_TYPE] = self.type()
|
419
429
|
if self._parent:
|
420
430
|
data[FIELD_STATIKK_PARENT_ID] = self._parent.id
|
431
|
+
data[FIELD_STATIKK_PARENT_FIELD_NAME] = self._parent_field_name
|
421
432
|
return data
|
422
433
|
|
423
434
|
@model_validator(mode="after")
|
@@ -465,13 +476,6 @@ class DatabaseModel(BaseModel, TrackingMixin, extra=Extra.allow):
|
|
465
476
|
items.append(item)
|
466
477
|
item.split_to_simple_objects(items)
|
467
478
|
|
468
|
-
elif isinstance(field_value, set):
|
469
|
-
for item in field_value:
|
470
|
-
if hasattr(item, "__class__") and issubclass(item.__class__, DatabaseModel):
|
471
|
-
if item not in items:
|
472
|
-
items.append(item)
|
473
|
-
item.split_to_simple_objects(items)
|
474
|
-
|
475
479
|
return items
|
476
480
|
|
477
481
|
def get_attribute(self, attribute_name: str):
|
@@ -488,14 +492,6 @@ class DatabaseModel(BaseModel, TrackingMixin, extra=Extra.allow):
|
|
488
492
|
for item in field_value:
|
489
493
|
if issubclass(item.__class__, DatabaseModel) and item.is_nested():
|
490
494
|
nested_models.append(field_name)
|
491
|
-
elif isinstance(field_value, set):
|
492
|
-
for item in field_value:
|
493
|
-
if issubclass(item.__class__, DatabaseModel) and item.is_nested():
|
494
|
-
nested_models.append(field_name)
|
495
|
-
elif isinstance(field_value, dict):
|
496
|
-
for key, value in field_value.items():
|
497
|
-
if issubclass(value.__class__, DatabaseModel) and value.is_nested():
|
498
|
-
nested_models.append(field_name)
|
499
495
|
return set(nested_models)
|
500
496
|
|
501
497
|
def get_type_from_hierarchy_by_name(self, name: str) -> Optional[Type[DatabaseModel]]:
|
@@ -512,6 +508,7 @@ class DatabaseModel(BaseModel, TrackingMixin, extra=Extra.allow):
|
|
512
508
|
if field._parent and not force_override:
|
513
509
|
return # Already set
|
514
510
|
field._parent = parent
|
511
|
+
field._parent_field_name = field_name
|
515
512
|
if field.should_track_session:
|
516
513
|
field._session.add_change(parent, field_name, field)
|
517
514
|
root._model_types_in_hierarchy[field.type()] = type(field)
|
@@ -532,14 +529,10 @@ class DatabaseModel(BaseModel, TrackingMixin, extra=Extra.allow):
|
|
532
529
|
for field_name, field_value in self:
|
533
530
|
if isinstance(field_value, DatabaseModel):
|
534
531
|
yield self, field_name, field_value
|
535
|
-
elif isinstance(field_value,
|
532
|
+
elif isinstance(field_value, list):
|
536
533
|
for item in field_value:
|
537
534
|
if isinstance(item, DatabaseModel):
|
538
535
|
yield self, field_name, item
|
539
|
-
elif isinstance(field_value, dict):
|
540
|
-
for key, value in field_value.items():
|
541
|
-
if isinstance(value, DatabaseModel):
|
542
|
-
yield self, key, value
|
543
536
|
|
544
537
|
def dfs_traverse_hierarchy(self):
|
545
538
|
"""
|
@@ -555,11 +548,7 @@ class DatabaseModel(BaseModel, TrackingMixin, extra=Extra.allow):
|
|
555
548
|
for field_name, field_value in fields:
|
556
549
|
if isinstance(field_value, DatabaseModel):
|
557
550
|
yield from field_value.dfs_traverse_hierarchy()
|
558
|
-
elif isinstance(field_value,
|
551
|
+
elif isinstance(field_value, list):
|
559
552
|
for item in field_value:
|
560
553
|
if isinstance(item, DatabaseModel):
|
561
554
|
yield from item.dfs_traverse_hierarchy()
|
562
|
-
elif isinstance(field_value, dict):
|
563
|
-
for key, value in field_value.items():
|
564
|
-
if isinstance(value, DatabaseModel):
|
565
|
-
yield from value.dfs_traverse_hierarchy()
|
@@ -897,79 +897,6 @@ def test_nested_hierarchies():
|
|
897
897
|
assert list(table.scan()) == []
|
898
898
|
|
899
899
|
|
900
|
-
def test_update_parent():
|
901
|
-
class DoublyNestedModel(DatabaseModel):
|
902
|
-
bar: str
|
903
|
-
|
904
|
-
@classmethod
|
905
|
-
def index_definitions(cls) -> dict[str, IndexFieldConfig]:
|
906
|
-
return {"main-index": IndexFieldConfig(sk_fields=["bar"])}
|
907
|
-
|
908
|
-
@classmethod
|
909
|
-
def is_nested(cls) -> bool:
|
910
|
-
return True
|
911
|
-
|
912
|
-
class NestedModel(DatabaseModel):
|
913
|
-
nested_id: str
|
914
|
-
name: str
|
915
|
-
doubly_nested: list[DoublyNestedModel] = []
|
916
|
-
|
917
|
-
@classmethod
|
918
|
-
def is_nested(cls) -> bool:
|
919
|
-
return True
|
920
|
-
|
921
|
-
@classmethod
|
922
|
-
def index_definitions(cls) -> dict[str, IndexFieldConfig]:
|
923
|
-
return {"main-index": IndexFieldConfig(sk_fields=["nested_id"])}
|
924
|
-
|
925
|
-
class Model(DatabaseModel):
|
926
|
-
model_id: str
|
927
|
-
name: str
|
928
|
-
nested: list[NestedModel] = []
|
929
|
-
|
930
|
-
@classmethod
|
931
|
-
def index_definitions(cls) -> dict[str, IndexFieldConfig]:
|
932
|
-
return {"main-index": IndexFieldConfig(pk_fields=["model_id"], sk_fields=["name"])}
|
933
|
-
|
934
|
-
mock_dynamodb().start()
|
935
|
-
table = Table(
|
936
|
-
name="my-dynamodb-table",
|
937
|
-
key_schema=KeySchema(hash_key="id"),
|
938
|
-
indexes=[
|
939
|
-
GSI(
|
940
|
-
name="main-index",
|
941
|
-
hash_key=Key(name="gsi_pk"),
|
942
|
-
sort_key=Key(name="gsi_sk"),
|
943
|
-
)
|
944
|
-
],
|
945
|
-
models=[DoublyNestedModel, NestedModel, Model],
|
946
|
-
)
|
947
|
-
_create_dynamodb_table(table)
|
948
|
-
doubly_nested_model = DoublyNestedModel(bar="bar")
|
949
|
-
nested_model = NestedModel(nested_id="nested-model-1", name="nested-model-1", doubly_nested=[doubly_nested_model])
|
950
|
-
model = Model(model_id="model-1", name="model-1", nested=[nested_model])
|
951
|
-
model.save()
|
952
|
-
hierarchy = Model.query_hierarchy(hash_key=Equals("model-1"))
|
953
|
-
model_2 = Model(model_id="model-2", name="model-2", nested=[])
|
954
|
-
model_2.save()
|
955
|
-
nested = hierarchy.nested[0]
|
956
|
-
updated_hierarchy = nested.change_parent_to(model_2)
|
957
|
-
model_2.nested.append(updated_hierarchy)
|
958
|
-
model_2.save()
|
959
|
-
assert hierarchy.nested[0].is_parent_changed()
|
960
|
-
assert not model_2.nested[0].is_parent_changed()
|
961
|
-
assert model_2.nested[0].id == nested_model.id
|
962
|
-
model_2 = Model.query_hierarchy(hash_key=Equals("model-2"))
|
963
|
-
assert model_2.nested[0].id == nested_model.id
|
964
|
-
assert model_2.nested[0].gsi_pk == "model-2"
|
965
|
-
assert model_2.nested[0].gsi_sk == "Model|model-2|NestedModel|nested-model-1"
|
966
|
-
|
967
|
-
# when reparenting to a root node, the old subtree is deleted from the database after saving the old root
|
968
|
-
hierarchy.save()
|
969
|
-
hierarchy = Model.query_hierarchy(hash_key=Equals("model-1"))
|
970
|
-
assert len(hierarchy.nested) == 0
|
971
|
-
|
972
|
-
|
973
900
|
def test_rebuild_model_indexes():
|
974
901
|
class MyDatabaseModel(DatabaseModel):
|
975
902
|
foo: str = "foo"
|
@@ -1017,7 +944,7 @@ def test_add_child_node():
|
|
1017
944
|
bar: str
|
1018
945
|
other_nested: Optional[MyOtherNestedDatabaseModel] = None
|
1019
946
|
list_nested: list[MyOtherNestedDatabaseModel] = []
|
1020
|
-
|
947
|
+
other_list_nested: list[MyOtherNestedDatabaseModel] = []
|
1021
948
|
|
1022
949
|
@classmethod
|
1023
950
|
def is_nested(cls) -> bool:
|
@@ -1044,22 +971,23 @@ def test_add_child_node():
|
|
1044
971
|
my_database_model.build_model_indexes()
|
1045
972
|
my_database_model.nested.add_child_node("other_nested", MyOtherNestedDatabaseModel(baz="baz"))
|
1046
973
|
my_database_model.nested.add_child_node("list_nested", MyOtherNestedDatabaseModel(baz="bazz"))
|
1047
|
-
my_database_model.nested.add_child_node("
|
974
|
+
my_database_model.nested.add_child_node("other_list_nested", MyOtherNestedDatabaseModel(baz="bazzz"))
|
1048
975
|
assert my_database_model.nested.other_nested.baz == "baz"
|
1049
976
|
assert my_database_model.nested.list_nested[0].baz == "bazz"
|
1050
|
-
|
1051
|
-
assert
|
1052
|
-
assert
|
1053
|
-
assert
|
1054
|
-
assert
|
977
|
+
other_list_nested = my_database_model.nested.other_list_nested[0]
|
978
|
+
assert other_list_nested.baz == "bazzz"
|
979
|
+
assert other_list_nested._parent == my_database_model.nested
|
980
|
+
assert other_list_nested.gsi_pk == other_list_nested._parent.gsi_pk
|
981
|
+
assert other_list_nested.gsi_sk == "MyDatabaseModel|MyNestedDatabaseModel|bar|MyOtherNestedDatabaseModel|bazzz"
|
1055
982
|
assert my_database_model.nested.other_nested._parent == my_database_model.nested
|
1056
983
|
assert my_database_model.nested.list_nested[0]._parent == my_database_model.nested
|
1057
|
-
my_database_model.nested.add_child_node("
|
984
|
+
my_database_model.nested.add_child_node("other_list_nested", my_database_model.nested.list_nested[0])
|
1058
985
|
assert my_database_model.nested.list_nested[0]._parent_changed is True
|
1059
|
-
|
1060
|
-
assert
|
1061
|
-
assert
|
1062
|
-
|
986
|
+
other_list_nested_new = my_database_model.nested.other_list_nested[1]
|
987
|
+
assert len(my_database_model.nested.other_list_nested) == 2
|
988
|
+
assert other_list_nested_new._parent_changed is False
|
989
|
+
assert other_list_nested_new.gsi_sk == "MyDatabaseModel|MyNestedDatabaseModel|bar|MyOtherNestedDatabaseModel|bazz"
|
990
|
+
my_database_model.nested.add_child_node("list_nested", other_list_nested_new)
|
1063
991
|
assert my_database_model.nested.list_nested[0]._parent_changed is False
|
1064
992
|
assert my_database_model.nested.list_nested[0]._parent == my_database_model.nested
|
1065
993
|
assert my_database_model.nested.list_nested[0].gsi_pk == "foo"
|
@@ -1067,4 +995,14 @@ def test_add_child_node():
|
|
1067
995
|
my_database_model.nested.list_nested[0].gsi_sk
|
1068
996
|
== "MyDatabaseModel|MyNestedDatabaseModel|bar|MyOtherNestedDatabaseModel|bazz"
|
1069
997
|
)
|
1070
|
-
assert
|
998
|
+
assert other_list_nested_new._parent_changed is True
|
999
|
+
my_database_model.save()
|
1000
|
+
my_database_model = MyDatabaseModel.query_hierarchy(hash_key=Equals("foo"))
|
1001
|
+
assert len(my_database_model.nested.list_nested) == 1
|
1002
|
+
assert len(my_database_model.nested.other_list_nested) == 1
|
1003
|
+
hierarchy = MyDatabaseModel.query_hierarchy(hash_key=Equals("foo"))
|
1004
|
+
assert hierarchy.is_persisted is True
|
1005
|
+
assert hierarchy.nested.is_persisted is True
|
1006
|
+
assert hierarchy.nested.list_nested[0].is_persisted is True
|
1007
|
+
assert hierarchy.nested.other_list_nested[0].is_persisted is True
|
1008
|
+
assert hierarchy.nested.other_nested.is_persisted is True
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|