nuql 0.0.1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (65) hide show
  1. nuql/__init__.py +3 -0
  2. nuql/api/__init__.py +13 -0
  3. nuql/api/adapter.py +34 -0
  4. nuql/api/batch_get/__init__.py +2 -0
  5. nuql/api/batch_get/batch_get.py +40 -0
  6. nuql/api/batch_get/queue.py +120 -0
  7. nuql/api/batch_write.py +99 -0
  8. nuql/api/condition_check.py +39 -0
  9. nuql/api/create.py +25 -0
  10. nuql/api/delete.py +88 -0
  11. nuql/api/get.py +30 -0
  12. nuql/api/put_item.py +112 -0
  13. nuql/api/put_update.py +25 -0
  14. nuql/api/query/__init__.py +4 -0
  15. nuql/api/query/condition.py +157 -0
  16. nuql/api/query/condition_builder.py +211 -0
  17. nuql/api/query/key_condition.py +200 -0
  18. nuql/api/query/query.py +166 -0
  19. nuql/api/transaction.py +145 -0
  20. nuql/api/update/__init__.py +3 -0
  21. nuql/api/update/expression_builder.py +33 -0
  22. nuql/api/update/update_item.py +139 -0
  23. nuql/api/update/utils.py +126 -0
  24. nuql/api/upsert.py +32 -0
  25. nuql/client.py +88 -0
  26. nuql/connection.py +43 -0
  27. nuql/exceptions.py +66 -0
  28. nuql/fields/__init__.py +11 -0
  29. nuql/fields/boolean.py +29 -0
  30. nuql/fields/datetime.py +49 -0
  31. nuql/fields/datetime_timestamp.py +45 -0
  32. nuql/fields/float.py +40 -0
  33. nuql/fields/integer.py +40 -0
  34. nuql/fields/key.py +207 -0
  35. nuql/fields/list.py +90 -0
  36. nuql/fields/map.py +67 -0
  37. nuql/fields/string.py +184 -0
  38. nuql/fields/ulid.py +39 -0
  39. nuql/fields/uuid.py +42 -0
  40. nuql/generators/__init__.py +3 -0
  41. nuql/generators/datetime.py +37 -0
  42. nuql/generators/ulid.py +10 -0
  43. nuql/generators/uuid.py +19 -0
  44. nuql/resources/__init__.py +4 -0
  45. nuql/resources/fields/__init__.py +3 -0
  46. nuql/resources/fields/field.py +153 -0
  47. nuql/resources/fields/field_map.py +85 -0
  48. nuql/resources/fields/value.py +5 -0
  49. nuql/resources/records/__init__.py +3 -0
  50. nuql/resources/records/projections.py +49 -0
  51. nuql/resources/records/serialiser.py +144 -0
  52. nuql/resources/records/validator.py +48 -0
  53. nuql/resources/tables/__init__.py +2 -0
  54. nuql/resources/tables/indexes.py +140 -0
  55. nuql/resources/tables/table.py +151 -0
  56. nuql/resources/utils/__init__.py +2 -0
  57. nuql/resources/utils/dict.py +21 -0
  58. nuql/resources/utils/validators.py +165 -0
  59. nuql/types/__init__.py +3 -0
  60. nuql/types/config.py +27 -0
  61. nuql/types/fields.py +27 -0
  62. nuql/types/serialisation.py +10 -0
  63. nuql-0.0.1.dist-info/METADATA +12 -0
  64. nuql-0.0.1.dist-info/RECORD +65 -0
  65. nuql-0.0.1.dist-info/WHEEL +4 -0
@@ -0,0 +1,85 @@
1
+ __all__ = ['create_field_map', 'get_field_types']
2
+
3
+ import inspect
4
+ from typing import Dict, List, Type, Any, Callable
5
+
6
+ import nuql
7
+ from nuql import resources, types
8
+
9
+
10
+ def create_field_map(
11
+ fields: Dict[str, 'types.FieldConfig'],
12
+ parent: 'resources.Table',
13
+ field_types: List[Type['types.FieldType']] | None = None
14
+ ) -> Dict[str, 'types.FieldType']:
15
+ """
16
+ Generates a dict of table field instances for the serialisation process.
17
+
18
+ :arg fields: Dict of field configurations.
19
+ :arg parent: Parent Table instance.
20
+ :param field_types: Additional field types that are defined outside the library.
21
+ :return: Field map dict.
22
+ """
23
+ all_field_types = get_field_types(field_types)
24
+
25
+ output = {}
26
+ callbacks = []
27
+
28
+ def init_callback(fn: Callable[[Dict[str, Any]], None]) -> None:
29
+ callbacks.append(fn)
30
+
31
+ for key, config in fields.items():
32
+ if config['type'] not in all_field_types:
33
+ raise nuql.NuqlError(
34
+ code='InvalidFieldType',
35
+ message=f'Field type \'{config["type"]}\' is not defined.'
36
+ )
37
+
38
+ field_type = all_field_types[config['type']]
39
+
40
+ output[key] = field_type(key, config, parent, init_callback=init_callback)
41
+
42
+ # Run any applicable callbacks on the output
43
+ for callback in callbacks:
44
+ callback(output)
45
+
46
+ return output
47
+
48
+
49
+ def get_field_types(field_types: List[Type['types.FieldType']] | None = None) -> Dict[str, Type['types.FieldType']]:
50
+ """
51
+ Dynamically generates a dict of all available field types.
52
+
53
+ :param field_types: Additional field types that are defined outside the library.
54
+ :return: Field type dict.
55
+ """
56
+ from nuql import fields as builtin_fields
57
+
58
+ if not isinstance(field_types, list):
59
+ field_types = []
60
+
61
+ output = {}
62
+
63
+ def is_valid(_obj: Any) -> bool:
64
+ """Check the provided object is a valid field type."""
65
+ if not inspect.isclass(obj):
66
+ return False
67
+
68
+ if not issubclass(obj, resources.FieldBase):
69
+ return False
70
+
71
+ return True
72
+
73
+ # Import built-in field types
74
+ for name in dir(builtin_fields):
75
+ obj = getattr(builtin_fields, name)
76
+
77
+ if is_valid(obj):
78
+ output[obj.type] = obj
79
+
80
+ # Import custom-defined field types
81
+ for field_type in field_types:
82
+ if is_valid(field_type):
83
+ output[field_type.type] = field_type
84
+
85
+ return output
@@ -0,0 +1,5 @@
1
+ __all__ = ['EmptyValue']
2
+
3
+
4
+ class EmptyValue:
5
+ pass
@@ -0,0 +1,3 @@
1
+ from .validator import *
2
+ from .serialiser import *
3
+ from .projections import *
@@ -0,0 +1,49 @@
1
+ from typing import Any, Dict
2
+
3
+ from nuql import resources, types
4
+ from nuql.fields import Key, String
5
+
6
+
7
+ class Projections:
8
+ def __init__(self, parent: 'resources.Table', serialiser: 'resources.Serialiser') -> None:
9
+ """
10
+ Helper for handling projected fields.
11
+
12
+ :arg parent: Parent Table instance.
13
+ :arg serialiser: Serialiser instance.
14
+ """
15
+ self.parent = parent
16
+ self.serialiser = serialiser
17
+ self._store = {}
18
+
19
+ def add(self, name: str, value: Any) -> None:
20
+ """
21
+ Adds a projection to the store.
22
+
23
+ :arg name: Projected field name.
24
+ :arg value: Value to project.
25
+ """
26
+ field = self.serialiser.get_field(name)
27
+
28
+ for key in field.projected_from:
29
+ if key not in self._store:
30
+ self._store[key] = {}
31
+ self._store[key][name] = value
32
+
33
+ def merge(self, data: Dict[str, Any], action: 'types.SerialisationType', validator: 'resources.Validator') -> None:
34
+ """
35
+ Merges serialised projections into the record.
36
+
37
+ :arg data: Current serialised record.
38
+ :arg action: Serialisation type.
39
+ :arg validator: Validator instance.
40
+ """
41
+ key_fields = {
42
+ key: field
43
+ for key, field in self.parent.fields.items()
44
+ if isinstance(field, Key) or (isinstance(field, String) and field.is_template)
45
+ }
46
+
47
+ for key, field in key_fields.items():
48
+ projections = self._store.get(key, {})
49
+ data[key] = field(projections, action, validator)
@@ -0,0 +1,144 @@
1
+ from typing import Dict, Any, Optional, Union
2
+
3
+ import nuql
4
+ from nuql import resources, types, fields
5
+
6
+
7
+ class Serialiser:
8
+ def __init__(self, parent: Union['resources.Table', 'fields.Map']) -> None:
9
+ """
10
+ Helper object to serialise a record.
11
+
12
+ :arg parent: Parent Table or Map.
13
+ """
14
+ self.parent = parent
15
+
16
+ def get_field(self, key: str) -> 'resources.FieldBase':
17
+ """
18
+ Get a field instance from the schema.
19
+
20
+ :arg key: Field key.
21
+ :return: FieldBase instance.
22
+ """
23
+ if key not in self.parent.fields:
24
+ raise nuql.NuqlError(
25
+ code='FieldNotFound',
26
+ message=f'Field \'{key}\' is not defined in the schema.'
27
+ )
28
+ return self.parent.fields[key]
29
+
30
+ def serialise(
31
+ self,
32
+ action: 'types.SerialisationType',
33
+ data: Dict[str, Any] | None = None,
34
+ validator: Optional['resources.Validator'] = None
35
+ ):
36
+ """
37
+ Serialises/marshals a record based on the data provided.
38
+
39
+ :arg action: Serialisation type.
40
+ :param data: Data to serialise.
41
+ :param validator: Validator instance.
42
+ :return:
43
+ """
44
+ validator = resources.Validator() if validator is None else validator
45
+ projections = resources.Projections(self.parent, self)
46
+ output = {}
47
+
48
+ # Serialise provided fields
49
+ for key, deserialised_value in data.items():
50
+ field = self.get_field(key)
51
+
52
+ if not field:
53
+ raise nuql.NuqlError(
54
+ code='SchemaError',
55
+ message=f'Field \'{key}\' is not defined in the schema.'
56
+ )
57
+
58
+ # Skip serialisation for projected fields as this is to be handled at
59
+ # the end of the serialisation process
60
+ if field.projected_from:
61
+ projections.add(key, deserialised_value)
62
+ else:
63
+ serialised_value = field(deserialised_value, action, validator)
64
+ output[key] = serialised_value
65
+
66
+ # Serialise fields not provided (i.e. could have defaults)
67
+ untouched = {name: field for name, field in self.parent.fields.items() if name not in data}
68
+ for name, field in untouched.items():
69
+ if field.projects_fields:
70
+ continue
71
+
72
+ if field.projected_from:
73
+ continue
74
+
75
+ serialised_value = field(resources.EmptyValue(), action, validator)
76
+ output[name] = serialised_value
77
+
78
+ # Set projections
79
+ projections.merge(output, action, validator)
80
+
81
+ if action in ['create', 'update', 'write']:
82
+ validator.raise_for_validation_errors()
83
+
84
+ return output
85
+
86
+ def serialise_key(self, key: Dict[str, Any], index_name: str = 'primary') -> Dict[str, Any]:
87
+ """
88
+ Serialises the key for an item on a given index.
89
+
90
+ :arg key: Key to serialise.
91
+ :param index_name: Index name to serialise key for.
92
+ :return: Serialised key.
93
+ """
94
+ # Check parent is of a valid type
95
+ if not isinstance(self.parent, resources.Table):
96
+ raise nuql.NuqlError(
97
+ code='InvalidTable',
98
+ message='Serialisation of keys is only supported for Table resources.'
99
+ )
100
+
101
+ # Get applicable index
102
+ if index_name == 'primary':
103
+ index = self.parent.indexes.primary
104
+ else:
105
+ index = self.parent.indexes.get_index(index_name)
106
+
107
+ # Serialise provided data according the the schema
108
+ serialised_key = self.serialise('query', key)
109
+
110
+ # Produce a key from the serialised result and for the given index
111
+ return {
112
+ key: value
113
+ for key, value in serialised_key.items()
114
+ if key == index['hash'] or ('sort' not in index or key == index['sort'])
115
+ }
116
+
117
+ def deserialise(self, data: Dict[str, Any]) -> Dict[str, Any]:
118
+ """
119
+ Deserialises/unmarshalls data from DynamoDB.
120
+
121
+ :arg data: Data to deserialise.
122
+ :return: Deserialised data.
123
+ """
124
+ record = {}
125
+
126
+ for name, field in self.parent.fields.items():
127
+ # Special Case: string templates
128
+ if hasattr(field, 'deserialise_template') and getattr(field, 'is_template', True):
129
+ deserialised_value = field.deserialise_template(data.get(name))
130
+ else:
131
+ deserialised_value = field.deserialise(data.get(name))
132
+
133
+ if field.projected_from:
134
+ continue
135
+
136
+ # Directly set field
137
+ record[name] = deserialised_value
138
+
139
+ # Handle projected fields
140
+ if field.projects_fields:
141
+ for projected_key in field.projects_fields:
142
+ record[projected_key] = deserialised_value.get(projected_key)
143
+
144
+ return record
@@ -0,0 +1,48 @@
1
+ __all__ = ['Validator']
2
+
3
+ import nuql
4
+
5
+
6
+ class Validator:
7
+ def __init__(self, parent: 'Validator' = None, path: str | None = None) -> None:
8
+ """
9
+ Validation helper class to pick up serialisation errors.
10
+
11
+ :param parent: Parent Validator instance if applicable.
12
+ :param path: Path (where nested).
13
+ """
14
+ self.parent = parent
15
+ self.path = path
16
+ self.children = []
17
+ self._errors = []
18
+
19
+ @property
20
+ def errors(self):
21
+ """Recursively provide errors."""
22
+ return [*self._errors, *[x.errors for x in self.children]]
23
+
24
+ def spawn_new(self, path: str) -> 'Validator':
25
+ """
26
+ Spawns a new validator instance for nested validation.
27
+
28
+ :arg path: Path of new validator.
29
+ :return: Validator instance.
30
+ """
31
+ full_path = self.path + '.' + path if self.path else path
32
+ validator = Validator(parent=self, path=full_path)
33
+ self.children.append(validator)
34
+ return validator
35
+
36
+ def add(self, name: str, message: str) -> None:
37
+ """
38
+ Adds a validation error.
39
+
40
+ :arg name: Field name.
41
+ :arg message: Error message.
42
+ """
43
+ self._errors.append({'name': self.path + '.' + name if self.path else name, 'message': message})
44
+
45
+ def raise_for_validation_errors(self):
46
+ """Raises a ValidationError exception if there are any errors."""
47
+ if self._errors:
48
+ raise nuql.ValidationError(self.errors)
@@ -0,0 +1,2 @@
1
+ from .indexes import *
2
+ from .table import *
@@ -0,0 +1,140 @@
1
+ __all__ = ['Indexes']
2
+
3
+ from typing import Dict, Any, cast
4
+
5
+ import nuql
6
+ from nuql import types
7
+
8
+
9
+ MAX_LSI = 5
10
+ MAX_GSI = 20
11
+
12
+
13
+ class Indexes:
14
+ def __init__(self, indexes: 'types.IndexesType') -> None:
15
+ """
16
+ Wrapper class to validate and use indexes for the overall table.
17
+
18
+ :arg indexes: List of indexes.
19
+ """
20
+ self.index_keys = set()
21
+ self._indexes = self.validate_indexes(indexes)
22
+
23
+ @property
24
+ def primary(self) -> 'types.PrimaryIndex':
25
+ """Retrieve the primary index for the table"""
26
+ return cast(types.PrimaryIndex, self._indexes['primary'])
27
+
28
+ def validate_indexes(self, indexes: 'types.IndexesType') -> Dict[str, Dict[str, Any]]:
29
+ """
30
+ Processes, validates and generates index dict for the table.
31
+
32
+ :arg indexes: List of indexes.
33
+ :return: Index dict.
34
+ """
35
+ index_dict = {}
36
+
37
+ local_count = 0
38
+ global_count = 0
39
+
40
+ if not isinstance(indexes, list):
41
+ raise nuql.NuqlError(code='IndexValidation', message='Indexes must be a list')
42
+
43
+ for index in indexes:
44
+ if not isinstance(index, dict):
45
+ raise nuql.NuqlError(code='IndexValidation', message='Indexes must be a list of dicts')
46
+
47
+ if 'hash' not in index:
48
+ raise nuql.NuqlError(code='IndexValidation', message='\'hash\' is required for all indexes')
49
+
50
+ index_name = index.get('name', 'primary')
51
+ self.index_keys.add(index['hash'])
52
+
53
+ if 'sort' in index:
54
+ self.index_keys.add(index['sort'])
55
+
56
+ # Validate only one primary index
57
+ if index_name == 'primary' and 'primary' in index_dict:
58
+ raise nuql.NuqlError(
59
+ code='IndexValidation',
60
+ message='More than one primary index cannot be defined. Did you mean to add \'name\' and \'type\'?'
61
+ )
62
+
63
+ # Validate index has a type set
64
+ if index_name != 'primary' and index.get('type') not in ['local', 'global']:
65
+ raise nuql.NuqlError(
66
+ code='IndexValidation',
67
+ message='Index type is required for all indexes except the primary index'
68
+ )
69
+
70
+ # Set index follow rule
71
+ if index_name != 'primary' and 'follow' in index and not isinstance(index['follow'], bool):
72
+ raise nuql.NuqlError(
73
+ code='IndexValidation',
74
+ message='Index \'follow\' must be a boolean value if provided.'
75
+ )
76
+
77
+ # Validate index projection
78
+ if index_name != 'primary' and 'projection' in index and index['projection'] not in ['all', 'keys']:
79
+ raise nuql.NuqlError(
80
+ code='IndexValidation',
81
+ message='Index \'projection\' must be \'all\' or \'keys\' if provided.'
82
+ )
83
+
84
+ # Count LSIs
85
+ if index.get('type') == 'local':
86
+ local_count += 1
87
+
88
+ # Count GSIs
89
+ if index.get('type') == 'global':
90
+ global_count += 1
91
+
92
+ accepted_keys = ['hash', 'sort', 'name', 'type', 'follow', 'projection']
93
+ extra_keys = [x for x in index.keys() if x not in accepted_keys]
94
+ if extra_keys:
95
+ raise nuql.NuqlError(
96
+ code='IndexValidation',
97
+ message=f'Index \'{index_name}\' contains invalid keys: {", ".join(extra_keys)}\n\n'
98
+ f'Accepted index keys are: {", ".join(accepted_keys)}'
99
+ )
100
+
101
+ index_dict[index_name] = index
102
+
103
+ # Throw on more than 5 LSIs
104
+ if local_count >= MAX_LSI:
105
+ raise nuql.NuqlError(
106
+ code='IndexValidation',
107
+ message='More than 5 local indexes cannot be defined'
108
+ )
109
+
110
+ # Throw on more than 20 GSIs
111
+ if global_count >= MAX_GSI:
112
+ raise nuql.NuqlError(
113
+ code='IndexValidation',
114
+ message='More than 20 global indexes cannot be defined'
115
+ )
116
+
117
+ return index_dict
118
+
119
+ def get_index(self, name: str) -> 'types.SecondaryIndex':
120
+ """
121
+ Get a secondary index by name.
122
+
123
+ :arg name: Index name.
124
+ :return: SecondaryIndex dict.
125
+ """
126
+ # Throw on accessing primary to keep logical separation
127
+ if name == 'primary':
128
+ raise nuql.NuqlError(
129
+ code='InvalidIndex',
130
+ message='The primary index cannot be accessed using get_index, please use the primary attribute instead'
131
+ )
132
+
133
+ # Validate index exists
134
+ if name not in self._indexes:
135
+ raise nuql.NuqlError(
136
+ code='InvalidIndex',
137
+ message=f'Index \'{name}\' is not defined for this DynamoDB table'
138
+ )
139
+
140
+ return cast(types.SecondaryIndex, self._indexes[name])
@@ -0,0 +1,151 @@
1
+ __all__ = ['Table']
2
+
3
+ from typing import Dict, Any, List
4
+
5
+ import nuql
6
+ from nuql import resources, types, api
7
+
8
+
9
+ class Table:
10
+ def __init__(
11
+ self,
12
+ provider: 'nuql.Nuql',
13
+ name: str,
14
+ schema: Dict[str, 'types.FieldConfig'],
15
+ indexes: 'resources.Indexes',
16
+ ) -> None:
17
+ """
18
+ Main Table API for performing actions against a single table.
19
+
20
+ :arg provider: Nuql instance.
21
+ :arg name: Table name.
22
+ :arg schema: Field schema.
23
+ :arg indexes: Table indexes.
24
+ """
25
+ self.name = name
26
+ self.provider = provider
27
+ self.indexes = indexes
28
+ self.fields = resources.create_field_map(schema, self, provider.fields)
29
+ self.serialiser = resources.Serialiser(self)
30
+
31
+ def query(
32
+ self,
33
+ key_condition: Dict[str, Any] | None = None,
34
+ condition: Dict[str, Any] | None = None,
35
+ index_name: str = 'primary',
36
+ limit: int | None = None,
37
+ scan_index_forward: bool = True,
38
+ exclusive_start_key: Dict[str, Any] | None = None,
39
+ consistent_read: bool = False,
40
+ ) -> Dict[str, Any]:
41
+ """
42
+ Synchronously invokes a query against the table.
43
+
44
+ :param key_condition: Key condition expression as a dict.
45
+ :param condition: Filter condition expression as a dict.
46
+ :param index_name: Index to perform query against.
47
+ :param limit: Number of items to retrieve.
48
+ :param scan_index_forward: Direction of scan.
49
+ :param exclusive_start_key: Exclusive start key.
50
+ :param consistent_read: Perform query as a consistent read.
51
+ :return: Query result.
52
+ """
53
+ query = api.Query(self.provider, self)
54
+ return query.invoke_sync(
55
+ key_condition=key_condition,
56
+ condition=condition,
57
+ index_name=index_name,
58
+ limit=limit,
59
+ scan_index_forward=scan_index_forward,
60
+ exclusive_start_key=exclusive_start_key,
61
+ consistent_read=consistent_read,
62
+ )
63
+
64
+ def get(self, key: Dict[str, Any], consistent_read: bool = False) -> Dict[str, Any]:
65
+ """
66
+ Retrieves a record from the table using the key.
67
+
68
+ :arg key: Record key as a dict.
69
+ :param consistent_read: Perform a consistent read.
70
+ :return: Deserialised record dict.
71
+ """
72
+ get = api.Get(self.provider, self)
73
+ return get.invoke_sync(key=key, consistent_read=consistent_read)
74
+
75
+ def create(self, data: Dict[str, Any], condition: Dict[str, Any] | None = None) -> Dict[str, Any]:
76
+ """
77
+ Create a new item on the table.
78
+
79
+ :arg data: Data to create.
80
+ :param condition: Optional condition expression dict.
81
+ :return: New item dict.
82
+ """
83
+ create = api.Create(self.provider, self)
84
+ return create.invoke_sync(data=data, condition=condition)
85
+
86
+ def delete(
87
+ self,
88
+ key: Dict[str, Any],
89
+ condition: Dict[str, Any] | None = None,
90
+ ) -> None:
91
+ """
92
+ Performs a delete operation for an item on the table.
93
+
94
+ :arg key: Record key as a dict.
95
+ :param condition: Condition expression as a dict.
96
+ """
97
+ delete = api.Delete(self.provider, self)
98
+ return delete.invoke_sync(key=key, condition=condition)
99
+
100
+ def update(
101
+ self,
102
+ data: Dict[str, Any],
103
+ condition: Dict[str, Any] | None = None,
104
+ shallow: bool = False
105
+ ) -> Dict[str, Any]:
106
+ """
107
+ Updates an item in the table.
108
+
109
+ :arg data: Data to update.
110
+ :param condition: Optional condition expression.
111
+ :param shallow: Activates shallow update mode (so that whole nested items are updated at once).
112
+ :return: New item dict.
113
+ """
114
+ update = api.UpdateItem(self.provider, self)
115
+ return update.invoke_sync(data=data, condition=condition, shallow=shallow)
116
+
117
+ def put_item(self, data: Dict[str, Any], condition: Dict[str, Any] | None = None) -> Dict[str, Any]:
118
+ """
119
+ Perform a put operation against the table.
120
+
121
+ :arg data: Data to put.
122
+ :param condition: Optional condition expression dict.
123
+ :return: New item dict.
124
+ """
125
+ put = api.PutItem(self.provider, self)
126
+ return put.invoke_sync(data=data, condition=condition)
127
+
128
+ def upsert(self, data: Dict[str, Any], shallow: bool = False) -> Dict[str, Any]:
129
+ """
130
+ Updates an item in the table if it exists, otherwise creates a new one.
131
+
132
+ [NOTE]
133
+ Conditions aren't allowed for this API to avoid ambiguous
134
+ ConditionCheckFailedException (as this is a catch-all for any condition).
135
+
136
+ :arg data: Data to upsert.
137
+ :param shallow: Activates shallow update mode (so that whole nested items are updated at once).
138
+ :return: New item dict.
139
+ """
140
+ upsert = api.Upsert(self.provider, self)
141
+ return upsert.invoke_sync(data=data, shallow=shallow)
142
+
143
+ def batch_get(self, keys: List[Dict[str, Any]]) -> Dict[str, Any]:
144
+ """
145
+ Performs a batch get operation against the table.
146
+
147
+ :arg keys: List of keys to get.
148
+ :return: Batch get result.
149
+ """
150
+ batch_get = api.BatchGet(self.provider, self)
151
+ return batch_get.invoke_sync(keys=keys)
@@ -0,0 +1,2 @@
1
+ from .dict import *
2
+ from .validators import *
@@ -0,0 +1,21 @@
1
+ __all__ = ['merge_dicts']
2
+
3
+ from collections.abc import Mapping
4
+ from typing import Dict, Any
5
+
6
+
7
+ def merge_dicts(d1: Dict[str, Any], d2: Dict[str, Any] | Mapping):
8
+ """
9
+ Deeply merge two dicts.
10
+
11
+ :param d1: First dict.
12
+ :param d2: Second dict.
13
+ :return: Merged dict.
14
+ """
15
+ result = d1.copy()
16
+ for k, v in d2.items():
17
+ if k in result and isinstance(result[k], Mapping) and isinstance(v, Mapping):
18
+ result[k] = merge_dicts(result[k], v)
19
+ else:
20
+ result[k] = v
21
+ return result