boto3-assist 0.30.0__py3-none-any.whl → 0.31.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- boto3_assist/dynamodb/dynamodb.py +692 -34
- boto3_assist/dynamodb/dynamodb_index.py +258 -0
- boto3_assist/dynamodb/dynamodb_key.py +19 -0
- boto3_assist/dynamodb/dynamodb_model_base.py +10 -3
- boto3_assist/version.py +1 -1
- {boto3_assist-0.30.0.dist-info → boto3_assist-0.31.0.dist-info}/METADATA +1 -1
- {boto3_assist-0.30.0.dist-info → boto3_assist-0.31.0.dist-info}/RECORD +10 -10
- {boto3_assist-0.30.0.dist-info → boto3_assist-0.31.0.dist-info}/WHEEL +0 -0
- {boto3_assist-0.30.0.dist-info → boto3_assist-0.31.0.dist-info}/licenses/LICENSE-EXPLAINED.txt +0 -0
- {boto3_assist-0.30.0.dist-info → boto3_assist-0.31.0.dist-info}/licenses/LICENSE.txt +0 -0
|
@@ -86,9 +86,13 @@ class DynamoDB(DynamoDBConnection):
|
|
|
86
86
|
table_name: str,
|
|
87
87
|
source: Optional[str] = None,
|
|
88
88
|
fail_if_exists: bool = False,
|
|
89
|
+
condition_expression: Optional[str] = None,
|
|
90
|
+
expression_attribute_names: Optional[dict] = None,
|
|
91
|
+
expression_attribute_values: Optional[dict] = None,
|
|
89
92
|
) -> dict:
|
|
90
93
|
"""
|
|
91
|
-
Save an item to the database
|
|
94
|
+
Save an item to the database with optional conditional expressions.
|
|
95
|
+
|
|
92
96
|
Args:
|
|
93
97
|
item (dict): DynamoDB Dictionary Object or DynamoDBModelBase.
|
|
94
98
|
Supports the "client" or "resource" syntax
|
|
@@ -97,14 +101,37 @@ class DynamoDB(DynamoDBConnection):
|
|
|
97
101
|
fail_if_exists (bool, optional): Only allow it to insert once.
|
|
98
102
|
Fail if it already exits. This is useful for loggers, historical records,
|
|
99
103
|
tasks, etc. that should only be created once
|
|
104
|
+
condition_expression (str, optional): Custom condition expression.
|
|
105
|
+
Example: "attribute_not_exists(#pk)" or "#version = :expected_version"
|
|
106
|
+
expression_attribute_names (dict, optional): Attribute name mappings.
|
|
107
|
+
Example: {"#version": "version", "#status": "status"}
|
|
108
|
+
expression_attribute_values (dict, optional): Attribute value mappings.
|
|
109
|
+
Example: {":expected_version": 1, ":active": "active"}
|
|
100
110
|
|
|
101
111
|
Raises:
|
|
102
112
|
ClientError: Client specific errors
|
|
113
|
+
RuntimeError: Conditional check failed
|
|
103
114
|
Exception: Any Error Raised
|
|
104
115
|
|
|
105
116
|
Returns:
|
|
106
117
|
dict: The Response from DynamoDB's put_item actions.
|
|
107
118
|
It does not return the saved object, only the response.
|
|
119
|
+
|
|
120
|
+
Examples:
|
|
121
|
+
>>> # Simple save
|
|
122
|
+
>>> db.save(item=user, table_name="users")
|
|
123
|
+
|
|
124
|
+
>>> # Prevent duplicates
|
|
125
|
+
>>> db.save(item=user, table_name="users", fail_if_exists=True)
|
|
126
|
+
|
|
127
|
+
>>> # Optimistic locking with version check
|
|
128
|
+
>>> db.save(
|
|
129
|
+
... item=user,
|
|
130
|
+
... table_name="users",
|
|
131
|
+
... condition_expression="#version = :expected_version",
|
|
132
|
+
... expression_attribute_names={"#version": "version"},
|
|
133
|
+
... expression_attribute_values={":expected_version": 5}
|
|
134
|
+
... )
|
|
108
135
|
"""
|
|
109
136
|
response: Dict[str, Any] = {}
|
|
110
137
|
|
|
@@ -128,6 +155,10 @@ class DynamoDB(DynamoDBConnection):
|
|
|
128
155
|
|
|
129
156
|
if isinstance(item, dict):
|
|
130
157
|
self.__log_item_size(item=item)
|
|
158
|
+
|
|
159
|
+
# Convert native numeric types to Decimal for DynamoDB
|
|
160
|
+
# (DynamoDB doesn't accept float, requires Decimal)
|
|
161
|
+
item = DecimalConversionUtility.convert_native_types_to_decimals(item)
|
|
131
162
|
|
|
132
163
|
if isinstance(item, dict) and isinstance(next(iter(item.values())), dict):
|
|
133
164
|
# Use boto3.client syntax
|
|
@@ -136,33 +167,65 @@ class DynamoDB(DynamoDBConnection):
|
|
|
136
167
|
"TableName": table_name,
|
|
137
168
|
"Item": item,
|
|
138
169
|
}
|
|
139
|
-
|
|
170
|
+
|
|
171
|
+
# Handle conditional expressions
|
|
172
|
+
if condition_expression:
|
|
173
|
+
# Custom condition provided
|
|
174
|
+
params["ConditionExpression"] = condition_expression
|
|
175
|
+
if expression_attribute_names:
|
|
176
|
+
params["ExpressionAttributeNames"] = expression_attribute_names
|
|
177
|
+
if expression_attribute_values:
|
|
178
|
+
params["ExpressionAttributeValues"] = expression_attribute_values
|
|
179
|
+
elif fail_if_exists:
|
|
140
180
|
# only insert if the item does *not* already exist
|
|
141
181
|
params["ConditionExpression"] = (
|
|
142
182
|
"attribute_not_exists(#pk) AND attribute_not_exists(#sk)"
|
|
143
183
|
)
|
|
144
184
|
params["ExpressionAttributeNames"] = {"#pk": "pk", "#sk": "sk"}
|
|
185
|
+
|
|
145
186
|
response = dict(self.dynamodb_client.put_item(**params))
|
|
146
187
|
|
|
147
188
|
else:
|
|
148
189
|
# Use boto3.resource syntax
|
|
149
190
|
table = self.dynamodb_resource.Table(table_name)
|
|
150
|
-
|
|
151
|
-
|
|
152
|
-
|
|
153
|
-
|
|
154
|
-
|
|
155
|
-
|
|
191
|
+
|
|
192
|
+
# Build put_item parameters
|
|
193
|
+
put_params = {"Item": item}
|
|
194
|
+
|
|
195
|
+
# Handle conditional expressions
|
|
196
|
+
if condition_expression:
|
|
197
|
+
# Custom condition provided
|
|
198
|
+
# Convert string condition to boto3 condition object if needed
|
|
199
|
+
put_params["ConditionExpression"] = condition_expression
|
|
200
|
+
if expression_attribute_names:
|
|
201
|
+
put_params["ExpressionAttributeNames"] = expression_attribute_names
|
|
202
|
+
if expression_attribute_values:
|
|
203
|
+
put_params["ExpressionAttributeValues"] = expression_attribute_values
|
|
204
|
+
elif fail_if_exists:
|
|
205
|
+
put_params["ConditionExpression"] = (
|
|
206
|
+
Attr("pk").not_exists() & Attr("sk").not_exists()
|
|
207
|
+
)
|
|
208
|
+
|
|
209
|
+
response = dict(table.put_item(**put_params))
|
|
156
210
|
|
|
157
211
|
except ClientError as e:
|
|
158
|
-
|
|
159
|
-
|
|
160
|
-
|
|
161
|
-
|
|
162
|
-
|
|
163
|
-
|
|
164
|
-
|
|
165
|
-
|
|
212
|
+
error_code = e.response["Error"]["Code"]
|
|
213
|
+
|
|
214
|
+
if error_code == "ConditionalCheckFailedException":
|
|
215
|
+
# Enhanced error message for conditional check failures
|
|
216
|
+
if fail_if_exists:
|
|
217
|
+
raise RuntimeError(
|
|
218
|
+
f"Item with pk={item['pk']} already exists in {table_name}"
|
|
219
|
+
) from e
|
|
220
|
+
elif condition_expression:
|
|
221
|
+
raise RuntimeError(
|
|
222
|
+
f"Conditional check failed for item in {table_name}. "
|
|
223
|
+
f"Condition: {condition_expression}"
|
|
224
|
+
) from e
|
|
225
|
+
else:
|
|
226
|
+
raise RuntimeError(
|
|
227
|
+
f"Conditional check failed for item in {table_name}"
|
|
228
|
+
) from e
|
|
166
229
|
|
|
167
230
|
logger.exception(
|
|
168
231
|
{"source": f"{source}", "metric_filter": "put_item", "error": str(e)}
|
|
@@ -309,29 +372,110 @@ class DynamoDB(DynamoDBConnection):
|
|
|
309
372
|
table_name: str,
|
|
310
373
|
key: dict,
|
|
311
374
|
update_expression: str,
|
|
312
|
-
expression_attribute_values: dict,
|
|
375
|
+
expression_attribute_values: Optional[dict] = None,
|
|
376
|
+
expression_attribute_names: Optional[dict] = None,
|
|
377
|
+
condition_expression: Optional[str] = None,
|
|
378
|
+
return_values: str = "NONE",
|
|
313
379
|
) -> dict:
|
|
314
|
-
"""
|
|
315
|
-
|
|
380
|
+
"""
|
|
381
|
+
Update an item in DynamoDB with an update expression.
|
|
382
|
+
|
|
383
|
+
Update expressions allow you to modify specific attributes without replacing
|
|
384
|
+
the entire item. Supports SET, ADD, REMOVE, and DELETE operations.
|
|
385
|
+
|
|
316
386
|
Args:
|
|
317
|
-
table_name
|
|
318
|
-
key
|
|
319
|
-
update_expression
|
|
320
|
-
expression_attribute_values
|
|
321
|
-
|
|
387
|
+
table_name: The DynamoDB table name
|
|
388
|
+
key: Primary key dict, e.g., {"pk": "user#123", "sk": "user#123"}
|
|
389
|
+
update_expression: Update expression string, e.g., "SET #name = :name, age = age + :inc"
|
|
390
|
+
expression_attribute_values: Value mappings, e.g., {":name": "Alice", ":inc": 1}
|
|
391
|
+
expression_attribute_names: Attribute name mappings for reserved words, e.g., {"#name": "name"}
|
|
392
|
+
condition_expression: Optional condition that must be met, e.g., "attribute_exists(pk)"
|
|
393
|
+
return_values: What to return after update:
|
|
394
|
+
- "NONE" (default): Nothing
|
|
395
|
+
- "ALL_OLD": All attributes before update
|
|
396
|
+
- "UPDATED_OLD": Only updated attributes before update
|
|
397
|
+
- "ALL_NEW": All attributes after update
|
|
398
|
+
- "UPDATED_NEW": Only updated attributes after update
|
|
399
|
+
|
|
322
400
|
Returns:
|
|
323
|
-
dict:
|
|
401
|
+
dict: DynamoDB response with optional Attributes based on return_values
|
|
402
|
+
|
|
403
|
+
Raises:
|
|
404
|
+
RuntimeError: If condition expression fails
|
|
405
|
+
ClientError: For other DynamoDB errors
|
|
406
|
+
|
|
407
|
+
Examples:
|
|
408
|
+
>>> # Simple SET operation
|
|
409
|
+
>>> db.update_item(
|
|
410
|
+
... table_name="users",
|
|
411
|
+
... key={"pk": "user#123", "sk": "user#123"},
|
|
412
|
+
... update_expression="SET email = :email",
|
|
413
|
+
... expression_attribute_values={":email": "new@example.com"}
|
|
414
|
+
... )
|
|
415
|
+
|
|
416
|
+
>>> # Atomic counter
|
|
417
|
+
>>> db.update_item(
|
|
418
|
+
... table_name="users",
|
|
419
|
+
... key={"pk": "user#123", "sk": "user#123"},
|
|
420
|
+
... update_expression="ADD view_count :inc",
|
|
421
|
+
... expression_attribute_values={":inc": 1}
|
|
422
|
+
... )
|
|
423
|
+
|
|
424
|
+
>>> # Multiple operations with reserved word
|
|
425
|
+
>>> db.update_item(
|
|
426
|
+
... table_name="users",
|
|
427
|
+
... key={"pk": "user#123", "sk": "user#123"},
|
|
428
|
+
... update_expression="SET #status = :status, updated_at = :now REMOVE temp_field",
|
|
429
|
+
... expression_attribute_names={"#status": "status"},
|
|
430
|
+
... expression_attribute_values={":status": "active", ":now": "2024-10-15"}
|
|
431
|
+
... )
|
|
432
|
+
|
|
433
|
+
>>> # Conditional update with return value
|
|
434
|
+
>>> response = db.update_item(
|
|
435
|
+
... table_name="users",
|
|
436
|
+
... key={"pk": "user#123", "sk": "user#123"},
|
|
437
|
+
... update_expression="SET email = :email",
|
|
438
|
+
... expression_attribute_values={":email": "new@example.com"},
|
|
439
|
+
... condition_expression="attribute_exists(pk)",
|
|
440
|
+
... return_values="ALL_NEW"
|
|
441
|
+
... )
|
|
442
|
+
>>> updated_user = response['Attributes']
|
|
324
443
|
"""
|
|
325
444
|
table = self.dynamodb_resource.Table(table_name)
|
|
326
|
-
|
|
327
|
-
|
|
328
|
-
|
|
329
|
-
|
|
330
|
-
|
|
331
|
-
|
|
332
|
-
|
|
333
|
-
|
|
334
|
-
|
|
445
|
+
|
|
446
|
+
# Build update parameters
|
|
447
|
+
params = {
|
|
448
|
+
"Key": key,
|
|
449
|
+
"UpdateExpression": update_expression,
|
|
450
|
+
"ReturnValues": return_values
|
|
451
|
+
}
|
|
452
|
+
|
|
453
|
+
if expression_attribute_values:
|
|
454
|
+
params["ExpressionAttributeValues"] = expression_attribute_values
|
|
455
|
+
|
|
456
|
+
if expression_attribute_names:
|
|
457
|
+
params["ExpressionAttributeNames"] = expression_attribute_names
|
|
458
|
+
|
|
459
|
+
if condition_expression:
|
|
460
|
+
params["ConditionExpression"] = condition_expression
|
|
461
|
+
|
|
462
|
+
try:
|
|
463
|
+
response = dict(table.update_item(**params))
|
|
464
|
+
|
|
465
|
+
# Apply decimal conversion if response contains attributes
|
|
466
|
+
return self._apply_decimal_conversion(response)
|
|
467
|
+
|
|
468
|
+
except ClientError as e:
|
|
469
|
+
error_code = e.response["Error"]["Code"]
|
|
470
|
+
|
|
471
|
+
if error_code == "ConditionalCheckFailedException":
|
|
472
|
+
raise RuntimeError(
|
|
473
|
+
f"Conditional check failed for update in {table_name}. "
|
|
474
|
+
f"Condition: {condition_expression}"
|
|
475
|
+
) from e
|
|
476
|
+
|
|
477
|
+
logger.exception(f"Error in update_item: {str(e)}")
|
|
478
|
+
raise
|
|
335
479
|
|
|
336
480
|
def query(
|
|
337
481
|
self,
|
|
@@ -546,3 +690,517 @@ class DynamoDB(DynamoDBConnection):
|
|
|
546
690
|
"""
|
|
547
691
|
|
|
548
692
|
return response.get("Item", {})
|
|
693
|
+
|
|
694
|
+
def batch_get_item(
|
|
695
|
+
self,
|
|
696
|
+
keys: list[dict],
|
|
697
|
+
table_name: str,
|
|
698
|
+
*,
|
|
699
|
+
projection_expression: Optional[str] = None,
|
|
700
|
+
expression_attribute_names: Optional[dict] = None,
|
|
701
|
+
consistent_read: bool = False,
|
|
702
|
+
) -> dict:
|
|
703
|
+
"""
|
|
704
|
+
Retrieve multiple items from DynamoDB in a single request.
|
|
705
|
+
|
|
706
|
+
DynamoDB allows up to 100 items per batch_get_item call. This method
|
|
707
|
+
automatically chunks larger requests and handles unprocessed keys with
|
|
708
|
+
exponential backoff retry logic.
|
|
709
|
+
|
|
710
|
+
Args:
|
|
711
|
+
keys: List of key dictionaries. Each dict must contain the primary key
|
|
712
|
+
(and sort key if applicable) for the items to retrieve.
|
|
713
|
+
Example: [{"pk": "user#1", "sk": "user#1"}, {"pk": "user#2", "sk": "user#2"}]
|
|
714
|
+
table_name: The DynamoDB table name
|
|
715
|
+
projection_expression: Optional comma-separated list of attributes to retrieve
|
|
716
|
+
expression_attribute_names: Optional dict mapping attribute name placeholders to actual names
|
|
717
|
+
consistent_read: If True, uses strongly consistent reads (costs more RCUs)
|
|
718
|
+
|
|
719
|
+
Returns:
|
|
720
|
+
dict: Response containing:
|
|
721
|
+
- 'Items': List of retrieved items (with Decimal conversion applied)
|
|
722
|
+
- 'UnprocessedKeys': Any keys that couldn't be processed after retries
|
|
723
|
+
- 'ConsumedCapacity': Capacity units consumed (if available)
|
|
724
|
+
|
|
725
|
+
Example:
|
|
726
|
+
>>> keys = [
|
|
727
|
+
... {"pk": "user#user-001", "sk": "user#user-001"},
|
|
728
|
+
... {"pk": "user#user-002", "sk": "user#user-002"},
|
|
729
|
+
... {"pk": "user#user-003", "sk": "user#user-003"}
|
|
730
|
+
... ]
|
|
731
|
+
>>> response = db.batch_get_item(keys=keys, table_name="users")
|
|
732
|
+
>>> items = response['Items']
|
|
733
|
+
>>> print(f"Retrieved {len(items)} items")
|
|
734
|
+
|
|
735
|
+
Note:
|
|
736
|
+
- Maximum 100 items per request (automatically chunked)
|
|
737
|
+
- Each item can be up to 400 KB
|
|
738
|
+
- Maximum 16 MB total response size
|
|
739
|
+
- Unprocessed keys are automatically retried with exponential backoff
|
|
740
|
+
"""
|
|
741
|
+
import time
|
|
742
|
+
|
|
743
|
+
all_items = []
|
|
744
|
+
unprocessed_keys = []
|
|
745
|
+
|
|
746
|
+
# DynamoDB limit: 100 items per batch_get_item call
|
|
747
|
+
BATCH_SIZE = 100
|
|
748
|
+
|
|
749
|
+
# Chunk keys into batches of 100
|
|
750
|
+
for i in range(0, len(keys), BATCH_SIZE):
|
|
751
|
+
batch_keys = keys[i:i + BATCH_SIZE]
|
|
752
|
+
|
|
753
|
+
# Build request parameters
|
|
754
|
+
request_items = {
|
|
755
|
+
table_name: {
|
|
756
|
+
'Keys': batch_keys,
|
|
757
|
+
'ConsistentRead': consistent_read
|
|
758
|
+
}
|
|
759
|
+
}
|
|
760
|
+
|
|
761
|
+
# Add projection if provided
|
|
762
|
+
if projection_expression:
|
|
763
|
+
request_items[table_name]['ProjectionExpression'] = projection_expression
|
|
764
|
+
if expression_attribute_names:
|
|
765
|
+
request_items[table_name]['ExpressionAttributeNames'] = expression_attribute_names
|
|
766
|
+
|
|
767
|
+
# Retry logic for unprocessed keys
|
|
768
|
+
max_retries = 5
|
|
769
|
+
retry_count = 0
|
|
770
|
+
backoff_time = 0.1 # Start with 100ms
|
|
771
|
+
|
|
772
|
+
while retry_count <= max_retries:
|
|
773
|
+
try:
|
|
774
|
+
response = self.dynamodb_resource.meta.client.batch_get_item(
|
|
775
|
+
RequestItems=request_items
|
|
776
|
+
)
|
|
777
|
+
|
|
778
|
+
# Collect items from this batch
|
|
779
|
+
if 'Responses' in response and table_name in response['Responses']:
|
|
780
|
+
batch_items = response['Responses'][table_name]
|
|
781
|
+
all_items.extend(batch_items)
|
|
782
|
+
|
|
783
|
+
# Check for unprocessed keys
|
|
784
|
+
if 'UnprocessedKeys' in response and response['UnprocessedKeys']:
|
|
785
|
+
if table_name in response['UnprocessedKeys']:
|
|
786
|
+
unprocessed = response['UnprocessedKeys'][table_name]
|
|
787
|
+
|
|
788
|
+
if retry_count < max_retries:
|
|
789
|
+
# Retry with exponential backoff
|
|
790
|
+
logger.warning(
|
|
791
|
+
f"Batch get has {len(unprocessed['Keys'])} unprocessed keys. "
|
|
792
|
+
f"Retrying in {backoff_time}s (attempt {retry_count + 1}/{max_retries})"
|
|
793
|
+
)
|
|
794
|
+
time.sleep(backoff_time)
|
|
795
|
+
request_items = {table_name: unprocessed}
|
|
796
|
+
backoff_time *= 2 # Exponential backoff
|
|
797
|
+
retry_count += 1
|
|
798
|
+
continue
|
|
799
|
+
else:
|
|
800
|
+
# Max retries reached, collect remaining unprocessed keys
|
|
801
|
+
logger.error(
|
|
802
|
+
f"Max retries reached. {len(unprocessed['Keys'])} keys remain unprocessed"
|
|
803
|
+
)
|
|
804
|
+
unprocessed_keys.extend(unprocessed['Keys'])
|
|
805
|
+
break
|
|
806
|
+
else:
|
|
807
|
+
# No unprocessed keys, we're done with this batch
|
|
808
|
+
break
|
|
809
|
+
|
|
810
|
+
except ClientError as e:
|
|
811
|
+
error_code = e.response['Error']['Code']
|
|
812
|
+
if error_code == 'ProvisionedThroughputExceededException' and retry_count < max_retries:
|
|
813
|
+
logger.warning(
|
|
814
|
+
f"Throughput exceeded. Retrying in {backoff_time}s (attempt {retry_count + 1}/{max_retries})"
|
|
815
|
+
)
|
|
816
|
+
time.sleep(backoff_time)
|
|
817
|
+
backoff_time *= 2
|
|
818
|
+
retry_count += 1
|
|
819
|
+
continue
|
|
820
|
+
else:
|
|
821
|
+
logger.exception(f"Error in batch_get_item: {str(e)}")
|
|
822
|
+
raise
|
|
823
|
+
|
|
824
|
+
# Apply decimal conversion to all items
|
|
825
|
+
result = {
|
|
826
|
+
'Items': all_items,
|
|
827
|
+
'Count': len(all_items),
|
|
828
|
+
'UnprocessedKeys': unprocessed_keys
|
|
829
|
+
}
|
|
830
|
+
|
|
831
|
+
return self._apply_decimal_conversion(result)
|
|
832
|
+
|
|
833
|
+
def batch_write_item(
|
|
834
|
+
self,
|
|
835
|
+
items: list[dict],
|
|
836
|
+
table_name: str,
|
|
837
|
+
*,
|
|
838
|
+
operation: str = "put"
|
|
839
|
+
) -> dict:
|
|
840
|
+
"""
|
|
841
|
+
Write or delete multiple items in a single request.
|
|
842
|
+
|
|
843
|
+
DynamoDB allows up to 25 write operations per batch_write_item call.
|
|
844
|
+
This method automatically chunks larger requests and handles unprocessed
|
|
845
|
+
items with exponential backoff retry logic.
|
|
846
|
+
|
|
847
|
+
Args:
|
|
848
|
+
items: List of items to write or delete
|
|
849
|
+
- For 'put': Full item dictionaries
|
|
850
|
+
- For 'delete': Key-only dictionaries (pk, sk)
|
|
851
|
+
table_name: The DynamoDB table name
|
|
852
|
+
operation: Either 'put' (default) or 'delete'
|
|
853
|
+
|
|
854
|
+
Returns:
|
|
855
|
+
dict: Response containing:
|
|
856
|
+
- 'UnprocessedItems': Items that couldn't be processed after retries
|
|
857
|
+
- 'ProcessedCount': Number of successfully processed items
|
|
858
|
+
- 'UnprocessedCount': Number of unprocessed items
|
|
859
|
+
|
|
860
|
+
Example (Put):
|
|
861
|
+
>>> items = [
|
|
862
|
+
... {"pk": "user#1", "sk": "user#1", "name": "Alice"},
|
|
863
|
+
... {"pk": "user#2", "sk": "user#2", "name": "Bob"},
|
|
864
|
+
... {"pk": "user#3", "sk": "user#3", "name": "Charlie"}
|
|
865
|
+
... ]
|
|
866
|
+
>>> response = db.batch_write_item(items=items, table_name="users")
|
|
867
|
+
>>> print(f"Processed {response['ProcessedCount']} items")
|
|
868
|
+
|
|
869
|
+
Example (Delete):
|
|
870
|
+
>>> keys = [
|
|
871
|
+
... {"pk": "user#1", "sk": "user#1"},
|
|
872
|
+
... {"pk": "user#2", "sk": "user#2"}
|
|
873
|
+
... ]
|
|
874
|
+
>>> response = db.batch_write_item(
|
|
875
|
+
... items=keys,
|
|
876
|
+
... table_name="users",
|
|
877
|
+
... operation="delete"
|
|
878
|
+
... )
|
|
879
|
+
|
|
880
|
+
Note:
|
|
881
|
+
- Maximum 25 operations per request (automatically chunked)
|
|
882
|
+
- Each item can be up to 400 KB
|
|
883
|
+
- Maximum 16 MB total request size
|
|
884
|
+
- No conditional writes in batch operations
|
|
885
|
+
- Unprocessed items are automatically retried with exponential backoff
|
|
886
|
+
"""
|
|
887
|
+
import time
|
|
888
|
+
|
|
889
|
+
if operation not in ['put', 'delete']:
|
|
890
|
+
raise ValueError(f"Invalid operation '{operation}'. Must be 'put' or 'delete'")
|
|
891
|
+
|
|
892
|
+
# DynamoDB limit: 25 operations per batch_write_item call
|
|
893
|
+
BATCH_SIZE = 25
|
|
894
|
+
|
|
895
|
+
total_processed = 0
|
|
896
|
+
all_unprocessed = []
|
|
897
|
+
|
|
898
|
+
# Chunk items into batches of 25
|
|
899
|
+
for i in range(0, len(items), BATCH_SIZE):
|
|
900
|
+
batch_items = items[i:i + BATCH_SIZE]
|
|
901
|
+
|
|
902
|
+
# Build request items
|
|
903
|
+
write_requests = []
|
|
904
|
+
for item in batch_items:
|
|
905
|
+
if operation == 'put':
|
|
906
|
+
write_requests.append({'PutRequest': {'Item': item}})
|
|
907
|
+
else: # delete
|
|
908
|
+
write_requests.append({'DeleteRequest': {'Key': item}})
|
|
909
|
+
|
|
910
|
+
request_items = {table_name: write_requests}
|
|
911
|
+
|
|
912
|
+
# Retry logic for unprocessed items
|
|
913
|
+
max_retries = 5
|
|
914
|
+
retry_count = 0
|
|
915
|
+
backoff_time = 0.1 # Start with 100ms
|
|
916
|
+
|
|
917
|
+
while retry_count <= max_retries:
|
|
918
|
+
try:
|
|
919
|
+
response = self.dynamodb_resource.meta.client.batch_write_item(
|
|
920
|
+
RequestItems=request_items
|
|
921
|
+
)
|
|
922
|
+
|
|
923
|
+
# Count processed items from this batch
|
|
924
|
+
processed_in_batch = len(batch_items)
|
|
925
|
+
|
|
926
|
+
# Check for unprocessed items
|
|
927
|
+
if 'UnprocessedItems' in response and response['UnprocessedItems']:
|
|
928
|
+
if table_name in response['UnprocessedItems']:
|
|
929
|
+
unprocessed = response['UnprocessedItems'][table_name]
|
|
930
|
+
unprocessed_count = len(unprocessed)
|
|
931
|
+
processed_in_batch -= unprocessed_count
|
|
932
|
+
|
|
933
|
+
if retry_count < max_retries:
|
|
934
|
+
# Retry with exponential backoff
|
|
935
|
+
logger.warning(
|
|
936
|
+
f"Batch write has {unprocessed_count} unprocessed items. "
|
|
937
|
+
f"Retrying in {backoff_time}s (attempt {retry_count + 1}/{max_retries})"
|
|
938
|
+
)
|
|
939
|
+
time.sleep(backoff_time)
|
|
940
|
+
request_items = {table_name: unprocessed}
|
|
941
|
+
backoff_time *= 2 # Exponential backoff
|
|
942
|
+
retry_count += 1
|
|
943
|
+
continue
|
|
944
|
+
else:
|
|
945
|
+
# Max retries reached
|
|
946
|
+
logger.error(
|
|
947
|
+
f"Max retries reached. {unprocessed_count} items remain unprocessed"
|
|
948
|
+
)
|
|
949
|
+
all_unprocessed.extend(unprocessed)
|
|
950
|
+
break
|
|
951
|
+
|
|
952
|
+
# Successfully processed this batch
|
|
953
|
+
total_processed += processed_in_batch
|
|
954
|
+
break
|
|
955
|
+
|
|
956
|
+
except ClientError as e:
|
|
957
|
+
error_code = e.response['Error']['Code']
|
|
958
|
+
if error_code == 'ProvisionedThroughputExceededException' and retry_count < max_retries:
|
|
959
|
+
logger.warning(
|
|
960
|
+
f"Throughput exceeded. Retrying in {backoff_time}s (attempt {retry_count + 1}/{max_retries})"
|
|
961
|
+
)
|
|
962
|
+
time.sleep(backoff_time)
|
|
963
|
+
backoff_time *= 2
|
|
964
|
+
retry_count += 1
|
|
965
|
+
continue
|
|
966
|
+
else:
|
|
967
|
+
logger.exception(f"Error in batch_write_item: {str(e)}")
|
|
968
|
+
raise
|
|
969
|
+
|
|
970
|
+
return {
|
|
971
|
+
'ProcessedCount': total_processed,
|
|
972
|
+
'UnprocessedCount': len(all_unprocessed),
|
|
973
|
+
'UnprocessedItems': all_unprocessed
|
|
974
|
+
}
|
|
975
|
+
|
|
976
|
+
def transact_write_items(
|
|
977
|
+
self,
|
|
978
|
+
operations: list[dict],
|
|
979
|
+
*,
|
|
980
|
+
client_request_token: Optional[str] = None,
|
|
981
|
+
return_consumed_capacity: str = "NONE",
|
|
982
|
+
return_item_collection_metrics: str = "NONE"
|
|
983
|
+
) -> dict:
|
|
984
|
+
"""
|
|
985
|
+
Execute multiple write operations as an atomic transaction.
|
|
986
|
+
|
|
987
|
+
All operations succeed or all fail together. This is critical for
|
|
988
|
+
maintaining data consistency across multiple items. Supports up to
|
|
989
|
+
100 operations per transaction (increased from 25 in 2023).
|
|
990
|
+
|
|
991
|
+
Args:
|
|
992
|
+
operations: List of transaction operation dictionaries. Each dict must
|
|
993
|
+
have one of: 'Put', 'Update', 'Delete', or 'ConditionCheck'
|
|
994
|
+
Example:
|
|
995
|
+
[
|
|
996
|
+
{
|
|
997
|
+
'Put': {
|
|
998
|
+
'TableName': 'users',
|
|
999
|
+
'Item': {'pk': 'user#1', 'sk': 'user#1', 'name': 'Alice'}
|
|
1000
|
+
}
|
|
1001
|
+
},
|
|
1002
|
+
{
|
|
1003
|
+
'Update': {
|
|
1004
|
+
'TableName': 'accounts',
|
|
1005
|
+
'Key': {'pk': 'account#1', 'sk': 'account#1'},
|
|
1006
|
+
'UpdateExpression': 'SET balance = balance - :amount',
|
|
1007
|
+
'ExpressionAttributeValues': {':amount': 100}
|
|
1008
|
+
}
|
|
1009
|
+
}
|
|
1010
|
+
]
|
|
1011
|
+
client_request_token: Optional idempotency token for retry safety
|
|
1012
|
+
return_consumed_capacity: 'INDEXES', 'TOTAL', or 'NONE' (default)
|
|
1013
|
+
return_item_collection_metrics: 'SIZE' or 'NONE' (default)
|
|
1014
|
+
|
|
1015
|
+
Returns:
|
|
1016
|
+
dict: Transaction response containing:
|
|
1017
|
+
- 'ConsumedCapacity': Capacity consumed (if requested)
|
|
1018
|
+
- 'ItemCollectionMetrics': Metrics (if requested)
|
|
1019
|
+
|
|
1020
|
+
Raises:
|
|
1021
|
+
TransactionCanceledException: If transaction fails due to:
|
|
1022
|
+
- Conditional check failure
|
|
1023
|
+
- Item size too large
|
|
1024
|
+
- Throughput exceeded
|
|
1025
|
+
- Duplicate request
|
|
1026
|
+
|
|
1027
|
+
Example:
|
|
1028
|
+
>>> # Transfer money between accounts atomically
|
|
1029
|
+
>>> operations = [
|
|
1030
|
+
... {
|
|
1031
|
+
... 'Update': {
|
|
1032
|
+
... 'TableName': 'accounts',
|
|
1033
|
+
... 'Key': {'pk': 'account#123', 'sk': 'account#123'},
|
|
1034
|
+
... 'UpdateExpression': 'SET balance = balance - :amount',
|
|
1035
|
+
... 'ExpressionAttributeValues': {':amount': 100},
|
|
1036
|
+
... 'ConditionExpression': 'balance >= :amount'
|
|
1037
|
+
... }
|
|
1038
|
+
... },
|
|
1039
|
+
... {
|
|
1040
|
+
... 'Update': {
|
|
1041
|
+
... 'TableName': 'accounts',
|
|
1042
|
+
... 'Key': {'pk': 'account#456', 'sk': 'account#456'},
|
|
1043
|
+
... 'UpdateExpression': 'SET balance = balance + :amount',
|
|
1044
|
+
... 'ExpressionAttributeValues': {':amount': 100}
|
|
1045
|
+
... }
|
|
1046
|
+
... }
|
|
1047
|
+
... ]
|
|
1048
|
+
>>> response = db.transact_write_items(operations=operations)
|
|
1049
|
+
|
|
1050
|
+
Note:
|
|
1051
|
+
- Maximum 100 operations per transaction (AWS limit as of 2023)
|
|
1052
|
+
- Each item can be up to 400 KB
|
|
1053
|
+
- Maximum 4 MB total transaction size
|
|
1054
|
+
- Cannot target same item multiple times in one transaction
|
|
1055
|
+
- All operations must succeed or all fail (atomic)
|
|
1056
|
+
- Uses strongly consistent reads for condition checks
|
|
1057
|
+
"""
|
|
1058
|
+
if not operations:
|
|
1059
|
+
raise ValueError("At least one operation is required")
|
|
1060
|
+
|
|
1061
|
+
if len(operations) > 100:
|
|
1062
|
+
raise ValueError(
|
|
1063
|
+
f"Transaction supports maximum 100 operations, got {len(operations)}. "
|
|
1064
|
+
"Consider splitting into multiple transactions."
|
|
1065
|
+
)
|
|
1066
|
+
|
|
1067
|
+
params = {
|
|
1068
|
+
'TransactItems': operations,
|
|
1069
|
+
'ReturnConsumedCapacity': return_consumed_capacity,
|
|
1070
|
+
'ReturnItemCollectionMetrics': return_item_collection_metrics
|
|
1071
|
+
}
|
|
1072
|
+
|
|
1073
|
+
if client_request_token:
|
|
1074
|
+
params['ClientRequestToken'] = client_request_token
|
|
1075
|
+
|
|
1076
|
+
try:
|
|
1077
|
+
response = self.dynamodb_resource.meta.client.transact_write_items(**params)
|
|
1078
|
+
return response
|
|
1079
|
+
|
|
1080
|
+
except ClientError as e:
|
|
1081
|
+
error_code = e.response['Error']['Code']
|
|
1082
|
+
|
|
1083
|
+
if error_code == 'TransactionCanceledException':
|
|
1084
|
+
# Parse cancellation reasons
|
|
1085
|
+
reasons = e.response.get('CancellationReasons', [])
|
|
1086
|
+
logger.error(f"Transaction cancelled. Reasons: {reasons}")
|
|
1087
|
+
|
|
1088
|
+
# Enhance error message with specific reason
|
|
1089
|
+
if reasons:
|
|
1090
|
+
reason_messages = []
|
|
1091
|
+
for idx, reason in enumerate(reasons):
|
|
1092
|
+
if reason.get('Code'):
|
|
1093
|
+
reason_messages.append(
|
|
1094
|
+
f"Operation {idx}: {reason['Code']} - {reason.get('Message', '')}"
|
|
1095
|
+
)
|
|
1096
|
+
|
|
1097
|
+
raise RuntimeError(
|
|
1098
|
+
f"Transaction failed: {'; '.join(reason_messages)}"
|
|
1099
|
+
) from e
|
|
1100
|
+
|
|
1101
|
+
logger.exception(f"Error in transact_write_items: {str(e)}")
|
|
1102
|
+
raise
|
|
1103
|
+
|
|
1104
|
+
def transact_get_items(
|
|
1105
|
+
self,
|
|
1106
|
+
keys: list[dict],
|
|
1107
|
+
*,
|
|
1108
|
+
return_consumed_capacity: str = "NONE"
|
|
1109
|
+
) -> dict:
|
|
1110
|
+
"""
|
|
1111
|
+
Retrieve multiple items with strong consistency as a transaction.
|
|
1112
|
+
|
|
1113
|
+
Unlike batch_get_item, this provides a consistent snapshot across all items
|
|
1114
|
+
using strongly consistent reads. Maximum 100 items per transaction.
|
|
1115
|
+
|
|
1116
|
+
Args:
|
|
1117
|
+
keys: List of get operation dictionaries. Each dict must specify:
|
|
1118
|
+
- 'Key': The item's primary key
|
|
1119
|
+
- 'TableName': The table name
|
|
1120
|
+
- 'ProjectionExpression': Optional projection
|
|
1121
|
+
- 'ExpressionAttributeNames': Optional attribute names
|
|
1122
|
+
Example:
|
|
1123
|
+
[
|
|
1124
|
+
{
|
|
1125
|
+
'Key': {'pk': 'user#1', 'sk': 'user#1'},
|
|
1126
|
+
'TableName': 'users'
|
|
1127
|
+
},
|
|
1128
|
+
{
|
|
1129
|
+
'Key': {'pk': 'order#123', 'sk': 'order#123'},
|
|
1130
|
+
'TableName': 'orders',
|
|
1131
|
+
'ProjectionExpression': 'id,total,#status',
|
|
1132
|
+
'ExpressionAttributeNames': {'#status': 'status'}
|
|
1133
|
+
}
|
|
1134
|
+
]
|
|
1135
|
+
return_consumed_capacity: 'INDEXES', 'TOTAL', or 'NONE' (default)
|
|
1136
|
+
|
|
1137
|
+
Returns:
|
|
1138
|
+
dict: Response containing:
|
|
1139
|
+
- 'Items': List of retrieved items (with Decimal conversion)
|
|
1140
|
+
- 'ConsumedCapacity': Capacity consumed (if requested)
|
|
1141
|
+
|
|
1142
|
+
Example:
|
|
1143
|
+
>>> keys = [
|
|
1144
|
+
... {
|
|
1145
|
+
... 'Key': {'pk': 'user#123', 'sk': 'user#123'},
|
|
1146
|
+
... 'TableName': 'users'
|
|
1147
|
+
... },
|
|
1148
|
+
... {
|
|
1149
|
+
... 'Key': {'pk': 'account#123', 'sk': 'account#123'},
|
|
1150
|
+
... 'TableName': 'accounts'
|
|
1151
|
+
... }
|
|
1152
|
+
... ]
|
|
1153
|
+
>>> response = db.transact_get_items(keys=keys)
|
|
1154
|
+
>>> items = response['Items']
|
|
1155
|
+
|
|
1156
|
+
Note:
|
|
1157
|
+
- Maximum 100 items per transaction
|
|
1158
|
+
- Always uses strongly consistent reads
|
|
1159
|
+
- More expensive than batch_get_item (2x RCUs)
|
|
1160
|
+
- Provides snapshot isolation across items
|
|
1161
|
+
- Cannot be combined with transact_write_items
|
|
1162
|
+
"""
|
|
1163
|
+
if not keys:
|
|
1164
|
+
raise ValueError("At least one key is required")
|
|
1165
|
+
|
|
1166
|
+
if len(keys) > 100:
|
|
1167
|
+
raise ValueError(
|
|
1168
|
+
f"Transaction supports maximum 100 items, got {len(keys)}. "
|
|
1169
|
+
"Use batch_get_item for larger requests."
|
|
1170
|
+
)
|
|
1171
|
+
|
|
1172
|
+
# Build transaction get items
|
|
1173
|
+
transact_items = []
|
|
1174
|
+
for key_spec in keys:
|
|
1175
|
+
get_item = {'Get': key_spec}
|
|
1176
|
+
transact_items.append(get_item)
|
|
1177
|
+
|
|
1178
|
+
params = {
|
|
1179
|
+
'TransactItems': transact_items,
|
|
1180
|
+
'ReturnConsumedCapacity': return_consumed_capacity
|
|
1181
|
+
}
|
|
1182
|
+
|
|
1183
|
+
try:
|
|
1184
|
+
response = self.dynamodb_resource.meta.client.transact_get_items(**params)
|
|
1185
|
+
|
|
1186
|
+
# Extract items from response
|
|
1187
|
+
items = []
|
|
1188
|
+
if 'Responses' in response:
|
|
1189
|
+
for item_response in response['Responses']:
|
|
1190
|
+
if 'Item' in item_response:
|
|
1191
|
+
items.append(item_response['Item'])
|
|
1192
|
+
|
|
1193
|
+
result = {
|
|
1194
|
+
'Items': items,
|
|
1195
|
+
'Count': len(items)
|
|
1196
|
+
}
|
|
1197
|
+
|
|
1198
|
+
if 'ConsumedCapacity' in response:
|
|
1199
|
+
result['ConsumedCapacity'] = response['ConsumedCapacity']
|
|
1200
|
+
|
|
1201
|
+
# Apply decimal conversion
|
|
1202
|
+
return self._apply_decimal_conversion(result)
|
|
1203
|
+
|
|
1204
|
+
except ClientError as e:
|
|
1205
|
+
logger.exception(f"Error in transact_get_items: {str(e)}")
|
|
1206
|
+
raise
|
|
@@ -149,6 +149,149 @@ class DynamoDBIndex:
|
|
|
149
149
|
def sort_key(self, value: DynamoDBKey | None):
|
|
150
150
|
self.__sk = value
|
|
151
151
|
|
|
152
|
+
def to_dict(self, include_sort_key: bool = True) -> dict[str, str]:
|
|
153
|
+
"""
|
|
154
|
+
Return a dictionary representation of this index's keys for debugging.
|
|
155
|
+
|
|
156
|
+
This is particularly useful for:
|
|
157
|
+
- Debugging key generation logic
|
|
158
|
+
- Logging DynamoDB operations
|
|
159
|
+
- Verifying composite key structure
|
|
160
|
+
- Testing key values
|
|
161
|
+
|
|
162
|
+
Args:
|
|
163
|
+
include_sort_key: Whether to include the sort key (default: True)
|
|
164
|
+
|
|
165
|
+
Returns:
|
|
166
|
+
Dictionary with partition key and optionally sort key.
|
|
167
|
+
|
|
168
|
+
Example:
|
|
169
|
+
>>> index = DynamoDBIndex()
|
|
170
|
+
>>> index.partition_key.attribute_name = "pk"
|
|
171
|
+
>>> index.partition_key.value = lambda: "user#123"
|
|
172
|
+
>>> index.sort_key.attribute_name = "sk"
|
|
173
|
+
>>> index.sort_key.value = lambda: "user#123"
|
|
174
|
+
>>> index.to_dict()
|
|
175
|
+
{'pk': 'user#123', 'sk': 'user#123'}
|
|
176
|
+
|
|
177
|
+
>>> # Partition key only
|
|
178
|
+
>>> index.to_dict(include_sort_key=False)
|
|
179
|
+
{'pk': 'user#123'}
|
|
180
|
+
|
|
181
|
+
>>> # Useful for debugging
|
|
182
|
+
>>> print(f"Querying with key: {index.to_dict()}")
|
|
183
|
+
Querying with key: {'pk': 'user#123', 'sk': 'user#123'}
|
|
184
|
+
"""
|
|
185
|
+
result = {}
|
|
186
|
+
|
|
187
|
+
# Always include partition key
|
|
188
|
+
if self.__pk:
|
|
189
|
+
result[self.partition_key.attribute_name] = self.partition_key.value
|
|
190
|
+
|
|
191
|
+
# Optionally include sort key
|
|
192
|
+
if include_sort_key and self.__sk and self.sort_key.attribute_name:
|
|
193
|
+
try:
|
|
194
|
+
result[self.sort_key.attribute_name] = self.sort_key.value
|
|
195
|
+
except ValueError:
|
|
196
|
+
# Sort key value not set, skip it
|
|
197
|
+
pass
|
|
198
|
+
|
|
199
|
+
return result
|
|
200
|
+
|
|
201
|
+
def debug_info(
|
|
202
|
+
self,
|
|
203
|
+
*,
|
|
204
|
+
include_sort_key: bool = True,
|
|
205
|
+
condition: str = "begins_with",
|
|
206
|
+
low_value: Any = None,
|
|
207
|
+
high_value: Any = None,
|
|
208
|
+
) -> dict[str, Any]:
|
|
209
|
+
"""
|
|
210
|
+
Return detailed debugging information about this index and how it would be queried.
|
|
211
|
+
|
|
212
|
+
This is useful for understanding:
|
|
213
|
+
- What keys are defined
|
|
214
|
+
- What condition would be used in a query
|
|
215
|
+
- What the actual key values are
|
|
216
|
+
- What index name would be used
|
|
217
|
+
|
|
218
|
+
Args:
|
|
219
|
+
include_sort_key: Whether to include the sort key (default: True)
|
|
220
|
+
condition: The condition type being used (default: "begins_with")
|
|
221
|
+
low_value: Low value for "between" condition
|
|
222
|
+
high_value: High value for "between" condition
|
|
223
|
+
|
|
224
|
+
Returns:
|
|
225
|
+
Dictionary with debugging information including keys, condition, and index details.
|
|
226
|
+
|
|
227
|
+
Example:
|
|
228
|
+
>>> index = product.indexes.get("gsi1")
|
|
229
|
+
>>> debug = index.debug_info(condition="begins_with")
|
|
230
|
+
>>> print(debug)
|
|
231
|
+
{
|
|
232
|
+
'index_name': 'gsi1',
|
|
233
|
+
'partition_key': {
|
|
234
|
+
'attribute': 'gsi1_pk',
|
|
235
|
+
'value': 'category#electronics'
|
|
236
|
+
},
|
|
237
|
+
'sort_key': {
|
|
238
|
+
'attribute': 'gsi1_sk',
|
|
239
|
+
'value': 'product#prod_123',
|
|
240
|
+
'condition': 'begins_with'
|
|
241
|
+
},
|
|
242
|
+
'keys_dict': {'gsi1_pk': 'category#electronics', 'gsi1_sk': 'product#prod_123'},
|
|
243
|
+
'query_type': 'GSI' or 'Primary'
|
|
244
|
+
}
|
|
245
|
+
|
|
246
|
+
>>> # Check condition type
|
|
247
|
+
>>> if debug['sort_key']['condition'] == 'begins_with':
|
|
248
|
+
... print("This query uses begins_with")
|
|
249
|
+
"""
|
|
250
|
+
result = {
|
|
251
|
+
'index_name': self.name,
|
|
252
|
+
'query_type': 'Primary' if self.name == DynamoDBIndexes.PRIMARY_INDEX else 'GSI/LSI'
|
|
253
|
+
}
|
|
254
|
+
|
|
255
|
+
# Partition key info
|
|
256
|
+
if self.__pk:
|
|
257
|
+
result['partition_key'] = {
|
|
258
|
+
'attribute': self.partition_key.attribute_name,
|
|
259
|
+
'value': self.partition_key.value
|
|
260
|
+
}
|
|
261
|
+
|
|
262
|
+
# Sort key info with condition
|
|
263
|
+
if include_sort_key and self.__sk and self.sort_key.attribute_name:
|
|
264
|
+
try:
|
|
265
|
+
sk_info = {
|
|
266
|
+
'attribute': self.sort_key.attribute_name,
|
|
267
|
+
'value': self.sort_key.value,
|
|
268
|
+
'condition': condition
|
|
269
|
+
}
|
|
270
|
+
|
|
271
|
+
# Add range info for between condition
|
|
272
|
+
if condition == "between" and low_value is not None and high_value is not None:
|
|
273
|
+
sk_info['low_value'] = low_value
|
|
274
|
+
sk_info['high_value'] = high_value
|
|
275
|
+
sk_info['full_range'] = {
|
|
276
|
+
'low': f"{self.sort_key.value}{low_value}",
|
|
277
|
+
'high': f"{self.sort_key.value}{high_value}"
|
|
278
|
+
}
|
|
279
|
+
|
|
280
|
+
result['sort_key'] = sk_info
|
|
281
|
+
except ValueError:
|
|
282
|
+
# Sort key value not set
|
|
283
|
+
result['sort_key'] = {
|
|
284
|
+
'attribute': self.sort_key.attribute_name,
|
|
285
|
+
'value': None,
|
|
286
|
+
'condition': condition,
|
|
287
|
+
'note': 'Sort key value not set'
|
|
288
|
+
}
|
|
289
|
+
|
|
290
|
+
# Include the keys dictionary for convenience
|
|
291
|
+
result['keys_dict'] = self.to_dict(include_sort_key=include_sort_key)
|
|
292
|
+
|
|
293
|
+
return result
|
|
294
|
+
|
|
152
295
|
def key(
|
|
153
296
|
self,
|
|
154
297
|
*,
|
|
@@ -247,3 +390,118 @@ class DynamoDBIndex:
|
|
|
247
390
|
)
|
|
248
391
|
|
|
249
392
|
return key
|
|
393
|
+
|
|
394
|
+
@staticmethod
|
|
395
|
+
def extract_key_values(
|
|
396
|
+
key_expression: And | Equals,
|
|
397
|
+
index: Optional[str | DynamoDBIndex] = None
|
|
398
|
+
) -> dict[str, Any]:
|
|
399
|
+
"""
|
|
400
|
+
Extract key values and condition information from a boto3 Key condition expression.
|
|
401
|
+
|
|
402
|
+
This is useful for debugging queries at runtime to see exactly what values
|
|
403
|
+
are being used in the KeyConditionExpression.
|
|
404
|
+
|
|
405
|
+
Args:
|
|
406
|
+
key_expression: The Key condition expression (from key() or _build_query_key())
|
|
407
|
+
index: Optional index name (str) or DynamoDBIndex object to include in results
|
|
408
|
+
|
|
409
|
+
Returns:
|
|
410
|
+
Dictionary containing:
|
|
411
|
+
- index_name: str (if index parameter provided)
|
|
412
|
+
- partition_key: {'attribute': str, 'value': str}
|
|
413
|
+
- sort_key: {'attribute': str, 'value': str, 'operator': str, 'format': str} (if present)
|
|
414
|
+
|
|
415
|
+
Example:
|
|
416
|
+
>>> index = model.indexes.get("gsi1")
|
|
417
|
+
>>> key_expr = index.key(query_key=True, condition="begins_with")
|
|
418
|
+
>>> debug = DynamoDBIndex.extract_key_values(key_expr, index)
|
|
419
|
+
>>> print(debug)
|
|
420
|
+
{
|
|
421
|
+
'index_name': 'gsi1',
|
|
422
|
+
'partition_key': {
|
|
423
|
+
'attribute': 'gsi1_pk',
|
|
424
|
+
'value': 'inbox#support#status#open'
|
|
425
|
+
},
|
|
426
|
+
'sort_key': {
|
|
427
|
+
'attribute': 'gsi1_sk',
|
|
428
|
+
'value': 'priority#medium#ts#',
|
|
429
|
+
'operator': 'begins_with',
|
|
430
|
+
'format': '{operator}({0}, {1})'
|
|
431
|
+
}
|
|
432
|
+
}
|
|
433
|
+
|
|
434
|
+
>>> # Or pass just the index name
|
|
435
|
+
>>> debug = DynamoDBIndex.extract_key_values(key_expr, "gsi1")
|
|
436
|
+
|
|
437
|
+
>>> # Quick access to values
|
|
438
|
+
>>> pk_value = debug['partition_key']['value']
|
|
439
|
+
>>> sk_value = debug['sort_key']['value']
|
|
440
|
+
>>> condition = debug['sort_key']['operator']
|
|
441
|
+
>>> index_name = debug.get('index_name')
|
|
442
|
+
"""
|
|
443
|
+
result = {}
|
|
444
|
+
|
|
445
|
+
# Include index name if provided
|
|
446
|
+
if index is not None:
|
|
447
|
+
if isinstance(index, str):
|
|
448
|
+
result['index_name'] = index
|
|
449
|
+
elif isinstance(index, DynamoDBIndex):
|
|
450
|
+
result['index_name'] = index.name
|
|
451
|
+
|
|
452
|
+
try:
|
|
453
|
+
# The key_expression._values is a list of conditions
|
|
454
|
+
# [0] is the partition key (Equals condition)
|
|
455
|
+
# [1] is the sort key (ComparisonCondition) if present
|
|
456
|
+
|
|
457
|
+
if hasattr(key_expression, '_values') and len(key_expression._values) > 0:
|
|
458
|
+
# Extract partition key
|
|
459
|
+
pk_condition = key_expression._values[0]
|
|
460
|
+
if hasattr(pk_condition, '_values') and len(pk_condition._values) >= 2:
|
|
461
|
+
pk_attr = pk_condition._values[0]
|
|
462
|
+
result['partition_key'] = {
|
|
463
|
+
'attribute': pk_attr.name if hasattr(pk_attr, 'name') else str(pk_attr),
|
|
464
|
+
'value': pk_condition._values[1]
|
|
465
|
+
}
|
|
466
|
+
|
|
467
|
+
# Extract sort key if present
|
|
468
|
+
if len(key_expression._values) > 1:
|
|
469
|
+
sk_condition = key_expression._values[1]
|
|
470
|
+
if hasattr(sk_condition, '_values'):
|
|
471
|
+
sk_attr = sk_condition._values[0] if len(sk_condition._values) > 0 else None
|
|
472
|
+
sk_info = {
|
|
473
|
+
'attribute': sk_attr.name if (sk_attr and hasattr(sk_attr, 'name')) else str(sk_attr),
|
|
474
|
+
}
|
|
475
|
+
|
|
476
|
+
# Get value(s)
|
|
477
|
+
if len(sk_condition._values) > 1:
|
|
478
|
+
sk_info['value'] = sk_condition._values[1]
|
|
479
|
+
|
|
480
|
+
# For 'between' condition, there are two values
|
|
481
|
+
if len(sk_condition._values) > 2:
|
|
482
|
+
sk_info['value_low'] = sk_condition._values[1]
|
|
483
|
+
sk_info['value_high'] = sk_condition._values[2]
|
|
484
|
+
del sk_info['value'] # Remove single value key
|
|
485
|
+
|
|
486
|
+
# Get operator and format
|
|
487
|
+
if hasattr(sk_condition, 'expression_operator'):
|
|
488
|
+
sk_info['operator'] = sk_condition.expression_operator
|
|
489
|
+
if hasattr(sk_condition, 'expression_format'):
|
|
490
|
+
sk_info['format'] = sk_condition.expression_format
|
|
491
|
+
|
|
492
|
+
result['sort_key'] = sk_info
|
|
493
|
+
|
|
494
|
+
# If no _values found, handle single Equals condition (no sort key)
|
|
495
|
+
elif isinstance(key_expression, Equals):
|
|
496
|
+
if hasattr(key_expression, '_values') and len(key_expression._values) >= 2:
|
|
497
|
+
pk_attr = key_expression._values[0]
|
|
498
|
+
result['partition_key'] = {
|
|
499
|
+
'attribute': pk_attr.name if hasattr(pk_attr, 'name') else str(pk_attr),
|
|
500
|
+
'value': key_expression._values[1]
|
|
501
|
+
}
|
|
502
|
+
|
|
503
|
+
except (AttributeError, IndexError) as e:
|
|
504
|
+
result['error'] = f"Unable to extract key values: {str(e)}"
|
|
505
|
+
result['note'] = "The Key expression structure may have changed"
|
|
506
|
+
|
|
507
|
+
return result
|
|
@@ -45,6 +45,25 @@ class DynamoDBKey:
|
|
|
45
45
|
def value(self, value: Optional[str | Callable[[], str]]):
|
|
46
46
|
self.__value = value
|
|
47
47
|
|
|
48
|
+
def to_dict(self) -> dict[str, str]:
|
|
49
|
+
"""
|
|
50
|
+
Return a dictionary representation of this key for debugging.
|
|
51
|
+
|
|
52
|
+
Returns:
|
|
53
|
+
Dictionary with attribute name as key and value as the value.
|
|
54
|
+
|
|
55
|
+
Example:
|
|
56
|
+
>>> key = DynamoDBKey(attribute_name="pk", value="user#123")
|
|
57
|
+
>>> key.to_dict()
|
|
58
|
+
{'pk': 'user#123'}
|
|
59
|
+
|
|
60
|
+
>>> # With lambda
|
|
61
|
+
>>> key = DynamoDBKey(attribute_name="pk", value=lambda: "user#456")
|
|
62
|
+
>>> key.to_dict()
|
|
63
|
+
{'pk': 'user#456'}
|
|
64
|
+
"""
|
|
65
|
+
return {self.attribute_name: self.value}
|
|
66
|
+
|
|
48
67
|
@staticmethod
|
|
49
68
|
def build_key(*key_value_pairs) -> str:
|
|
50
69
|
"""
|
|
@@ -164,11 +164,13 @@ class DynamoDBModelBase(SerializableModel):
|
|
|
164
164
|
if response is None:
|
|
165
165
|
response = {}
|
|
166
166
|
item = response
|
|
167
|
-
elif "Item" in item and not any(
|
|
167
|
+
elif "Item" in item and not any(
|
|
168
|
+
key in item for key in ["id", "name", "pk", "sk"]
|
|
169
|
+
):
|
|
168
170
|
# Response with Item key but no direct model attributes (likely a DynamoDB response)
|
|
169
171
|
# This handles cases like {'Item': {...}} or {'Item': {...}, 'Count': 1}
|
|
170
172
|
item = item.get("Item", {})
|
|
171
|
-
|
|
173
|
+
|
|
172
174
|
# Convert any Decimal objects to native Python types for easier handling
|
|
173
175
|
item = DecimalConversionUtility.convert_decimals_to_native_types(item)
|
|
174
176
|
|
|
@@ -195,6 +197,12 @@ class DynamoDBModelBase(SerializableModel):
|
|
|
195
197
|
self, include_indexes=include_indexes, include_none=include_none
|
|
196
198
|
)
|
|
197
199
|
|
|
200
|
+
def to_dict(self, include_none: bool = True):
|
|
201
|
+
"""
|
|
202
|
+
Convert the instance to a dictionary suitable for DynamoDB client.
|
|
203
|
+
"""
|
|
204
|
+
return self.to_client_dictionary(include_none=include_none)
|
|
205
|
+
|
|
198
206
|
def to_dictionary(self, include_none: bool = True):
|
|
199
207
|
"""
|
|
200
208
|
Convert the instance to a dictionary without an indexes/keys.
|
|
@@ -290,7 +298,6 @@ class DynamoDBSerializer:
|
|
|
290
298
|
|
|
291
299
|
return mapped
|
|
292
300
|
|
|
293
|
-
|
|
294
301
|
@staticmethod
|
|
295
302
|
def to_client_dictionary(
|
|
296
303
|
instance: DynamoDBModelBase, include_indexes: bool = True
|
boto3_assist/version.py
CHANGED
|
@@ -1 +1 @@
|
|
|
1
|
-
__version__ = "0.
|
|
1
|
+
__version__ = "0.31.0"
|
|
@@ -6,7 +6,7 @@ boto3_assist/connection_tracker.py,sha256=UgfR9RlvXf3A4ssMr3gDMpw89ka8mSRvJn4M34
|
|
|
6
6
|
boto3_assist/http_status_codes.py,sha256=G0zRSWenwavYKETvDF9tNVUXQz3Ae2gXdBETYbjvJe8,3284
|
|
7
7
|
boto3_assist/role_assumption_mixin.py,sha256=PMUU5yC2FUBjFD1UokVkRY3CPB5zTw85AhIB5BMtbc8,1031
|
|
8
8
|
boto3_assist/session_setup_mixin.py,sha256=X-JQKyyaWNA8Z8kKgf2V2I5vsiLAH8udLTX_xepnsdQ,3140
|
|
9
|
-
boto3_assist/version.py,sha256=
|
|
9
|
+
boto3_assist/version.py,sha256=TJt1pYzTJuPE6GzDP1gxaeeVQlzcoUyEmSVSUPgYnIA,23
|
|
10
10
|
boto3_assist/aws_lambda/event_info.py,sha256=OkZ4WzuGaHEu_T8sB188KBgShAJhZpWASALKRGBOhMg,14648
|
|
11
11
|
boto3_assist/aws_lambda/mock_context.py,sha256=LPjHP-3YSoY6iPl1kPqJDwSVf1zLNTcukUunDtYcbK0,116
|
|
12
12
|
boto3_assist/cloudwatch/cloudwatch_connection.py,sha256=mnGWaLSQpHh5EeY7Ek_2o9JKHJxOELIYtQVMX1IaHn4,2480
|
|
@@ -19,14 +19,14 @@ boto3_assist/cognito/cognito_connection.py,sha256=deuXR3cNHz0mCYff2k0LfAvK--9Okq
|
|
|
19
19
|
boto3_assist/cognito/cognito_utility.py,sha256=IVZAg58nHG1U7uxe7FsTYpqwwZiwwdIBGiVTZuLCFqg,18417
|
|
20
20
|
boto3_assist/cognito/jwks_cache.py,sha256=1Y9r-YfQ8qrgZN5xYPvjUEEV0vthbdcPdAIaPbZP7kU,373
|
|
21
21
|
boto3_assist/cognito/user.py,sha256=qc44qLx3gwq6q2zMxcPQze1EjeZwy5Kuav93vbe-4WU,820
|
|
22
|
-
boto3_assist/dynamodb/dynamodb.py,sha256=
|
|
22
|
+
boto3_assist/dynamodb/dynamodb.py,sha256=MElfzO0kDKHdMye5vtIOC4sGH32P2xxvPlxj76NEDe4,48632
|
|
23
23
|
boto3_assist/dynamodb/dynamodb_connection.py,sha256=D4KmVpMpE0OuVOwW5g4JBWllUNkwy0hMXEGUiToAMBc,3608
|
|
24
24
|
boto3_assist/dynamodb/dynamodb_helpers.py,sha256=BYJEuXaQVCPbDfbtPswWA_OvV_yC3fVoTtKvIoZeIBc,12092
|
|
25
25
|
boto3_assist/dynamodb/dynamodb_importer.py,sha256=nCKsyRQeMqDSf0Q5mQ_X_oVIg4PRnu0hcUzZnBli610,3471
|
|
26
|
-
boto3_assist/dynamodb/dynamodb_index.py,sha256=
|
|
26
|
+
boto3_assist/dynamodb/dynamodb_index.py,sha256=2AKxHo8HrRbaxL0ePj7S6ek36_sy5cHkDp5I9wIp8Kw,19797
|
|
27
27
|
boto3_assist/dynamodb/dynamodb_iservice.py,sha256=O9Aj0PFEvcuk2vhARifWTFnUwcQW5EXzwZS478Hm-N0,796
|
|
28
|
-
boto3_assist/dynamodb/dynamodb_key.py,sha256=
|
|
29
|
-
boto3_assist/dynamodb/dynamodb_model_base.py,sha256=
|
|
28
|
+
boto3_assist/dynamodb/dynamodb_key.py,sha256=3VPFBGLXSLNGol5WodLiOFGU60VU9ZAdLjd2oqZ1YH4,3928
|
|
29
|
+
boto3_assist/dynamodb/dynamodb_model_base.py,sha256=AqMfMbjwGHUByRFVmptfjqP9gxgIxJmarvKyn8vtNfw,13187
|
|
30
30
|
boto3_assist/dynamodb/dynamodb_model_base_interfaces.py,sha256=SFw-yK7TDPL4cK52bpn2zMm5G4mX7eYNU7eFytEw0-A,749
|
|
31
31
|
boto3_assist/dynamodb/dynamodb_re_indexer.py,sha256=D9gCGTJMS1R-ovAbqXK9gMbkl7a9zkBwA8_pxOAkHSY,6164
|
|
32
32
|
boto3_assist/dynamodb/dynamodb_reindexer.py,sha256=bCj6KIU0fQOgjkkiq9yF51PFZZr4Y9Lu3-hPlmsPG0Y,6164
|
|
@@ -60,8 +60,8 @@ boto3_assist/utilities/logging_utility.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5N
|
|
|
60
60
|
boto3_assist/utilities/numbers_utility.py,sha256=wzv9d0uXT_2_ZHHio7LBzibwxPqhGpvbq9HinrVn_4A,10160
|
|
61
61
|
boto3_assist/utilities/serialization_utility.py,sha256=m5wRZNeWW9VltQPVNziR27OGKO3MDJm6mFmcDHwN-n4,24479
|
|
62
62
|
boto3_assist/utilities/string_utility.py,sha256=XxUIz19L2LFFTRDAAmdPa8Qhn40u9yO7g4nULFuvg0M,11033
|
|
63
|
-
boto3_assist-0.
|
|
64
|
-
boto3_assist-0.
|
|
65
|
-
boto3_assist-0.
|
|
66
|
-
boto3_assist-0.
|
|
67
|
-
boto3_assist-0.
|
|
63
|
+
boto3_assist-0.31.0.dist-info/METADATA,sha256=c4rLRDjs2-j-xjTzSQNFWU9cU5icj9SMf-5LAJR_JWY,2879
|
|
64
|
+
boto3_assist-0.31.0.dist-info/WHEEL,sha256=qtCwoSJWgHk21S1Kb4ihdzI2rlJ1ZKaIurTj_ngOhyQ,87
|
|
65
|
+
boto3_assist-0.31.0.dist-info/licenses/LICENSE-EXPLAINED.txt,sha256=WFREvTpfTjPjDHpOLADxJpCKpIla3Ht87RUUGii4ODU,606
|
|
66
|
+
boto3_assist-0.31.0.dist-info/licenses/LICENSE.txt,sha256=PXDhFWS5L5aOTkVhNvoitHKbAkgxqMI2uUPQyrnXGiI,1105
|
|
67
|
+
boto3_assist-0.31.0.dist-info/RECORD,,
|
|
File without changes
|
{boto3_assist-0.30.0.dist-info → boto3_assist-0.31.0.dist-info}/licenses/LICENSE-EXPLAINED.txt
RENAMED
|
File without changes
|
|
File without changes
|