clear-skies-aws 1.9.18__py3-none-any.whl → 1.10.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {clear_skies_aws-1.9.18.dist-info → clear_skies_aws-1.10.0.dist-info}/METADATA +5 -1
- {clear_skies_aws-1.9.18.dist-info → clear_skies_aws-1.10.0.dist-info}/RECORD +17 -13
- clearskies_aws/actions/assume_role_test.py +5 -2
- clearskies_aws/backends/__init__.py +10 -0
- clearskies_aws/backends/dynamo_db_backend.py +10 -7
- clearskies_aws/backends/dynamo_db_backend_test.py +71 -65
- clearskies_aws/backends/dynamo_db_condition_parser.py +365 -0
- clearskies_aws/backends/dynamo_db_condition_parser_test.py +266 -0
- clearskies_aws/backends/dynamo_db_parti_ql_backend.py +1000 -0
- clearskies_aws/backends/dynamo_db_parti_ql_backend_test.py +540 -0
- clearskies_aws/contexts/lambda_sqs_standard_partial_batch_test.py +11 -16
- clearskies_aws/di/standard_dependencies.py +47 -5
- clearskies_aws/input_outputs/lambda_api_gateway_test.py +27 -19
- clearskies_aws/secrets/parameter_store_test.py +5 -2
- clearskies_aws/secrets/secrets_manager_test.py +5 -2
- {clear_skies_aws-1.9.18.dist-info → clear_skies_aws-1.10.0.dist-info}/LICENSE +0 -0
- {clear_skies_aws-1.9.18.dist-info → clear_skies_aws-1.10.0.dist-info}/WHEEL +0 -0
|
@@ -0,0 +1,1000 @@
|
|
|
1
|
+
import base64
|
|
2
|
+
import binascii
|
|
3
|
+
import json
|
|
4
|
+
import logging
|
|
5
|
+
from decimal import Decimal
|
|
6
|
+
from typing import Any, Callable, Dict, Generator, List, Optional, Tuple, Union
|
|
7
|
+
|
|
8
|
+
from boto3.session import Session as Boto3Session
|
|
9
|
+
from botocore.exceptions import ClientError
|
|
10
|
+
from clearskies import Model as ClearSkiesModel
|
|
11
|
+
from clearskies.autodoc.schema import String as AutoDocString
|
|
12
|
+
from clearskies.backends import CursorBackend
|
|
13
|
+
from types_boto3_dynamodb.client import DynamoDBClient
|
|
14
|
+
from types_boto3_dynamodb.type_defs import (
|
|
15
|
+
AttributeValueTypeDef,
|
|
16
|
+
ExecuteStatementInputTypeDef,
|
|
17
|
+
ExecuteStatementOutputTypeDef,
|
|
18
|
+
GlobalSecondaryIndexDescriptionTypeDef,
|
|
19
|
+
KeySchemaElementTypeDef,
|
|
20
|
+
)
|
|
21
|
+
|
|
22
|
+
from clearskies_aws.backends.dynamo_db_condition_parser import DynamoDBConditionParser
|
|
23
|
+
|
|
24
|
+
logger = logging.getLogger(__name__)
|
|
25
|
+
|
|
26
|
+
|
|
27
|
+
class DynamoDBPartiQLCursor:
|
|
28
|
+
"""
|
|
29
|
+
Cursor for executing PartiQL statements against DynamoDB.
|
|
30
|
+
|
|
31
|
+
This class wraps a Boto3 DynamoDB client to provide a simplified interface
|
|
32
|
+
for statement execution and error handling.
|
|
33
|
+
"""
|
|
34
|
+
|
|
35
|
+
def __init__(self, boto3_session: Boto3Session) -> None:
|
|
36
|
+
"""
|
|
37
|
+
Initializes the DynamoDBPartiQLCursor.
|
|
38
|
+
|
|
39
|
+
Args:
|
|
40
|
+
boto3_session: An initialized Boto3 Session object.
|
|
41
|
+
"""
|
|
42
|
+
self._session: Boto3Session = boto3_session
|
|
43
|
+
self._client: DynamoDBClient = self._session.client("dynamodb")
|
|
44
|
+
|
|
45
|
+
def execute(
|
|
46
|
+
self,
|
|
47
|
+
statement: str,
|
|
48
|
+
parameters: Optional[List[AttributeValueTypeDef]] = None,
|
|
49
|
+
Limit: Optional[int] = None,
|
|
50
|
+
NextToken: Optional[str] = None,
|
|
51
|
+
ConsistentRead: Optional[bool] = None,
|
|
52
|
+
) -> ExecuteStatementOutputTypeDef:
|
|
53
|
+
"""
|
|
54
|
+
Execute a PartiQL statement against DynamoDB.
|
|
55
|
+
|
|
56
|
+
Args:
|
|
57
|
+
statement: The PartiQL statement string to execute.
|
|
58
|
+
parameters: An optional list of parameters for the PartiQL statement.
|
|
59
|
+
Limit: Optional limit for the number of items DynamoDB evaluates.
|
|
60
|
+
NextToken: Optional token for paginating results from DynamoDB.
|
|
61
|
+
ConsistentRead: Optional flag for strongly consistent reads.
|
|
62
|
+
|
|
63
|
+
Returns:
|
|
64
|
+
The output from the boto3 client's execute_statement method.
|
|
65
|
+
|
|
66
|
+
Raises:
|
|
67
|
+
ClientError: If the execution fails due to a client-side error.
|
|
68
|
+
"""
|
|
69
|
+
try:
|
|
70
|
+
call_args: ExecuteStatementInputTypeDef = {"Statement": statement}
|
|
71
|
+
if parameters is not None:
|
|
72
|
+
call_args["Parameters"] = parameters
|
|
73
|
+
if Limit is not None:
|
|
74
|
+
call_args["Limit"] = Limit
|
|
75
|
+
if NextToken is not None:
|
|
76
|
+
call_args["NextToken"] = NextToken
|
|
77
|
+
if ConsistentRead is not None:
|
|
78
|
+
call_args["ConsistentRead"] = ConsistentRead
|
|
79
|
+
|
|
80
|
+
output: ExecuteStatementOutputTypeDef = self._client.execute_statement(
|
|
81
|
+
**call_args
|
|
82
|
+
)
|
|
83
|
+
except ClientError as err:
|
|
84
|
+
error_response: Dict[str, Any] = err.response.get("Error", {})
|
|
85
|
+
error_code: str = error_response.get("Code", "UnknownCode")
|
|
86
|
+
error_message: str = error_response.get("Message", "Unknown error")
|
|
87
|
+
|
|
88
|
+
parameters_str = str(parameters) if parameters is not None else "None"
|
|
89
|
+
|
|
90
|
+
if error_code == "ResourceNotFoundException":
|
|
91
|
+
logger.error(
|
|
92
|
+
"Couldn't execute PartiQL '%s' with parameters '%s' because the table or index does not exist.",
|
|
93
|
+
statement,
|
|
94
|
+
parameters_str,
|
|
95
|
+
)
|
|
96
|
+
else:
|
|
97
|
+
logger.error(
|
|
98
|
+
"Couldn't execute PartiQL '%s' with parameters '%s'. Here's why: %s: %s",
|
|
99
|
+
statement,
|
|
100
|
+
parameters_str,
|
|
101
|
+
error_code,
|
|
102
|
+
error_message,
|
|
103
|
+
)
|
|
104
|
+
raise
|
|
105
|
+
else:
|
|
106
|
+
return output
|
|
107
|
+
|
|
108
|
+
|
|
109
|
+
class DynamoDBPartiQLBackend(CursorBackend):
|
|
110
|
+
"""
|
|
111
|
+
DynamoDB backend implementation that uses PartiQL for database interactions.
|
|
112
|
+
Supports querying base tables and attempts to use Global Secondary Indexes (GSIs)
|
|
113
|
+
when appropriate based on query conditions and sorting.
|
|
114
|
+
The count() method uses native DynamoDB Query/Scan operations for accuracy.
|
|
115
|
+
"""
|
|
116
|
+
|
|
117
|
+
_allowed_configs: List[str] = [
|
|
118
|
+
"table_name",
|
|
119
|
+
"wheres",
|
|
120
|
+
"sorts",
|
|
121
|
+
"limit",
|
|
122
|
+
"pagination",
|
|
123
|
+
"model_columns",
|
|
124
|
+
"selects",
|
|
125
|
+
"select_all",
|
|
126
|
+
"group_by_column",
|
|
127
|
+
"joins",
|
|
128
|
+
]
|
|
129
|
+
_required_configs: List[str] = ["table_name"]
|
|
130
|
+
|
|
131
|
+
def __init__(self, dynamo_db_parti_ql_cursor: DynamoDBPartiQLCursor) -> None:
|
|
132
|
+
"""
|
|
133
|
+
Initializes the DynamoDBPartiQLBackend.
|
|
134
|
+
"""
|
|
135
|
+
super().__init__(dynamo_db_parti_ql_cursor)
|
|
136
|
+
self.condition_parser: DynamoDBConditionParser = DynamoDBConditionParser()
|
|
137
|
+
self._table_descriptions_cache: Dict[str, Dict[str, Any]] = {}
|
|
138
|
+
|
|
139
|
+
def _get_table_description(self, table_name: str) -> Dict[str, Any]:
|
|
140
|
+
"""
|
|
141
|
+
Retrieves and caches the DynamoDB table description.
|
|
142
|
+
"""
|
|
143
|
+
if table_name not in self._table_descriptions_cache:
|
|
144
|
+
try:
|
|
145
|
+
self._table_descriptions_cache[table_name] = self._cursor._client.describe_table(TableName=table_name) # type: ignore
|
|
146
|
+
except ClientError as e:
|
|
147
|
+
logger.error(f"Failed to describe table '{table_name}': {e}")
|
|
148
|
+
raise
|
|
149
|
+
return self._table_descriptions_cache[table_name].get("Table", {})
|
|
150
|
+
|
|
151
|
+
def _table_escape_character(self) -> str:
|
|
152
|
+
"""Returns the character used to escape table/index names."""
|
|
153
|
+
return '"'
|
|
154
|
+
|
|
155
|
+
def _column_escape_character(self) -> str:
|
|
156
|
+
"""Returns the character used to escape column names."""
|
|
157
|
+
return '"'
|
|
158
|
+
|
|
159
|
+
def _finalize_table_name(
|
|
160
|
+
self, table_name: str, index_name: Optional[str] = None
|
|
161
|
+
) -> str:
|
|
162
|
+
"""
|
|
163
|
+
Escapes a table name and optionally an index name for use in a PartiQL FROM clause.
|
|
164
|
+
"""
|
|
165
|
+
if not table_name:
|
|
166
|
+
return ""
|
|
167
|
+
esc: str = self._table_escape_character()
|
|
168
|
+
final_name = f"{esc}{table_name.strip(esc)}{esc}"
|
|
169
|
+
if index_name:
|
|
170
|
+
final_name += f".{esc}{index_name.strip(esc)}{esc}"
|
|
171
|
+
return final_name
|
|
172
|
+
|
|
173
|
+
def _conditions_as_wheres_and_parameters(
|
|
174
|
+
self, conditions: List[Dict[str, Any]], default_table_name: str
|
|
175
|
+
) -> Tuple[str, List[AttributeValueTypeDef]]:
|
|
176
|
+
"""
|
|
177
|
+
Converts where conditions into a PartiQL WHERE clause and parameters.
|
|
178
|
+
"""
|
|
179
|
+
if not conditions:
|
|
180
|
+
return "", []
|
|
181
|
+
|
|
182
|
+
where_parts: List[str] = []
|
|
183
|
+
parameters: List[AttributeValueTypeDef] = []
|
|
184
|
+
|
|
185
|
+
for where in conditions:
|
|
186
|
+
if not isinstance(where, dict):
|
|
187
|
+
logger.warning(f"Skipping non-dictionary where condition: {where}")
|
|
188
|
+
continue
|
|
189
|
+
|
|
190
|
+
column: Optional[str] = where.get("column")
|
|
191
|
+
operator: Optional[str] = where.get("operator")
|
|
192
|
+
values: Optional[List[Any]] = where.get("values")
|
|
193
|
+
|
|
194
|
+
if not column or not operator or values is None:
|
|
195
|
+
logger.warning(
|
|
196
|
+
f"Skipping malformed structured where condition: {where}"
|
|
197
|
+
)
|
|
198
|
+
continue
|
|
199
|
+
|
|
200
|
+
value_parts: List[str] = []
|
|
201
|
+
for v in values:
|
|
202
|
+
if isinstance(v, str):
|
|
203
|
+
value_parts.append(f"'{v}'")
|
|
204
|
+
elif isinstance(v, bool):
|
|
205
|
+
value_parts.append(str(v).lower())
|
|
206
|
+
elif isinstance(v, (int, float, Decimal, type(None))):
|
|
207
|
+
value_parts.append(str(v))
|
|
208
|
+
else:
|
|
209
|
+
value_parts.append(f"'{str(v)}'")
|
|
210
|
+
|
|
211
|
+
condition_string: str = ""
|
|
212
|
+
op_lower: str = operator.lower()
|
|
213
|
+
if op_lower == "in":
|
|
214
|
+
condition_string = f"{column} {operator} ({', '.join(value_parts)})"
|
|
215
|
+
elif op_lower in self.condition_parser.operators_without_placeholders:
|
|
216
|
+
condition_string = f"{column} {operator}"
|
|
217
|
+
else:
|
|
218
|
+
condition_string = (
|
|
219
|
+
f"{column} {operator} {value_parts[0] if value_parts else ''}"
|
|
220
|
+
)
|
|
221
|
+
|
|
222
|
+
try:
|
|
223
|
+
parsed: Dict[str, Any] = self.condition_parser.parse_condition(
|
|
224
|
+
condition_string
|
|
225
|
+
)
|
|
226
|
+
where_parts.append(parsed["parsed"])
|
|
227
|
+
parameters.extend(parsed["values"])
|
|
228
|
+
except ValueError as e:
|
|
229
|
+
logger.error(f"Error parsing condition '{condition_string}': {e}")
|
|
230
|
+
continue
|
|
231
|
+
|
|
232
|
+
if not where_parts:
|
|
233
|
+
return "", []
|
|
234
|
+
return " WHERE " + " AND ".join(where_parts), parameters
|
|
235
|
+
|
|
236
|
+
def as_sql(
|
|
237
|
+
self, configuration: Dict[str, Any]
|
|
238
|
+
) -> Tuple[str, List[AttributeValueTypeDef], Optional[int], Optional[str]]:
|
|
239
|
+
"""
|
|
240
|
+
Constructs a PartiQL statement and parameters from a query configuration.
|
|
241
|
+
"""
|
|
242
|
+
escape: str = self._column_escape_character()
|
|
243
|
+
table_name: str = configuration.get("table_name", "")
|
|
244
|
+
chosen_index_name: Optional[str] = configuration.get("_chosen_index_name")
|
|
245
|
+
|
|
246
|
+
wheres, parameters = self._conditions_as_wheres_and_parameters(
|
|
247
|
+
configuration.get("wheres", []), table_name
|
|
248
|
+
)
|
|
249
|
+
|
|
250
|
+
from_clause_target: str = self._finalize_table_name(
|
|
251
|
+
table_name, chosen_index_name
|
|
252
|
+
)
|
|
253
|
+
|
|
254
|
+
selects: Optional[List[str]] = configuration.get("selects")
|
|
255
|
+
select_clause: str
|
|
256
|
+
if selects:
|
|
257
|
+
select_clause = ", ".join(
|
|
258
|
+
[f"{escape}{s.strip(escape)}{escape}" for s in selects]
|
|
259
|
+
)
|
|
260
|
+
if configuration.get("select_all"):
|
|
261
|
+
logger.warning(
|
|
262
|
+
"Both 'select_all=True' and specific 'selects' were provided. Using specific 'selects'."
|
|
263
|
+
)
|
|
264
|
+
else:
|
|
265
|
+
select_clause = "*"
|
|
266
|
+
|
|
267
|
+
order_by: str = ""
|
|
268
|
+
sorts: Optional[List[Dict[str, str]]] = configuration.get("sorts")
|
|
269
|
+
if sorts:
|
|
270
|
+
sort_parts: List[str] = []
|
|
271
|
+
for sort in sorts:
|
|
272
|
+
column_name: str = sort["column"]
|
|
273
|
+
direction: str = sort.get("direction", "ASC").upper()
|
|
274
|
+
sort_parts.append(
|
|
275
|
+
f"{escape}{column_name.strip(escape)}{escape} {direction}"
|
|
276
|
+
)
|
|
277
|
+
if sort_parts:
|
|
278
|
+
order_by = " ORDER BY " + ", ".join(sort_parts)
|
|
279
|
+
|
|
280
|
+
if configuration.get("group_by_column"):
|
|
281
|
+
logger.warning(
|
|
282
|
+
f"Configuration included 'group_by_column={configuration.get("group_by_column")}', "
|
|
283
|
+
"but GROUP BY is not supported by this DynamoDB PartiQL backend and will be ignored for SQL generation."
|
|
284
|
+
)
|
|
285
|
+
|
|
286
|
+
if configuration.get("joins"):
|
|
287
|
+
logger.warning(
|
|
288
|
+
f"Configuration included 'joins={configuration.get("joins")}', "
|
|
289
|
+
"but JOINs are not supported by this DynamoDB PartiQL backend and will be ignored for SQL generation."
|
|
290
|
+
)
|
|
291
|
+
|
|
292
|
+
limit: Optional[int] = configuration.get("limit")
|
|
293
|
+
if limit is not None:
|
|
294
|
+
limit = int(limit)
|
|
295
|
+
|
|
296
|
+
pagination: Dict[str, Any] = configuration.get("pagination", {})
|
|
297
|
+
next_token: Optional[str] = pagination.get("next_token")
|
|
298
|
+
if next_token is not None:
|
|
299
|
+
next_token = str(next_token)
|
|
300
|
+
|
|
301
|
+
if not from_clause_target:
|
|
302
|
+
raise ValueError("Table name is required for constructing SQL query.")
|
|
303
|
+
|
|
304
|
+
statement: str = (
|
|
305
|
+
f"SELECT {select_clause} FROM {from_clause_target}{wheres}{order_by}".strip()
|
|
306
|
+
)
|
|
307
|
+
|
|
308
|
+
return statement, parameters, limit, next_token
|
|
309
|
+
|
|
310
|
+
def records(
|
|
311
|
+
self,
|
|
312
|
+
configuration: Dict[str, Any],
|
|
313
|
+
model: ClearSkiesModel,
|
|
314
|
+
next_page_data: dict[str, Any] = {},
|
|
315
|
+
) -> Generator[Dict[str, Any], None, None]:
|
|
316
|
+
"""
|
|
317
|
+
Fetches records from DynamoDB based on the provided configuration using PartiQL.
|
|
318
|
+
"""
|
|
319
|
+
configuration = self._check_query_configuration(configuration, model)
|
|
320
|
+
|
|
321
|
+
statement, params, limit, client_next_token_from_as_sql = self.as_sql(
|
|
322
|
+
configuration
|
|
323
|
+
)
|
|
324
|
+
|
|
325
|
+
ddb_token_for_this_call: Optional[str] = self.restore_next_token_from_config(
|
|
326
|
+
client_next_token_from_as_sql
|
|
327
|
+
)
|
|
328
|
+
|
|
329
|
+
cursor_limit: Optional[int] = None
|
|
330
|
+
if limit is not None and limit > 0:
|
|
331
|
+
cursor_limit = limit
|
|
332
|
+
|
|
333
|
+
try:
|
|
334
|
+
response: ExecuteStatementOutputTypeDef = self._cursor.execute(
|
|
335
|
+
statement=statement,
|
|
336
|
+
parameters=params,
|
|
337
|
+
Limit=cursor_limit,
|
|
338
|
+
NextToken=ddb_token_for_this_call,
|
|
339
|
+
)
|
|
340
|
+
except Exception as e:
|
|
341
|
+
logger.error(
|
|
342
|
+
f"Error executing PartiQL statement in records(): {statement}, error: {e}"
|
|
343
|
+
)
|
|
344
|
+
next_page_data = {}
|
|
345
|
+
raise
|
|
346
|
+
|
|
347
|
+
items_from_response: List[Dict[str, Any]] = response.get("Items", [])
|
|
348
|
+
|
|
349
|
+
for item_raw in items_from_response:
|
|
350
|
+
yield self._map_from_boto3(item_raw)
|
|
351
|
+
|
|
352
|
+
next_token_from_ddb: Optional[str] = response.get("NextToken")
|
|
353
|
+
if next_token_from_ddb:
|
|
354
|
+
next_page_data["next_token"] = self.serialize_next_token_for_response(
|
|
355
|
+
next_token_from_ddb
|
|
356
|
+
)
|
|
357
|
+
|
|
358
|
+
def _wheres_to_native_dynamo_expressions(
|
|
359
|
+
self,
|
|
360
|
+
conditions: List[Dict[str, Any]],
|
|
361
|
+
partition_key_name: Optional[str],
|
|
362
|
+
sort_key_name: Optional[str] = None,
|
|
363
|
+
) -> Dict[str, Any]:
|
|
364
|
+
"""
|
|
365
|
+
Converts a list of 'where' condition dictionaries into DynamoDB native
|
|
366
|
+
expression strings and attribute maps.
|
|
367
|
+
This is a simplified implementation.
|
|
368
|
+
"""
|
|
369
|
+
expression_attribute_names: Dict[str, str] = {}
|
|
370
|
+
expression_attribute_values: Dict[str, AttributeValueTypeDef] = {}
|
|
371
|
+
key_condition_parts: List[str] = []
|
|
372
|
+
filter_expression_parts: List[str] = []
|
|
373
|
+
|
|
374
|
+
name_counter = 0
|
|
375
|
+
value_counter = 0
|
|
376
|
+
|
|
377
|
+
partition_key_condition_found = False
|
|
378
|
+
if partition_key_name:
|
|
379
|
+
processed_indices = set() # To avoid processing a condition twice
|
|
380
|
+
for i, cond in enumerate(conditions):
|
|
381
|
+
if i in processed_indices:
|
|
382
|
+
continue
|
|
383
|
+
if (
|
|
384
|
+
cond.get("column") == partition_key_name
|
|
385
|
+
and cond.get("operator") == "="
|
|
386
|
+
and cond.get("values")
|
|
387
|
+
):
|
|
388
|
+
name_placeholder = f"#pk{name_counter}"
|
|
389
|
+
value_placeholder = f":pk_val{value_counter}"
|
|
390
|
+
expression_attribute_names[name_placeholder] = partition_key_name
|
|
391
|
+
expression_attribute_values[value_placeholder] = (
|
|
392
|
+
self.condition_parser.to_dynamodb_attribute_value(
|
|
393
|
+
cond["values"][0]
|
|
394
|
+
)
|
|
395
|
+
)
|
|
396
|
+
key_condition_parts.append(
|
|
397
|
+
f"{name_placeholder} = {value_placeholder}"
|
|
398
|
+
)
|
|
399
|
+
name_counter += 1
|
|
400
|
+
value_counter += 1
|
|
401
|
+
partition_key_condition_found = True
|
|
402
|
+
processed_indices.add(i)
|
|
403
|
+
break
|
|
404
|
+
|
|
405
|
+
# Example for sort key condition (simplified)
|
|
406
|
+
if (
|
|
407
|
+
sort_key_name and partition_key_condition_found
|
|
408
|
+
): # Sort key only relevant if PK is there
|
|
409
|
+
for i, cond in enumerate(conditions):
|
|
410
|
+
if i in processed_indices:
|
|
411
|
+
continue
|
|
412
|
+
if cond.get("column") == sort_key_name and cond.get("values"):
|
|
413
|
+
# Simplified: only handle '=' for sort key for now
|
|
414
|
+
if cond.get("operator") == "=":
|
|
415
|
+
name_placeholder = f"#sk{name_counter}"
|
|
416
|
+
value_placeholder = f":sk_val{value_counter}"
|
|
417
|
+
expression_attribute_names[name_placeholder] = sort_key_name
|
|
418
|
+
expression_attribute_values[value_placeholder] = (
|
|
419
|
+
self.condition_parser.to_dynamodb_attribute_value(
|
|
420
|
+
cond["values"][0]
|
|
421
|
+
)
|
|
422
|
+
)
|
|
423
|
+
key_condition_parts.append(
|
|
424
|
+
f"{name_placeholder} = {value_placeholder}"
|
|
425
|
+
)
|
|
426
|
+
name_counter += 1
|
|
427
|
+
value_counter += 1
|
|
428
|
+
processed_indices.add(i)
|
|
429
|
+
break # Assuming one sort key condition for KeyConditionExpression
|
|
430
|
+
|
|
431
|
+
# Remaining conditions go to FilterExpression
|
|
432
|
+
for i, cond in enumerate(conditions):
|
|
433
|
+
if i in (
|
|
434
|
+
locals().get("processed_indices") or set()
|
|
435
|
+
): # Check if already processed for KeyCondition
|
|
436
|
+
continue
|
|
437
|
+
|
|
438
|
+
col_name = cond.get("column")
|
|
439
|
+
op = cond.get("operator")
|
|
440
|
+
vals = cond.get("values")
|
|
441
|
+
|
|
442
|
+
if not col_name or not op or vals is None:
|
|
443
|
+
continue
|
|
444
|
+
|
|
445
|
+
# This is a very simplified filter builder. A real one would handle all operators.
|
|
446
|
+
if op == "=" and vals: # Example: only equality
|
|
447
|
+
name_placeholder = f"#fn{name_counter}"
|
|
448
|
+
value_placeholder = f":fv{value_counter}"
|
|
449
|
+
expression_attribute_names[name_placeholder] = col_name
|
|
450
|
+
expression_attribute_values[value_placeholder] = (
|
|
451
|
+
self.condition_parser.to_dynamodb_attribute_value(vals[0])
|
|
452
|
+
)
|
|
453
|
+
filter_expression_parts.append(
|
|
454
|
+
f"{name_placeholder} = {value_placeholder}"
|
|
455
|
+
)
|
|
456
|
+
name_counter += 1
|
|
457
|
+
value_counter += 1
|
|
458
|
+
# Add more operator handling here for a complete filter expression builder
|
|
459
|
+
|
|
460
|
+
result: Dict[str, Any] = {}
|
|
461
|
+
if (
|
|
462
|
+
key_condition_parts and partition_key_condition_found
|
|
463
|
+
): # Only add KeyConditionExpression if PK equality was found
|
|
464
|
+
result["KeyConditionExpression"] = " AND ".join(key_condition_parts)
|
|
465
|
+
elif (
|
|
466
|
+
key_condition_parts
|
|
467
|
+
): # If we built key_condition_parts but PK equality wasn't the one, move to filter
|
|
468
|
+
filter_expression_parts.extend(key_condition_parts)
|
|
469
|
+
|
|
470
|
+
if filter_expression_parts:
|
|
471
|
+
result["FilterExpression"] = " AND ".join(filter_expression_parts)
|
|
472
|
+
if expression_attribute_names:
|
|
473
|
+
result["ExpressionAttributeNames"] = expression_attribute_names
|
|
474
|
+
if expression_attribute_values:
|
|
475
|
+
result["ExpressionAttributeValues"] = expression_attribute_values
|
|
476
|
+
|
|
477
|
+
return result
|
|
478
|
+
|
|
479
|
+
def count(self, configuration: Dict[str, Any], model: ClearSkiesModel) -> int:
|
|
480
|
+
"""
|
|
481
|
+
Counts records in DynamoDB using native Query or Scan operations.
|
|
482
|
+
"""
|
|
483
|
+
configuration = self._check_query_configuration(configuration, model)
|
|
484
|
+
|
|
485
|
+
table_name: str = configuration["table_name"]
|
|
486
|
+
chosen_index_name: Optional[str] = configuration.get("_chosen_index_name")
|
|
487
|
+
partition_key_for_target: Optional[str] = configuration.get(
|
|
488
|
+
"_partition_key_for_target"
|
|
489
|
+
)
|
|
490
|
+
# Get sort key for the chosen target (base table or GSI)
|
|
491
|
+
sort_key_for_target: Optional[str] = None
|
|
492
|
+
table_description = self._get_table_description(table_name)
|
|
493
|
+
if chosen_index_name:
|
|
494
|
+
gsi_definitions: List[GlobalSecondaryIndexDescriptionTypeDef] = (
|
|
495
|
+
table_description.get("GlobalSecondaryIndexes", [])
|
|
496
|
+
)
|
|
497
|
+
for gsi in gsi_definitions:
|
|
498
|
+
if gsi["IndexName"] == chosen_index_name:
|
|
499
|
+
for key_element in gsi["KeySchema"]:
|
|
500
|
+
if key_element["KeyType"] == "RANGE":
|
|
501
|
+
sort_key_for_target = key_element["AttributeName"]
|
|
502
|
+
break
|
|
503
|
+
break
|
|
504
|
+
else:
|
|
505
|
+
base_table_key_schema: List[KeySchemaElementTypeDef] = (
|
|
506
|
+
table_description.get("KeySchema", [])
|
|
507
|
+
)
|
|
508
|
+
for key_element in base_table_key_schema:
|
|
509
|
+
if key_element["KeyType"] == "RANGE":
|
|
510
|
+
sort_key_for_target = key_element["AttributeName"]
|
|
511
|
+
break
|
|
512
|
+
|
|
513
|
+
wheres_config = configuration.get("wheres", [])
|
|
514
|
+
|
|
515
|
+
native_expressions = self._wheres_to_native_dynamo_expressions(
|
|
516
|
+
wheres_config, partition_key_for_target, sort_key_for_target
|
|
517
|
+
)
|
|
518
|
+
|
|
519
|
+
params_for_native_call: Dict[str, Any] = { # Renamed from params
|
|
520
|
+
"TableName": table_name,
|
|
521
|
+
"Select": "COUNT",
|
|
522
|
+
}
|
|
523
|
+
if chosen_index_name:
|
|
524
|
+
params_for_native_call["IndexName"] = chosen_index_name
|
|
525
|
+
|
|
526
|
+
can_use_query_for_count = False
|
|
527
|
+
if (
|
|
528
|
+
native_expressions.get("KeyConditionExpression")
|
|
529
|
+
and partition_key_for_target
|
|
530
|
+
and any(
|
|
531
|
+
w.get("column") == partition_key_for_target and w.get("operator") == "="
|
|
532
|
+
for w in wheres_config
|
|
533
|
+
if isinstance(w, dict)
|
|
534
|
+
)
|
|
535
|
+
):
|
|
536
|
+
can_use_query_for_count = True
|
|
537
|
+
params_for_native_call["KeyConditionExpression"] = native_expressions[
|
|
538
|
+
"KeyConditionExpression"
|
|
539
|
+
]
|
|
540
|
+
if native_expressions.get(
|
|
541
|
+
"FilterExpression"
|
|
542
|
+
): # Query can also have FilterExpression
|
|
543
|
+
params_for_native_call["FilterExpression"] = native_expressions[
|
|
544
|
+
"FilterExpression"
|
|
545
|
+
]
|
|
546
|
+
else: # Fall back to Scan
|
|
547
|
+
# If KeyConditionExpression was built but not for the PK, it becomes part of Filter for Scan
|
|
548
|
+
all_filter_parts = []
|
|
549
|
+
if native_expressions.get("KeyConditionExpression"):
|
|
550
|
+
all_filter_parts.append(native_expressions["KeyConditionExpression"])
|
|
551
|
+
if native_expressions.get("FilterExpression"):
|
|
552
|
+
all_filter_parts.append(native_expressions["FilterExpression"])
|
|
553
|
+
if all_filter_parts:
|
|
554
|
+
params_for_native_call["FilterExpression"] = " AND ".join(
|
|
555
|
+
all_filter_parts
|
|
556
|
+
)
|
|
557
|
+
|
|
558
|
+
if native_expressions.get("ExpressionAttributeNames"):
|
|
559
|
+
params_for_native_call["ExpressionAttributeNames"] = native_expressions[
|
|
560
|
+
"ExpressionAttributeNames"
|
|
561
|
+
]
|
|
562
|
+
if native_expressions.get("ExpressionAttributeValues"):
|
|
563
|
+
params_for_native_call["ExpressionAttributeValues"] = native_expressions[
|
|
564
|
+
"ExpressionAttributeValues"
|
|
565
|
+
]
|
|
566
|
+
|
|
567
|
+
total_count = 0
|
|
568
|
+
exclusive_start_key: Optional[Dict[str, AttributeValueTypeDef]] = None
|
|
569
|
+
|
|
570
|
+
while True:
|
|
571
|
+
if exclusive_start_key:
|
|
572
|
+
params_for_native_call["ExclusiveStartKey"] = exclusive_start_key
|
|
573
|
+
|
|
574
|
+
try:
|
|
575
|
+
if can_use_query_for_count:
|
|
576
|
+
logger.debug(
|
|
577
|
+
f"Executing native DynamoDB Query (for count) with params: {params_for_native_call}"
|
|
578
|
+
)
|
|
579
|
+
response = self._cursor._client.query(**params_for_native_call) # type: ignore
|
|
580
|
+
else:
|
|
581
|
+
logger.debug(
|
|
582
|
+
f"Executing native DynamoDB Scan (for count) with params: {params_for_native_call}"
|
|
583
|
+
)
|
|
584
|
+
response = self._cursor._client.scan(**params_for_native_call) # type: ignore
|
|
585
|
+
except ClientError as e:
|
|
586
|
+
logger.error(
|
|
587
|
+
f"Error executing native DynamoDB operation for count: {e}. Params: {params_for_native_call}"
|
|
588
|
+
)
|
|
589
|
+
raise
|
|
590
|
+
|
|
591
|
+
total_count += response.get("Count", 0)
|
|
592
|
+
exclusive_start_key = response.get("LastEvaluatedKey")
|
|
593
|
+
if not exclusive_start_key:
|
|
594
|
+
break
|
|
595
|
+
|
|
596
|
+
return total_count
|
|
597
|
+
|
|
598
|
+
def create(self, data: Dict[str, Any], model: ClearSkiesModel) -> Dict[str, Any]:
|
|
599
|
+
"""
|
|
600
|
+
Creates a new record in DynamoDB using PartiQL INSERT.
|
|
601
|
+
"""
|
|
602
|
+
table_name: str = self._finalize_table_name(model.table_name) # type: ignore
|
|
603
|
+
|
|
604
|
+
item_to_insert: Dict[str, AttributeValueTypeDef] = {
|
|
605
|
+
key: self.condition_parser.to_dynamodb_attribute_value(value)
|
|
606
|
+
for key, value in data.items()
|
|
607
|
+
}
|
|
608
|
+
|
|
609
|
+
parameters: List[AttributeValueTypeDef] = [item_to_insert] # type: ignore
|
|
610
|
+
statement = f"INSERT INTO {table_name} VALUE ?"
|
|
611
|
+
|
|
612
|
+
try:
|
|
613
|
+
self._cursor.execute(statement=statement, parameters=parameters)
|
|
614
|
+
return data
|
|
615
|
+
except Exception as e:
|
|
616
|
+
logger.error(
|
|
617
|
+
f"Error executing INSERT PartiQL statement: {statement}, data: {data}, error: {e}"
|
|
618
|
+
)
|
|
619
|
+
raise
|
|
620
|
+
|
|
621
|
+
def update(
|
|
622
|
+
self, id_value: Any, data: Dict[str, Any], model: ClearSkiesModel
|
|
623
|
+
) -> Dict[str, Any]:
|
|
624
|
+
"""
|
|
625
|
+
Updates an existing record in DynamoDB using PartiQL UPDATE.
|
|
626
|
+
"""
|
|
627
|
+
table_name: str = self._finalize_table_name(model.table_name) # type: ignore
|
|
628
|
+
id_column_name: str = model.id_column_name # type: ignore
|
|
629
|
+
escaped_id_column: str = (
|
|
630
|
+
f"{self._column_escape_character()}{id_column_name}{self._column_escape_character()}"
|
|
631
|
+
)
|
|
632
|
+
|
|
633
|
+
if not data:
|
|
634
|
+
logger.warning(
|
|
635
|
+
f"Update called with empty data for ID {id_value}. Returning ID only."
|
|
636
|
+
)
|
|
637
|
+
return {id_column_name: id_value}
|
|
638
|
+
|
|
639
|
+
set_clauses: List[str] = []
|
|
640
|
+
parameters: List[AttributeValueTypeDef] = []
|
|
641
|
+
col_esc: str = self._column_escape_character()
|
|
642
|
+
|
|
643
|
+
for key, value in data.items():
|
|
644
|
+
if key == id_column_name:
|
|
645
|
+
continue
|
|
646
|
+
set_clauses.append(f"{col_esc}{key}{col_esc} = ?")
|
|
647
|
+
parameters.append(self.condition_parser.to_dynamodb_attribute_value(value))
|
|
648
|
+
|
|
649
|
+
if not set_clauses:
|
|
650
|
+
logger.warning(
|
|
651
|
+
f"Update called for ID {id_value} but no updatable fields found in data. Returning ID only."
|
|
652
|
+
)
|
|
653
|
+
return {id_column_name: id_value}
|
|
654
|
+
|
|
655
|
+
parameters.append(self.condition_parser.to_dynamodb_attribute_value(id_value))
|
|
656
|
+
|
|
657
|
+
set_statement: str = ", ".join(set_clauses)
|
|
658
|
+
statement: str = (
|
|
659
|
+
f"UPDATE {table_name} SET {set_statement} WHERE {escaped_id_column} = ? RETURNING ALL NEW *"
|
|
660
|
+
)
|
|
661
|
+
|
|
662
|
+
try:
|
|
663
|
+
response = self._cursor.execute(statement=statement, parameters=parameters)
|
|
664
|
+
items = response.get("Items", [])
|
|
665
|
+
if items:
|
|
666
|
+
return self._map_from_boto3(items[0])
|
|
667
|
+
logger.warning(
|
|
668
|
+
f"UPDATE statement did not return items for ID {id_value}. Returning input data merged with ID."
|
|
669
|
+
)
|
|
670
|
+
return {**data, id_column_name: id_value}
|
|
671
|
+
|
|
672
|
+
except Exception as e:
|
|
673
|
+
logger.error(
|
|
674
|
+
f"Error executing UPDATE PartiQL statement: {statement}, data: {data}, id: {id_value}, error: {e}"
|
|
675
|
+
)
|
|
676
|
+
raise
|
|
677
|
+
|
|
678
|
+
def delete(self, id_value: Any, model: ClearSkiesModel) -> bool:
|
|
679
|
+
"""
|
|
680
|
+
Deletes a record from DynamoDB using PartiQL DELETE.
|
|
681
|
+
"""
|
|
682
|
+
table_name: str = self._finalize_table_name(model.table_name) # type: ignore
|
|
683
|
+
id_column_name: str = model.id_column_name # type: ignore
|
|
684
|
+
escaped_id_column: str = (
|
|
685
|
+
f"{self._column_escape_character()}{id_column_name}{self._column_escape_character()}"
|
|
686
|
+
)
|
|
687
|
+
|
|
688
|
+
parameters: List[AttributeValueTypeDef] = [self.condition_parser.to_dynamodb_attribute_value(id_value)] # type: ignore
|
|
689
|
+
statement: str = f"DELETE FROM {table_name} WHERE {escaped_id_column} = ?"
|
|
690
|
+
|
|
691
|
+
try:
|
|
692
|
+
self._cursor.execute(statement=statement, parameters=parameters)
|
|
693
|
+
return True
|
|
694
|
+
except Exception as e:
|
|
695
|
+
logger.error(
|
|
696
|
+
f"Error executing DELETE PartiQL statement: {statement}, id: {id_value}, error: {e}"
|
|
697
|
+
)
|
|
698
|
+
raise
|
|
699
|
+
|
|
700
|
+
def _map_from_boto3(self, record: Dict[str, Any]) -> Dict[str, Any]:
|
|
701
|
+
"""
|
|
702
|
+
Maps a raw record from DynamoDB (which uses AttributeValueTypeDef for values)
|
|
703
|
+
to a dictionary with Python-native types.
|
|
704
|
+
|
|
705
|
+
Args:
|
|
706
|
+
record: A dictionary representing a record item from DynamoDB,
|
|
707
|
+
where values are in AttributeValueTypeDef format.
|
|
708
|
+
|
|
709
|
+
Returns:
|
|
710
|
+
A dictionary with values unwrapped to Python native types.
|
|
711
|
+
"""
|
|
712
|
+
return {
|
|
713
|
+
key: self._map_from_boto3_value(value) for (key, value) in record.items()
|
|
714
|
+
}
|
|
715
|
+
|
|
716
|
+
def _map_from_boto3_value(self, attribute_value: AttributeValueTypeDef) -> Any:
|
|
717
|
+
"""
|
|
718
|
+
Converts a single DynamoDB AttributeValueTypeDef to its Python native equivalent.
|
|
719
|
+
|
|
720
|
+
Args:
|
|
721
|
+
attribute_value: A DynamoDB AttributeValueTypeDef dictionary.
|
|
722
|
+
|
|
723
|
+
Returns:
|
|
724
|
+
The unwrapped Python native value.
|
|
725
|
+
"""
|
|
726
|
+
if not isinstance(attribute_value, dict):
|
|
727
|
+
return attribute_value
|
|
728
|
+
|
|
729
|
+
if "S" in attribute_value:
|
|
730
|
+
return attribute_value["S"]
|
|
731
|
+
if "N" in attribute_value:
|
|
732
|
+
try:
|
|
733
|
+
return Decimal(attribute_value["N"])
|
|
734
|
+
except DecimalException:
|
|
735
|
+
logger.warning(
|
|
736
|
+
f"Could not convert N value '{attribute_value['N']}' to Decimal."
|
|
737
|
+
)
|
|
738
|
+
return attribute_value["N"]
|
|
739
|
+
if "BOOL" in attribute_value:
|
|
740
|
+
return attribute_value["BOOL"]
|
|
741
|
+
if "NULL" in attribute_value:
|
|
742
|
+
return None
|
|
743
|
+
if "B" in attribute_value:
|
|
744
|
+
return base64.b64decode(attribute_value["B"])
|
|
745
|
+
if "L" in attribute_value:
|
|
746
|
+
return [self._map_from_boto3_value(item) for item in attribute_value["L"]]
|
|
747
|
+
if "M" in attribute_value:
|
|
748
|
+
return {
|
|
749
|
+
key: self._map_from_boto3_value(val)
|
|
750
|
+
for key, val in attribute_value["M"].items()
|
|
751
|
+
}
|
|
752
|
+
if "SS" in attribute_value:
|
|
753
|
+
return set(attribute_value["SS"])
|
|
754
|
+
if "NS" in attribute_value:
|
|
755
|
+
try:
|
|
756
|
+
return set(Decimal(n_val) for n_val in attribute_value["NS"])
|
|
757
|
+
except DecimalException:
|
|
758
|
+
logger.warning(
|
|
759
|
+
f"Could not convert one or more NS values in '{attribute_value['NS']}' to Decimal."
|
|
760
|
+
)
|
|
761
|
+
return set(attribute_value["NS"])
|
|
762
|
+
if "BS" in attribute_value:
|
|
763
|
+
return set(base64.b64decode(b_val) for b_val in attribute_value["BS"])
|
|
764
|
+
|
|
765
|
+
logger.warning(f"Unrecognized DynamoDB attribute type: {attribute_value}")
|
|
766
|
+
return attribute_value
|
|
767
|
+
|
|
768
|
+
def _check_query_configuration(
|
|
769
|
+
self, configuration: Dict[str, Any], model: ClearSkiesModel
|
|
770
|
+
) -> Dict[str, Any]:
|
|
771
|
+
"""
|
|
772
|
+
Validates the query configuration, applies default values, and attempts to
|
|
773
|
+
select an appropriate GSI if sorting is requested and conditions allow.
|
|
774
|
+
It also stores the determined partition key for the target in the configuration.
|
|
775
|
+
"""
|
|
776
|
+
for key in list(configuration.keys()):
|
|
777
|
+
if key not in self._allowed_configs:
|
|
778
|
+
raise KeyError(
|
|
779
|
+
f"DynamoDBBackend does not support config '{key}'. You may be using the wrong backend"
|
|
780
|
+
)
|
|
781
|
+
for key in self._required_configs:
|
|
782
|
+
if not configuration.get(key):
|
|
783
|
+
raise KeyError(f"Missing required configuration key {key}")
|
|
784
|
+
|
|
785
|
+
if "wheres" not in configuration:
|
|
786
|
+
configuration["wheres"] = []
|
|
787
|
+
if "sorts" not in configuration:
|
|
788
|
+
configuration["sorts"] = []
|
|
789
|
+
if "selects" not in configuration:
|
|
790
|
+
configuration["selects"] = []
|
|
791
|
+
if "model_columns" not in configuration:
|
|
792
|
+
configuration["model_columns"] = []
|
|
793
|
+
if "pagination" not in configuration:
|
|
794
|
+
configuration["pagination"] = {}
|
|
795
|
+
if "limit" not in configuration:
|
|
796
|
+
configuration["limit"] = None
|
|
797
|
+
if "select_all" not in configuration:
|
|
798
|
+
configuration["select_all"] = False
|
|
799
|
+
if "group_by_column" not in configuration:
|
|
800
|
+
configuration["group_by_column"] = None
|
|
801
|
+
if "joins" not in configuration:
|
|
802
|
+
configuration["joins"] = []
|
|
803
|
+
|
|
804
|
+
configuration["_chosen_index_name"] = None
|
|
805
|
+
configuration["_partition_key_for_target"] = None
|
|
806
|
+
|
|
807
|
+
if configuration.get("sorts") or configuration.get(
|
|
808
|
+
"wheres"
|
|
809
|
+
): # Check for index even if not sorting, for count
|
|
810
|
+
table_name_from_config: str = configuration.get("table_name", "")
|
|
811
|
+
table_description = self._get_table_description(table_name_from_config)
|
|
812
|
+
|
|
813
|
+
wheres = configuration.get("wheres", [])
|
|
814
|
+
sort_column = (
|
|
815
|
+
configuration.get("sorts")[0]["column"]
|
|
816
|
+
if configuration.get("sorts")
|
|
817
|
+
else None
|
|
818
|
+
)
|
|
819
|
+
|
|
820
|
+
key_to_check_for_equality: Optional[str] = None
|
|
821
|
+
target_name_for_error_msg: str = table_name_from_config
|
|
822
|
+
chosen_index_for_query: Optional[str] = None
|
|
823
|
+
partition_key_for_chosen_target: Optional[str] = None
|
|
824
|
+
|
|
825
|
+
gsi_definitions: List[GlobalSecondaryIndexDescriptionTypeDef] = (
|
|
826
|
+
table_description.get("GlobalSecondaryIndexes", [])
|
|
827
|
+
)
|
|
828
|
+
if gsi_definitions:
|
|
829
|
+
for gsi in gsi_definitions:
|
|
830
|
+
gsi_name: str = gsi["IndexName"]
|
|
831
|
+
gsi_key_schema: List[KeySchemaElementTypeDef] = gsi["KeySchema"]
|
|
832
|
+
gsi_partition_key: Optional[str] = None
|
|
833
|
+
gsi_sort_key: Optional[str] = None
|
|
834
|
+
|
|
835
|
+
for key_element in gsi_key_schema:
|
|
836
|
+
if key_element["KeyType"] == "HASH":
|
|
837
|
+
gsi_partition_key = key_element["AttributeName"]
|
|
838
|
+
elif key_element["KeyType"] == "RANGE":
|
|
839
|
+
gsi_sort_key = key_element["AttributeName"]
|
|
840
|
+
|
|
841
|
+
if gsi_partition_key and any(
|
|
842
|
+
w.get("column") == gsi_partition_key
|
|
843
|
+
and w.get("operator") == "="
|
|
844
|
+
for w in wheres
|
|
845
|
+
if isinstance(w, dict)
|
|
846
|
+
):
|
|
847
|
+
if configuration.get("sorts"):
|
|
848
|
+
if sort_column == gsi_partition_key and not gsi_sort_key:
|
|
849
|
+
key_to_check_for_equality = gsi_partition_key
|
|
850
|
+
chosen_index_for_query = gsi_name
|
|
851
|
+
target_name_for_error_msg = (
|
|
852
|
+
f"{table_name_from_config} (index: {gsi_name})"
|
|
853
|
+
)
|
|
854
|
+
partition_key_for_chosen_target = gsi_partition_key
|
|
855
|
+
break
|
|
856
|
+
if sort_column == gsi_sort_key:
|
|
857
|
+
key_to_check_for_equality = gsi_partition_key
|
|
858
|
+
chosen_index_for_query = gsi_name
|
|
859
|
+
target_name_for_error_msg = (
|
|
860
|
+
f"{table_name_from_config} (index: {gsi_name})"
|
|
861
|
+
)
|
|
862
|
+
partition_key_for_chosen_target = gsi_partition_key
|
|
863
|
+
break
|
|
864
|
+
else:
|
|
865
|
+
key_to_check_for_equality = gsi_partition_key
|
|
866
|
+
chosen_index_for_query = gsi_name
|
|
867
|
+
target_name_for_error_msg = (
|
|
868
|
+
f"{table_name_from_config} (index: {gsi_name})"
|
|
869
|
+
)
|
|
870
|
+
partition_key_for_chosen_target = gsi_partition_key
|
|
871
|
+
break
|
|
872
|
+
|
|
873
|
+
if not chosen_index_for_query:
|
|
874
|
+
base_table_key_schema: List[KeySchemaElementTypeDef] = (
|
|
875
|
+
table_description.get("KeySchema", [])
|
|
876
|
+
)
|
|
877
|
+
if base_table_key_schema:
|
|
878
|
+
for key_element in base_table_key_schema:
|
|
879
|
+
if key_element["KeyType"] == "HASH":
|
|
880
|
+
key_to_check_for_equality = key_element["AttributeName"]
|
|
881
|
+
partition_key_for_chosen_target = key_element[
|
|
882
|
+
"AttributeName"
|
|
883
|
+
]
|
|
884
|
+
break
|
|
885
|
+
|
|
886
|
+
configuration["_chosen_index_name"] = chosen_index_for_query
|
|
887
|
+
configuration["_partition_key_for_target"] = partition_key_for_chosen_target
|
|
888
|
+
|
|
889
|
+
if configuration.get("sorts"):
|
|
890
|
+
if not key_to_check_for_equality:
|
|
891
|
+
logger.warning(
|
|
892
|
+
f"Could not determine the required partition key for table/index '{target_name_for_error_msg}' "
|
|
893
|
+
f"to validate ORDER BY clause. The query may fail in DynamoDB."
|
|
894
|
+
)
|
|
895
|
+
else:
|
|
896
|
+
has_required_key_equality = any(
|
|
897
|
+
w.get("column") == key_to_check_for_equality
|
|
898
|
+
and w.get("operator") == "="
|
|
899
|
+
for w in wheres
|
|
900
|
+
if isinstance(w, dict)
|
|
901
|
+
)
|
|
902
|
+
if not has_required_key_equality:
|
|
903
|
+
raise ValueError(
|
|
904
|
+
f"DynamoDB PartiQL queries with ORDER BY on '{target_name_for_error_msg}' require an equality "
|
|
905
|
+
f"condition on its partition key ('{key_to_check_for_equality}') in the WHERE clause."
|
|
906
|
+
)
|
|
907
|
+
return configuration
|
|
908
|
+
|
|
909
|
+
def validate_pagination_kwargs(
|
|
910
|
+
self, kwargs: Dict[str, Any], case_mapping: Callable[[str], str]
|
|
911
|
+
) -> str:
|
|
912
|
+
"""
|
|
913
|
+
Validates pagination keyword arguments.
|
|
914
|
+
"""
|
|
915
|
+
extra_keys: set[str] = set(kwargs.keys()) - set(self.allowed_pagination_keys())
|
|
916
|
+
key_name: str = case_mapping("next_token")
|
|
917
|
+
if len(extra_keys):
|
|
918
|
+
return (
|
|
919
|
+
f"Invalid pagination key(s): '{','.join(sorted(list(extra_keys)))}'. "
|
|
920
|
+
f"Only '{key_name}' is allowed"
|
|
921
|
+
)
|
|
922
|
+
if "next_token" not in kwargs:
|
|
923
|
+
return f"You must specify '{key_name}' when setting pagination"
|
|
924
|
+
try:
|
|
925
|
+
token: Any = kwargs["next_token"]
|
|
926
|
+
if not isinstance(token, str) or not token:
|
|
927
|
+
raise ValueError("Token must be a non-empty string.")
|
|
928
|
+
json.loads(base64.urlsafe_b64decode(token))
|
|
929
|
+
except (TypeError, ValueError, binascii.Error, json.JSONDecodeError):
|
|
930
|
+
return f"The provided '{key_name}' appears to be invalid."
|
|
931
|
+
return ""
|
|
932
|
+
|
|
933
|
+
def allowed_pagination_keys(self) -> List[str]:
|
|
934
|
+
"""
|
|
935
|
+
Returns a list of allowed keys for pagination.
|
|
936
|
+
"""
|
|
937
|
+
return ["next_token"]
|
|
938
|
+
|
|
939
|
+
def restore_next_token_from_config(
|
|
940
|
+
self, next_token: Optional[str]
|
|
941
|
+
) -> Optional[Any]:
|
|
942
|
+
"""
|
|
943
|
+
Decodes a base64 encoded JSON string (next_token) into its original form.
|
|
944
|
+
"""
|
|
945
|
+
if not next_token or not isinstance(next_token, str):
|
|
946
|
+
return None
|
|
947
|
+
try:
|
|
948
|
+
decoded_bytes: bytes = base64.urlsafe_b64decode(next_token)
|
|
949
|
+
restored_token: Any = json.loads(decoded_bytes)
|
|
950
|
+
return restored_token
|
|
951
|
+
except (TypeError, ValueError, binascii.Error, json.JSONDecodeError):
|
|
952
|
+
logger.warning(f"Failed to restore next_token: {next_token}")
|
|
953
|
+
return None
|
|
954
|
+
|
|
955
|
+
def serialize_next_token_for_response(
|
|
956
|
+
self, ddb_next_token: Optional[str]
|
|
957
|
+
) -> Optional[str]:
|
|
958
|
+
"""
|
|
959
|
+
Serializes a DynamoDB PartiQL NextToken string into a base64 encoded JSON string.
|
|
960
|
+
"""
|
|
961
|
+
if ddb_next_token is None:
|
|
962
|
+
return None
|
|
963
|
+
try:
|
|
964
|
+
json_string: str = json.dumps(ddb_next_token)
|
|
965
|
+
encoded_bytes: bytes = base64.urlsafe_b64encode(json_string.encode("utf-8"))
|
|
966
|
+
return encoded_bytes.decode("utf8")
|
|
967
|
+
except (TypeError, ValueError) as e:
|
|
968
|
+
logger.error(
|
|
969
|
+
f"Error serializing DDB next_token: {ddb_next_token}, error: {e}"
|
|
970
|
+
)
|
|
971
|
+
return None
|
|
972
|
+
|
|
973
|
+
def documentation_pagination_next_page_response(
|
|
974
|
+
self, case_mapping: Callable[[str], str]
|
|
975
|
+
) -> List[AutoDocString]:
|
|
976
|
+
"""
|
|
977
|
+
Provides documentation for the 'next_page' (pagination token) in API responses.
|
|
978
|
+
"""
|
|
979
|
+
return [AutoDocString(case_mapping("next_token"))]
|
|
980
|
+
|
|
981
|
+
def documentation_pagination_next_page_example(
|
|
982
|
+
self, case_mapping: Callable[[str], str]
|
|
983
|
+
) -> Dict[str, str]:
|
|
984
|
+
"""
|
|
985
|
+
Provides an example value for the 'next_page' (pagination token) in API responses.
|
|
986
|
+
"""
|
|
987
|
+
return {case_mapping("next_token"): ""}
|
|
988
|
+
|
|
989
|
+
def documentation_pagination_parameters(
|
|
990
|
+
self, case_mapping: Callable[[str], str]
|
|
991
|
+
) -> List[Tuple[AutoDocString, str]]:
|
|
992
|
+
"""
|
|
993
|
+
Provides documentation for pagination parameters in API requests.
|
|
994
|
+
"""
|
|
995
|
+
return [
|
|
996
|
+
(
|
|
997
|
+
AutoDocString(case_mapping("next_token"), example=""),
|
|
998
|
+
"A token to fetch the next page of results",
|
|
999
|
+
)
|
|
1000
|
+
]
|