clear-skies-aws 2.0.1__py3-none-any.whl → 2.0.2__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {clear_skies_aws-2.0.1.dist-info → clear_skies_aws-2.0.2.dist-info}/METADATA +1 -1
- clear_skies_aws-2.0.2.dist-info/RECORD +63 -0
- clearskies_aws/__init__.py +15 -0
- clearskies_aws/actions/__init__.py +15 -0
- clearskies_aws/actions/action_aws.py +135 -0
- clearskies_aws/actions/assume_role.py +115 -0
- clearskies_aws/actions/ses.py +203 -0
- clearskies_aws/actions/sns.py +61 -0
- clearskies_aws/actions/sqs.py +81 -0
- clearskies_aws/actions/step_function.py +73 -0
- clearskies_aws/backends/__init__.py +19 -0
- clearskies_aws/backends/backend.py +106 -0
- clearskies_aws/backends/dynamo_db_backend.py +609 -0
- clearskies_aws/backends/dynamo_db_condition_parser.py +325 -0
- clearskies_aws/backends/dynamo_db_parti_ql_backend.py +965 -0
- clearskies_aws/backends/sqs_backend.py +61 -0
- clearskies_aws/configs/__init__.py +0 -0
- clearskies_aws/contexts/__init__.py +23 -0
- clearskies_aws/contexts/cli_web_socket_mock.py +19 -0
- clearskies_aws/contexts/lambda_alb.py +76 -0
- clearskies_aws/contexts/lambda_api_gateway.py +77 -0
- clearskies_aws/contexts/lambda_api_gateway_web_socket.py +57 -0
- clearskies_aws/contexts/lambda_invocation.py +19 -0
- clearskies_aws/contexts/lambda_sns.py +18 -0
- clearskies_aws/contexts/lambda_sqs_standard_partial_batch.py +29 -0
- clearskies_aws/di/__init__.py +6 -0
- clearskies_aws/di/aws_additional_config_auto_import.py +37 -0
- clearskies_aws/di/inject/__init__.py +6 -0
- clearskies_aws/di/inject/boto3.py +15 -0
- clearskies_aws/di/inject/boto3_session.py +13 -0
- clearskies_aws/di/inject/parameter_store.py +15 -0
- clearskies_aws/endpoints/__init__.py +2 -0
- clearskies_aws/endpoints/secrets_manager_rotation.py +195 -0
- clearskies_aws/endpoints/simple_body_routing.py +41 -0
- clearskies_aws/input_outputs/__init__.py +21 -0
- clearskies_aws/input_outputs/cli_web_socket_mock.py +18 -0
- clearskies_aws/input_outputs/lambda_alb.py +53 -0
- clearskies_aws/input_outputs/lambda_api_gateway.py +123 -0
- clearskies_aws/input_outputs/lambda_api_gateway_web_socket.py +71 -0
- clearskies_aws/input_outputs/lambda_input_output.py +87 -0
- clearskies_aws/input_outputs/lambda_invocation.py +85 -0
- clearskies_aws/input_outputs/lambda_sns.py +79 -0
- clearskies_aws/input_outputs/lambda_sqs_standard.py +84 -0
- clearskies_aws/mocks/__init__.py +1 -0
- clearskies_aws/mocks/actions/__init__.py +6 -0
- clearskies_aws/mocks/actions/ses.py +34 -0
- clearskies_aws/mocks/actions/sns.py +29 -0
- clearskies_aws/mocks/actions/sqs.py +29 -0
- clearskies_aws/mocks/actions/step_function.py +32 -0
- clearskies_aws/models/__init__.py +0 -0
- clearskies_aws/models/web_socket_connection_model.py +182 -0
- clearskies_aws/secrets/__init__.py +13 -0
- clearskies_aws/secrets/additional_configs/__init__.py +62 -0
- clearskies_aws/secrets/additional_configs/iam_db_auth.py +39 -0
- clearskies_aws/secrets/additional_configs/iam_db_auth_with_ssm.py +96 -0
- clearskies_aws/secrets/additional_configs/mysql_connection_dynamic_producer_via_ssh_cert_bastion.py +80 -0
- clearskies_aws/secrets/additional_configs/mysql_connection_dynamic_producer_via_ssm_bastion.py +162 -0
- clearskies_aws/secrets/akeyless_with_ssm_cache.py +60 -0
- clearskies_aws/secrets/parameter_store.py +52 -0
- clearskies_aws/secrets/secrets.py +16 -0
- clearskies_aws/secrets/secrets_manager.py +96 -0
- clear_skies_aws-2.0.1.dist-info/RECORD +0 -4
- {clear_skies_aws-2.0.1.dist-info → clear_skies_aws-2.0.2.dist-info}/WHEEL +0 -0
- {clear_skies_aws-2.0.1.dist-info → clear_skies_aws-2.0.2.dist-info}/licenses/LICENSE +0 -0
|
@@ -0,0 +1,965 @@
|
|
|
1
|
+
from __future__ import annotations
|
|
2
|
+
|
|
3
|
+
import base64
|
|
4
|
+
import binascii
|
|
5
|
+
import json
|
|
6
|
+
import logging
|
|
7
|
+
import re
|
|
8
|
+
from decimal import Decimal, InvalidOperation
|
|
9
|
+
from typing import Any, Callable, Generator
|
|
10
|
+
|
|
11
|
+
from boto3.session import Session as Boto3Session
|
|
12
|
+
from botocore.exceptions import ClientError
|
|
13
|
+
from clearskies import Model as ClearSkiesModel
|
|
14
|
+
from clearskies.autodoc.schema import String as AutoDocString
|
|
15
|
+
from clearskies.backends import CursorBackend
|
|
16
|
+
from types_boto3_dynamodb.client import DynamoDBClient
|
|
17
|
+
from types_boto3_dynamodb.type_defs import (
|
|
18
|
+
AttributeValueTypeDef,
|
|
19
|
+
ExecuteStatementInputTypeDef,
|
|
20
|
+
ExecuteStatementOutputTypeDef,
|
|
21
|
+
GlobalSecondaryIndexDescriptionTypeDef,
|
|
22
|
+
KeySchemaElementTypeDef,
|
|
23
|
+
)
|
|
24
|
+
|
|
25
|
+
from clearskies_aws.backends.dynamo_db_condition_parser import DynamoDBConditionParser
|
|
26
|
+
|
|
27
|
+
logger = logging.getLogger(__name__)
|
|
28
|
+
|
|
29
|
+
|
|
30
|
+
class DynamoDBPartiQLCursor:
|
|
31
|
+
"""
|
|
32
|
+
Cursor for executing PartiQL statements against DynamoDB.
|
|
33
|
+
|
|
34
|
+
This class wraps a Boto3 DynamoDB client to provide a simplified interface
|
|
35
|
+
for statement execution and error handling.
|
|
36
|
+
"""
|
|
37
|
+
|
|
38
|
+
def __init__(self, boto3_session: Boto3Session) -> None:
|
|
39
|
+
"""
|
|
40
|
+
Create the DynamoDBPartiQLCursor.
|
|
41
|
+
|
|
42
|
+
Args:
|
|
43
|
+
boto3_session: An initialized Boto3 Session object.
|
|
44
|
+
"""
|
|
45
|
+
self._session: Boto3Session = boto3_session
|
|
46
|
+
self._client: DynamoDBClient = self._session.client("dynamodb")
|
|
47
|
+
|
|
48
|
+
def execute(
|
|
49
|
+
self,
|
|
50
|
+
statement: str,
|
|
51
|
+
parameters: list[AttributeValueTypeDef] | None = None,
|
|
52
|
+
Limit: int | None = None,
|
|
53
|
+
NextToken: str | None = None,
|
|
54
|
+
ConsistentRead: bool | None = None,
|
|
55
|
+
) -> ExecuteStatementOutputTypeDef:
|
|
56
|
+
"""
|
|
57
|
+
Execute a PartiQL statement against DynamoDB.
|
|
58
|
+
|
|
59
|
+
Args:
|
|
60
|
+
statement: The PartiQL statement string to execute.
|
|
61
|
+
parameters: An optional list of parameters for the PartiQL statement.
|
|
62
|
+
Limit: Optional limit for the number of items DynamoDB evaluates.
|
|
63
|
+
NextToken: Optional token for paginating results from DynamoDB.
|
|
64
|
+
ConsistentRead: Optional flag for strongly consistent reads.
|
|
65
|
+
|
|
66
|
+
Returns:
|
|
67
|
+
The output from the boto3 client's execute_statement method.
|
|
68
|
+
|
|
69
|
+
Raises:
|
|
70
|
+
ClientError: If the execution fails due to a client-side error.
|
|
71
|
+
"""
|
|
72
|
+
try:
|
|
73
|
+
call_args: ExecuteStatementInputTypeDef = {"Statement": statement}
|
|
74
|
+
# Only include 'Parameters' if it's not None AND not empty
|
|
75
|
+
if parameters: # This implies parameters is not None and parameters is not an empty list
|
|
76
|
+
call_args["Parameters"] = parameters
|
|
77
|
+
if Limit is not None:
|
|
78
|
+
call_args["Limit"] = Limit
|
|
79
|
+
if NextToken is not None:
|
|
80
|
+
call_args["NextToken"] = NextToken
|
|
81
|
+
if ConsistentRead is not None:
|
|
82
|
+
call_args["ConsistentRead"] = ConsistentRead
|
|
83
|
+
|
|
84
|
+
output: ExecuteStatementOutputTypeDef = self._client.execute_statement(**call_args)
|
|
85
|
+
except ClientError as err:
|
|
86
|
+
error_response: dict[str, Any] = err.response.get("Error", {}) # type: ignore
|
|
87
|
+
error_code: str = error_response.get("Code", "UnknownCode")
|
|
88
|
+
error_message: str = error_response.get("Message", "Unknown error")
|
|
89
|
+
|
|
90
|
+
parameters_str = str(parameters) if parameters is not None else "None"
|
|
91
|
+
|
|
92
|
+
if error_code == "ResourceNotFoundException":
|
|
93
|
+
logger.error(
|
|
94
|
+
"Couldn't execute PartiQL '%s' with parameters '%s' because the table or index does not exist.",
|
|
95
|
+
statement,
|
|
96
|
+
parameters_str,
|
|
97
|
+
)
|
|
98
|
+
else:
|
|
99
|
+
logger.error(
|
|
100
|
+
"Couldn't execute PartiQL '%s' with parameters '%s'. Here's why: %s: %s",
|
|
101
|
+
statement,
|
|
102
|
+
parameters_str,
|
|
103
|
+
error_code,
|
|
104
|
+
error_message,
|
|
105
|
+
)
|
|
106
|
+
raise
|
|
107
|
+
else:
|
|
108
|
+
return output
|
|
109
|
+
|
|
110
|
+
|
|
111
|
+
class DynamoDBPartiQLBackend(CursorBackend):
|
|
112
|
+
"""
|
|
113
|
+
DynamoDB backend implementation that uses PartiQL for database interactions.
|
|
114
|
+
|
|
115
|
+
Supports querying base tables and attempts to use Global Secondary Indexes (GSIs)
|
|
116
|
+
when appropriate based on query conditions and sorting.
|
|
117
|
+
The count() method uses native DynamoDB Query/Scan operations for accuracy.
|
|
118
|
+
"""
|
|
119
|
+
|
|
120
|
+
_cursor: DynamoDBPartiQLCursor
|
|
121
|
+
_allowed_configs: list[str] = [
|
|
122
|
+
"table_name",
|
|
123
|
+
"wheres",
|
|
124
|
+
"sorts",
|
|
125
|
+
"limit",
|
|
126
|
+
"pagination",
|
|
127
|
+
"model_columns",
|
|
128
|
+
"selects",
|
|
129
|
+
"select_all",
|
|
130
|
+
"group_by_column",
|
|
131
|
+
"joins",
|
|
132
|
+
]
|
|
133
|
+
_required_configs: list[str] = ["table_name"]
|
|
134
|
+
|
|
135
|
+
def __init__(self, dynamo_db_parti_ql_cursor: DynamoDBPartiQLCursor) -> None:
|
|
136
|
+
"""Initialize the DynamoDBPartiQLBackend."""
|
|
137
|
+
super().__init__(dynamo_db_parti_ql_cursor)
|
|
138
|
+
self.condition_parser: DynamoDBConditionParser = DynamoDBConditionParser()
|
|
139
|
+
self._table_descriptions_cache: dict[str, dict[str, Any]] = {}
|
|
140
|
+
|
|
141
|
+
def _get_table_description(self, table_name: str) -> dict[str, Any]:
|
|
142
|
+
"""Retrieve and cache the DynamoDB table description."""
|
|
143
|
+
if table_name not in self._table_descriptions_cache:
|
|
144
|
+
try:
|
|
145
|
+
self._table_descriptions_cache[table_name] = self._cursor._client.describe_table(TableName=table_name) # type: ignore
|
|
146
|
+
except ClientError as e:
|
|
147
|
+
logger.error(f"Failed to describe table '{table_name}': {e}")
|
|
148
|
+
raise
|
|
149
|
+
return self._table_descriptions_cache[table_name].get("Table", {})
|
|
150
|
+
|
|
151
|
+
def _table_escape_character(self) -> str:
|
|
152
|
+
"""Return the character used to escape table/index names."""
|
|
153
|
+
return '"'
|
|
154
|
+
|
|
155
|
+
def _column_escape_character(self) -> str:
|
|
156
|
+
"""Return the character used to escape column names."""
|
|
157
|
+
return '"'
|
|
158
|
+
|
|
159
|
+
def _finalize_table_name(self, table_name: str, index_name: str | None = None) -> str:
|
|
160
|
+
"""Escapes a table name and optionally an index name for use in a PartiQL FROM clause."""
|
|
161
|
+
if not table_name:
|
|
162
|
+
return ""
|
|
163
|
+
esc: str = self._table_escape_character()
|
|
164
|
+
final_name = f"{esc}{str(table_name).strip(esc)}{esc}"
|
|
165
|
+
if index_name:
|
|
166
|
+
final_name += f".{esc}{index_name.strip(esc)}{esc}"
|
|
167
|
+
return final_name
|
|
168
|
+
|
|
169
|
+
def _conditions_as_wheres_and_parameters(
|
|
170
|
+
self, conditions: list[dict[str, Any]], default_table_name: str
|
|
171
|
+
) -> tuple[str, list[AttributeValueTypeDef]]:
|
|
172
|
+
"""Convert where conditions into a PartiQL WHERE clause and parameters."""
|
|
173
|
+
if not conditions:
|
|
174
|
+
return "", []
|
|
175
|
+
|
|
176
|
+
where_parts: list[str] = []
|
|
177
|
+
parameters: list[AttributeValueTypeDef] = []
|
|
178
|
+
|
|
179
|
+
for where in conditions:
|
|
180
|
+
if not isinstance(where, dict):
|
|
181
|
+
logger.warning(f"Skipping non-dictionary where condition: {where}")
|
|
182
|
+
continue
|
|
183
|
+
|
|
184
|
+
column: str | None = where.get("column")
|
|
185
|
+
operator: str | None = where.get("operator")
|
|
186
|
+
values: list[Any] | None = where.get("values")
|
|
187
|
+
|
|
188
|
+
if not column or not operator or values is None:
|
|
189
|
+
logger.warning(f"Skipping malformed structured where condition: {where}")
|
|
190
|
+
continue
|
|
191
|
+
|
|
192
|
+
value_parts: list[str] = []
|
|
193
|
+
for v in values:
|
|
194
|
+
if isinstance(v, str):
|
|
195
|
+
value_parts.append(f"'{v}'")
|
|
196
|
+
elif isinstance(v, bool):
|
|
197
|
+
value_parts.append(str(v).lower())
|
|
198
|
+
elif isinstance(v, (int, float, Decimal, type(None))):
|
|
199
|
+
value_parts.append(str(v))
|
|
200
|
+
else:
|
|
201
|
+
value_parts.append(f"'{str(v)}'")
|
|
202
|
+
|
|
203
|
+
condition_string: str = ""
|
|
204
|
+
op_lower: str = operator.lower()
|
|
205
|
+
if op_lower == "in":
|
|
206
|
+
condition_string = f"{column} {operator} ({', '.join(value_parts)})"
|
|
207
|
+
elif op_lower in self.condition_parser.operators_without_placeholders:
|
|
208
|
+
condition_string = f"{column} {operator}"
|
|
209
|
+
else:
|
|
210
|
+
condition_string = f"{column} {operator} {value_parts[0] if value_parts else ''}"
|
|
211
|
+
|
|
212
|
+
try:
|
|
213
|
+
parsed: dict[str, Any] = self.condition_parser.parse_condition(condition_string)
|
|
214
|
+
where_parts.append(parsed["parsed"])
|
|
215
|
+
parameters.extend(parsed["values"])
|
|
216
|
+
except ValueError as e:
|
|
217
|
+
logger.error(f"Error parsing condition '{condition_string}': {e}")
|
|
218
|
+
continue
|
|
219
|
+
|
|
220
|
+
if not where_parts:
|
|
221
|
+
return "", []
|
|
222
|
+
return " WHERE " + " AND ".join(where_parts), parameters
|
|
223
|
+
|
|
224
|
+
def as_sql(self, configuration: dict[str, Any]) -> tuple[str, list[AttributeValueTypeDef], int | None, str | None]:
|
|
225
|
+
"""Construct a PartiQL statement and parameters from a query configuration."""
|
|
226
|
+
escape: str = self._column_escape_character()
|
|
227
|
+
table_name: str = configuration.get("table_name", "")
|
|
228
|
+
chosen_index_name: str | None = configuration.get("_chosen_index_name")
|
|
229
|
+
|
|
230
|
+
wheres, parameters = self._conditions_as_wheres_and_parameters(configuration.get("wheres", []), table_name)
|
|
231
|
+
|
|
232
|
+
from_clause_target: str = self._finalize_table_name(table_name, chosen_index_name)
|
|
233
|
+
|
|
234
|
+
selects: list[str] | None = configuration.get("selects")
|
|
235
|
+
select_clause: str
|
|
236
|
+
if selects:
|
|
237
|
+
select_clause = ", ".join([f"{escape}{s.strip(escape)}{escape}" for s in selects])
|
|
238
|
+
if configuration.get("select_all"):
|
|
239
|
+
logger.warning("Both 'select_all=True' and specific 'selects' were provided. Using specific 'selects'.")
|
|
240
|
+
else:
|
|
241
|
+
select_clause = "*"
|
|
242
|
+
|
|
243
|
+
order_by: str = ""
|
|
244
|
+
sorts: list[dict[str, str]] | None = configuration.get("sorts")
|
|
245
|
+
if sorts:
|
|
246
|
+
sort_parts: list[str] = []
|
|
247
|
+
for sort in sorts:
|
|
248
|
+
column_name: str = sort["column"]
|
|
249
|
+
direction: str = sort.get("direction", "ASC").upper()
|
|
250
|
+
sort_parts.append(f"{escape}{column_name.strip(escape)}{escape} {direction}")
|
|
251
|
+
if sort_parts:
|
|
252
|
+
order_by = " ORDER BY " + ", ".join(sort_parts)
|
|
253
|
+
|
|
254
|
+
if configuration.get("group_by_column"):
|
|
255
|
+
group_by_column = configuration.get("group_by_column")
|
|
256
|
+
logger.warning(
|
|
257
|
+
"Configuration included 'group_by_column="
|
|
258
|
+
+ (group_by_column if group_by_column is not None else "")
|
|
259
|
+
+ "', "
|
|
260
|
+
+ "but GROUP BY is not supported by this DynamoDB PartiQL backend and will be ignored for SQL generation."
|
|
261
|
+
)
|
|
262
|
+
|
|
263
|
+
if configuration.get("joins"):
|
|
264
|
+
logger.warning(
|
|
265
|
+
"Configuration included 'joins="
|
|
266
|
+
+ str(configuration.get("joins"))
|
|
267
|
+
+ "', "
|
|
268
|
+
+ "but JOINs are not supported by this DynamoDB PartiQL backend and will be ignored for SQL generation."
|
|
269
|
+
)
|
|
270
|
+
|
|
271
|
+
limit: int | None = configuration.get("limit")
|
|
272
|
+
if limit is not None:
|
|
273
|
+
limit = int(limit)
|
|
274
|
+
|
|
275
|
+
pagination: dict[str, Any] = configuration.get("pagination", {})
|
|
276
|
+
next_token: str | None = pagination.get("next_token")
|
|
277
|
+
if next_token is not None:
|
|
278
|
+
next_token = str(next_token)
|
|
279
|
+
|
|
280
|
+
if not from_clause_target:
|
|
281
|
+
raise ValueError("Table name is required for constructing SQL query.")
|
|
282
|
+
|
|
283
|
+
statement: str = f"SELECT {select_clause} FROM {from_clause_target}{wheres}{order_by}".strip()
|
|
284
|
+
|
|
285
|
+
return statement, parameters, limit, next_token
|
|
286
|
+
|
|
287
|
+
def records(
|
|
288
|
+
self,
|
|
289
|
+
configuration: dict[str, Any],
|
|
290
|
+
model: ClearSkiesModel,
|
|
291
|
+
next_page_data: dict[str, Any] = {},
|
|
292
|
+
) -> Generator[dict[str, Any], None, None]:
|
|
293
|
+
"""Fetch records from DynamoDB based on the provided configuration using PartiQL."""
|
|
294
|
+
configuration = self._check_query_configuration(configuration, model)
|
|
295
|
+
|
|
296
|
+
statement, params, limit, client_next_token_from_as_sql = self.as_sql(configuration)
|
|
297
|
+
|
|
298
|
+
ddb_token_for_this_call: str | None = self.restore_next_token_from_config(client_next_token_from_as_sql)
|
|
299
|
+
|
|
300
|
+
cursor_limit: int | None = None
|
|
301
|
+
if limit is not None and limit > 0:
|
|
302
|
+
cursor_limit = limit
|
|
303
|
+
|
|
304
|
+
try:
|
|
305
|
+
response: ExecuteStatementOutputTypeDef = self._cursor.execute(
|
|
306
|
+
statement=statement,
|
|
307
|
+
parameters=params,
|
|
308
|
+
Limit=cursor_limit,
|
|
309
|
+
NextToken=ddb_token_for_this_call,
|
|
310
|
+
)
|
|
311
|
+
except Exception as e:
|
|
312
|
+
logger.error(f"Error executing PartiQL statement in records(): {statement}, error: {e}")
|
|
313
|
+
next_page_data = {}
|
|
314
|
+
raise
|
|
315
|
+
|
|
316
|
+
items_from_response: list[dict[str, Any]] = response.get("Items", [])
|
|
317
|
+
|
|
318
|
+
for item_raw in items_from_response:
|
|
319
|
+
yield self._map_from_boto3(item_raw)
|
|
320
|
+
|
|
321
|
+
next_token_from_ddb: str | None = response.get("NextToken")
|
|
322
|
+
if next_token_from_ddb:
|
|
323
|
+
next_page_data["next_token"] = self.serialize_next_token_for_response(next_token_from_ddb)
|
|
324
|
+
|
|
325
|
+
def _wheres_to_native_dynamo_expressions(
|
|
326
|
+
self,
|
|
327
|
+
conditions: list[dict[str, Any]],
|
|
328
|
+
partition_key_name: str | None,
|
|
329
|
+
sort_key_name: str | None = None,
|
|
330
|
+
) -> dict[str, Any]:
|
|
331
|
+
"""
|
|
332
|
+
Convert 'where' conditions to DynamoDB expressions.
|
|
333
|
+
|
|
334
|
+
Transforms a list of condition dictionaries into PartiQL expression strings and attribute maps
|
|
335
|
+
for Query/Scan operations.
|
|
336
|
+
|
|
337
|
+
This implementation is more comprehensive than the previous one.
|
|
338
|
+
"""
|
|
339
|
+
expression_attribute_names: dict[str, str] = {}
|
|
340
|
+
expression_attribute_values: dict[str, AttributeValueTypeDef] = {}
|
|
341
|
+
key_condition_parts: list[str] = []
|
|
342
|
+
filter_expression_parts: list[str] = []
|
|
343
|
+
|
|
344
|
+
name_counter = 0
|
|
345
|
+
value_counter = 0
|
|
346
|
+
|
|
347
|
+
# Helper to get unique placeholder names
|
|
348
|
+
def get_name_placeholder(column_name: str) -> str:
|
|
349
|
+
nonlocal name_counter
|
|
350
|
+
# Sanitize column name for placeholder if it contains special characters
|
|
351
|
+
sanitized_column_name = re.sub(r"[^a-zA-Z0-9_]", "", column_name)
|
|
352
|
+
placeholder = f"#{sanitized_column_name}_{name_counter}"
|
|
353
|
+
expression_attribute_names[placeholder] = column_name
|
|
354
|
+
name_counter += 1
|
|
355
|
+
return placeholder
|
|
356
|
+
|
|
357
|
+
# Helper to get unique value placeholders and add values
|
|
358
|
+
def get_value_placeholder(value: Any) -> str:
|
|
359
|
+
nonlocal value_counter
|
|
360
|
+
placeholder = f":val{value_counter}"
|
|
361
|
+
expression_attribute_values[placeholder] = self.condition_parser.to_dynamodb_attribute_value(value)
|
|
362
|
+
value_counter += 1
|
|
363
|
+
return placeholder
|
|
364
|
+
|
|
365
|
+
processed_condition_indices = set()
|
|
366
|
+
|
|
367
|
+
# First, try to build KeyConditionExpression for Partition Key and Sort Key
|
|
368
|
+
# Find partition key equality condition
|
|
369
|
+
pk_condition_index = -1
|
|
370
|
+
if partition_key_name:
|
|
371
|
+
for i, cond in enumerate(conditions):
|
|
372
|
+
if cond.get("column") == partition_key_name and cond.get("operator") == "=" and cond.get("values"):
|
|
373
|
+
pk_condition_index = i
|
|
374
|
+
break
|
|
375
|
+
|
|
376
|
+
if pk_condition_index != -1:
|
|
377
|
+
pk_cond = conditions[pk_condition_index]
|
|
378
|
+
pk_name_ph = get_name_placeholder(pk_cond["column"])
|
|
379
|
+
pk_value_ph = get_value_placeholder(pk_cond["values"][0])
|
|
380
|
+
key_condition_parts.append(f"{pk_name_ph} = {pk_value_ph}")
|
|
381
|
+
processed_condition_indices.add(pk_condition_index)
|
|
382
|
+
|
|
383
|
+
# If partition key found, check for sort key condition
|
|
384
|
+
if sort_key_name:
|
|
385
|
+
for i, cond in enumerate(conditions):
|
|
386
|
+
if i in processed_condition_indices:
|
|
387
|
+
continue
|
|
388
|
+
if cond.get("column") == sort_key_name and cond.get("values"):
|
|
389
|
+
op_lower = cond["operator"].lower()
|
|
390
|
+
sk_name_ph = get_name_placeholder(cond["column"])
|
|
391
|
+
|
|
392
|
+
if op_lower == "=":
|
|
393
|
+
sk_value_ph = get_value_placeholder(cond["values"][0])
|
|
394
|
+
key_condition_parts.append(f"{sk_name_ph} = {sk_value_ph}")
|
|
395
|
+
elif op_lower == ">":
|
|
396
|
+
sk_value_ph = get_value_placeholder(cond["values"][0])
|
|
397
|
+
key_condition_parts.append(f"{sk_name_ph} > {sk_value_ph}")
|
|
398
|
+
elif op_lower == "<":
|
|
399
|
+
sk_value_ph = get_value_placeholder(cond["values"][0])
|
|
400
|
+
key_condition_parts.append(f"{sk_name_ph} < {sk_value_ph}")
|
|
401
|
+
elif op_lower == ">=":
|
|
402
|
+
sk_value_ph = get_value_placeholder(cond["values"][0])
|
|
403
|
+
key_condition_parts.append(f"{sk_name_ph} >= {sk_value_ph}")
|
|
404
|
+
elif op_lower == "<=":
|
|
405
|
+
sk_value_ph = get_value_placeholder(cond["values"][0])
|
|
406
|
+
key_condition_parts.append(f"{sk_name_ph} <= {sk_value_ph}")
|
|
407
|
+
elif op_lower == "between":
|
|
408
|
+
if len(cond["values"]) == 2:
|
|
409
|
+
sk_value1_ph = get_value_placeholder(cond["values"][0])
|
|
410
|
+
sk_value2_ph = get_value_placeholder(cond["values"][1])
|
|
411
|
+
key_condition_parts.append(f"{sk_name_ph} BETWEEN {sk_value1_ph} AND {sk_value2_ph}")
|
|
412
|
+
else:
|
|
413
|
+
logger.warning(f"Skipping malformed BETWEEN condition for sort key: {cond}")
|
|
414
|
+
elif op_lower == "begins_with":
|
|
415
|
+
sk_value_ph = get_value_placeholder(cond["values"][0])
|
|
416
|
+
key_condition_parts.append(f"begins_with({sk_name_ph}, {sk_value_ph})")
|
|
417
|
+
else:
|
|
418
|
+
# Other operators for sort key are not part of KeyConditionExpression
|
|
419
|
+
# They will be handled in FilterExpression below
|
|
420
|
+
continue
|
|
421
|
+
processed_condition_indices.add(i)
|
|
422
|
+
break # Assume only one sort key condition for KeyConditionExpression
|
|
423
|
+
|
|
424
|
+
# Process all remaining conditions for FilterExpression
|
|
425
|
+
for i, cond in enumerate(conditions):
|
|
426
|
+
if i in processed_condition_indices:
|
|
427
|
+
continue
|
|
428
|
+
|
|
429
|
+
col_name = cond.get("column")
|
|
430
|
+
op = cond.get("operator")
|
|
431
|
+
vals = cond.get("values")
|
|
432
|
+
|
|
433
|
+
if not col_name or not op or vals is None:
|
|
434
|
+
continue
|
|
435
|
+
|
|
436
|
+
name_ph = get_name_placeholder(col_name)
|
|
437
|
+
op_lower = op.lower()
|
|
438
|
+
|
|
439
|
+
if op_lower == "=":
|
|
440
|
+
value_ph = get_value_placeholder(vals[0])
|
|
441
|
+
filter_expression_parts.append(f"{name_ph} = {value_ph}")
|
|
442
|
+
elif op_lower == "!=":
|
|
443
|
+
value_ph = get_value_placeholder(vals[0])
|
|
444
|
+
filter_expression_parts.append(f"{name_ph} <> {value_ph}")
|
|
445
|
+
elif op_lower == ">":
|
|
446
|
+
value_ph = get_value_placeholder(vals[0])
|
|
447
|
+
filter_expression_parts.append(f"{name_ph} > {value_ph}")
|
|
448
|
+
elif op_lower == "<":
|
|
449
|
+
value_ph = get_value_placeholder(vals[0])
|
|
450
|
+
filter_expression_parts.append(f"{name_ph} < {value_ph}")
|
|
451
|
+
elif op_lower == ">=":
|
|
452
|
+
value_ph = get_value_placeholder(vals[0])
|
|
453
|
+
filter_expression_parts.append(f"{name_ph} >= {value_ph}")
|
|
454
|
+
elif op_lower == "<=":
|
|
455
|
+
value_ph = get_value_placeholder(vals[0])
|
|
456
|
+
filter_expression_parts.append(f"{name_ph} <= {value_ph}")
|
|
457
|
+
elif op_lower == "between":
|
|
458
|
+
if len(vals) == 2:
|
|
459
|
+
value1_ph = get_value_placeholder(vals[0])
|
|
460
|
+
value2_ph = get_value_placeholder(vals[1])
|
|
461
|
+
filter_expression_parts.append(f"{name_ph} BETWEEN {value1_ph} AND {value2_ph}")
|
|
462
|
+
else:
|
|
463
|
+
logger.warning(f"Skipping malformed BETWEEN condition: {cond}")
|
|
464
|
+
elif op_lower == "in":
|
|
465
|
+
value_placeholders = ", ".join([get_value_placeholder(v) for v in vals])
|
|
466
|
+
filter_expression_parts.append(f"{name_ph} IN ({value_placeholders})")
|
|
467
|
+
elif op_lower == "contains":
|
|
468
|
+
value_ph = get_value_placeholder(vals[0])
|
|
469
|
+
filter_expression_parts.append(f"contains({name_ph}, {value_ph})")
|
|
470
|
+
elif op_lower == "not contains":
|
|
471
|
+
value_ph = get_value_placeholder(vals[0])
|
|
472
|
+
filter_expression_parts.append(f"NOT contains({name_ph}, {value_ph})")
|
|
473
|
+
elif op_lower == "begins_with":
|
|
474
|
+
value_ph = get_value_placeholder(vals[0])
|
|
475
|
+
filter_expression_parts.append(f"begins_with({name_ph}, {value_ph})")
|
|
476
|
+
elif op_lower == "not begins_with":
|
|
477
|
+
value_ph = get_value_placeholder(vals[0])
|
|
478
|
+
filter_expression_parts.append(f"NOT begins_with({name_ph}, {value_ph})")
|
|
479
|
+
elif op_lower == "is null":
|
|
480
|
+
filter_expression_parts.append(f"attribute_not_exists({name_ph})")
|
|
481
|
+
elif op_lower == "is not null":
|
|
482
|
+
filter_expression_parts.append(f"attribute_exists({name_ph})")
|
|
483
|
+
elif op_lower == "like": # Clearskies 'like' usually translates to begins_with or contains
|
|
484
|
+
# This is a simplification. A full implementation might need to inspect '%' position.
|
|
485
|
+
# For now, if it contains '%', assume 'contains'. If it ends with '%', assume 'begins_with'.
|
|
486
|
+
# If no '%', it's an equality.
|
|
487
|
+
if len(vals) > 0 and isinstance(vals[0], str):
|
|
488
|
+
like_value = vals[0]
|
|
489
|
+
if like_value.startswith("%") and like_value.endswith("%"):
|
|
490
|
+
value_ph = get_value_placeholder(like_value.strip("%"))
|
|
491
|
+
filter_expression_parts.append(f"contains({name_ph}, {value_ph})")
|
|
492
|
+
elif like_value.endswith("%"):
|
|
493
|
+
value_ph = get_value_placeholder(like_value.rstrip("%"))
|
|
494
|
+
filter_expression_parts.append(f"begins_with({name_ph}, {value_ph})")
|
|
495
|
+
else: # Treat as equality if no wildcards or complex pattern
|
|
496
|
+
value_ph = get_value_placeholder(like_value)
|
|
497
|
+
filter_expression_parts.append(f"{name_ph} = {value_ph}")
|
|
498
|
+
else:
|
|
499
|
+
logger.warning(f"Skipping unsupported LIKE condition: {cond}")
|
|
500
|
+
else:
|
|
501
|
+
logger.warning(f"Skipping unsupported operator '{op}' for native DynamoDB expressions: {cond}")
|
|
502
|
+
|
|
503
|
+
result: dict[str, Any] = {}
|
|
504
|
+
if key_condition_parts:
|
|
505
|
+
result["KeyConditionExpression"] = " AND ".join(key_condition_parts)
|
|
506
|
+
if filter_expression_parts:
|
|
507
|
+
result["FilterExpression"] = " AND ".join(filter_expression_parts)
|
|
508
|
+
if expression_attribute_names:
|
|
509
|
+
result["ExpressionAttributeNames"] = expression_attribute_names
|
|
510
|
+
if expression_attribute_values:
|
|
511
|
+
result["ExpressionAttributeValues"] = expression_attribute_values
|
|
512
|
+
|
|
513
|
+
return result
|
|
514
|
+
|
|
515
|
+
def count(self, configuration: dict[str, Any], model: ClearSkiesModel) -> int:
|
|
516
|
+
"""Count records in DynamoDB using native Query or Scan operations."""
|
|
517
|
+
configuration = self._check_query_configuration(configuration, model)
|
|
518
|
+
|
|
519
|
+
table_name: str = configuration["table_name"]
|
|
520
|
+
chosen_index_name: str | None = configuration.get("_chosen_index_name")
|
|
521
|
+
partition_key_for_target: str | None = configuration.get("_partition_key_for_target")
|
|
522
|
+
# Get sort key for the chosen target (base table or GSI)
|
|
523
|
+
sort_key_for_target: str | None = None
|
|
524
|
+
table_description = self._get_table_description(table_name)
|
|
525
|
+
if chosen_index_name:
|
|
526
|
+
gsi_definitions: list[GlobalSecondaryIndexDescriptionTypeDef] = table_description.get(
|
|
527
|
+
"GlobalSecondaryIndexes", []
|
|
528
|
+
)
|
|
529
|
+
for gsi in gsi_definitions:
|
|
530
|
+
if gsi.get("IndexName", "") == chosen_index_name:
|
|
531
|
+
for key_element in gsi.get("KeySchema", []):
|
|
532
|
+
if key_element["KeyType"] == "RANGE":
|
|
533
|
+
sort_key_for_target = key_element["AttributeName"]
|
|
534
|
+
break
|
|
535
|
+
break
|
|
536
|
+
else:
|
|
537
|
+
base_table_key_schema: list[KeySchemaElementTypeDef] = table_description.get("KeySchema", [])
|
|
538
|
+
for key_element in base_table_key_schema:
|
|
539
|
+
if key_element["KeyType"] == "RANGE":
|
|
540
|
+
sort_key_for_target = key_element["AttributeName"]
|
|
541
|
+
break
|
|
542
|
+
|
|
543
|
+
wheres_config = configuration.get("wheres", [])
|
|
544
|
+
|
|
545
|
+
native_expressions = self._wheres_to_native_dynamo_expressions(
|
|
546
|
+
wheres_config, partition_key_for_target, sort_key_for_target
|
|
547
|
+
)
|
|
548
|
+
|
|
549
|
+
params_for_native_call: dict[str, Any] = {
|
|
550
|
+
"TableName": table_name,
|
|
551
|
+
"Select": "COUNT",
|
|
552
|
+
}
|
|
553
|
+
if chosen_index_name:
|
|
554
|
+
params_for_native_call["IndexName"] = chosen_index_name
|
|
555
|
+
|
|
556
|
+
can_use_query_for_count = False
|
|
557
|
+
# A Query operation can be used for count if there is a KeyConditionExpression
|
|
558
|
+
# that includes an equality condition on the partition key of the target (table or GSI).
|
|
559
|
+
# We check if the partition key condition was successfully extracted into KeyConditionExpression.
|
|
560
|
+
if (
|
|
561
|
+
partition_key_for_target
|
|
562
|
+
and f"#{re.sub(r'[^a-zA-Z0-9_]', '', partition_key_for_target)}_0"
|
|
563
|
+
in native_expressions.get("ExpressionAttributeNames", {})
|
|
564
|
+
and native_expressions.get("KeyConditionExpression")
|
|
565
|
+
and f"#{re.sub(r'[^a-zA-Z0-9_]', '', partition_key_for_target)}_0 = :val0"
|
|
566
|
+
in native_expressions["KeyConditionExpression"] # Simplified check, assumes first value is PK
|
|
567
|
+
):
|
|
568
|
+
can_use_query_for_count = True
|
|
569
|
+
params_for_native_call["KeyConditionExpression"] = native_expressions["KeyConditionExpression"]
|
|
570
|
+
if native_expressions.get("FilterExpression"):
|
|
571
|
+
params_for_native_call["FilterExpression"] = native_expressions["FilterExpression"]
|
|
572
|
+
else:
|
|
573
|
+
# Fall back to Scan, and all conditions (including any potential key conditions that
|
|
574
|
+
# couldn't be used for a Query) go into FilterExpression.
|
|
575
|
+
if native_expressions.get("FilterExpression"):
|
|
576
|
+
params_for_native_call["FilterExpression"] = native_expressions["FilterExpression"]
|
|
577
|
+
# If there's a KeyConditionExpression but no PK equality, it should also be part of the filter for scan.
|
|
578
|
+
# This logic is now handled more robustly within _wheres_to_native_dynamo_expressions
|
|
579
|
+
# by ensuring only true PK/SK conditions go to KeyConditionExpression initially.
|
|
580
|
+
|
|
581
|
+
if native_expressions.get("ExpressionAttributeNames"):
|
|
582
|
+
params_for_native_call["ExpressionAttributeNames"] = native_expressions["ExpressionAttributeNames"]
|
|
583
|
+
if native_expressions.get("ExpressionAttributeValues"):
|
|
584
|
+
params_for_native_call["ExpressionAttributeValues"] = native_expressions["ExpressionAttributeValues"]
|
|
585
|
+
|
|
586
|
+
total_count = 0
|
|
587
|
+
exclusive_start_key: dict[str, AttributeValueTypeDef] | None = None
|
|
588
|
+
|
|
589
|
+
while True:
|
|
590
|
+
if exclusive_start_key:
|
|
591
|
+
params_for_native_call["ExclusiveStartKey"] = exclusive_start_key
|
|
592
|
+
|
|
593
|
+
try:
|
|
594
|
+
if can_use_query_for_count:
|
|
595
|
+
logger.debug(f"Executing native DynamoDB Query (for count) with params: {params_for_native_call}")
|
|
596
|
+
response = self._cursor._client.query(**params_for_native_call) # type: ignore
|
|
597
|
+
else:
|
|
598
|
+
logger.debug(f"Executing native DynamoDB Scan (for count) with params: {params_for_native_call}")
|
|
599
|
+
response = self._cursor._client.scan(**params_for_native_call) # type: ignore
|
|
600
|
+
except ClientError as e:
|
|
601
|
+
logger.error(
|
|
602
|
+
f"Error executing native DynamoDB operation for count: {e}. Params: {params_for_native_call}"
|
|
603
|
+
)
|
|
604
|
+
raise
|
|
605
|
+
|
|
606
|
+
total_count += response.get("Count", 0)
|
|
607
|
+
exclusive_start_key = response.get("LastEvaluatedKey")
|
|
608
|
+
if not exclusive_start_key:
|
|
609
|
+
break
|
|
610
|
+
|
|
611
|
+
return total_count
|
|
612
|
+
|
|
613
|
+
def create(self, data: dict[str, Any], model: ClearSkiesModel) -> dict[str, Any]:
|
|
614
|
+
"""Create a new record in DynamoDB using PartiQL INSERT."""
|
|
615
|
+
table_name: str = self._finalize_table_name(model.get_table_name())
|
|
616
|
+
|
|
617
|
+
if not data:
|
|
618
|
+
logger.warning("Create called with empty data. Nothing to insert.")
|
|
619
|
+
return {}
|
|
620
|
+
|
|
621
|
+
# Prepare parameters
|
|
622
|
+
parameters: list[AttributeValueTypeDef] = []
|
|
623
|
+
|
|
624
|
+
# Build the 'VALUE {key: ?, key: ?}' part and collect parameters
|
|
625
|
+
value_struct_parts: list[str] = []
|
|
626
|
+
for key, value in data.items():
|
|
627
|
+
# Use single quotes around the key to match PartiQL documentation examples
|
|
628
|
+
value_struct_parts.append(f"'{key}': ?")
|
|
629
|
+
parameters.append(self.condition_parser.to_dynamodb_attribute_value(value))
|
|
630
|
+
value_struct_clause = ", ".join(value_struct_parts)
|
|
631
|
+
|
|
632
|
+
# Construct the INSERT statement with explicit struct format
|
|
633
|
+
statement = f"INSERT INTO {table_name} VALUE {{{value_struct_clause}}}"
|
|
634
|
+
|
|
635
|
+
try:
|
|
636
|
+
self._cursor.execute(
|
|
637
|
+
statement=statement,
|
|
638
|
+
parameters=parameters,
|
|
639
|
+
)
|
|
640
|
+
return data
|
|
641
|
+
except Exception as e:
|
|
642
|
+
logger.error(f"Error executing INSERT PartiQL statement: {statement}, data: {data}, error: {e}")
|
|
643
|
+
raise
|
|
644
|
+
|
|
645
|
+
def update(self, id_value: Any, data: dict[str, Any], model: ClearSkiesModel) -> dict[str, Any]:
|
|
646
|
+
"""Update an existing record in DynamoDB using PartiQL UPDATE."""
|
|
647
|
+
table_name: str = self._finalize_table_name(model.get_table_name())
|
|
648
|
+
id_column_name: str = model.id_column_name
|
|
649
|
+
escaped_id_column: str = f"{self._column_escape_character()}{id_column_name}{self._column_escape_character()}"
|
|
650
|
+
|
|
651
|
+
if not data:
|
|
652
|
+
logger.warning(f"Update called with empty data for ID {id_value}. Returning ID only.")
|
|
653
|
+
return {id_column_name: id_value}
|
|
654
|
+
|
|
655
|
+
set_clauses: list[str] = []
|
|
656
|
+
parameters: list[AttributeValueTypeDef] = []
|
|
657
|
+
col_esc: str = self._column_escape_character()
|
|
658
|
+
|
|
659
|
+
for key, value in data.items():
|
|
660
|
+
if key == id_column_name:
|
|
661
|
+
continue
|
|
662
|
+
set_clauses.append(f"{col_esc}{key}{col_esc} = ?")
|
|
663
|
+
parameters.append(self.condition_parser.to_dynamodb_attribute_value(value))
|
|
664
|
+
|
|
665
|
+
if not set_clauses:
|
|
666
|
+
logger.warning(f"Update called for ID {id_value} but no updatable fields found in data. Returning ID only.")
|
|
667
|
+
return {id_column_name: id_value}
|
|
668
|
+
|
|
669
|
+
parameters.append(self.condition_parser.to_dynamodb_attribute_value(id_value))
|
|
670
|
+
|
|
671
|
+
set_statement: str = ", ".join(set_clauses)
|
|
672
|
+
statement: str = f"UPDATE {table_name} SET {set_statement} WHERE {escaped_id_column} = ? RETURNING ALL NEW *"
|
|
673
|
+
|
|
674
|
+
try:
|
|
675
|
+
response = self._cursor.execute(statement=statement, parameters=parameters)
|
|
676
|
+
items = response.get("Items", [])
|
|
677
|
+
if items:
|
|
678
|
+
return self._map_from_boto3(items[0])
|
|
679
|
+
logger.warning(
|
|
680
|
+
f"UPDATE statement did not return items for ID {id_value}. Returning input data merged with ID."
|
|
681
|
+
)
|
|
682
|
+
return {**data, id_column_name: id_value}
|
|
683
|
+
|
|
684
|
+
except Exception as e:
|
|
685
|
+
logger.error(
|
|
686
|
+
f"Error executing UPDATE PartiQL statement: {statement}, data: {data}, id: {id_value}, error: {e}"
|
|
687
|
+
)
|
|
688
|
+
raise
|
|
689
|
+
|
|
690
|
+
def delete(self, id_value: Any, model: ClearSkiesModel) -> bool:
|
|
691
|
+
"""Delete a record from DynamoDB using PartiQL DELETE."""
|
|
692
|
+
table_name: str = self._finalize_table_name(model.get_table_name())
|
|
693
|
+
id_column_name: str = model.id_column_name
|
|
694
|
+
escaped_id_column: str = f"{self._column_escape_character()}{id_column_name}{self._column_escape_character()}"
|
|
695
|
+
|
|
696
|
+
parameters: list[AttributeValueTypeDef] = [self.condition_parser.to_dynamodb_attribute_value(id_value)]
|
|
697
|
+
statement: str = f"DELETE FROM {table_name} WHERE {escaped_id_column} = ?"
|
|
698
|
+
|
|
699
|
+
try:
|
|
700
|
+
self._cursor.execute(statement=statement, parameters=parameters)
|
|
701
|
+
return True
|
|
702
|
+
except Exception as e:
|
|
703
|
+
logger.error(f"Error executing DELETE PartiQL statement: {statement}, id: {id_value}, error: {e}")
|
|
704
|
+
raise
|
|
705
|
+
|
|
706
|
+
def _map_from_boto3(self, record: dict[str, Any]) -> dict[str, Any]:
|
|
707
|
+
"""
|
|
708
|
+
Convert DynamoDB record to Python-native dictionary.
|
|
709
|
+
|
|
710
|
+
Maps AttributeValueTypeDef values from DynamoDB to standard Python types for easier processing.
|
|
711
|
+
|
|
712
|
+
Args:
|
|
713
|
+
record: A dictionary representing a record item from DynamoDB,
|
|
714
|
+
where values are in AttributeValueTypeDef format.
|
|
715
|
+
|
|
716
|
+
Returns:
|
|
717
|
+
A dictionary with values unwrapped to Python native types.
|
|
718
|
+
"""
|
|
719
|
+
return {key: self._map_from_boto3_value(value) for (key, value) in record.items()}
|
|
720
|
+
|
|
721
|
+
def _map_from_boto3_value(self, attribute_value: AttributeValueTypeDef) -> Any:
|
|
722
|
+
"""
|
|
723
|
+
Convert a single DynamoDB AttributeValueTypeDef to its Python native equivalent.
|
|
724
|
+
|
|
725
|
+
Args:
|
|
726
|
+
attribute_value: A DynamoDB AttributeValueTypeDef dictionary.
|
|
727
|
+
|
|
728
|
+
Returns:
|
|
729
|
+
The unwrapped Python native value.
|
|
730
|
+
"""
|
|
731
|
+
if not isinstance(attribute_value, dict):
|
|
732
|
+
return attribute_value
|
|
733
|
+
|
|
734
|
+
if "S" in attribute_value:
|
|
735
|
+
return attribute_value["S"]
|
|
736
|
+
if "N" in attribute_value:
|
|
737
|
+
try:
|
|
738
|
+
return Decimal(attribute_value["N"])
|
|
739
|
+
except InvalidOperation: # Changed from DecimalException
|
|
740
|
+
logger.warning(f"Could not convert N value '{attribute_value['N']}' to Decimal.")
|
|
741
|
+
return attribute_value["N"]
|
|
742
|
+
if "BOOL" in attribute_value:
|
|
743
|
+
return attribute_value["BOOL"]
|
|
744
|
+
if "NULL" in attribute_value:
|
|
745
|
+
return None
|
|
746
|
+
if "B" in attribute_value:
|
|
747
|
+
try:
|
|
748
|
+
return base64.b64decode(attribute_value["B"])
|
|
749
|
+
except (binascii.Error, TypeError) as e:
|
|
750
|
+
logger.warning(f"Failed to decode base64 binary value: {attribute_value['B']}, error: {e}")
|
|
751
|
+
return attribute_value["B"] # Return raw if decoding fails
|
|
752
|
+
if "L" in attribute_value:
|
|
753
|
+
return [self._map_from_boto3_value(item) for item in attribute_value["L"]]
|
|
754
|
+
if "M" in attribute_value:
|
|
755
|
+
return {key: self._map_from_boto3_value(val) for key, val in attribute_value["M"].items()}
|
|
756
|
+
if "SS" in attribute_value:
|
|
757
|
+
return set(attribute_value["SS"])
|
|
758
|
+
if "NS" in attribute_value:
|
|
759
|
+
try:
|
|
760
|
+
return set(Decimal(n_val) for n_val in attribute_value["NS"])
|
|
761
|
+
except InvalidOperation: # Changed from DecimalException
|
|
762
|
+
logger.warning(f"Could not convert one or more NS values in '{attribute_value['NS']}' to Decimal.")
|
|
763
|
+
return set(attribute_value["NS"])
|
|
764
|
+
if "BS" in attribute_value:
|
|
765
|
+
try:
|
|
766
|
+
return set(base64.b64decode(b_val) for b_val in attribute_value["BS"])
|
|
767
|
+
except (binascii.Error, TypeError) as e:
|
|
768
|
+
logger.warning(
|
|
769
|
+
f"Failed to decode one or more base64 binary values in '{attribute_value['BS']}', error: {e}"
|
|
770
|
+
)
|
|
771
|
+
return set(attribute_value["BS"]) # Return raw if decoding fails
|
|
772
|
+
|
|
773
|
+
logger.warning(f"Unrecognized DynamoDB attribute type: {attribute_value}")
|
|
774
|
+
return attribute_value
|
|
775
|
+
|
|
776
|
+
def _check_query_configuration(self, configuration: dict[str, Any], model: ClearSkiesModel) -> dict[str, Any]:
|
|
777
|
+
"""
|
|
778
|
+
Validate and update query configuration.
|
|
779
|
+
|
|
780
|
+
Checks the configuration, sets defaults, and ensures required fields for a valid query.
|
|
781
|
+
select an appropriate GSI if sorting is requested and conditions allow.
|
|
782
|
+
|
|
783
|
+
It also stores the determined partition key for the target in the configuration.
|
|
784
|
+
"""
|
|
785
|
+
for key in list(configuration.keys()):
|
|
786
|
+
if key not in self._allowed_configs:
|
|
787
|
+
raise KeyError(f"DynamoDBBackend does not support config '{key}'. You may be using the wrong backend")
|
|
788
|
+
for key in self._required_configs:
|
|
789
|
+
if not configuration.get(key):
|
|
790
|
+
raise KeyError(f"Missing required configuration key {key}")
|
|
791
|
+
|
|
792
|
+
if "wheres" not in configuration:
|
|
793
|
+
configuration["wheres"] = []
|
|
794
|
+
if "sorts" not in configuration:
|
|
795
|
+
configuration["sorts"] = []
|
|
796
|
+
if "selects" not in configuration:
|
|
797
|
+
configuration["selects"] = []
|
|
798
|
+
if "model_columns" not in configuration:
|
|
799
|
+
configuration["model_columns"] = []
|
|
800
|
+
if "pagination" not in configuration:
|
|
801
|
+
configuration["pagination"] = {}
|
|
802
|
+
if "limit" not in configuration:
|
|
803
|
+
configuration["limit"] = None
|
|
804
|
+
if "select_all" not in configuration:
|
|
805
|
+
configuration["select_all"] = False
|
|
806
|
+
if "group_by_column" not in configuration:
|
|
807
|
+
configuration["group_by_column"] = None
|
|
808
|
+
if "joins" not in configuration:
|
|
809
|
+
configuration["joins"] = []
|
|
810
|
+
|
|
811
|
+
configuration["_chosen_index_name"] = None
|
|
812
|
+
configuration["_partition_key_for_target"] = None
|
|
813
|
+
|
|
814
|
+
if configuration.get("sorts") or configuration.get("wheres"): # Check for index even if not sorting, for count
|
|
815
|
+
table_name_from_config: str = configuration.get("table_name", "")
|
|
816
|
+
table_description = self._get_table_description(table_name_from_config)
|
|
817
|
+
|
|
818
|
+
wheres = configuration.get("wheres", [])
|
|
819
|
+
sorts = configuration.get("sorts")
|
|
820
|
+
sort_column = (
|
|
821
|
+
sorts[0]["column"]
|
|
822
|
+
if sorts and len(sorts) > 0 and sorts[0] is not None and "column" in sorts[0]
|
|
823
|
+
else None
|
|
824
|
+
)
|
|
825
|
+
|
|
826
|
+
key_to_check_for_equality: str | None = None
|
|
827
|
+
target_name_for_error_msg: str = table_name_from_config
|
|
828
|
+
chosen_index_for_query: str | None = None
|
|
829
|
+
partition_key_for_chosen_target: str | None = None
|
|
830
|
+
|
|
831
|
+
gsi_definitions: list[GlobalSecondaryIndexDescriptionTypeDef] = table_description.get(
|
|
832
|
+
"GlobalSecondaryIndexes", []
|
|
833
|
+
)
|
|
834
|
+
if gsi_definitions:
|
|
835
|
+
for gsi in gsi_definitions:
|
|
836
|
+
gsi_name: str = gsi["IndexName"]
|
|
837
|
+
gsi_key_schema: list[KeySchemaElementTypeDef] = gsi["KeySchema"]
|
|
838
|
+
gsi_partition_key: str | None = None
|
|
839
|
+
gsi_sort_key: str | None = None
|
|
840
|
+
|
|
841
|
+
for key_element in gsi_key_schema:
|
|
842
|
+
if key_element["KeyType"] == "HASH":
|
|
843
|
+
gsi_partition_key = key_element["AttributeName"]
|
|
844
|
+
elif key_element["KeyType"] == "RANGE":
|
|
845
|
+
gsi_sort_key = key_element["AttributeName"]
|
|
846
|
+
|
|
847
|
+
if gsi_partition_key and any(
|
|
848
|
+
w.get("column") == gsi_partition_key and w.get("operator") == "="
|
|
849
|
+
for w in wheres
|
|
850
|
+
if isinstance(w, dict)
|
|
851
|
+
):
|
|
852
|
+
if configuration.get("sorts"):
|
|
853
|
+
if sort_column == gsi_partition_key and not gsi_sort_key:
|
|
854
|
+
key_to_check_for_equality = gsi_partition_key
|
|
855
|
+
chosen_index_for_query = gsi_name
|
|
856
|
+
target_name_for_error_msg = f"{table_name_from_config} (index: {gsi_name})"
|
|
857
|
+
partition_key_for_chosen_target = gsi_partition_key
|
|
858
|
+
break
|
|
859
|
+
if sort_column == gsi_sort_key:
|
|
860
|
+
key_to_check_for_equality = gsi_partition_key
|
|
861
|
+
chosen_index_for_query = gsi_name
|
|
862
|
+
target_name_for_error_msg = f"{table_name_from_config} (index: {gsi_name})"
|
|
863
|
+
partition_key_for_chosen_target = gsi_partition_key
|
|
864
|
+
break
|
|
865
|
+
else:
|
|
866
|
+
key_to_check_for_equality = gsi_partition_key
|
|
867
|
+
chosen_index_for_query = gsi_name
|
|
868
|
+
target_name_for_error_msg = f"{table_name_from_config} (index: {gsi_name})"
|
|
869
|
+
partition_key_for_chosen_target = gsi_partition_key
|
|
870
|
+
break
|
|
871
|
+
|
|
872
|
+
if not chosen_index_for_query:
|
|
873
|
+
base_table_key_schema: list[KeySchemaElementTypeDef] = table_description.get("KeySchema", [])
|
|
874
|
+
if base_table_key_schema:
|
|
875
|
+
for key_element in base_table_key_schema:
|
|
876
|
+
if key_element["KeyType"] == "HASH":
|
|
877
|
+
key_to_check_for_equality = key_element["AttributeName"]
|
|
878
|
+
partition_key_for_chosen_target = key_element["AttributeName"]
|
|
879
|
+
break
|
|
880
|
+
|
|
881
|
+
configuration["_chosen_index_name"] = chosen_index_for_query
|
|
882
|
+
configuration["_partition_key_for_target"] = partition_key_for_chosen_target
|
|
883
|
+
|
|
884
|
+
if configuration.get("sorts"):
|
|
885
|
+
if not key_to_check_for_equality:
|
|
886
|
+
logger.warning(
|
|
887
|
+
f"Could not determine the required partition key for table/index '{target_name_for_error_msg}' "
|
|
888
|
+
f"to validate ORDER BY clause. The query may fail in DynamoDB."
|
|
889
|
+
)
|
|
890
|
+
else:
|
|
891
|
+
has_required_key_equality = any(
|
|
892
|
+
w.get("column") == key_to_check_for_equality and w.get("operator") == "="
|
|
893
|
+
for w in wheres
|
|
894
|
+
if isinstance(w, dict)
|
|
895
|
+
)
|
|
896
|
+
if not has_required_key_equality:
|
|
897
|
+
raise ValueError(
|
|
898
|
+
f"DynamoDB PartiQL queries with ORDER BY on '{target_name_for_error_msg}' require an equality "
|
|
899
|
+
f"condition on its partition key ('{key_to_check_for_equality}') in the WHERE clause."
|
|
900
|
+
)
|
|
901
|
+
return configuration
|
|
902
|
+
|
|
903
|
+
def validate_pagination_kwargs(self, kwargs: dict[str, Any], case_mapping: Callable[[str], str]) -> str:
|
|
904
|
+
"""Validate pagination keyword arguments."""
|
|
905
|
+
extra_keys: set[str] = set(kwargs.keys()) - set(self.allowed_pagination_keys())
|
|
906
|
+
key_name: str = case_mapping("next_token")
|
|
907
|
+
if len(extra_keys):
|
|
908
|
+
return f"Invalid pagination key(s): '{','.join(sorted(list(extra_keys)))}'. Only '{key_name}' is allowed"
|
|
909
|
+
if "next_token" not in kwargs:
|
|
910
|
+
return f"You must specify '{key_name}' when setting pagination"
|
|
911
|
+
try:
|
|
912
|
+
token: Any = kwargs["next_token"]
|
|
913
|
+
if not isinstance(token, str) or not token:
|
|
914
|
+
raise ValueError("Token must be a non-empty string.")
|
|
915
|
+
json.loads(base64.urlsafe_b64decode(token))
|
|
916
|
+
except (TypeError, ValueError, binascii.Error, json.JSONDecodeError):
|
|
917
|
+
return f"The provided '{key_name}' appears to be invalid."
|
|
918
|
+
return ""
|
|
919
|
+
|
|
920
|
+
def allowed_pagination_keys(self) -> list[str]:
|
|
921
|
+
"""Return a list of allowed keys for pagination."""
|
|
922
|
+
return ["next_token"]
|
|
923
|
+
|
|
924
|
+
def restore_next_token_from_config(self, next_token: str | None) -> Any | None:
|
|
925
|
+
"""Decode a base64 encoded JSON string (next_token) into its original form."""
|
|
926
|
+
if not next_token or not isinstance(next_token, str):
|
|
927
|
+
return None
|
|
928
|
+
try:
|
|
929
|
+
decoded_bytes: bytes = base64.urlsafe_b64decode(next_token)
|
|
930
|
+
restored_token: Any = json.loads(decoded_bytes)
|
|
931
|
+
return restored_token
|
|
932
|
+
except (TypeError, ValueError, binascii.Error, json.JSONDecodeError):
|
|
933
|
+
logger.warning(f"Failed to restore next_token: {next_token}")
|
|
934
|
+
return None
|
|
935
|
+
|
|
936
|
+
def serialize_next_token_for_response(self, ddb_next_token: str | None) -> str | None:
|
|
937
|
+
"""Serialize a DynamoDB PartiQL NextToken string into a base64 encoded JSON string."""
|
|
938
|
+
if ddb_next_token is None:
|
|
939
|
+
return None
|
|
940
|
+
try:
|
|
941
|
+
json_string: str = json.dumps(ddb_next_token)
|
|
942
|
+
encoded_bytes: bytes = base64.urlsafe_b64encode(json_string.encode("utf-8"))
|
|
943
|
+
return encoded_bytes.decode("utf8")
|
|
944
|
+
except (TypeError, ValueError) as e:
|
|
945
|
+
logger.error(f"Error serializing DDB next_token: {ddb_next_token}, error: {e}")
|
|
946
|
+
return None
|
|
947
|
+
|
|
948
|
+
def documentation_pagination_next_page_response(self, case_mapping: Callable[[str], str]) -> list[AutoDocString]:
|
|
949
|
+
"""Provide documentation for the 'next_page' (pagination token) in API responses."""
|
|
950
|
+
return [AutoDocString(case_mapping("next_token"))]
|
|
951
|
+
|
|
952
|
+
def documentation_pagination_next_page_example(self, case_mapping: Callable[[str], str]) -> dict[str, str]:
|
|
953
|
+
"""Provide an example value for the 'next_page' (pagination token) in API responses."""
|
|
954
|
+
return {case_mapping("next_token"): ""}
|
|
955
|
+
|
|
956
|
+
def documentation_pagination_parameters(
|
|
957
|
+
self, case_mapping: Callable[[str], str]
|
|
958
|
+
) -> list[tuple[AutoDocString, str]]:
|
|
959
|
+
"""Provide documentation for pagination parameters in API requests."""
|
|
960
|
+
return [
|
|
961
|
+
(
|
|
962
|
+
AutoDocString(case_mapping("next_token"), example=""),
|
|
963
|
+
"A token to fetch the next page of results",
|
|
964
|
+
)
|
|
965
|
+
]
|