zae-limiter 0.1.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- zae_limiter/__init__.py +130 -0
- zae_limiter/aggregator/__init__.py +11 -0
- zae_limiter/aggregator/handler.py +54 -0
- zae_limiter/aggregator/processor.py +270 -0
- zae_limiter/bucket.py +291 -0
- zae_limiter/cli.py +608 -0
- zae_limiter/exceptions.py +214 -0
- zae_limiter/infra/__init__.py +10 -0
- zae_limiter/infra/cfn_template.yaml +255 -0
- zae_limiter/infra/lambda_builder.py +85 -0
- zae_limiter/infra/stack_manager.py +536 -0
- zae_limiter/lease.py +196 -0
- zae_limiter/limiter.py +925 -0
- zae_limiter/migrations/__init__.py +114 -0
- zae_limiter/migrations/v1_0_0.py +55 -0
- zae_limiter/models.py +302 -0
- zae_limiter/repository.py +656 -0
- zae_limiter/schema.py +163 -0
- zae_limiter/version.py +214 -0
- zae_limiter-0.1.0.dist-info/METADATA +470 -0
- zae_limiter-0.1.0.dist-info/RECORD +24 -0
- zae_limiter-0.1.0.dist-info/WHEEL +4 -0
- zae_limiter-0.1.0.dist-info/entry_points.txt +2 -0
- zae_limiter-0.1.0.dist-info/licenses/LICENSE +21 -0
|
@@ -0,0 +1,656 @@
|
|
|
1
|
+
"""DynamoDB repository for rate limiter data."""
|
|
2
|
+
|
|
3
|
+
import time
|
|
4
|
+
from typing import Any
|
|
5
|
+
|
|
6
|
+
import aioboto3 # type: ignore[import-untyped]
|
|
7
|
+
from botocore.exceptions import ClientError
|
|
8
|
+
|
|
9
|
+
from . import schema
|
|
10
|
+
from .exceptions import EntityExistsError
|
|
11
|
+
from .models import BucketState, Entity, Limit
|
|
12
|
+
|
|
13
|
+
|
|
14
|
+
class Repository:
|
|
15
|
+
"""
|
|
16
|
+
Async DynamoDB repository for rate limiter data.
|
|
17
|
+
|
|
18
|
+
Handles all DynamoDB operations including entities, buckets,
|
|
19
|
+
limit configs, and transactions.
|
|
20
|
+
"""
|
|
21
|
+
|
|
22
|
+
def __init__(
|
|
23
|
+
self,
|
|
24
|
+
table_name: str,
|
|
25
|
+
region: str | None = None,
|
|
26
|
+
endpoint_url: str | None = None,
|
|
27
|
+
) -> None:
|
|
28
|
+
self.table_name = table_name
|
|
29
|
+
self.region = region
|
|
30
|
+
self.endpoint_url = endpoint_url
|
|
31
|
+
self._session: aioboto3.Session | None = None
|
|
32
|
+
self._client: Any = None
|
|
33
|
+
|
|
34
|
+
async def _get_client(self) -> Any:
|
|
35
|
+
"""Get or create the DynamoDB client."""
|
|
36
|
+
if self._client is None:
|
|
37
|
+
self._session = aioboto3.Session()
|
|
38
|
+
self._client = await self._session.client(
|
|
39
|
+
"dynamodb",
|
|
40
|
+
region_name=self.region,
|
|
41
|
+
endpoint_url=self.endpoint_url,
|
|
42
|
+
).__aenter__()
|
|
43
|
+
return self._client
|
|
44
|
+
|
|
45
|
+
async def close(self) -> None:
|
|
46
|
+
"""Close the DynamoDB client."""
|
|
47
|
+
if self._client is not None:
|
|
48
|
+
await self._client.__aexit__(None, None, None)
|
|
49
|
+
self._client = None
|
|
50
|
+
self._session = None
|
|
51
|
+
|
|
52
|
+
def _now_ms(self) -> int:
|
|
53
|
+
"""Current time in milliseconds."""
|
|
54
|
+
return int(time.time() * 1000)
|
|
55
|
+
|
|
56
|
+
# -------------------------------------------------------------------------
|
|
57
|
+
# Table operations
|
|
58
|
+
# -------------------------------------------------------------------------
|
|
59
|
+
|
|
60
|
+
async def create_table(self) -> None:
|
|
61
|
+
"""Create the DynamoDB table if it doesn't exist."""
|
|
62
|
+
client = await self._get_client()
|
|
63
|
+
definition = schema.get_table_definition(self.table_name)
|
|
64
|
+
|
|
65
|
+
try:
|
|
66
|
+
await client.create_table(**definition)
|
|
67
|
+
# Wait for table to be active
|
|
68
|
+
waiter = client.get_waiter("table_exists")
|
|
69
|
+
await waiter.wait(TableName=self.table_name)
|
|
70
|
+
except ClientError as e:
|
|
71
|
+
if e.response["Error"]["Code"] != "ResourceInUseException":
|
|
72
|
+
raise
|
|
73
|
+
|
|
74
|
+
async def delete_table(self) -> None:
|
|
75
|
+
"""Delete the DynamoDB table."""
|
|
76
|
+
client = await self._get_client()
|
|
77
|
+
try:
|
|
78
|
+
await client.delete_table(TableName=self.table_name)
|
|
79
|
+
except ClientError as e:
|
|
80
|
+
if e.response["Error"]["Code"] != "ResourceNotFoundException":
|
|
81
|
+
raise
|
|
82
|
+
|
|
83
|
+
async def create_table_or_stack(
|
|
84
|
+
self,
|
|
85
|
+
use_cloudformation: bool = True,
|
|
86
|
+
stack_parameters: dict[str, str] | None = None,
|
|
87
|
+
) -> None:
|
|
88
|
+
"""
|
|
89
|
+
Create DynamoDB infrastructure via CloudFormation or direct API.
|
|
90
|
+
|
|
91
|
+
Automatically detects local DynamoDB (via endpoint_url) and uses
|
|
92
|
+
direct table creation instead of CloudFormation.
|
|
93
|
+
|
|
94
|
+
Args:
|
|
95
|
+
use_cloudformation: Use CloudFormation if True, else direct table creation
|
|
96
|
+
stack_parameters: Parameters for CloudFormation stack (e.g.,
|
|
97
|
+
{'snapshot_windows': 'hourly,daily', 'retention_days': '90'})
|
|
98
|
+
|
|
99
|
+
Raises:
|
|
100
|
+
StackCreationError: If CloudFormation stack creation fails
|
|
101
|
+
"""
|
|
102
|
+
# If endpoint_url is set (local DynamoDB), always use direct creation
|
|
103
|
+
if self.endpoint_url:
|
|
104
|
+
await self.create_table()
|
|
105
|
+
return
|
|
106
|
+
|
|
107
|
+
if use_cloudformation:
|
|
108
|
+
# Use stack manager for CloudFormation deployment
|
|
109
|
+
from .infra.stack_manager import StackManager
|
|
110
|
+
|
|
111
|
+
async with StackManager(self.table_name, self.region, self.endpoint_url) as manager:
|
|
112
|
+
await manager.create_stack(parameters=stack_parameters)
|
|
113
|
+
else:
|
|
114
|
+
# Fallback to direct table creation
|
|
115
|
+
await self.create_table()
|
|
116
|
+
|
|
117
|
+
# -------------------------------------------------------------------------
|
|
118
|
+
# Entity operations
|
|
119
|
+
# -------------------------------------------------------------------------
|
|
120
|
+
|
|
121
|
+
async def create_entity(
|
|
122
|
+
self,
|
|
123
|
+
entity_id: str,
|
|
124
|
+
name: str | None = None,
|
|
125
|
+
parent_id: str | None = None,
|
|
126
|
+
metadata: dict[str, str] | None = None,
|
|
127
|
+
) -> Entity:
|
|
128
|
+
"""Create a new entity."""
|
|
129
|
+
client = await self._get_client()
|
|
130
|
+
now = time.strftime("%Y-%m-%dT%H:%M:%SZ", time.gmtime())
|
|
131
|
+
|
|
132
|
+
item: dict[str, Any] = {
|
|
133
|
+
"PK": {"S": schema.pk_entity(entity_id)},
|
|
134
|
+
"SK": {"S": schema.sk_meta()},
|
|
135
|
+
"entity_id": {"S": entity_id},
|
|
136
|
+
"data": {
|
|
137
|
+
"M": {
|
|
138
|
+
"name": {"S": name or entity_id},
|
|
139
|
+
"parent_id": {"S": parent_id} if parent_id else {"NULL": True},
|
|
140
|
+
"metadata": {"M": self._serialize_map(metadata or {})},
|
|
141
|
+
"created_at": {"S": now},
|
|
142
|
+
}
|
|
143
|
+
},
|
|
144
|
+
}
|
|
145
|
+
|
|
146
|
+
# Add GSI1 keys for parent lookup if this is a child
|
|
147
|
+
if parent_id:
|
|
148
|
+
item["GSI1PK"] = {"S": schema.gsi1_pk_parent(parent_id)}
|
|
149
|
+
item["GSI1SK"] = {"S": schema.gsi1_sk_child(entity_id)}
|
|
150
|
+
|
|
151
|
+
try:
|
|
152
|
+
await client.put_item(
|
|
153
|
+
TableName=self.table_name,
|
|
154
|
+
Item=item,
|
|
155
|
+
ConditionExpression="attribute_not_exists(PK)",
|
|
156
|
+
)
|
|
157
|
+
except ClientError as e:
|
|
158
|
+
if e.response["Error"]["Code"] == "ConditionalCheckFailedException":
|
|
159
|
+
raise EntityExistsError(entity_id)
|
|
160
|
+
raise
|
|
161
|
+
|
|
162
|
+
return Entity(
|
|
163
|
+
id=entity_id,
|
|
164
|
+
name=name or entity_id,
|
|
165
|
+
parent_id=parent_id,
|
|
166
|
+
metadata=metadata or {},
|
|
167
|
+
created_at=now,
|
|
168
|
+
)
|
|
169
|
+
|
|
170
|
+
async def get_entity(self, entity_id: str) -> Entity | None:
|
|
171
|
+
"""Get an entity by ID."""
|
|
172
|
+
client = await self._get_client()
|
|
173
|
+
|
|
174
|
+
response = await client.get_item(
|
|
175
|
+
TableName=self.table_name,
|
|
176
|
+
Key={
|
|
177
|
+
"PK": {"S": schema.pk_entity(entity_id)},
|
|
178
|
+
"SK": {"S": schema.sk_meta()},
|
|
179
|
+
},
|
|
180
|
+
)
|
|
181
|
+
|
|
182
|
+
item = response.get("Item")
|
|
183
|
+
if not item:
|
|
184
|
+
return None
|
|
185
|
+
|
|
186
|
+
return self._deserialize_entity(item)
|
|
187
|
+
|
|
188
|
+
async def delete_entity(self, entity_id: str) -> None:
|
|
189
|
+
"""Delete an entity and all its related records."""
|
|
190
|
+
client = await self._get_client()
|
|
191
|
+
|
|
192
|
+
# First, query all items for this entity
|
|
193
|
+
response = await client.query(
|
|
194
|
+
TableName=self.table_name,
|
|
195
|
+
KeyConditionExpression="PK = :pk",
|
|
196
|
+
ExpressionAttributeValues={":pk": {"S": schema.pk_entity(entity_id)}},
|
|
197
|
+
)
|
|
198
|
+
|
|
199
|
+
# Delete all items in batches
|
|
200
|
+
items = response.get("Items", [])
|
|
201
|
+
if not items:
|
|
202
|
+
return
|
|
203
|
+
|
|
204
|
+
# Build delete requests
|
|
205
|
+
delete_requests = [
|
|
206
|
+
{"DeleteRequest": {"Key": {"PK": item["PK"], "SK": item["SK"]}}} for item in items
|
|
207
|
+
]
|
|
208
|
+
|
|
209
|
+
# BatchWriteItem in chunks of 25
|
|
210
|
+
for i in range(0, len(delete_requests), 25):
|
|
211
|
+
chunk = delete_requests[i : i + 25]
|
|
212
|
+
await client.batch_write_item(RequestItems={self.table_name: chunk})
|
|
213
|
+
|
|
214
|
+
async def get_children(self, parent_id: str) -> list[Entity]:
|
|
215
|
+
"""Get all children of a parent entity."""
|
|
216
|
+
client = await self._get_client()
|
|
217
|
+
|
|
218
|
+
response = await client.query(
|
|
219
|
+
TableName=self.table_name,
|
|
220
|
+
IndexName=schema.GSI1_NAME,
|
|
221
|
+
KeyConditionExpression="GSI1PK = :pk",
|
|
222
|
+
ExpressionAttributeValues={":pk": {"S": schema.gsi1_pk_parent(parent_id)}},
|
|
223
|
+
)
|
|
224
|
+
|
|
225
|
+
entities = []
|
|
226
|
+
for item in response.get("Items", []):
|
|
227
|
+
entity = self._deserialize_entity(item)
|
|
228
|
+
if entity:
|
|
229
|
+
entities.append(entity)
|
|
230
|
+
|
|
231
|
+
return entities
|
|
232
|
+
|
|
233
|
+
# -------------------------------------------------------------------------
|
|
234
|
+
# Bucket operations
|
|
235
|
+
# -------------------------------------------------------------------------
|
|
236
|
+
|
|
237
|
+
async def get_bucket(
|
|
238
|
+
self,
|
|
239
|
+
entity_id: str,
|
|
240
|
+
resource: str,
|
|
241
|
+
limit_name: str,
|
|
242
|
+
) -> BucketState | None:
|
|
243
|
+
"""Get a bucket by entity/resource/limit."""
|
|
244
|
+
client = await self._get_client()
|
|
245
|
+
|
|
246
|
+
response = await client.get_item(
|
|
247
|
+
TableName=self.table_name,
|
|
248
|
+
Key={
|
|
249
|
+
"PK": {"S": schema.pk_entity(entity_id)},
|
|
250
|
+
"SK": {"S": schema.sk_bucket(resource, limit_name)},
|
|
251
|
+
},
|
|
252
|
+
)
|
|
253
|
+
|
|
254
|
+
item = response.get("Item")
|
|
255
|
+
if not item:
|
|
256
|
+
return None
|
|
257
|
+
|
|
258
|
+
return self._deserialize_bucket(item)
|
|
259
|
+
|
|
260
|
+
async def get_buckets(
|
|
261
|
+
self,
|
|
262
|
+
entity_id: str,
|
|
263
|
+
resource: str | None = None,
|
|
264
|
+
) -> list[BucketState]:
|
|
265
|
+
"""Get all buckets for an entity, optionally filtered by resource."""
|
|
266
|
+
client = await self._get_client()
|
|
267
|
+
|
|
268
|
+
key_condition = "PK = :pk AND begins_with(SK, :sk_prefix)"
|
|
269
|
+
expression_values: dict[str, Any] = {
|
|
270
|
+
":pk": {"S": schema.pk_entity(entity_id)},
|
|
271
|
+
":sk_prefix": {"S": schema.SK_BUCKET + (f"{resource}#" if resource else "")},
|
|
272
|
+
}
|
|
273
|
+
|
|
274
|
+
response = await client.query(
|
|
275
|
+
TableName=self.table_name,
|
|
276
|
+
KeyConditionExpression=key_condition,
|
|
277
|
+
ExpressionAttributeValues=expression_values,
|
|
278
|
+
)
|
|
279
|
+
|
|
280
|
+
return [self._deserialize_bucket(item) for item in response.get("Items", [])]
|
|
281
|
+
|
|
282
|
+
def build_bucket_put_item(
|
|
283
|
+
self,
|
|
284
|
+
state: BucketState,
|
|
285
|
+
ttl_seconds: int = 86400,
|
|
286
|
+
) -> dict[str, Any]:
|
|
287
|
+
"""Build a PutItem for a bucket (for use in transactions)."""
|
|
288
|
+
now_ms = self._now_ms()
|
|
289
|
+
return {
|
|
290
|
+
"Put": {
|
|
291
|
+
"TableName": self.table_name,
|
|
292
|
+
"Item": {
|
|
293
|
+
"PK": {"S": schema.pk_entity(state.entity_id)},
|
|
294
|
+
"SK": {"S": schema.sk_bucket(state.resource, state.limit_name)},
|
|
295
|
+
"entity_id": {"S": state.entity_id},
|
|
296
|
+
"data": {
|
|
297
|
+
"M": {
|
|
298
|
+
"resource": {"S": state.resource},
|
|
299
|
+
"limit_name": {"S": state.limit_name},
|
|
300
|
+
"tokens_milli": {"N": str(state.tokens_milli)},
|
|
301
|
+
"last_refill_ms": {"N": str(state.last_refill_ms)},
|
|
302
|
+
"capacity_milli": {"N": str(state.capacity_milli)},
|
|
303
|
+
"burst_milli": {"N": str(state.burst_milli)},
|
|
304
|
+
"refill_amount_milli": {"N": str(state.refill_amount_milli)},
|
|
305
|
+
"refill_period_ms": {"N": str(state.refill_period_ms)},
|
|
306
|
+
}
|
|
307
|
+
},
|
|
308
|
+
"GSI2PK": {"S": schema.gsi2_pk_resource(state.resource)},
|
|
309
|
+
"GSI2SK": {"S": schema.gsi2_sk_bucket(state.entity_id, state.limit_name)},
|
|
310
|
+
"ttl": {"N": str(schema.calculate_ttl(now_ms, ttl_seconds))},
|
|
311
|
+
},
|
|
312
|
+
}
|
|
313
|
+
}
|
|
314
|
+
|
|
315
|
+
def build_bucket_update_item(
|
|
316
|
+
self,
|
|
317
|
+
entity_id: str,
|
|
318
|
+
resource: str,
|
|
319
|
+
limit_name: str,
|
|
320
|
+
new_tokens_milli: int,
|
|
321
|
+
new_last_refill_ms: int,
|
|
322
|
+
expected_tokens_milli: int | None = None,
|
|
323
|
+
) -> dict[str, Any]:
|
|
324
|
+
"""Build an UpdateItem for a bucket (for use in transactions)."""
|
|
325
|
+
update: dict[str, dict[str, Any]] = {
|
|
326
|
+
"Update": {
|
|
327
|
+
"TableName": self.table_name,
|
|
328
|
+
"Key": {
|
|
329
|
+
"PK": {"S": schema.pk_entity(entity_id)},
|
|
330
|
+
"SK": {"S": schema.sk_bucket(resource, limit_name)},
|
|
331
|
+
},
|
|
332
|
+
"UpdateExpression": "SET #data.#tokens = :tokens, #data.#refill = :refill",
|
|
333
|
+
"ExpressionAttributeNames": {
|
|
334
|
+
"#data": "data",
|
|
335
|
+
"#tokens": "tokens_milli",
|
|
336
|
+
"#refill": "last_refill_ms",
|
|
337
|
+
},
|
|
338
|
+
"ExpressionAttributeValues": {
|
|
339
|
+
":tokens": {"N": str(new_tokens_milli)},
|
|
340
|
+
":refill": {"N": str(new_last_refill_ms)},
|
|
341
|
+
},
|
|
342
|
+
}
|
|
343
|
+
}
|
|
344
|
+
|
|
345
|
+
# Add optimistic locking condition if provided
|
|
346
|
+
if expected_tokens_milli is not None:
|
|
347
|
+
update["Update"]["ConditionExpression"] = "#data.#tokens = :expected"
|
|
348
|
+
update["Update"]["ExpressionAttributeValues"][":expected"] = {
|
|
349
|
+
"N": str(expected_tokens_milli)
|
|
350
|
+
}
|
|
351
|
+
|
|
352
|
+
return update
|
|
353
|
+
|
|
354
|
+
async def transact_write(self, items: list[dict[str, Any]]) -> None:
|
|
355
|
+
"""Execute a transactional write."""
|
|
356
|
+
if not items:
|
|
357
|
+
return
|
|
358
|
+
|
|
359
|
+
client = await self._get_client()
|
|
360
|
+
await client.transact_write_items(TransactItems=items)
|
|
361
|
+
|
|
362
|
+
# -------------------------------------------------------------------------
|
|
363
|
+
# Limit config operations
|
|
364
|
+
# -------------------------------------------------------------------------
|
|
365
|
+
|
|
366
|
+
async def set_limits(
|
|
367
|
+
self,
|
|
368
|
+
entity_id: str,
|
|
369
|
+
limits: list[Limit],
|
|
370
|
+
resource: str = schema.DEFAULT_RESOURCE,
|
|
371
|
+
) -> None:
|
|
372
|
+
"""Store limit configs for an entity."""
|
|
373
|
+
client = await self._get_client()
|
|
374
|
+
|
|
375
|
+
# Delete existing limits for this resource first
|
|
376
|
+
await self._delete_limits_for_resource(entity_id, resource)
|
|
377
|
+
|
|
378
|
+
# Write new limits
|
|
379
|
+
for limit in limits:
|
|
380
|
+
item = {
|
|
381
|
+
"PK": {"S": schema.pk_entity(entity_id)},
|
|
382
|
+
"SK": {"S": schema.sk_limit(resource, limit.name)},
|
|
383
|
+
"entity_id": {"S": entity_id},
|
|
384
|
+
"data": {
|
|
385
|
+
"M": {
|
|
386
|
+
"resource": {"S": resource},
|
|
387
|
+
"limit_name": {"S": limit.name},
|
|
388
|
+
"capacity": {"N": str(limit.capacity)},
|
|
389
|
+
"burst": {"N": str(limit.burst)},
|
|
390
|
+
"refill_amount": {"N": str(limit.refill_amount)},
|
|
391
|
+
"refill_period_seconds": {"N": str(limit.refill_period_seconds)},
|
|
392
|
+
}
|
|
393
|
+
},
|
|
394
|
+
}
|
|
395
|
+
await client.put_item(TableName=self.table_name, Item=item)
|
|
396
|
+
|
|
397
|
+
async def get_limits(
|
|
398
|
+
self,
|
|
399
|
+
entity_id: str,
|
|
400
|
+
resource: str = schema.DEFAULT_RESOURCE,
|
|
401
|
+
) -> list[Limit]:
|
|
402
|
+
"""Get stored limit configs for an entity."""
|
|
403
|
+
client = await self._get_client()
|
|
404
|
+
|
|
405
|
+
response = await client.query(
|
|
406
|
+
TableName=self.table_name,
|
|
407
|
+
KeyConditionExpression="PK = :pk AND begins_with(SK, :sk_prefix)",
|
|
408
|
+
ExpressionAttributeValues={
|
|
409
|
+
":pk": {"S": schema.pk_entity(entity_id)},
|
|
410
|
+
":sk_prefix": {"S": schema.sk_limit_prefix(resource)},
|
|
411
|
+
},
|
|
412
|
+
)
|
|
413
|
+
|
|
414
|
+
limits = []
|
|
415
|
+
for item in response.get("Items", []):
|
|
416
|
+
data = self._deserialize_map(item.get("data", {}).get("M", {}))
|
|
417
|
+
limits.append(
|
|
418
|
+
Limit(
|
|
419
|
+
name=data["limit_name"],
|
|
420
|
+
capacity=int(data["capacity"]),
|
|
421
|
+
burst=int(data["burst"]),
|
|
422
|
+
refill_amount=int(data["refill_amount"]),
|
|
423
|
+
refill_period_seconds=int(data["refill_period_seconds"]),
|
|
424
|
+
)
|
|
425
|
+
)
|
|
426
|
+
|
|
427
|
+
return limits
|
|
428
|
+
|
|
429
|
+
async def delete_limits(
|
|
430
|
+
self,
|
|
431
|
+
entity_id: str,
|
|
432
|
+
resource: str = schema.DEFAULT_RESOURCE,
|
|
433
|
+
) -> None:
|
|
434
|
+
"""Delete stored limit configs for an entity."""
|
|
435
|
+
await self._delete_limits_for_resource(entity_id, resource)
|
|
436
|
+
|
|
437
|
+
async def _delete_limits_for_resource(self, entity_id: str, resource: str) -> None:
|
|
438
|
+
"""Delete all limits for a specific resource."""
|
|
439
|
+
client = await self._get_client()
|
|
440
|
+
|
|
441
|
+
response = await client.query(
|
|
442
|
+
TableName=self.table_name,
|
|
443
|
+
KeyConditionExpression="PK = :pk AND begins_with(SK, :sk_prefix)",
|
|
444
|
+
ExpressionAttributeValues={
|
|
445
|
+
":pk": {"S": schema.pk_entity(entity_id)},
|
|
446
|
+
":sk_prefix": {"S": schema.sk_limit_prefix(resource)},
|
|
447
|
+
},
|
|
448
|
+
ProjectionExpression="PK, SK",
|
|
449
|
+
)
|
|
450
|
+
|
|
451
|
+
items = response.get("Items", [])
|
|
452
|
+
if not items:
|
|
453
|
+
return
|
|
454
|
+
|
|
455
|
+
delete_requests = [
|
|
456
|
+
{"DeleteRequest": {"Key": {"PK": item["PK"], "SK": item["SK"]}}} for item in items
|
|
457
|
+
]
|
|
458
|
+
|
|
459
|
+
for i in range(0, len(delete_requests), 25):
|
|
460
|
+
chunk = delete_requests[i : i + 25]
|
|
461
|
+
await client.batch_write_item(RequestItems={self.table_name: chunk})
|
|
462
|
+
|
|
463
|
+
# -------------------------------------------------------------------------
|
|
464
|
+
# Version record operations
|
|
465
|
+
# -------------------------------------------------------------------------
|
|
466
|
+
|
|
467
|
+
async def get_version_record(self) -> dict[str, Any] | None:
|
|
468
|
+
"""
|
|
469
|
+
Get the infrastructure version record.
|
|
470
|
+
|
|
471
|
+
Returns:
|
|
472
|
+
Version record with schema_version, lambda_version, etc.
|
|
473
|
+
None if no version record exists.
|
|
474
|
+
"""
|
|
475
|
+
client = await self._get_client()
|
|
476
|
+
|
|
477
|
+
response = await client.get_item(
|
|
478
|
+
TableName=self.table_name,
|
|
479
|
+
Key={
|
|
480
|
+
"PK": {"S": schema.pk_system()},
|
|
481
|
+
"SK": {"S": schema.sk_version()},
|
|
482
|
+
},
|
|
483
|
+
)
|
|
484
|
+
|
|
485
|
+
item = response.get("Item")
|
|
486
|
+
if not item:
|
|
487
|
+
return None
|
|
488
|
+
|
|
489
|
+
return self._deserialize_map(item.get("data", {}).get("M", {}))
|
|
490
|
+
|
|
491
|
+
async def set_version_record(
|
|
492
|
+
self,
|
|
493
|
+
schema_version: str,
|
|
494
|
+
lambda_version: str | None = None,
|
|
495
|
+
client_min_version: str = "0.0.0",
|
|
496
|
+
updated_by: str | None = None,
|
|
497
|
+
) -> None:
|
|
498
|
+
"""
|
|
499
|
+
Set the infrastructure version record.
|
|
500
|
+
|
|
501
|
+
Args:
|
|
502
|
+
schema_version: Current schema version (e.g., "1.0.0")
|
|
503
|
+
lambda_version: Currently deployed Lambda version
|
|
504
|
+
client_min_version: Minimum compatible client version
|
|
505
|
+
updated_by: Identifier of what performed the update
|
|
506
|
+
"""
|
|
507
|
+
client = await self._get_client()
|
|
508
|
+
now = time.strftime("%Y-%m-%dT%H:%M:%SZ", time.gmtime())
|
|
509
|
+
|
|
510
|
+
data: dict[str, Any] = {
|
|
511
|
+
"schema_version": {"S": schema_version},
|
|
512
|
+
"client_min_version": {"S": client_min_version},
|
|
513
|
+
"updated_at": {"S": now},
|
|
514
|
+
}
|
|
515
|
+
|
|
516
|
+
if lambda_version:
|
|
517
|
+
data["lambda_version"] = {"S": lambda_version}
|
|
518
|
+
else:
|
|
519
|
+
data["lambda_version"] = {"NULL": True}
|
|
520
|
+
|
|
521
|
+
if updated_by:
|
|
522
|
+
data["updated_by"] = {"S": updated_by}
|
|
523
|
+
else:
|
|
524
|
+
data["updated_by"] = {"NULL": True}
|
|
525
|
+
|
|
526
|
+
item = {
|
|
527
|
+
"PK": {"S": schema.pk_system()},
|
|
528
|
+
"SK": {"S": schema.sk_version()},
|
|
529
|
+
"data": {"M": data},
|
|
530
|
+
}
|
|
531
|
+
|
|
532
|
+
await client.put_item(TableName=self.table_name, Item=item)
|
|
533
|
+
|
|
534
|
+
# -------------------------------------------------------------------------
|
|
535
|
+
# Resource aggregation
|
|
536
|
+
# -------------------------------------------------------------------------
|
|
537
|
+
|
|
538
|
+
async def get_resource_buckets(
|
|
539
|
+
self,
|
|
540
|
+
resource: str,
|
|
541
|
+
limit_name: str | None = None,
|
|
542
|
+
) -> list[BucketState]:
|
|
543
|
+
"""Get all buckets for a resource across all entities."""
|
|
544
|
+
client = await self._get_client()
|
|
545
|
+
|
|
546
|
+
key_condition = "GSI2PK = :pk"
|
|
547
|
+
expression_values: dict[str, Any] = {
|
|
548
|
+
":pk": {"S": schema.gsi2_pk_resource(resource)},
|
|
549
|
+
}
|
|
550
|
+
|
|
551
|
+
if limit_name:
|
|
552
|
+
key_condition += " AND begins_with(GSI2SK, :sk_prefix)"
|
|
553
|
+
expression_values[":sk_prefix"] = {"S": "BUCKET#"}
|
|
554
|
+
|
|
555
|
+
response = await client.query(
|
|
556
|
+
TableName=self.table_name,
|
|
557
|
+
IndexName=schema.GSI2_NAME,
|
|
558
|
+
KeyConditionExpression=key_condition,
|
|
559
|
+
ExpressionAttributeValues=expression_values,
|
|
560
|
+
)
|
|
561
|
+
|
|
562
|
+
buckets = []
|
|
563
|
+
for item in response.get("Items", []):
|
|
564
|
+
bucket = self._deserialize_bucket(item)
|
|
565
|
+
if limit_name is None or bucket.limit_name == limit_name:
|
|
566
|
+
buckets.append(bucket)
|
|
567
|
+
|
|
568
|
+
return buckets
|
|
569
|
+
|
|
570
|
+
# -------------------------------------------------------------------------
|
|
571
|
+
# Serialization helpers
|
|
572
|
+
# -------------------------------------------------------------------------
|
|
573
|
+
|
|
574
|
+
def _serialize_map(self, data: dict[str, Any]) -> dict[str, Any]:
|
|
575
|
+
"""Serialize a Python dict to DynamoDB map format."""
|
|
576
|
+
result: dict[str, Any] = {}
|
|
577
|
+
for key, value in data.items():
|
|
578
|
+
if isinstance(value, str):
|
|
579
|
+
result[key] = {"S": value}
|
|
580
|
+
elif isinstance(value, bool):
|
|
581
|
+
result[key] = {"BOOL": value}
|
|
582
|
+
elif isinstance(value, (int, float)):
|
|
583
|
+
result[key] = {"N": str(value)}
|
|
584
|
+
elif isinstance(value, dict):
|
|
585
|
+
result[key] = {"M": self._serialize_map(value)}
|
|
586
|
+
elif isinstance(value, list):
|
|
587
|
+
result[key] = {"L": [self._serialize_value(v) for v in value]}
|
|
588
|
+
elif value is None:
|
|
589
|
+
result[key] = {"NULL": True}
|
|
590
|
+
return result
|
|
591
|
+
|
|
592
|
+
def _serialize_value(self, value: Any) -> dict[str, Any]:
|
|
593
|
+
"""Serialize a single value to DynamoDB format."""
|
|
594
|
+
if isinstance(value, str):
|
|
595
|
+
return {"S": value}
|
|
596
|
+
elif isinstance(value, bool):
|
|
597
|
+
return {"BOOL": value}
|
|
598
|
+
elif isinstance(value, (int, float)):
|
|
599
|
+
return {"N": str(value)}
|
|
600
|
+
elif isinstance(value, dict):
|
|
601
|
+
return {"M": self._serialize_map(value)}
|
|
602
|
+
elif isinstance(value, list):
|
|
603
|
+
return {"L": [self._serialize_value(v) for v in value]}
|
|
604
|
+
elif value is None:
|
|
605
|
+
return {"NULL": True}
|
|
606
|
+
return {"S": str(value)}
|
|
607
|
+
|
|
608
|
+
def _deserialize_map(self, data: dict[str, Any]) -> dict[str, Any]:
|
|
609
|
+
"""Deserialize a DynamoDB map to Python dict."""
|
|
610
|
+
result = {}
|
|
611
|
+
for key, value in data.items():
|
|
612
|
+
result[key] = self._deserialize_value(value)
|
|
613
|
+
return result
|
|
614
|
+
|
|
615
|
+
def _deserialize_value(self, value: dict[str, Any]) -> Any:
|
|
616
|
+
"""Deserialize a single DynamoDB value."""
|
|
617
|
+
if "S" in value:
|
|
618
|
+
return value["S"]
|
|
619
|
+
elif "N" in value:
|
|
620
|
+
num_str = value["N"]
|
|
621
|
+
return int(num_str) if "." not in num_str else float(num_str)
|
|
622
|
+
elif "BOOL" in value:
|
|
623
|
+
return value["BOOL"]
|
|
624
|
+
elif "M" in value:
|
|
625
|
+
return self._deserialize_map(value["M"])
|
|
626
|
+
elif "L" in value:
|
|
627
|
+
return [self._deserialize_value(v) for v in value["L"]]
|
|
628
|
+
elif "NULL" in value:
|
|
629
|
+
return None
|
|
630
|
+
return None
|
|
631
|
+
|
|
632
|
+
def _deserialize_entity(self, item: dict[str, Any]) -> Entity:
|
|
633
|
+
"""Deserialize a DynamoDB item to Entity."""
|
|
634
|
+
data = self._deserialize_map(item.get("data", {}).get("M", {}))
|
|
635
|
+
return Entity(
|
|
636
|
+
id=item.get("entity_id", {}).get("S", ""),
|
|
637
|
+
name=data.get("name"),
|
|
638
|
+
parent_id=data.get("parent_id"),
|
|
639
|
+
metadata=data.get("metadata", {}),
|
|
640
|
+
created_at=data.get("created_at"),
|
|
641
|
+
)
|
|
642
|
+
|
|
643
|
+
def _deserialize_bucket(self, item: dict[str, Any]) -> BucketState:
|
|
644
|
+
"""Deserialize a DynamoDB item to BucketState."""
|
|
645
|
+
data = self._deserialize_map(item.get("data", {}).get("M", {}))
|
|
646
|
+
return BucketState(
|
|
647
|
+
entity_id=item.get("entity_id", {}).get("S", ""),
|
|
648
|
+
resource=data.get("resource", ""),
|
|
649
|
+
limit_name=data.get("limit_name", ""),
|
|
650
|
+
tokens_milli=int(data.get("tokens_milli", 0)),
|
|
651
|
+
last_refill_ms=int(data.get("last_refill_ms", 0)),
|
|
652
|
+
capacity_milli=int(data.get("capacity_milli", 0)),
|
|
653
|
+
burst_milli=int(data.get("burst_milli", 0)),
|
|
654
|
+
refill_amount_milli=int(data.get("refill_amount_milli", 0)),
|
|
655
|
+
refill_period_ms=int(data.get("refill_period_ms", 0)),
|
|
656
|
+
)
|