localstack-core 4.13.2.dev63__py3-none-any.whl → 4.13.2.dev67__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -3,10 +3,15 @@ from typing import TypedDict
3
3
 
4
4
  from localstack.aws.api.dynamodb import (
5
5
  AttributeMap,
6
+ BackupDetails,
7
+ ContinuousBackupsDescription,
8
+ GlobalTableDescription,
6
9
  Key,
10
+ KinesisDataStreamDestination,
7
11
  RegionName,
8
12
  ReplicaDescription,
9
13
  StreamViewType,
14
+ TableDescription,
10
15
  TableName,
11
16
  TimeToLiveSpecification,
12
17
  )
@@ -91,9 +96,20 @@ class TableRecords(TypedDict):
91
96
  RecordsMap = dict[TableName, TableRecords]
92
97
 
93
98
 
99
+ class TableProperties(TypedDict, total=False):
100
+ ContinuousBackupsDescription: ContinuousBackupsDescription
101
+
102
+
103
+ @dataclasses.dataclass
104
+ class Backup:
105
+ details: BackupDetails
106
+ backup_file: str
107
+ table_name: str
108
+
109
+
94
110
  class DynamoDBStore(BaseStore):
95
111
  # maps global table names to configurations (for the legacy v.2017 tables)
96
- GLOBAL_TABLES: dict[str, dict] = CrossRegionAttribute(default=dict)
112
+ GLOBAL_TABLES: dict[str, GlobalTableDescription] = CrossRegionAttribute(default=dict)
97
113
 
98
114
  # Maps table name to the region they exist in on DDBLocal (for v.2019 global tables)
99
115
  TABLE_REGION: dict[TableName, RegionName] = CrossRegionAttribute(default=dict)
@@ -104,19 +120,24 @@ class DynamoDBStore(BaseStore):
104
120
  )
105
121
 
106
122
  # cache table taggings - maps table ARN to tags dict
107
- TABLE_TAGS: dict[str, dict] = CrossRegionAttribute(default=dict)
123
+ TABLE_TAGS: dict[str, dict[str, str]] = CrossRegionAttribute(default=dict)
108
124
 
109
125
  # maps table names to cached table definitions
110
- table_definitions: dict[str, dict] = LocalAttribute(default=dict)
126
+ table_definitions: dict[str, TableDescription] = LocalAttribute(default=dict)
127
+
128
+ # map table name to streaming destinations
129
+ streaming_destinations: dict[str, list[KinesisDataStreamDestination]] = LocalAttribute(
130
+ default=dict
131
+ )
111
132
 
112
133
  # maps table names to additional table properties that are not stored upstream (e.g., ReplicaUpdates)
113
- table_properties: dict[str, dict] = LocalAttribute(default=dict)
134
+ table_properties: dict[str, TableProperties] = LocalAttribute(default=dict)
114
135
 
115
136
  # maps table names to TTL specifications
116
137
  ttl_specifications: dict[str, TimeToLiveSpecification] = LocalAttribute(default=dict)
117
138
 
118
139
  # maps backups
119
- backups: dict[str, dict] = LocalAttribute(default=dict)
140
+ backups: dict[str, Backup] = LocalAttribute(default=dict)
120
141
 
121
142
 
122
143
  dynamodb_stores = AccountRegionBundle("dynamodb", DynamoDBStore)
@@ -6,7 +6,6 @@ import random
6
6
  import re
7
7
  import threading
8
8
  import time
9
- import traceback
10
9
  from collections import defaultdict
11
10
  from concurrent.futures import ThreadPoolExecutor
12
11
  from contextlib import contextmanager
@@ -64,6 +63,7 @@ from localstack.aws.api.dynamodb import (
64
63
  GetItemOutput,
65
64
  GlobalTableAlreadyExistsException,
66
65
  GlobalTableNotFoundException,
66
+ KinesisDataStreamDestination,
67
67
  KinesisStreamingDestinationOutput,
68
68
  ListGlobalTablesOutput,
69
69
  ListTablesInputLimit,
@@ -274,7 +274,12 @@ class EventForwarder:
274
274
  table_arn = arns.dynamodb_table_arn(table_name, account_id, region_name)
275
275
  records = table_records["records"]
276
276
  table_def = store.table_definitions.get(table_name) or {}
277
- stream_arn = table_def["KinesisDataStreamDestinations"][-1]["StreamArn"]
277
+ destinations = store.streaming_destinations.get(table_name)
278
+ if not destinations:
279
+ LOG.debug("Table %s has no Kinesis streaming destinations enabled", table_name)
280
+ continue
281
+
282
+ stream_arn = destinations[-1]["StreamArn"]
278
283
  for record in records:
279
284
  kinesis_record = dict(
280
285
  tableName=table_name,
@@ -1665,34 +1670,32 @@ class DynamoDBProvider(DynamodbApi, ServiceLifecycleHook):
1665
1670
  if not stream:
1666
1671
  raise ValidationException("User does not have a permission to use kinesis stream")
1667
1672
 
1668
- table_def = get_store(context.account_id, context.region).table_definitions.setdefault(
1669
- table_name, {}
1670
- )
1671
-
1672
- dest_status = table_def.get("KinesisDataStreamDestinationStatus")
1673
- if dest_status not in ["DISABLED", "ENABLE_FAILED", None]:
1674
- raise ValidationException(
1675
- "Table is not in a valid state to enable Kinesis Streaming "
1676
- "Destination:EnableKinesisStreamingDestination must be DISABLED or ENABLE_FAILED "
1677
- "to perform ENABLE operation."
1678
- )
1673
+ store = get_store(context.account_id, context.region)
1674
+ streaming_destinations = store.streaming_destinations.get(table_name) or []
1679
1675
 
1680
- table_def.setdefault("KinesisDataStreamDestinations", [])
1676
+ destinations = [d for d in streaming_destinations if d["StreamArn"] == stream_arn]
1677
+ if destinations:
1678
+ status = destinations[0].get("DestinationStatus", None)
1679
+ if status not in ["DISABLED", "ENABLED_FAILED", None]:
1680
+ raise ValidationException(
1681
+ "Table is not in a valid state to enable Kinesis Streaming "
1682
+ "Destination:EnableKinesisStreamingDestination must be DISABLED or ENABLE_FAILED "
1683
+ "to perform ENABLE operation."
1684
+ )
1681
1685
 
1682
1686
  # remove the stream destination if already present
1683
- table_def["KinesisDataStreamDestinations"] = [
1684
- t for t in table_def["KinesisDataStreamDestinations"] if t["StreamArn"] != stream_arn
1687
+ store.streaming_destinations[table_name] = [
1688
+ _d for _d in streaming_destinations if _d["StreamArn"] != stream_arn
1685
1689
  ]
1686
1690
  # append the active stream destination at the end of the list
1687
- table_def["KinesisDataStreamDestinations"].append(
1688
- {
1689
- "DestinationStatus": DestinationStatus.ACTIVE,
1690
- "DestinationStatusDescription": "Stream is active",
1691
- "StreamArn": stream_arn,
1692
- "ApproximateCreationDateTimePrecision": ApproximateCreationDateTimePrecision.MILLISECOND,
1693
- }
1691
+ store.streaming_destinations[table_name].append(
1692
+ KinesisDataStreamDestination(
1693
+ DestinationStatus=DestinationStatus.ACTIVE,
1694
+ DestinationStatusDescription="Stream is active",
1695
+ StreamArn=stream_arn,
1696
+ ApproximateCreationDateTimePrecision=ApproximateCreationDateTimePrecision.MILLISECOND,
1697
+ )
1694
1698
  )
1695
- table_def["KinesisDataStreamDestinationStatus"] = DestinationStatus.ACTIVE
1696
1699
  return KinesisStreamingDestinationOutput(
1697
1700
  DestinationStatus=DestinationStatus.ENABLING,
1698
1701
  StreamArn=stream_arn,
@@ -1715,34 +1718,25 @@ class DynamoDBProvider(DynamodbApi, ServiceLifecycleHook):
1715
1718
  error_message=f"Requested resource not found: Table: {table_name} not found",
1716
1719
  )
1717
1720
 
1718
- # TODO: Must raise if invoked before KinesisStreamingDestination is ACTIVE
1719
-
1720
1721
  stream = self._event_forwarder.is_kinesis_stream_exists(stream_arn=stream_arn)
1721
1722
  if not stream:
1722
1723
  raise ValidationException(
1723
1724
  "User does not have a permission to use kinesis stream",
1724
1725
  )
1725
1726
 
1726
- table_def = get_store(context.account_id, context.region).table_definitions.setdefault(
1727
- table_name, {}
1728
- )
1729
-
1730
- stream_destinations = table_def.get("KinesisDataStreamDestinations")
1731
- if stream_destinations:
1732
- if table_def["KinesisDataStreamDestinationStatus"] == DestinationStatus.ACTIVE:
1733
- for dest in stream_destinations:
1734
- if (
1735
- dest["StreamArn"] == stream_arn
1736
- and dest["DestinationStatus"] == DestinationStatus.ACTIVE
1737
- ):
1738
- dest["DestinationStatus"] = DestinationStatus.DISABLED
1739
- dest["DestinationStatusDescription"] = ("Stream is disabled",)
1740
- table_def["KinesisDataStreamDestinationStatus"] = DestinationStatus.DISABLED
1741
- return KinesisStreamingDestinationOutput(
1742
- DestinationStatus=DestinationStatus.DISABLING,
1743
- StreamArn=stream_arn,
1744
- TableName=table_name,
1745
- )
1727
+ store = get_store(context.account_id, context.region)
1728
+ streaming_destinations = store.streaming_destinations.get(table_name) or []
1729
+
1730
+ # Get the right destination based on the arn
1731
+ destinations = [d for d in streaming_destinations if d["StreamArn"] == stream_arn]
1732
+ if destinations:
1733
+ destinations[0]["DestinationStatus"] = DestinationStatus.DISABLED
1734
+ destinations[0]["DestinationStatusDescription"] = "Stream is disabled"
1735
+ return KinesisStreamingDestinationOutput(
1736
+ DestinationStatus=DestinationStatus.DISABLING,
1737
+ StreamArn=stream_arn,
1738
+ TableName=table_name,
1739
+ )
1746
1740
  raise ValidationException(
1747
1741
  "Table is not in a valid state to disable Kinesis Streaming Destination:"
1748
1742
  "DisableKinesisStreamingDestination must be ACTIVE to perform DISABLE operation."
@@ -1753,12 +1747,9 @@ class DynamoDBProvider(DynamodbApi, ServiceLifecycleHook):
1753
1747
  ) -> DescribeKinesisStreamingDestinationOutput:
1754
1748
  self.ensure_table_exists(context.account_id, context.region, table_name)
1755
1749
 
1756
- table_def = (
1757
- get_store(context.account_id, context.region).table_definitions.get(table_name) or {}
1758
- )
1759
-
1760
- stream_destinations = table_def.get("KinesisDataStreamDestinations") or []
1761
- stream_destinations = copy.deepcopy(stream_destinations)
1750
+ store = get_store(context.account_id, context.region)
1751
+ table_destinations = store.streaming_destinations.get(table_name) or []
1752
+ stream_destinations = copy.deepcopy(table_destinations)
1762
1753
 
1763
1754
  for destination in stream_destinations:
1764
1755
  destination.pop("ApproximateCreationDateTimePrecision", None)
@@ -1799,23 +1790,21 @@ class DynamoDBProvider(DynamodbApi, ServiceLifecycleHook):
1799
1790
  )
1800
1791
 
1801
1792
  store = get_store(context.account_id, context.region)
1793
+ table_destinations = store.streaming_destinations.get(table_name) or []
1802
1794
 
1803
- table_def = store.table_definitions.get(table_name) or {}
1804
- table_def.setdefault("KinesisDataStreamDestinations", [])
1805
-
1806
- table_id = table_def["TableId"]
1807
-
1808
- destination = None
1809
- for stream in table_def["KinesisDataStreamDestinations"]:
1810
- if stream["StreamArn"] == stream_arn:
1811
- destination = stream
1812
-
1813
- if destination is None:
1795
+ # filter the right destination based on the stream ARN
1796
+ destinations = [d for d in table_destinations if d["StreamArn"] == stream_arn]
1797
+ if not destinations:
1814
1798
  raise ValidationException(
1815
1799
  "Table is not in a valid state to enable Kinesis Streaming Destination: "
1816
1800
  f"No streaming destination with streamArn: {stream_arn} found for table with tableName: {table_name}"
1817
1801
  )
1818
1802
 
1803
+ destination = destinations[0]
1804
+ table_def = store.table_definitions.get(table_name) or {}
1805
+ table_def.setdefault("KinesisDataStreamDestinations", [])
1806
+
1807
+ table_id = store.table_definitions.get(table_name, {}).get("TableId")
1819
1808
  if (
1820
1809
  existing_precision := destination["ApproximateCreationDateTimePrecision"]
1821
1810
  ) == update_kinesis_streaming_configuration["ApproximateCreationDateTimePrecision"]:
@@ -1823,7 +1812,6 @@ class DynamoDBProvider(DynamodbApi, ServiceLifecycleHook):
1823
1812
  f"Invalid Request: Precision is already set to the desired value of {existing_precision} "
1824
1813
  f"for tableId: {table_id}, kdsArn: {stream_arn}"
1825
1814
  )
1826
-
1827
1815
  destination["ApproximateCreationDateTimePrecision"] = time_precision
1828
1816
 
1829
1817
  return UpdateKinesisStreamingDestinationOutput(
@@ -2317,19 +2305,20 @@ def get_table_stream_type(
2317
2305
  :return: a TableStreamViewType object if the table has streams enabled. If not, return None
2318
2306
  """
2319
2307
  if not table_name_or_arn:
2320
- return
2308
+ return None
2321
2309
 
2322
2310
  table_name = table_name_or_arn.split(":table/")[-1]
2323
2311
 
2324
2312
  is_kinesis = False
2325
2313
  stream_view_type = None
2326
2314
 
2327
- if table_definition := get_store(account_id, region_name).table_definitions.get(table_name):
2328
- if table_definition.get("KinesisDataStreamDestinationStatus") == "ACTIVE":
2315
+ # To determine if stream to kinesis is enabled, we look for active kinesis destinations
2316
+ destinations = get_store(account_id, region_name).streaming_destinations.get(table_name) or []
2317
+ for destination in destinations:
2318
+ if destination["DestinationStatus"] == DestinationStatus.ACTIVE:
2329
2319
  is_kinesis = True
2330
2320
 
2331
2321
  table_arn = arns.dynamodb_table_arn(table_name, account_id=account_id, region_name=region_name)
2332
-
2333
2322
  if (
2334
2323
  stream := dynamodbstreams_api.get_stream_for_table(account_id, region_name, table_arn)
2335
2324
  ) and stream["StreamStatus"] in (StreamStatus.ENABLING, StreamStatus.ENABLED):
@@ -2337,6 +2326,7 @@ def get_table_stream_type(
2337
2326
 
2338
2327
  if is_kinesis or stream_view_type:
2339
2328
  return TableStreamType(stream_view_type, is_kinesis=is_kinesis)
2329
+ return None
2340
2330
 
2341
2331
 
2342
2332
  def get_updated_records(
@@ -2410,7 +2400,9 @@ def get_updated_records(
2410
2400
  return {table_name: TableRecords(records=result, table_stream_type=table_stream_type)}
2411
2401
 
2412
2402
 
2413
- def create_dynamodb_stream(account_id: str, region_name: str, data, latest_stream_label):
2403
+ def create_dynamodb_stream(
2404
+ account_id: str, region_name: str, data: CreateTableInput, latest_stream_label: str | None
2405
+ ) -> None:
2414
2406
  stream = data["StreamSpecification"]
2415
2407
  enabled = stream.get("StreamEnabled")
2416
2408
 
@@ -2428,22 +2420,6 @@ def create_dynamodb_stream(account_id: str, region_name: str, data, latest_strea
2428
2420
  )
2429
2421
 
2430
2422
 
2431
- def dynamodb_get_table_stream_specification(account_id: str, region_name: str, table_name: str):
2432
- try:
2433
- table_schema = SchemaExtractor.get_table_schema(
2434
- table_name, account_id=account_id, region_name=region_name
2435
- )
2436
- return table_schema["Table"].get("StreamSpecification")
2437
- except Exception as e:
2438
- LOG.info(
2439
- "Unable to get stream specification for table %s: %s %s",
2440
- table_name,
2441
- e,
2442
- traceback.format_exc(),
2443
- )
2444
- raise e
2445
-
2446
-
2447
2423
  def find_item_for_keys_values_in_batch(
2448
2424
  table_name: str, item_keys: dict, batch: BatchGetResponseMap
2449
2425
  ) -> AttributeMap | None:
@@ -58,6 +58,7 @@ from localstack.aws.api.dynamodb import (
58
58
  GetItemOutput,
59
59
  GlobalTableAlreadyExistsException,
60
60
  GlobalTableNotFoundException,
61
+ KinesisDataStreamDestination,
61
62
  KinesisStreamingDestinationOutput,
62
63
  ListGlobalTablesOutput,
63
64
  ListTablesInputLimit,
@@ -1196,34 +1197,32 @@ class DynamoDBProvider(DynamodbApi, ServiceLifecycleHook):
1196
1197
  if not kinesis_stream_exists(stream_arn=stream_arn):
1197
1198
  raise ValidationException("User does not have a permission to use kinesis stream")
1198
1199
 
1199
- table_def = get_store(context.account_id, context.region).table_definitions.setdefault(
1200
- table_name, {}
1201
- )
1202
-
1203
- dest_status = table_def.get("KinesisDataStreamDestinationStatus")
1204
- if dest_status not in ["DISABLED", "ENABLE_FAILED", None]:
1205
- raise ValidationException(
1206
- "Table is not in a valid state to enable Kinesis Streaming "
1207
- "Destination:EnableKinesisStreamingDestination must be DISABLED or ENABLE_FAILED "
1208
- "to perform ENABLE operation."
1209
- )
1200
+ store = get_store(context.account_id, context.region)
1201
+ streaming_destinations = store.streaming_destinations.get(table_name) or []
1210
1202
 
1211
- table_def.setdefault("KinesisDataStreamDestinations", [])
1203
+ destinations = [d for d in streaming_destinations if d["StreamArn"] == stream_arn]
1204
+ if destinations:
1205
+ status = destinations[0].get("DestinationStatus", None)
1206
+ if status not in ["DISABLED", "ENABLED_FAILED", None]:
1207
+ raise ValidationException(
1208
+ "Table is not in a valid state to enable Kinesis Streaming "
1209
+ "Destination:EnableKinesisStreamingDestination must be DISABLED or ENABLE_FAILED "
1210
+ "to perform ENABLE operation."
1211
+ )
1212
1212
 
1213
1213
  # remove the stream destination if already present
1214
- table_def["KinesisDataStreamDestinations"] = [
1215
- t for t in table_def["KinesisDataStreamDestinations"] if t["StreamArn"] != stream_arn
1214
+ store.streaming_destinations[table_name] = [
1215
+ _d for _d in streaming_destinations if _d["StreamArn"] != stream_arn
1216
1216
  ]
1217
1217
  # append the active stream destination at the end of the list
1218
- table_def["KinesisDataStreamDestinations"].append(
1219
- {
1220
- "DestinationStatus": DestinationStatus.ACTIVE,
1221
- "DestinationStatusDescription": "Stream is active",
1222
- "StreamArn": stream_arn,
1223
- "ApproximateCreationDateTimePrecision": ApproximateCreationDateTimePrecision.MILLISECOND,
1224
- }
1218
+ store.streaming_destinations[table_name].append(
1219
+ KinesisDataStreamDestination(
1220
+ DestinationStatus=DestinationStatus.ACTIVE,
1221
+ DestinationStatusDescription="Stream is active",
1222
+ StreamArn=stream_arn,
1223
+ ApproximateCreationDateTimePrecision=ApproximateCreationDateTimePrecision.MILLISECOND,
1224
+ )
1225
1225
  )
1226
- table_def["KinesisDataStreamDestinationStatus"] = DestinationStatus.ACTIVE
1227
1226
  return KinesisStreamingDestinationOutput(
1228
1227
  DestinationStatus=DestinationStatus.ENABLING,
1229
1228
  StreamArn=stream_arn,
@@ -1251,26 +1250,19 @@ class DynamoDBProvider(DynamodbApi, ServiceLifecycleHook):
1251
1250
  "User does not have a permission to use kinesis stream",
1252
1251
  )
1253
1252
 
1254
- table_def = get_store(context.account_id, context.region).table_definitions.setdefault(
1255
- table_name, {}
1256
- )
1257
-
1258
- stream_destinations = table_def.get("KinesisDataStreamDestinations")
1259
- if stream_destinations:
1260
- if table_def["KinesisDataStreamDestinationStatus"] == DestinationStatus.ACTIVE:
1261
- for dest in stream_destinations:
1262
- if (
1263
- dest["StreamArn"] == stream_arn
1264
- and dest["DestinationStatus"] == DestinationStatus.ACTIVE
1265
- ):
1266
- dest["DestinationStatus"] = DestinationStatus.DISABLED
1267
- dest["DestinationStatusDescription"] = ("Stream is disabled",)
1268
- table_def["KinesisDataStreamDestinationStatus"] = DestinationStatus.DISABLED
1269
- return KinesisStreamingDestinationOutput(
1270
- DestinationStatus=DestinationStatus.DISABLING,
1271
- StreamArn=stream_arn,
1272
- TableName=table_name,
1273
- )
1253
+ store = get_store(context.account_id, context.region)
1254
+ streaming_destinations = store.streaming_destinations.get(table_name) or []
1255
+
1256
+ # Get the right destination based on the arn
1257
+ destinations = [d for d in streaming_destinations if d["StreamArn"] == stream_arn]
1258
+ if destinations:
1259
+ destinations[0]["DestinationStatus"] = DestinationStatus.DISABLED
1260
+ destinations[0]["DestinationStatusDescription"] = "Stream is disabled"
1261
+ return KinesisStreamingDestinationOutput(
1262
+ DestinationStatus=DestinationStatus.DISABLING,
1263
+ StreamArn=stream_arn,
1264
+ TableName=table_name,
1265
+ )
1274
1266
  raise ValidationException(
1275
1267
  "Table is not in a valid state to disable Kinesis Streaming Destination:"
1276
1268
  "DisableKinesisStreamingDestination must be ACTIVE to perform DISABLE operation."
@@ -1281,12 +1273,9 @@ class DynamoDBProvider(DynamodbApi, ServiceLifecycleHook):
1281
1273
  ) -> DescribeKinesisStreamingDestinationOutput:
1282
1274
  self.ensure_table_exists(context.account_id, context.region, table_name)
1283
1275
 
1284
- table_def = (
1285
- get_store(context.account_id, context.region).table_definitions.get(table_name) or {}
1286
- )
1287
-
1288
- stream_destinations = table_def.get("KinesisDataStreamDestinations") or []
1289
- stream_destinations = copy.deepcopy(stream_destinations)
1276
+ store = get_store(context.account_id, context.region)
1277
+ table_destinations = store.streaming_destinations.get(table_name) or []
1278
+ stream_destinations = copy.deepcopy(table_destinations)
1290
1279
 
1291
1280
  for destination in stream_destinations:
1292
1281
  destination.pop("ApproximateCreationDateTimePrecision", None)
@@ -1327,23 +1316,21 @@ class DynamoDBProvider(DynamodbApi, ServiceLifecycleHook):
1327
1316
  )
1328
1317
 
1329
1318
  store = get_store(context.account_id, context.region)
1319
+ table_destinations = store.streaming_destinations.get(table_name) or []
1330
1320
 
1331
- table_def = store.table_definitions.get(table_name) or {}
1332
- table_def.setdefault("KinesisDataStreamDestinations", [])
1333
-
1334
- table_id = table_def["TableId"]
1335
-
1336
- destination = None
1337
- for stream in table_def["KinesisDataStreamDestinations"]:
1338
- if stream["StreamArn"] == stream_arn:
1339
- destination = stream
1340
-
1341
- if destination is None:
1321
+ # filter the right destination based on the stream ARN
1322
+ destinations = [d for d in table_destinations if d["StreamArn"] == stream_arn]
1323
+ if not destinations:
1342
1324
  raise ValidationException(
1343
1325
  "Table is not in a valid state to enable Kinesis Streaming Destination: "
1344
1326
  f"No streaming destination with streamArn: {stream_arn} found for table with tableName: {table_name}"
1345
1327
  )
1346
1328
 
1329
+ destination = destinations[0]
1330
+ table_def = store.table_definitions.get(table_name) or {}
1331
+ table_def.setdefault("KinesisDataStreamDestinations", [])
1332
+
1333
+ table_id = store.table_definitions.get(table_name, {}).get("TableId")
1347
1334
  if (
1348
1335
  existing_precision := destination["ApproximateCreationDateTimePrecision"]
1349
1336
  ) == update_kinesis_streaming_configuration["ApproximateCreationDateTimePrecision"]:
@@ -1351,7 +1338,6 @@ class DynamoDBProvider(DynamodbApi, ServiceLifecycleHook):
1351
1338
  f"Invalid Request: Precision is already set to the desired value of {existing_precision} "
1352
1339
  f"for tableId: {table_id}, kdsArn: {stream_arn}"
1353
1340
  )
1354
-
1355
1341
  destination["ApproximateCreationDateTimePrecision"] = time_precision
1356
1342
 
1357
1343
  return UpdateKinesisStreamingDestinationOutput(
@@ -1625,7 +1611,7 @@ def is_index_query_valid(account_id: str, region_name: str, query_data: dict) ->
1625
1611
  return True
1626
1612
 
1627
1613
 
1628
- def kinesis_stream_exists(stream_arn):
1614
+ def kinesis_stream_exists(stream_arn: str) -> bool:
1629
1615
  account_id = extract_account_id_from_arn(stream_arn)
1630
1616
  region_name = extract_region_from_arn(stream_arn)
1631
1617
 
@@ -6,10 +6,19 @@ from bson.json_util import dumps
6
6
 
7
7
  from localstack import config
8
8
  from localstack.aws.api import RequestContext
9
- from localstack.aws.api.dynamodbstreams import StreamStatus, StreamViewType, TableName
9
+ from localstack.aws.api.dynamodbstreams import (
10
+ StreamDescription,
11
+ StreamStatus,
12
+ StreamViewType,
13
+ TableName,
14
+ )
10
15
  from localstack.aws.connect import connect_to
11
16
  from localstack.services.dynamodb.v2.provider import DynamoDBProvider
12
- from localstack.services.dynamodbstreams.models import DynamoDbStreamsStore, dynamodbstreams_stores
17
+ from localstack.services.dynamodbstreams.models import (
18
+ DynamoDbStreamsStore,
19
+ StreamWrapper,
20
+ dynamodbstreams_stores,
21
+ )
13
22
  from localstack.utils.aws import arns, resources
14
23
  from localstack.utils.common import now_utc
15
24
  from localstack.utils.threads import FuncThread
@@ -65,28 +74,32 @@ def add_dynamodb_stream(
65
74
  stream_name=stream_name,
66
75
  )
67
76
  latest_stream_label = latest_stream_label or "latest"
68
- stream = {
69
- "StreamArn": arns.dynamodb_stream_arn(
70
- table_name=table_name,
71
- latest_stream_label=latest_stream_label,
72
- account_id=account_id,
73
- region_name=region_name,
74
- ),
75
- "TableName": table_name,
76
- "StreamLabel": latest_stream_label,
77
- "StreamStatus": StreamStatus.ENABLING,
78
- "KeySchema": [],
79
- "Shards": [],
80
- "StreamViewType": view_type,
81
- "shards_id_map": {},
82
- }
83
- store.ddb_streams[table_name] = stream
84
-
85
-
86
- def get_stream_for_table(account_id: str, region_name: str, table_arn: str) -> dict:
77
+ stream_arn = arns.dynamodb_stream_arn(
78
+ table_name=table_name,
79
+ latest_stream_label=latest_stream_label,
80
+ account_id=account_id,
81
+ region_name=region_name,
82
+ )
83
+ stream = StreamDescription(
84
+ TableName=table_name,
85
+ StreamArn=stream_arn,
86
+ StreamLabel=latest_stream_label,
87
+ StreamStatus=StreamStatus.ENABLING,
88
+ KeySchema=[],
89
+ Shards=[],
90
+ StreamViewType=view_type,
91
+ )
92
+ store.ddb_streams[table_name] = StreamWrapper(StreamDescription=stream)
93
+
94
+
95
+ def get_stream_for_table(
96
+ account_id: str, region_name: str, table_arn: str
97
+ ) -> StreamDescription | None:
87
98
  store = get_dynamodbstreams_store(account_id, region_name)
88
99
  table_name = table_name_from_stream_arn(table_arn)
89
- return store.ddb_streams.get(table_name)
100
+ if stream := store.ddb_streams.get(table_name):
101
+ return stream.StreamDescription
102
+ return None
90
103
 
91
104
 
92
105
  def _process_forwarded_records(
@@ -206,11 +219,11 @@ def kinesis_shard_id(dynamodbstream_shard_id: str) -> str:
206
219
  return f"{shard_params[0]}-{shard_params[-1]}"
207
220
 
208
221
 
209
- def get_shard_id(stream: dict, kinesis_shard_id: str) -> str:
210
- ddb_stream_shard_id = stream.get("shards_id_map", {}).get(kinesis_shard_id)
222
+ def get_shard_id(stream: StreamWrapper, kinesis_shard_id: str) -> str:
223
+ ddb_stream_shard_id = stream.shards_id_map.get(kinesis_shard_id)
211
224
  if not ddb_stream_shard_id:
212
225
  ddb_stream_shard_id = shard_id(kinesis_shard_id)
213
- stream["shards_id_map"][kinesis_shard_id] = ddb_stream_shard_id
226
+ stream.shards_id_map[kinesis_shard_id] = ddb_stream_shard_id
214
227
 
215
228
  return ddb_stream_shard_id
216
229
 
@@ -1,9 +1,19 @@
1
+ import dataclasses
2
+
3
+ from localstack.aws.api.dynamodbstreams import StreamDescription
1
4
  from localstack.services.stores import AccountRegionBundle, BaseStore, LocalAttribute
2
5
 
3
6
 
7
+ @dataclasses.dataclass
8
+ class StreamWrapper:
9
+ """Wrapper for the API stub and additional information about a store"""
10
+
11
+ StreamDescription: StreamDescription
12
+ shards_id_map: dict[str, str] = dataclasses.field(default_factory=dict)
13
+
14
+
4
15
  class DynamoDbStreamsStore(BaseStore):
5
- # maps table names to DynamoDB stream descriptions
6
- ddb_streams: dict[str, dict] = LocalAttribute(default=dict)
16
+ ddb_streams: dict[str, StreamWrapper] = LocalAttribute(default=dict)
7
17
 
8
18
 
9
19
  dynamodbstreams_stores = AccountRegionBundle("dynamodbstreams", DynamoDbStreamsStore)
@@ -20,7 +20,6 @@ from localstack.aws.api.dynamodbstreams import (
20
20
  ShardIteratorType,
21
21
  Stream,
22
22
  StreamArn,
23
- StreamDescription,
24
23
  StreamStatus,
25
24
  TableName,
26
25
  )
@@ -77,20 +76,21 @@ class DynamoDBStreamsProvider(DynamodbstreamsApi, ServiceLifecycleHook):
77
76
  store = get_dynamodbstreams_store(context.account_id, og_region)
78
77
  kinesis = get_kinesis_client(account_id=context.account_id, region_name=og_region)
79
78
  for stream in store.ddb_streams.values():
79
+ stream_description = stream.StreamDescription
80
80
  _stream_arn = stream_arn
81
81
  if context.region != og_region:
82
82
  _stream_arn = change_region_in_ddb_stream_arn(_stream_arn, og_region)
83
- if stream["StreamArn"] == _stream_arn:
83
+ if stream_description["StreamArn"] == _stream_arn:
84
84
  # get stream details
85
85
  dynamodb = connect_to(
86
86
  aws_access_key_id=context.account_id, region_name=og_region
87
87
  ).dynamodb
88
- table_name = table_name_from_stream_arn(stream["StreamArn"])
88
+ table_name = table_name_from_stream_arn(stream_description["StreamArn"])
89
89
  stream_name = get_kinesis_stream_name(table_name)
90
90
  stream_details = kinesis.describe_stream(StreamName=stream_name)
91
91
  table_details = dynamodb.describe_table(TableName=table_name)
92
- stream["KeySchema"] = table_details["Table"]["KeySchema"]
93
- stream["StreamStatus"] = STREAM_STATUS_MAP.get(
92
+ stream_description["KeySchema"] = table_details["Table"]["KeySchema"]
93
+ stream_description["StreamStatus"] = STREAM_STATUS_MAP.get(
94
94
  stream_details["StreamDescription"]["StreamStatus"]
95
95
  )
96
96
 
@@ -110,8 +110,7 @@ class DynamoDBStreamsProvider(DynamodbstreamsApi, ServiceLifecycleHook):
110
110
  # slicing the resulting shards after the exclusive_start_shard_id parameters
111
111
  stream_shards = stream_shards[start_index + 1 :]
112
112
 
113
- stream["Shards"] = stream_shards
114
- stream_description = select_from_typed_dict(StreamDescription, stream)
113
+ stream_description["Shards"] = stream_shards
115
114
  stream_description["StreamArn"] = _stream_arn
116
115
  return DescribeStreamOutput(StreamDescription=stream_description)
117
116
 
@@ -190,7 +189,10 @@ class DynamoDBStreamsProvider(DynamodbstreamsApi, ServiceLifecycleHook):
190
189
  ) -> ListStreamsOutput:
191
190
  og_region = get_original_region(context=context, table_name=table_name)
192
191
  store = get_dynamodbstreams_store(context.account_id, og_region)
193
- result = [select_from_typed_dict(Stream, res) for res in store.ddb_streams.values()]
192
+ result = [
193
+ select_from_typed_dict(Stream, _s.StreamDescription)
194
+ for _s in store.ddb_streams.values()
195
+ ]
194
196
  if table_name:
195
197
  result: list[Stream] = [res for res in result if res["TableName"] == table_name]
196
198
  # If this is a stream from a table replica, we need to change the region in the stream ARN, as LocalStack
@@ -11,7 +11,7 @@ from localstack.utils.scheduler import Scheduler
11
11
 
12
12
  LOG = logging.getLogger(__name__)
13
13
 
14
- DEFAULT_FLUSH_INTERVAL_SECS = 15
14
+ DEFAULT_FLUSH_INTERVAL_SECS = 60
15
15
  EVENT_NAME = "aws_request_agg"
16
16
  OPTIONAL_FIELDS = ["err_type"]
17
17
 
@@ -411,6 +411,8 @@ class CmdDockerClient(ContainerClient):
411
411
  raise AccessDenied(docker_image)
412
412
  if "failed to authorize: failed to fetch oauth token" in to_str(e.stdout):
413
413
  raise AccessDenied(docker_image)
414
+ if "insufficient_scope: authorization failed" in to_str(e.stdout):
415
+ raise AccessDenied(docker_image)
414
416
  if "does not exist" in to_str(e.stdout):
415
417
  raise NoSuchImage(docker_image)
416
418
  if "connection refused" in to_str(e.stdout):
@@ -390,6 +390,8 @@ class SdkDockerClient(ContainerClient):
390
390
  raise AccessDenied(docker_image)
391
391
  if "unauthorized: authentication required" in to_str(result):
392
392
  raise AccessDenied(docker_image)
393
+ if "insufficient_scope: authorization failed" in to_str(result):
394
+ raise AccessDenied(docker_image)
393
395
  if "connection refused" in to_str(result):
394
396
  raise RegistryConnectionError(result)
395
397
  if "failed to do request:" in to_str(result):
localstack/version.py CHANGED
@@ -28,7 +28,7 @@ version_tuple: VERSION_TUPLE
28
28
  commit_id: COMMIT_ID
29
29
  __commit_id__: COMMIT_ID
30
30
 
31
- __version__ = version = '4.13.2.dev63'
32
- __version_tuple__ = version_tuple = (4, 13, 2, 'dev63')
31
+ __version__ = version = '4.13.2.dev67'
32
+ __version_tuple__ = version_tuple = (4, 13, 2, 'dev67')
33
33
 
34
34
  __commit_id__ = commit_id = None
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: localstack-core
3
- Version: 4.13.2.dev63
3
+ Version: 4.13.2.dev67
4
4
  Summary: The core library and runtime of LocalStack
5
5
  Author-email: LocalStack Contributors <info@localstack.cloud>
6
6
  License-Expression: Apache-2.0
@@ -4,7 +4,7 @@ localstack/deprecations.py,sha256=-3IYgCd6LEC3PjO7hbr3Dg-p0PIS6phjmv1qZnj1uo0,15
4
4
  localstack/openapi.yaml,sha256=jFUzv-NKkJttxb8HRrmKiNYOmJD-zVfPxG3DDMrRwfg,30865
5
5
  localstack/plugins.py,sha256=BIJC9dlo0WbP7lLKkCiGtd_2q5oeqiHZohvoRTcejXM,2457
6
6
  localstack/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
7
- localstack/version.py,sha256=D3wnwO8fZjDKQB_D0OeZBRud8mwwGvytF3vh7OSuolE,721
7
+ localstack/version.py,sha256=3esNR0t1TCuYgUH8jPLKK9-TUD3VWtpTLlweUOktgjU,721
8
8
  localstack/aws/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
9
9
  localstack/aws/accounts.py,sha256=102zpGowOxo0S6UGMpfjw14QW7WCLVAGsnFK5xFMLoo,3043
10
10
  localstack/aws/app.py,sha256=n9bJCfJRuMz_gLGAH430c3bIQXgUXeWO5NPfcdL2MV8,5145
@@ -352,10 +352,10 @@ localstack/services/cloudwatch/resource_providers/aws_cloudwatch_compositealarm_
352
352
  localstack/services/configservice/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
353
353
  localstack/services/configservice/provider.py,sha256=4sjKODGrTjUxzwvd_Q03U24CbEmtQiQGPnrFIYnGXoE,279
354
354
  localstack/services/dynamodb/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
355
- localstack/services/dynamodb/models.py,sha256=HVCbm5TbF5QJ-OLs2lCRL0vAr5Yj5buvhl58xbOpNUM,4008
355
+ localstack/services/dynamodb/models.py,sha256=Y5ushp-rwJv8K42cAHmWf9WpbFJ8l369jk-W_Z-aP2k,4584
356
356
  localstack/services/dynamodb/packages.py,sha256=TEOAyWzae5jTv0hiKf2SYRQK6zuLdFfS0v2EIZU8B3A,4108
357
357
  localstack/services/dynamodb/plugins.py,sha256=DraVGanzrytMltMMrDTg8CiDUosbnIuujjrT25Y7H3E,234
358
- localstack/services/dynamodb/provider.py,sha256=bgj0sIDyz1MvaA6KqgD6rrdyQMZieOa_C_uPxrIDz3U,100927
358
+ localstack/services/dynamodb/provider.py,sha256=jArNEyw5e5GnSEWbScBtCOZU3dakIPPcIahpn-C7TJo,100331
359
359
  localstack/services/dynamodb/server.py,sha256=sjZNg5UIkX3Ld1hvlSDlJibg4lZYNmo0YnJzUIS7rIg,8355
360
360
  localstack/services/dynamodb/utils.py,sha256=_ww13-O7FEGLADvZNI4rt9AW8VTxo1jCBSRl7QMAt6g,14829
361
361
  localstack/services/dynamodb/resource_providers/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
@@ -366,11 +366,11 @@ localstack/services/dynamodb/resource_providers/aws_dynamodb_table.py,sha256=3yE
366
366
  localstack/services/dynamodb/resource_providers/aws_dynamodb_table.schema.json,sha256=jFjMq5M2rLTtDRoJog7RnSofYuFJOXa_3RMOFfeIASc,12160
367
367
  localstack/services/dynamodb/resource_providers/aws_dynamodb_table_plugin.py,sha256=4_WJxaAS7kyOYyAFb-FoBIpXfZ8ToJrLnz8bl2JXUnU,527
368
368
  localstack/services/dynamodb/v2/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
369
- localstack/services/dynamodb/v2/provider.py,sha256=0Xhd5N0LRXHhZ_1HA1ta1MbIgJzNYXJYoOHXBQ9vA7Y,65895
369
+ localstack/services/dynamodb/v2/provider.py,sha256=hOxY2mpmXF52nrz9p_wLrDpxAsi6eDZXQ17VCI8nMes,65557
370
370
  localstack/services/dynamodbstreams/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
371
- localstack/services/dynamodbstreams/dynamodbstreams_api.py,sha256=sMuB6PHbOmwsZq_brKtZNT1X-fjfsJqAC6ASUP8HhAk,9070
372
- localstack/services/dynamodbstreams/models.py,sha256=WU6AT0ttEO6K2k4tes3y8bhp1KJ9vEvhk4Fb8Uhbnvg,334
373
- localstack/services/dynamodbstreams/provider.py,sha256=pUY0xpLV9KvPncZiyy64lmXQw3YcsW1omE3AnLPqHrA,8999
371
+ localstack/services/dynamodbstreams/dynamodbstreams_api.py,sha256=SKiuEer1F3yRc4Dtx8mtHzzqZkPYVOoGUHRYtMX2tgk,9234
372
+ localstack/services/dynamodbstreams/models.py,sha256=keJOQlqp8U5y_TTjh6-wY6_UqFJ9-teDcmlNgVNFMFg,613
373
+ localstack/services/dynamodbstreams/provider.py,sha256=K2zT46n8gnBtT7TVOrc5jjearsUpHiCLwHWUI3qw8io,9057
374
374
  localstack/services/dynamodbstreams/v2/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
375
375
  localstack/services/dynamodbstreams/v2/provider.py,sha256=OpkpXoX-tTgkstI3AQaA9pglQ-P200fAcozNDTKGwlk,5969
376
376
  localstack/services/ec2/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
@@ -1274,7 +1274,7 @@ localstack/utils/analytics/logger.py,sha256=-sA_zjptY7o2kOXP2sDduQPzfNwULYIRnoqw
1274
1274
  localstack/utils/analytics/metadata.py,sha256=M_h0iXZz-nYs0gNuN-g-sgeYtCMKCQoVDC4vezN7nTY,9169
1275
1275
  localstack/utils/analytics/publisher.py,sha256=96DqGgLMgyvB5U7mD_UczJ0Wu9p5ti5hCqH_9UZx5_A,5020
1276
1276
  localstack/utils/analytics/service_providers.py,sha256=2FwjdfuEED-03Kqi49qrwR2QF7opWF7IsE5VEK90VC8,604
1277
- localstack/utils/analytics/service_request_aggregator.py,sha256=XIukyhU68fAW96v2O10sHXWr1sepiRSbEnwN57BdNwA,3988
1277
+ localstack/utils/analytics/service_request_aggregator.py,sha256=FEM4gFLbCmK8HF5-PqqDjE63-VVjbSLQ82ESM-O1poE,3988
1278
1278
  localstack/utils/analytics/metrics/__init__.py,sha256=APJa7ulgGseR4dl4EPAhKOEuXxxNiP0f_MwLbNmgc4Y,233
1279
1279
  localstack/utils/analytics/metrics/api.py,sha256=5TfPqbvfgikMhQYSoT5hOaxTpNUIJo2iQYFOb0e0DgQ,1595
1280
1280
  localstack/utils/analytics/metrics/counter.py,sha256=AYXcJreYYOsdV6PuG-Muzjuu3N3SSa5z0sY8X9z3FT0,6830
@@ -1301,8 +1301,8 @@ localstack/utils/cloudwatch/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMp
1301
1301
  localstack/utils/cloudwatch/cloudwatch_util.py,sha256=urSfRKSoO_6B1J-pusjK30450k7jJLLBWzrU5CyqNWw,8453
1302
1302
  localstack/utils/container_utils/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
1303
1303
  localstack/utils/container_utils/container_client.py,sha256=A-t7r-Sv6WEsZoG-MRp7kScyhOFG6-1uOoc2CMDh1-k,59597
1304
- localstack/utils/container_utils/docker_cmd_client.py,sha256=kZlIk-kyH-L523PT5pU9SaYa4Yo_PE43ReV3-ywwNpc,40066
1305
- localstack/utils/container_utils/docker_sdk_client.py,sha256=If7Hjln5uVk_AEB8W3yG1xoPF7EOU5EDN4UqmViqYR8,40432
1304
+ localstack/utils/container_utils/docker_cmd_client.py,sha256=cLYL3CopPqXLS4MnjdqvblNLGNk4n1HQpCEFSZIL-kg,40194
1305
+ localstack/utils/container_utils/docker_sdk_client.py,sha256=AhzGt2GlixqPvswSGSqDoGFY4IW89THZ_rWx-GbFtW4,40566
1306
1306
  localstack/utils/kinesis/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
1307
1307
  localstack/utils/kinesis/kclipy_helper.py,sha256=D3GPG9eT21IXuS94iIrJSAEtxGbcoDOkuRyCnHGd4Ek,4022
1308
1308
  localstack/utils/kinesis/kinesis_connector.py,sha256=rtVAuKENjDQz6AjKXj9lULXGIAYobsABw7p-ffgdHNk,15085
@@ -1311,10 +1311,10 @@ localstack/utils/server/tcp_proxy.py,sha256=y2NJAmvftTiAYsLU_8qe4W5LGqwUw21i90Pu
1311
1311
  localstack/utils/xray/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
1312
1312
  localstack/utils/xray/trace_header.py,sha256=ahXk9eonq7LpeENwlqUEPj3jDOCiVRixhntQuxNor-Q,6209
1313
1313
  localstack/utils/xray/traceid.py,sha256=GKO-R2sMMjlrH2UaLPXlQlZ6flbE7ZKb6IZMtMu_M5U,1110
1314
- localstack_core-4.13.2.dev63.data/scripts/localstack-supervisor,sha256=nm1Il2d6ASyOB6Vo4CRHd90w7TK9FdRl9VPp0NN6hUk,6378
1315
- localstack_core-4.13.2.dev63.dist-info/licenses/LICENSE.txt,sha256=3PC-9Z69UsNARuQ980gNR_JsLx8uvMjdG6C7cc4LBYs,606
1316
- localstack_core-4.13.2.dev63.dist-info/METADATA,sha256=qbMkujIkEryvVlT1MvW3KvW5xBr0_DeRKLfAo2duBMM,5867
1317
- localstack_core-4.13.2.dev63.dist-info/WHEEL,sha256=YCfwYGOYMi5Jhw2fU4yNgwErybb2IX5PEwBKV4ZbdBo,91
1318
- localstack_core-4.13.2.dev63.dist-info/entry_points.txt,sha256=59aAnn8KVHWAHkMg2dOgmgYtRZ-xTX9T4UiIchWgK6k,20975
1319
- localstack_core-4.13.2.dev63.dist-info/top_level.txt,sha256=3sqmK2lGac8nCy8nwsbS5SpIY_izmtWtgaTFKHYVHbI,11
1320
- localstack_core-4.13.2.dev63.dist-info/RECORD,,
1314
+ localstack_core-4.13.2.dev67.data/scripts/localstack-supervisor,sha256=nm1Il2d6ASyOB6Vo4CRHd90w7TK9FdRl9VPp0NN6hUk,6378
1315
+ localstack_core-4.13.2.dev67.dist-info/licenses/LICENSE.txt,sha256=3PC-9Z69UsNARuQ980gNR_JsLx8uvMjdG6C7cc4LBYs,606
1316
+ localstack_core-4.13.2.dev67.dist-info/METADATA,sha256=Gyo4i-Z-6QY6NIJrJw8zZMPL1GoED0cgCSbyEYCUewk,5867
1317
+ localstack_core-4.13.2.dev67.dist-info/WHEEL,sha256=YCfwYGOYMi5Jhw2fU4yNgwErybb2IX5PEwBKV4ZbdBo,91
1318
+ localstack_core-4.13.2.dev67.dist-info/entry_points.txt,sha256=59aAnn8KVHWAHkMg2dOgmgYtRZ-xTX9T4UiIchWgK6k,20975
1319
+ localstack_core-4.13.2.dev67.dist-info/top_level.txt,sha256=3sqmK2lGac8nCy8nwsbS5SpIY_izmtWtgaTFKHYVHbI,11
1320
+ localstack_core-4.13.2.dev67.dist-info/RECORD,,