localstack-core 4.10.1.dev7__py3-none-any.whl → 4.10.1.dev42__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (77) hide show
  1. localstack/aws/api/acm/__init__.py +122 -122
  2. localstack/aws/api/apigateway/__init__.py +560 -559
  3. localstack/aws/api/cloudcontrol/__init__.py +63 -63
  4. localstack/aws/api/cloudformation/__init__.py +1040 -969
  5. localstack/aws/api/cloudwatch/__init__.py +375 -375
  6. localstack/aws/api/config/__init__.py +784 -786
  7. localstack/aws/api/dynamodb/__init__.py +753 -759
  8. localstack/aws/api/dynamodbstreams/__init__.py +74 -74
  9. localstack/aws/api/ec2/__init__.py +8901 -8818
  10. localstack/aws/api/es/__init__.py +453 -453
  11. localstack/aws/api/events/__init__.py +552 -552
  12. localstack/aws/api/firehose/__init__.py +541 -543
  13. localstack/aws/api/iam/__init__.py +639 -572
  14. localstack/aws/api/kinesis/__init__.py +235 -147
  15. localstack/aws/api/kms/__init__.py +340 -336
  16. localstack/aws/api/lambda_/__init__.py +574 -573
  17. localstack/aws/api/logs/__init__.py +676 -675
  18. localstack/aws/api/opensearch/__init__.py +814 -785
  19. localstack/aws/api/pipes/__init__.py +336 -336
  20. localstack/aws/api/redshift/__init__.py +1188 -1166
  21. localstack/aws/api/resource_groups/__init__.py +175 -175
  22. localstack/aws/api/resourcegroupstaggingapi/__init__.py +67 -67
  23. localstack/aws/api/route53/__init__.py +254 -254
  24. localstack/aws/api/route53resolver/__init__.py +396 -396
  25. localstack/aws/api/s3/__init__.py +1350 -1349
  26. localstack/aws/api/s3control/__init__.py +594 -594
  27. localstack/aws/api/scheduler/__init__.py +118 -118
  28. localstack/aws/api/secretsmanager/__init__.py +193 -193
  29. localstack/aws/api/ses/__init__.py +227 -227
  30. localstack/aws/api/sns/__init__.py +115 -115
  31. localstack/aws/api/sqs/__init__.py +100 -100
  32. localstack/aws/api/ssm/__init__.py +1977 -1971
  33. localstack/aws/api/stepfunctions/__init__.py +323 -323
  34. localstack/aws/api/sts/__init__.py +90 -66
  35. localstack/aws/api/support/__init__.py +112 -112
  36. localstack/aws/api/swf/__init__.py +378 -386
  37. localstack/aws/api/transcribe/__init__.py +425 -425
  38. localstack/aws/handlers/service.py +11 -1
  39. localstack/aws/protocol/parser.py +1 -1
  40. localstack/aws/scaffold.py +15 -17
  41. localstack/cli/localstack.py +6 -1
  42. localstack/dev/kubernetes/__main__.py +38 -3
  43. localstack/services/apigateway/helpers.py +5 -9
  44. localstack/services/apigateway/legacy/provider.py +32 -9
  45. localstack/services/apigateway/patches.py +0 -9
  46. localstack/services/cloudformation/provider.py +2 -2
  47. localstack/services/cloudformation/v2/provider.py +6 -6
  48. localstack/services/kinesis/packages.py +1 -1
  49. localstack/services/kms/models.py +34 -4
  50. localstack/services/kms/provider.py +93 -16
  51. localstack/services/lambda_/api_utils.py +3 -1
  52. localstack/services/lambda_/packages.py +1 -1
  53. localstack/services/lambda_/provider.py +1 -1
  54. localstack/services/lambda_/runtimes.py +8 -3
  55. localstack/services/logs/provider.py +36 -19
  56. localstack/services/s3/provider.py +1 -1
  57. localstack/services/sns/v2/models.py +24 -1
  58. localstack/services/sns/v2/provider.py +144 -12
  59. localstack/services/sns/v2/utils.py +8 -0
  60. localstack/services/sqs/models.py +37 -10
  61. localstack/testing/snapshots/transformer_utility.py +2 -0
  62. localstack/testing/testselection/matching.py +0 -1
  63. localstack/utils/aws/client_types.py +0 -8
  64. localstack/utils/catalog/catalog_loader.py +111 -3
  65. localstack/utils/crypto.py +109 -0
  66. localstack/version.py +2 -2
  67. {localstack_core-4.10.1.dev7.dist-info → localstack_core-4.10.1.dev42.dist-info}/METADATA +6 -5
  68. {localstack_core-4.10.1.dev7.dist-info → localstack_core-4.10.1.dev42.dist-info}/RECORD +76 -76
  69. localstack_core-4.10.1.dev42.dist-info/plux.json +1 -0
  70. localstack_core-4.10.1.dev7.dist-info/plux.json +0 -1
  71. {localstack_core-4.10.1.dev7.data → localstack_core-4.10.1.dev42.data}/scripts/localstack +0 -0
  72. {localstack_core-4.10.1.dev7.data → localstack_core-4.10.1.dev42.data}/scripts/localstack-supervisor +0 -0
  73. {localstack_core-4.10.1.dev7.data → localstack_core-4.10.1.dev42.data}/scripts/localstack.bat +0 -0
  74. {localstack_core-4.10.1.dev7.dist-info → localstack_core-4.10.1.dev42.dist-info}/WHEEL +0 -0
  75. {localstack_core-4.10.1.dev7.dist-info → localstack_core-4.10.1.dev42.dist-info}/entry_points.txt +0 -0
  76. {localstack_core-4.10.1.dev7.dist-info → localstack_core-4.10.1.dev42.dist-info}/licenses/LICENSE.txt +0 -0
  77. {localstack_core-4.10.1.dev7.dist-info → localstack_core-4.10.1.dev42.dist-info}/top_level.txt +0 -0
@@ -10,8 +10,11 @@ from localstack.aws.api import CommonServiceException, RequestContext
10
10
  from localstack.aws.api.sns import (
11
11
  AmazonResourceName,
12
12
  ConfirmSubscriptionResponse,
13
+ CreateEndpointResponse,
13
14
  CreatePlatformApplicationResponse,
14
15
  CreateTopicResponse,
16
+ Endpoint,
17
+ GetEndpointAttributesResponse,
15
18
  GetPlatformApplicationAttributesResponse,
16
19
  GetSMSAttributesResponse,
17
20
  GetSubscriptionAttributesResponse,
@@ -60,6 +63,9 @@ from localstack.services.sns.v2.models import (
60
63
  SMS_ATTRIBUTE_NAMES,
61
64
  SMS_DEFAULT_SENDER_REGEX,
62
65
  SMS_TYPES,
66
+ EndpointAttributeNames,
67
+ PlatformApplicationDetails,
68
+ PlatformEndpoint,
63
69
  SnsMessage,
64
70
  SnsMessageType,
65
71
  SnsStore,
@@ -68,6 +74,7 @@ from localstack.services.sns.v2.models import (
68
74
  sns_stores,
69
75
  )
70
76
  from localstack.services.sns.v2.utils import (
77
+ create_platform_endpoint_arn,
71
78
  create_subscription_arn,
72
79
  encode_subscription_token_with_region,
73
80
  get_next_page_token_from_arn,
@@ -237,10 +244,11 @@ class SnsProvider(SnsApi):
237
244
  raise InvalidParameterException("Invalid parameter: SQS endpoint ARN")
238
245
 
239
246
  elif protocol == "application":
240
- # TODO: This needs to be implemented once applications are ported from moto to the new provider
241
- raise NotImplementedError(
242
- "This functionality needs yet to be ported to the new SNS provider"
243
- )
247
+ # TODO: Validate exact behaviour
248
+ try:
249
+ parse_arn(endpoint)
250
+ except InvalidArnException:
251
+ raise InvalidParameterException("Invalid parameter: ApplicationEndpoint ARN")
244
252
 
245
253
  if ".fifo" in endpoint and ".fifo" not in topic_arn:
246
254
  # TODO: move to sqs protocol block if possible
@@ -591,17 +599,24 @@ class SnsProvider(SnsApi):
591
599
  account_id=context.account_id,
592
600
  region_name=context.region,
593
601
  )
594
- platform_application = PlatformApplication(
595
- PlatformApplicationArn=application_arn, Attributes=_attributes
602
+ platform_application_details = PlatformApplicationDetails(
603
+ platform_application=PlatformApplication(
604
+ PlatformApplicationArn=application_arn,
605
+ Attributes=_attributes,
606
+ ),
607
+ platform_endpoints={},
596
608
  )
597
- store.platform_applications[application_arn] = platform_application
598
- return CreatePlatformApplicationResponse(**platform_application)
609
+ store.platform_applications[application_arn] = platform_application_details
610
+
611
+ return platform_application_details.platform_application
599
612
 
600
613
  def delete_platform_application(
601
614
  self, context: RequestContext, platform_application_arn: String, **kwargs
602
615
  ) -> None:
603
616
  store = self.get_store(context.account_id, context.region)
604
617
  store.platform_applications.pop(platform_application_arn, None)
618
+ # TODO: if the platform had endpoints, should we remove them from the store? There is no way to list
619
+ # endpoints without an application, so this is impossible to check the state of AWS here
605
620
 
606
621
  def list_platform_applications(
607
622
  self, context: RequestContext, next_token: String | None = None, **kwargs
@@ -615,7 +630,9 @@ class SnsProvider(SnsApi):
615
630
  next_token=next_token,
616
631
  )
617
632
 
618
- response = ListPlatformApplicationsResponse(PlatformApplications=page)
633
+ response = ListPlatformApplicationsResponse(
634
+ PlatformApplications=[platform_app.platform_application for platform_app in page]
635
+ )
619
636
  if token:
620
637
  response["NextToken"] = token
621
638
  return response
@@ -644,6 +661,62 @@ class SnsProvider(SnsApi):
644
661
  # Platform Endpoints
645
662
  #
646
663
 
664
+ def create_platform_endpoint(
665
+ self,
666
+ context: RequestContext,
667
+ platform_application_arn: String,
668
+ token: String,
669
+ custom_user_data: String | None = None,
670
+ attributes: MapStringToString | None = None,
671
+ **kwargs,
672
+ ) -> CreateEndpointResponse:
673
+ store = self.get_store(context.account_id, context.region)
674
+ application = store.platform_applications.get(platform_application_arn)
675
+ if not application:
676
+ raise NotFoundException("PlatformApplication does not exist")
677
+ endpoint_arn = application.platform_endpoints.get(token, {})
678
+ attributes = attributes or {}
679
+ _validate_endpoint_attributes(attributes, allow_empty=True)
680
+ # CustomUserData can be specified both in attributes and as parameter. Attributes take precedence
681
+ attributes.setdefault(EndpointAttributeNames.CUSTOM_USER_DATA, custom_user_data)
682
+ _attributes = {"Enabled": "true", "Token": token, **attributes}
683
+ if endpoint_arn and (
684
+ platform_endpoint_details := store.platform_endpoints.get(endpoint_arn)
685
+ ):
686
+ # endpoint for that application with that particular token already exists
687
+ if not platform_endpoint_details.platform_endpoint["Attributes"] == _attributes:
688
+ raise InvalidParameterException(
689
+ f"Invalid parameter: Token Reason: Endpoint {endpoint_arn} already exists with the same Token, but different attributes."
690
+ )
691
+ else:
692
+ return CreateEndpointResponse(EndpointArn=endpoint_arn)
693
+
694
+ endpoint_arn = create_platform_endpoint_arn(platform_application_arn)
695
+ platform_endpoint = PlatformEndpoint(
696
+ platform_application_arn=endpoint_arn,
697
+ platform_endpoint=Endpoint(
698
+ Attributes=_attributes,
699
+ EndpointArn=endpoint_arn,
700
+ ),
701
+ )
702
+ store.platform_endpoints[endpoint_arn] = platform_endpoint
703
+ application.platform_endpoints[token] = endpoint_arn
704
+
705
+ return CreateEndpointResponse(EndpointArn=endpoint_arn)
706
+
707
+ def delete_endpoint(self, context: RequestContext, endpoint_arn: String, **kwargs) -> None:
708
+ store = self.get_store(context.account_id, context.region)
709
+ platform_endpoint_details = store.platform_endpoints.pop(endpoint_arn, None)
710
+ if platform_endpoint_details:
711
+ platform_application = store.platform_applications.get(
712
+ platform_endpoint_details.platform_application_arn
713
+ )
714
+ if platform_application:
715
+ platform_endpoint = platform_endpoint_details.platform_endpoint
716
+ platform_application.platform_endpoints.pop(
717
+ platform_endpoint["Attributes"]["Token"], None
718
+ )
719
+
647
720
  def list_endpoints_by_platform_application(
648
721
  self,
649
722
  context: RequestContext,
@@ -651,8 +724,49 @@ class SnsProvider(SnsApi):
651
724
  next_token: String | None = None,
652
725
  **kwargs,
653
726
  ) -> ListEndpointsByPlatformApplicationResponse:
654
- # TODO: stub so cleanup fixture won't fail
655
- return ListEndpointsByPlatformApplicationResponse(Endpoints=[])
727
+ store = self.get_store(context.account_id, context.region)
728
+ platform_application = store.platform_applications.get(platform_application_arn)
729
+ if not platform_application:
730
+ raise NotFoundException("PlatformApplication does not exist")
731
+ endpoint_arns = platform_application.platform_endpoints.values()
732
+ paginated_endpoint_arns = PaginatedList(endpoint_arns)
733
+ page, token = paginated_endpoint_arns.get_page(
734
+ token_generator=lambda x: get_next_page_token_from_arn(x),
735
+ page_size=100,
736
+ next_token=next_token,
737
+ )
738
+
739
+ response = ListEndpointsByPlatformApplicationResponse(
740
+ Endpoints=[
741
+ store.platform_endpoints[endpoint_arn].platform_endpoint
742
+ for endpoint_arn in page
743
+ if endpoint_arn in store.platform_endpoints
744
+ ]
745
+ )
746
+ if token:
747
+ response["NextToken"] = token
748
+ return response
749
+
750
+ def get_endpoint_attributes(
751
+ self, context: RequestContext, endpoint_arn: String, **kwargs
752
+ ) -> GetEndpointAttributesResponse:
753
+ store = self.get_store(context.account_id, context.region)
754
+ platform_endpoint_details = store.platform_endpoints.get(endpoint_arn)
755
+ if not platform_endpoint_details:
756
+ raise NotFoundException("Endpoint does not exist")
757
+ attributes = platform_endpoint_details.platform_endpoint["Attributes"]
758
+ return GetEndpointAttributesResponse(Attributes=attributes)
759
+
760
+ def set_endpoint_attributes(
761
+ self, context: RequestContext, endpoint_arn: String, attributes: MapStringToString, **kwargs
762
+ ) -> None:
763
+ store = self.get_store(context.account_id, context.region)
764
+ platform_endpoint_details = store.platform_endpoints.get(endpoint_arn)
765
+ if not platform_endpoint_details:
766
+ raise NotFoundException("Endpoint does not exist")
767
+ _validate_endpoint_attributes(attributes)
768
+ attributes = attributes or {}
769
+ platform_endpoint_details.platform_endpoint["Attributes"].update(attributes)
656
770
 
657
771
  #
658
772
  # Sms operations
@@ -736,7 +850,7 @@ class SnsProvider(SnsApi):
736
850
  parse_and_validate_platform_application_arn(platform_application_arn)
737
851
  try:
738
852
  store = SnsProvider.get_store(context.account_id, context.region)
739
- return store.platform_applications[platform_application_arn]
853
+ return store.platform_applications[platform_application_arn].platform_application
740
854
  except KeyError:
741
855
  raise NotFoundException("PlatformApplication does not exist")
742
856
 
@@ -821,6 +935,10 @@ def _validate_platform_application_name(name: str) -> None:
821
935
 
822
936
 
823
937
  def _validate_platform_application_attributes(attributes: dict) -> None:
938
+ _check_empty_attributes(attributes)
939
+
940
+
941
+ def _check_empty_attributes(attributes: dict) -> None:
824
942
  if not attributes:
825
943
  raise CommonServiceException(
826
944
  code="ValidationError",
@@ -829,6 +947,20 @@ def _validate_platform_application_attributes(attributes: dict) -> None:
829
947
  )
830
948
 
831
949
 
950
+ def _validate_endpoint_attributes(attributes: dict, allow_empty: bool = False) -> None:
951
+ if not allow_empty:
952
+ _check_empty_attributes(attributes)
953
+ for key in attributes:
954
+ if key not in EndpointAttributeNames:
955
+ raise InvalidParameterException(
956
+ f"Invalid parameter: Attributes Reason: Invalid attribute name: {key}"
957
+ )
958
+ if len(attributes.get(EndpointAttributeNames.CUSTOM_USER_DATA, "")) > 2048:
959
+ raise InvalidParameterException(
960
+ "Invalid parameter: Attributes Reason: Invalid value for attribute: CustomUserData: must be at most 2048 bytes long in UTF-8 encoding"
961
+ )
962
+
963
+
832
964
  def _validate_sms_attributes(attributes: dict) -> None:
833
965
  for k, v in attributes.items():
834
966
  if k not in SMS_ATTRIBUTE_NAMES:
@@ -103,6 +103,14 @@ def create_subscription_arn(topic_arn: str) -> str:
103
103
  return f"{topic_arn}:{uuid4()}"
104
104
 
105
105
 
106
+ def create_platform_endpoint_arn(
107
+ platform_application_arn: str,
108
+ ) -> str:
109
+ # This is the format of an Endpoint Arn
110
+ # arn:aws:sns:us-west-2:1234567890:endpoint/GCM/MyApplication/12345678-abcd-9012-efgh-345678901234
111
+ return f"{platform_application_arn.replace('app', 'endpoint', 1)}/{uuid4()}"
112
+
113
+
106
114
  def encode_subscription_token_with_region(region: str) -> str:
107
115
  """
108
116
  Create a 64 characters Subscription Token with the region encoded
@@ -314,7 +314,8 @@ class SqsQueue:
314
314
  purge_timestamp: float | None
315
315
 
316
316
  delayed: set[SqsMessage]
317
- inflight: set[SqsMessage]
317
+ # Simulating an ordered set in python. Only the keys are used and of interest.
318
+ inflight: dict[SqsMessage, None]
318
319
  receipts: dict[str, SqsMessage]
319
320
 
320
321
  def __init__(self, name: str, region: str, account_id: str, attributes=None, tags=None) -> None:
@@ -326,7 +327,7 @@ class SqsQueue:
326
327
  self.tags = tags or {}
327
328
 
328
329
  self.delayed = set()
329
- self.inflight = set()
330
+ self.inflight = {}
330
331
  self.receipts = {}
331
332
 
332
333
  self.attributes = self.default_attributes()
@@ -513,7 +514,7 @@ class SqsQueue:
513
514
  )
514
515
  # Terminating the visibility timeout for a message
515
516
  # https://docs.aws.amazon.com/AWSSimpleQueueService/latest/SQSDeveloperGuide/sqs-visibility-timeout.html#terminating-message-visibility-timeout
516
- self.inflight.remove(standard_message)
517
+ del self.inflight[standard_message]
517
518
  self._put_message(standard_message)
518
519
 
519
520
  def remove(self, receipt_handle: str):
@@ -606,9 +607,17 @@ class SqsQueue:
606
607
  standard_message,
607
608
  self.arn,
608
609
  )
609
- self.inflight.remove(standard_message)
610
+ del self.inflight[standard_message]
610
611
  self._put_message(standard_message)
611
612
 
613
+ def add_inflight_message(self, message: SqsMessage):
614
+ """
615
+ We are simulating an ordered set with a dict. When a value is added, it is added as key to the dict, which
616
+ is all we need. Hence all "values" in this ordered set are None
617
+ :param message: The message to put in flight
618
+ """
619
+ self.inflight[message] = None
620
+
612
621
  def enqueue_delayed_messages(self):
613
622
  if not self.delayed:
614
623
  return
@@ -779,7 +788,6 @@ class SqsQueue:
779
788
 
780
789
  class StandardQueue(SqsQueue):
781
790
  visible: InterruptiblePriorityQueue[SqsMessage]
782
- inflight: set[SqsMessage]
783
791
 
784
792
  def __init__(self, name: str, region: str, account_id: str, attributes=None, tags=None) -> None:
785
793
  super().__init__(name, region, account_id, attributes, tags)
@@ -923,13 +931,13 @@ class StandardQueue(SqsQueue):
923
931
  if message.visibility_timeout == 0:
924
932
  self.visible.put_nowait(message)
925
933
  else:
926
- self.inflight.add(message)
934
+ self.add_inflight_message(message)
927
935
 
928
936
  return result
929
937
 
930
938
  def _on_remove_message(self, message: SqsMessage):
931
939
  try:
932
- self.inflight.remove(message)
940
+ del self.inflight[message]
933
941
  except KeyError:
934
942
  # this likely means the message was removed with an expired receipt handle unfortunately this
935
943
  # means we need to scan the queue for the element and remove it from there, and then re-heapify
@@ -1149,6 +1157,26 @@ class FifoQueue(SqsQueue):
1149
1157
  elif previously_empty:
1150
1158
  self.message_group_queue.put_nowait(message_group)
1151
1159
 
1160
+ def requeue_inflight_messages(self):
1161
+ if not self.inflight:
1162
+ return
1163
+
1164
+ with self.mutex:
1165
+ messages = list(self.inflight)
1166
+ for standard_message in messages:
1167
+ # in fifo, an invisible message blocks potentially visible messages afterwards
1168
+ # this can happen for example if multiple message of the same group are received at once, then one
1169
+ # message of this batch has its visibility timeout extended
1170
+ if not standard_message.is_visible:
1171
+ return
1172
+ LOG.debug(
1173
+ "re-queueing inflight messages %s into queue %s",
1174
+ standard_message,
1175
+ self.arn,
1176
+ )
1177
+ del self.inflight[standard_message]
1178
+ self._put_message(standard_message)
1179
+
1152
1180
  def remove_expired_messages(self):
1153
1181
  with self.mutex:
1154
1182
  retention_period = self.message_retention_period
@@ -1278,8 +1306,7 @@ class FifoQueue(SqsQueue):
1278
1306
  if message.visibility_timeout == 0:
1279
1307
  self._put_message(message)
1280
1308
  else:
1281
- self.inflight.add(message)
1282
-
1309
+ self.add_inflight_message(message)
1283
1310
  return result
1284
1311
 
1285
1312
  def _on_remove_message(self, message: SqsMessage):
@@ -1288,7 +1315,7 @@ class FifoQueue(SqsQueue):
1288
1315
 
1289
1316
  with self.mutex:
1290
1317
  try:
1291
- self.inflight.remove(message)
1318
+ del self.inflight[message]
1292
1319
  except KeyError:
1293
1320
  # in FIFO queues, this should not happen, as expired receipt handles cannot be used to
1294
1321
  # delete a message.
@@ -566,6 +566,8 @@ class TransformerUtility:
566
566
  """
567
567
  return [
568
568
  TransformerUtility.key_value("KeyId"),
569
+ TransformerUtility.key_value("KeyMaterialId"),
570
+ TransformerUtility.key_value("CurrentKeyMaterialId"),
569
571
  TransformerUtility.jsonpath(
570
572
  jsonpath="$..Signature",
571
573
  value_replacement="<signature>",
@@ -181,7 +181,6 @@ MATCHING_RULES: list[MatchingRule] = [
181
181
  ).passthrough(), # changes in a test file should always at least test that file
182
182
  # CI
183
183
  Matchers.glob(".github/**").full_suite(),
184
- Matchers.glob(".circleci/**").full_suite(),
185
184
  # dependencies / project setup
186
185
  Matchers.glob("requirements*.txt").full_suite(),
187
186
  Matchers.glob("setup.cfg").full_suite(),
@@ -65,7 +65,6 @@ if TYPE_CHECKING:
65
65
  from mypy_boto3_iotwireless import IoTWirelessClient
66
66
  from mypy_boto3_kafka import KafkaClient
67
67
  from mypy_boto3_kinesis import KinesisClient
68
- from mypy_boto3_kinesisanalytics import KinesisAnalyticsClient
69
68
  from mypy_boto3_kinesisanalyticsv2 import KinesisAnalyticsV2Client
70
69
  from mypy_boto3_kms import KMSClient
71
70
  from mypy_boto3_lakeformation import LakeFormationClient
@@ -82,8 +81,6 @@ if TYPE_CHECKING:
82
81
  from mypy_boto3_pi import PIClient
83
82
  from mypy_boto3_pinpoint import PinpointClient
84
83
  from mypy_boto3_pipes import EventBridgePipesClient
85
- from mypy_boto3_qldb import QLDBClient
86
- from mypy_boto3_qldb_session import QLDBSessionClient
87
84
  from mypy_boto3_rds import RDSClient
88
85
  from mypy_boto3_rds_data import RDSDataServiceClient
89
86
  from mypy_boto3_redshift import RedshiftClient
@@ -191,9 +188,6 @@ class TypedServiceClientFactory(abc.ABC):
191
188
  iotwireless: Union["IoTWirelessClient", "MetadataRequestInjector[IoTWirelessClient]"]
192
189
  kafka: Union["KafkaClient", "MetadataRequestInjector[KafkaClient]"]
193
190
  kinesis: Union["KinesisClient", "MetadataRequestInjector[KinesisClient]"]
194
- kinesisanalytics: Union[
195
- "KinesisAnalyticsClient", "MetadataRequestInjector[KinesisAnalyticsClient]"
196
- ]
197
191
  kinesisanalyticsv2: Union[
198
192
  "KinesisAnalyticsV2Client", "MetadataRequestInjector[KinesisAnalyticsV2Client]"
199
193
  ]
@@ -214,8 +208,6 @@ class TypedServiceClientFactory(abc.ABC):
214
208
  pi: Union["PIClient", "MetadataRequestInjector[PIClient]"]
215
209
  pinpoint: Union["PinpointClient", "MetadataRequestInjector[PinpointClient]"]
216
210
  pipes: Union["EventBridgePipesClient", "MetadataRequestInjector[EventBridgePipesClient]"]
217
- qldb: Union["QLDBClient", "MetadataRequestInjector[QLDBClient]"]
218
- qldb_session: Union["QLDBSessionClient", "MetadataRequestInjector[QLDBSessionClient]"]
219
211
  rds: Union["RDSClient", "MetadataRequestInjector[RDSClient]"]
220
212
  rds_data: Union["RDSDataServiceClient", "MetadataRequestInjector[RDSDataServiceClient]"]
221
213
  redshift: Union["RedshiftClient", "MetadataRequestInjector[RedshiftClient]"]
@@ -1,11 +1,119 @@
1
1
  import json
2
+ import logging
3
+ from json import JSONDecodeError
4
+ from pathlib import Path
2
5
 
6
+ import requests
7
+ from pydantic import BaseModel
8
+
9
+ from localstack import config, constants
3
10
  from localstack.utils.catalog.common import AwsRemoteCatalog
11
+ from localstack.utils.http import get_proxies
12
+ from localstack.utils.json import FileMappedDocument
13
+
14
+ LOG = logging.getLogger(__name__)
15
+
16
+ AWS_CATALOG_FILE_NAME = "aws_catalog.json"
17
+
4
18
 
5
- LICENSE_CATALOG_PATH = ""
19
+ class RemoteCatalogVersionResponse(BaseModel):
20
+ emulator_type: str
21
+ version: str
22
+
23
+
24
+ class AwsCatalogLoaderException(Exception):
25
+ def __init__(self, msg: str, *args):
26
+ super().__init__(msg, *args)
6
27
 
7
28
 
8
29
  class RemoteCatalogLoader:
30
+ supported_schema_version = "v1"
31
+ api_endpoint_catalog = f"{constants.API_ENDPOINT}/license/catalog"
32
+ catalog_file_path = Path(config.dirs.cache) / AWS_CATALOG_FILE_NAME
33
+
9
34
  def get_remote_catalog(self) -> AwsRemoteCatalog:
10
- with open(LICENSE_CATALOG_PATH) as f:
11
- return AwsRemoteCatalog(**json.load(f))
35
+ catalog_doc = FileMappedDocument(self.catalog_file_path)
36
+ cached_catalog = AwsRemoteCatalog(**catalog_doc) if catalog_doc else None
37
+ if cached_catalog:
38
+ cached_catalog_version = cached_catalog.localstack.version
39
+ if not self._should_update_cached_catalog(cached_catalog_version):
40
+ return cached_catalog
41
+ catalog = self._get_catalog_from_platform()
42
+ self._save_catalog_to_cache(catalog_doc, catalog)
43
+ return catalog
44
+
45
+ def _get_latest_localstack_version(self) -> str:
46
+ try:
47
+ proxies = get_proxies()
48
+ response = requests.get(
49
+ f"{self.api_endpoint_catalog}/aws/version",
50
+ verify=not config.is_env_true("SSL_NO_VERIFY"),
51
+ proxies=proxies,
52
+ )
53
+ if response.ok:
54
+ return RemoteCatalogVersionResponse.model_validate(response.content).version
55
+ self._raise_server_error(response)
56
+ except requests.exceptions.RequestException as e:
57
+ raise AwsCatalogLoaderException(
58
+ f"An unexpected network error occurred when trying to fetch latest localstack version: {e}"
59
+ ) from e
60
+
61
+ def _should_update_cached_catalog(self, current_catalog_version: str) -> bool:
62
+ try:
63
+ latest_version = self._get_latest_localstack_version()
64
+ return latest_version != current_catalog_version
65
+ except Exception as e:
66
+ LOG.warning(
67
+ "Failed to retrieve the latest catalog version, cached catalog update skipped: %s",
68
+ e,
69
+ )
70
+ return False
71
+
72
+ def _save_catalog_to_cache(self, catalog_doc: FileMappedDocument, catalog: AwsRemoteCatalog):
73
+ catalog_doc.clear()
74
+ catalog_doc.update(catalog.model_dump())
75
+ catalog_doc.save()
76
+
77
+ def _get_catalog_from_platform(self) -> AwsRemoteCatalog:
78
+ try:
79
+ proxies = get_proxies()
80
+ response = requests.post(
81
+ self.api_endpoint_catalog,
82
+ verify=not config.is_env_true("SSL_NO_VERIFY"),
83
+ proxies=proxies,
84
+ )
85
+
86
+ if response.ok:
87
+ return self._parse_catalog(response.content)
88
+ self._raise_server_error(response)
89
+ except requests.exceptions.RequestException as e:
90
+ raise AwsCatalogLoaderException(
91
+ f"An unexpected network error occurred when trying to fetch remote catalog: {e}"
92
+ ) from e
93
+
94
+ def _parse_catalog(self, document: bytes) -> AwsRemoteCatalog | None:
95
+ try:
96
+ catalog_json = json.loads(document)
97
+ except JSONDecodeError as e:
98
+ raise AwsCatalogLoaderException(f"Could not de-serialize json catalog: {e}") from e
99
+ remote_catalog = AwsRemoteCatalog.model_validate(catalog_json)
100
+ if remote_catalog.schema_version != self.supported_schema_version:
101
+ raise AwsCatalogLoaderException(
102
+ f"Unsupported schema version: '{remote_catalog.schema_version}'. Only '{self.supported_schema_version}' is supported"
103
+ )
104
+ return remote_catalog
105
+
106
+ def _raise_server_error(self, response: requests.Response):
107
+ try:
108
+ server_error = response.json()
109
+ if error_message := server_error.get("message"):
110
+ raise AwsCatalogLoaderException(
111
+ f"Unexpected AWS catalog server error: {response.text}"
112
+ )
113
+ raise AwsCatalogLoaderException(
114
+ f"A server error occurred while calling remote catalog API (HTTP {response.status_code}): {error_message}"
115
+ )
116
+ except Exception:
117
+ raise AwsCatalogLoaderException(
118
+ f"An unexpected server error occurred while calling remote catalog API (HTTP {response.status_code}): {response.text}"
119
+ )
@@ -4,7 +4,13 @@ import os
4
4
  import re
5
5
  import threading
6
6
 
7
+ from asn1crypto import algos, cms, core
8
+ from asn1crypto import x509 as asn1_x509
7
9
  from cryptography.hazmat.backends import default_backend
10
+ from cryptography.hazmat.primitives import hashes
11
+ from cryptography.hazmat.primitives import padding as sym_padding
12
+ from cryptography.hazmat.primitives.asymmetric import padding
13
+ from cryptography.hazmat.primitives.asymmetric.rsa import RSAPublicKey
8
14
  from cryptography.hazmat.primitives.ciphers import Cipher, algorithms, modes
9
15
 
10
16
  from .files import TMP_FILES, file_exists_not_empty, load_file, new_tmp_file, save_file
@@ -26,6 +32,11 @@ PEM_CERT_END = "-----END CERTIFICATE-----"
26
32
  PEM_KEY_START_REGEX = r"-----BEGIN(.*)PRIVATE KEY-----"
27
33
  PEM_KEY_END_REGEX = r"-----END(.*)PRIVATE KEY-----"
28
34
 
35
+ OID_AES256_CBC = "2.16.840.1.101.3.4.1.42"
36
+ OID_MGF1 = "1.2.840.113549.1.1.8"
37
+ OID_RSAES_OAEP = "1.2.840.113549.1.1.7"
38
+ OID_SHA256 = "2.16.840.1.101.3.4.2.1"
39
+
29
40
 
30
41
  @synchronized(lock=SSL_CERT_LOCK)
31
42
  def generate_ssl_cert(
@@ -183,3 +194,101 @@ def decrypt(
183
194
  decrypted = decryptor.update(encrypted) + decryptor.finalize()
184
195
  decrypted = unpad(decrypted)
185
196
  return decrypted
197
+
198
+
199
+ def pkcs7_envelope_encrypt(plaintext: bytes, recipient_pubkey: RSAPublicKey) -> bytes:
200
+ """
201
+ Create a PKCS7 wrapper of some plaintext decryptable by recipient_pubkey. Uses RSA-OAEP with SHA-256
202
+ to encrypt the AES-256-CBC content key. Hazmat's PKCS7EnvelopeBuilder doesn't support RSA-OAEP with SHA-256,
203
+ so we need to build the pieces manually and then put them together in an envelope with asn1crypto.
204
+ """
205
+
206
+ # Encrypt the plaintext with an AES session key, then encrypt the session key to the recipient_pubkey
207
+ session_key = os.urandom(32)
208
+ iv = os.urandom(16)
209
+ encrypted_session_key = recipient_pubkey.encrypt(
210
+ session_key,
211
+ padding.OAEP(
212
+ mgf=padding.MGF1(algorithm=hashes.SHA256()), algorithm=hashes.SHA256(), label=None
213
+ ),
214
+ )
215
+ cipher = Cipher(algorithms.AES(session_key), modes.CBC(iv), backend=default_backend())
216
+ encryptor = cipher.encryptor()
217
+ padder = sym_padding.PKCS7(algorithms.AES.block_size).padder()
218
+ padded_plaintext = padder.update(plaintext) + padder.finalize()
219
+ encrypted_content = encryptor.update(padded_plaintext) + encryptor.finalize()
220
+
221
+ # Now put together the envelope.
222
+ # Add the recipient with their copy of the session key
223
+ recipient_identifier = cms.RecipientIdentifier(
224
+ name="issuer_and_serial_number",
225
+ value=cms.IssuerAndSerialNumber(
226
+ {
227
+ "issuer": asn1_x509.Name.build({"common_name": "recipient"}),
228
+ "serial_number": 1,
229
+ }
230
+ ),
231
+ )
232
+ key_enc_algorithm = cms.KeyEncryptionAlgorithm(
233
+ {
234
+ "algorithm": OID_RSAES_OAEP,
235
+ "parameters": algos.RSAESOAEPParams(
236
+ {
237
+ "hash_algorithm": algos.DigestAlgorithm(
238
+ {
239
+ "algorithm": OID_SHA256,
240
+ }
241
+ ),
242
+ "mask_gen_algorithm": algos.MaskGenAlgorithm(
243
+ {
244
+ "algorithm": OID_MGF1,
245
+ "parameters": algos.DigestAlgorithm(
246
+ {
247
+ "algorithm": OID_SHA256,
248
+ }
249
+ ),
250
+ }
251
+ ),
252
+ }
253
+ ),
254
+ }
255
+ )
256
+ recipient_info = cms.KeyTransRecipientInfo(
257
+ {
258
+ "version": "v0",
259
+ "rid": recipient_identifier,
260
+ "key_encryption_algorithm": key_enc_algorithm,
261
+ "encrypted_key": encrypted_session_key,
262
+ }
263
+ )
264
+
265
+ # Add the encrypted content
266
+ content_enc_algorithm = cms.EncryptionAlgorithm(
267
+ {
268
+ "algorithm": OID_AES256_CBC,
269
+ "parameters": core.OctetString(iv),
270
+ }
271
+ )
272
+ encrypted_content_info = cms.EncryptedContentInfo(
273
+ {
274
+ "content_type": "data",
275
+ "content_encryption_algorithm": content_enc_algorithm,
276
+ "encrypted_content": encrypted_content,
277
+ }
278
+ )
279
+ enveloped_data = cms.EnvelopedData(
280
+ {
281
+ "version": "v0",
282
+ "recipient_infos": [recipient_info],
283
+ "encrypted_content_info": encrypted_content_info,
284
+ }
285
+ )
286
+
287
+ # Finally add a wrapper and return its bytes
288
+ content_info = cms.ContentInfo(
289
+ {
290
+ "content_type": "enveloped_data",
291
+ "content": enveloped_data,
292
+ }
293
+ )
294
+ return content_info.dump()