aws-cdk-lib 2.179.0__py3-none-any.whl → 2.180.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of aws-cdk-lib might be problematic. Click here for more details.

Files changed (62) hide show
  1. aws_cdk/__init__.py +14 -6
  2. aws_cdk/_jsii/__init__.py +1 -1
  3. aws_cdk/_jsii/{aws-cdk-lib@2.179.0.jsii.tgz → aws-cdk-lib@2.180.0.jsii.tgz} +0 -0
  4. aws_cdk/aws_acmpca/__init__.py +47 -0
  5. aws_cdk/aws_apigateway/__init__.py +6 -6
  6. aws_cdk/aws_appsync/__init__.py +4 -3
  7. aws_cdk/aws_backup/__init__.py +89 -0
  8. aws_cdk/aws_batch/__init__.py +89 -50
  9. aws_cdk/aws_bedrock/__init__.py +506 -62
  10. aws_cdk/aws_cloudfront/__init__.py +1046 -155
  11. aws_cdk/aws_cloudfront_origins/__init__.py +1338 -144
  12. aws_cdk/aws_cloudtrail/__init__.py +4 -8
  13. aws_cdk/aws_cloudwatch/__init__.py +1 -1
  14. aws_cdk/aws_codebuild/__init__.py +2 -2
  15. aws_cdk/aws_codepipeline/__init__.py +24 -0
  16. aws_cdk/aws_codepipeline_actions/__init__.py +28 -1
  17. aws_cdk/aws_codestar/__init__.py +2 -1
  18. aws_cdk/aws_cognito/__init__.py +0 -9
  19. aws_cdk/aws_connect/__init__.py +257 -0
  20. aws_cdk/aws_datasync/__init__.py +279 -50
  21. aws_cdk/aws_deadline/__init__.py +683 -6
  22. aws_cdk/aws_directoryservice/__init__.py +9 -4
  23. aws_cdk/aws_dlm/__init__.py +2 -2
  24. aws_cdk/aws_dms/__init__.py +3 -3
  25. aws_cdk/aws_dynamodb/__init__.py +0 -54
  26. aws_cdk/aws_ec2/__init__.py +377 -121
  27. aws_cdk/aws_ecs/__init__.py +20 -35
  28. aws_cdk/aws_efs/__init__.py +1 -1
  29. aws_cdk/aws_eks/__init__.py +5 -3
  30. aws_cdk/aws_elasticloadbalancingv2/__init__.py +13 -27
  31. aws_cdk/aws_emrcontainers/__init__.py +44 -1
  32. aws_cdk/aws_events/__init__.py +8 -11
  33. aws_cdk/aws_fms/__init__.py +5 -5
  34. aws_cdk/aws_fsx/__init__.py +5 -4
  35. aws_cdk/aws_glue/__init__.py +161 -0
  36. aws_cdk/aws_groundstation/__init__.py +23 -1
  37. aws_cdk/aws_iam/__init__.py +12 -12
  38. aws_cdk/aws_iot/__init__.py +7 -0
  39. aws_cdk/aws_ivs/__init__.py +17 -8
  40. aws_cdk/aws_kinesis/__init__.py +689 -35
  41. aws_cdk/aws_lambda/__init__.py +10 -15
  42. aws_cdk/aws_lambda_event_sources/__init__.py +175 -2
  43. aws_cdk/aws_medialive/__init__.py +314 -4
  44. aws_cdk/aws_opensearchserverless/__init__.py +19 -0
  45. aws_cdk/aws_rds/__init__.py +78 -58
  46. aws_cdk/aws_s3/__init__.py +278 -0
  47. aws_cdk/aws_s3objectlambda/__init__.py +2 -2
  48. aws_cdk/aws_ses/__init__.py +228 -8
  49. aws_cdk/aws_ssm/__init__.py +4 -5
  50. aws_cdk/aws_stepfunctions/__init__.py +301 -70
  51. aws_cdk/aws_stepfunctions_tasks/__init__.py +142 -142
  52. aws_cdk/aws_supportapp/__init__.py +7 -7
  53. aws_cdk/aws_transfer/__init__.py +820 -2
  54. aws_cdk/aws_wafv2/__init__.py +17 -9
  55. aws_cdk/custom_resources/__init__.py +23 -26
  56. aws_cdk/cx_api/__init__.py +16 -0
  57. {aws_cdk_lib-2.179.0.dist-info → aws_cdk_lib-2.180.0.dist-info}/METADATA +1 -1
  58. {aws_cdk_lib-2.179.0.dist-info → aws_cdk_lib-2.180.0.dist-info}/RECORD +62 -62
  59. {aws_cdk_lib-2.179.0.dist-info → aws_cdk_lib-2.180.0.dist-info}/LICENSE +0 -0
  60. {aws_cdk_lib-2.179.0.dist-info → aws_cdk_lib-2.180.0.dist-info}/NOTICE +0 -0
  61. {aws_cdk_lib-2.179.0.dist-info → aws_cdk_lib-2.180.0.dist-info}/WHEEL +0 -0
  62. {aws_cdk_lib-2.179.0.dist-info → aws_cdk_lib-2.180.0.dist-info}/top_level.txt +0 -0
@@ -23047,25 +23047,20 @@ class StartingPosition(enum.Enum):
23047
23047
 
23048
23048
  Example::
23049
23049
 
23050
- from aws_cdk.aws_lambda_event_sources import ManagedKafkaEventSource
23050
+ import aws_cdk.aws_kinesis as kinesis
23051
+ from aws_cdk.aws_lambda_event_sources import KinesisConsumerEventSource
23051
23052
 
23052
23053
  # my_function: lambda.Function
23053
23054
 
23054
23055
 
23055
- # Your MSK cluster arn
23056
- cluster_arn = "arn:aws:kafka:us-east-1:0123456789019:cluster/SalesCluster/abcd1234-abcd-cafe-abab-9876543210ab-4"
23057
-
23058
- # The Kafka topic you want to subscribe to
23059
- topic = "some-cool-topic"
23060
- my_function.add_event_source(ManagedKafkaEventSource(
23061
- cluster_arn=cluster_arn,
23062
- topic=topic,
23063
- starting_position=lambda_.StartingPosition.TRIM_HORIZON,
23064
- filters=[
23065
- lambda_.FilterCriteria.filter({
23066
- "string_equals": lambda_.FilterRule.is_equal("test")
23067
- })
23068
- ]
23056
+ stream = kinesis.Stream(self, "MyStream")
23057
+ stream_consumer = kinesis.StreamConsumer(self, "MyStreamConsumer",
23058
+ stream=stream,
23059
+ stream_consumer_name="MyStreamConsumer"
23060
+ )
23061
+ my_function.add_event_source(KinesisConsumerEventSource(stream_consumer,
23062
+ batch_size=100, # default
23063
+ starting_position=lambda_.StartingPosition.TRIM_HORIZON
23069
23064
  ))
23070
23065
  '''
23071
23066
 
@@ -244,7 +244,7 @@ behavior:
244
244
  * **startingPosition**: Will determine where to begin consumption. 'LATEST' will start at the most recent record and ignore all records that arrived prior to attaching the event source, 'TRIM_HORIZON' will start at the oldest record and ensure you process all available data, while 'AT_TIMESTAMP' will start reading records from a specified time stamp. Note that 'AT_TIMESTAMP' is only supported for Amazon Kinesis streams.
245
245
  * **startingPositionTimestamp**: The time stamp from which to start reading. Used in conjunction with **startingPosition** when set to 'AT_TIMESTAMP'.
246
246
  * **tumblingWindow**: The duration in seconds of a processing window when using streams.
247
- * **enabled**: If the DynamoDB Streams event source mapping should be enabled. The default is true.
247
+ * **enabled**: If the event source mapping should be enabled. The default is true.
248
248
 
249
249
  ```python
250
250
  import aws_cdk.aws_kinesis as kinesis
@@ -260,6 +260,26 @@ my_function.add_event_source(KinesisEventSource(stream,
260
260
  ))
261
261
  ```
262
262
 
263
+ To use a dedicated-throughput consumer with enhanced fan-out
264
+
265
+ ```python
266
+ import aws_cdk.aws_kinesis as kinesis
267
+ from aws_cdk.aws_lambda_event_sources import KinesisConsumerEventSource
268
+
269
+ # my_function: lambda.Function
270
+
271
+
272
+ stream = kinesis.Stream(self, "MyStream")
273
+ stream_consumer = kinesis.StreamConsumer(self, "MyStreamConsumer",
274
+ stream=stream,
275
+ stream_consumer_name="MyStreamConsumer"
276
+ )
277
+ my_function.add_event_source(KinesisConsumerEventSource(stream_consumer,
278
+ batch_size=100, # default
279
+ starting_position=lambda_.StartingPosition.TRIM_HORIZON
280
+ ))
281
+ ```
282
+
263
283
  ## Kafka
264
284
 
265
285
  You can write Lambda functions to process data either from [Amazon MSK](https://docs.aws.amazon.com/lambda/latest/dg/with-msk.html) or a [self managed Kafka](https://docs.aws.amazon.com/lambda/latest/dg/kafka-smaa.html) cluster.
@@ -486,7 +506,9 @@ from ..aws_ec2 import (
486
506
  IVpc as _IVpc_f30d5663,
487
507
  SubnetSelection as _SubnetSelection_e57d76df,
488
508
  )
489
- from ..aws_kinesis import IStream as _IStream_4e2457d2
509
+ from ..aws_kinesis import (
510
+ IStream as _IStream_4e2457d2, IStreamConsumer as _IStreamConsumer_019d062e
511
+ )
490
512
  from ..aws_kms import IKey as _IKey_5f11635f
491
513
  from ..aws_lambda import (
492
514
  DlqDestinationConfig as _DlqDestinationConfig_5fe54cfa,
@@ -3493,6 +3515,127 @@ class DynamoEventSourceProps(StreamEventSourceProps):
3493
3515
  )
3494
3516
 
3495
3517
 
3518
+ class KinesisConsumerEventSource(
3519
+ StreamEventSource,
3520
+ metaclass=jsii.JSIIMeta,
3521
+ jsii_type="aws-cdk-lib.aws_lambda_event_sources.KinesisConsumerEventSource",
3522
+ ):
3523
+ '''Use an Amazon Kinesis stream consumer as an event source for AWS Lambda.
3524
+
3525
+ :exampleMetadata: infused
3526
+
3527
+ Example::
3528
+
3529
+ import aws_cdk.aws_kinesis as kinesis
3530
+ from aws_cdk.aws_lambda_event_sources import KinesisConsumerEventSource
3531
+
3532
+ # my_function: lambda.Function
3533
+
3534
+
3535
+ stream = kinesis.Stream(self, "MyStream")
3536
+ stream_consumer = kinesis.StreamConsumer(self, "MyStreamConsumer",
3537
+ stream=stream,
3538
+ stream_consumer_name="MyStreamConsumer"
3539
+ )
3540
+ my_function.add_event_source(KinesisConsumerEventSource(stream_consumer,
3541
+ batch_size=100, # default
3542
+ starting_position=lambda_.StartingPosition.TRIM_HORIZON
3543
+ ))
3544
+ '''
3545
+
3546
+ def __init__(
3547
+ self,
3548
+ stream_consumer: _IStreamConsumer_019d062e,
3549
+ *,
3550
+ starting_position_timestamp: typing.Optional[jsii.Number] = None,
3551
+ bisect_batch_on_error: typing.Optional[builtins.bool] = None,
3552
+ filter_encryption: typing.Optional[_IKey_5f11635f] = None,
3553
+ filters: typing.Optional[typing.Sequence[typing.Mapping[builtins.str, typing.Any]]] = None,
3554
+ max_record_age: typing.Optional[_Duration_4839e8c3] = None,
3555
+ metrics_config: typing.Optional[typing.Union[_MetricsConfig_48ab59c4, typing.Dict[builtins.str, typing.Any]]] = None,
3556
+ on_failure: typing.Optional[_IEventSourceDlq_5e2c6ad9] = None,
3557
+ parallelization_factor: typing.Optional[jsii.Number] = None,
3558
+ report_batch_item_failures: typing.Optional[builtins.bool] = None,
3559
+ retry_attempts: typing.Optional[jsii.Number] = None,
3560
+ tumbling_window: typing.Optional[_Duration_4839e8c3] = None,
3561
+ starting_position: _StartingPosition_c0a4852c,
3562
+ batch_size: typing.Optional[jsii.Number] = None,
3563
+ enabled: typing.Optional[builtins.bool] = None,
3564
+ max_batching_window: typing.Optional[_Duration_4839e8c3] = None,
3565
+ provisioned_poller_config: typing.Optional[typing.Union[ProvisionedPollerConfig, typing.Dict[builtins.str, typing.Any]]] = None,
3566
+ ) -> None:
3567
+ '''
3568
+ :param stream_consumer: -
3569
+ :param starting_position_timestamp: The time from which to start reading, in Unix time seconds. Default: - no timestamp
3570
+ :param bisect_batch_on_error: If the function returns an error, split the batch in two and retry. Default: false
3571
+ :param filter_encryption: Add Customer managed KMS key to encrypt Filter Criteria. Default: - none
3572
+ :param filters: Add filter criteria option. Default: - None
3573
+ :param max_record_age: The maximum age of a record that Lambda sends to a function for processing. Valid Range: - Minimum value of 60 seconds - Maximum value of 7 days The default value is -1, which sets the maximum age to infinite. When the value is set to infinite, Lambda never discards old records. Record are valid until it expires in the event source. Default: -1
3574
+ :param metrics_config: Configuration for enhanced monitoring metrics collection When specified, enables collection of additional metrics for the stream event source. Default: - Enhanced monitoring is disabled
3575
+ :param on_failure: An Amazon SQS queue or Amazon SNS topic destination for discarded records. Default: - discarded records are ignored
3576
+ :param parallelization_factor: The number of batches to process from each shard concurrently. Valid Range: - Minimum value of 1 - Maximum value of 10 Default: 1
3577
+ :param report_batch_item_failures: Allow functions to return partially successful responses for a batch of records. Default: false
3578
+ :param retry_attempts: Maximum number of retry attempts Valid Range: * Minimum value of 0 * Maximum value of 10000. The default value is -1, which sets the maximum number of retries to infinite. When MaximumRetryAttempts is infinite, Lambda retries failed records until the record expires in the event source. Default: -1
3579
+ :param tumbling_window: The size of the tumbling windows to group records sent to DynamoDB or Kinesis Valid Range: 0 - 15 minutes. Default: - None
3580
+ :param starting_position: Where to begin consuming the stream.
3581
+ :param batch_size: The largest number of records that AWS Lambda will retrieve from your event source at the time of invoking your function. Your function receives an event with all the retrieved records. Valid Range: - Minimum value of 1 - Maximum value of: - 1000 for ``DynamoEventSource`` - 10000 for ``KinesisEventSource``, ``ManagedKafkaEventSource`` and ``SelfManagedKafkaEventSource`` Default: 100
3582
+ :param enabled: If the stream event source mapping should be enabled. Default: true
3583
+ :param max_batching_window: The maximum amount of time to gather records before invoking the function. Maximum of Duration.minutes(5). Default: - Duration.seconds(0) for Kinesis, DynamoDB, and SQS event sources, Duration.millis(500) for MSK, self-managed Kafka, and Amazon MQ.
3584
+ :param provisioned_poller_config: Configuration for provisioned pollers that read from the event source. When specified, allows control over the minimum and maximum number of pollers that can be provisioned to process events from the source. Default: - no provisioned pollers
3585
+ '''
3586
+ if __debug__:
3587
+ type_hints = typing.get_type_hints(_typecheckingstub__5d4436c01738e680b66bbfda741235beea5eb51d1e287ec05ae0c58abfa41f1a)
3588
+ check_type(argname="argument stream_consumer", value=stream_consumer, expected_type=type_hints["stream_consumer"])
3589
+ props = KinesisEventSourceProps(
3590
+ starting_position_timestamp=starting_position_timestamp,
3591
+ bisect_batch_on_error=bisect_batch_on_error,
3592
+ filter_encryption=filter_encryption,
3593
+ filters=filters,
3594
+ max_record_age=max_record_age,
3595
+ metrics_config=metrics_config,
3596
+ on_failure=on_failure,
3597
+ parallelization_factor=parallelization_factor,
3598
+ report_batch_item_failures=report_batch_item_failures,
3599
+ retry_attempts=retry_attempts,
3600
+ tumbling_window=tumbling_window,
3601
+ starting_position=starting_position,
3602
+ batch_size=batch_size,
3603
+ enabled=enabled,
3604
+ max_batching_window=max_batching_window,
3605
+ provisioned_poller_config=provisioned_poller_config,
3606
+ )
3607
+
3608
+ jsii.create(self.__class__, self, [stream_consumer, props])
3609
+
3610
+ @jsii.member(jsii_name="bind")
3611
+ def bind(self, target: _IFunction_6adb0ab8) -> None:
3612
+ '''Called by ``lambda.addEventSource`` to allow the event source to bind to this function.
3613
+
3614
+ :param target: -
3615
+ '''
3616
+ if __debug__:
3617
+ type_hints = typing.get_type_hints(_typecheckingstub__7cd6beafb45fd5092320f505d4c975a198986b37261f8d9700231eee9d820412)
3618
+ check_type(argname="argument target", value=target, expected_type=type_hints["target"])
3619
+ return typing.cast(None, jsii.invoke(self, "bind", [target]))
3620
+
3621
+ @builtins.property
3622
+ @jsii.member(jsii_name="eventSourceMappingArn")
3623
+ def event_source_mapping_arn(self) -> builtins.str:
3624
+ '''The ARN for this EventSourceMapping.'''
3625
+ return typing.cast(builtins.str, jsii.get(self, "eventSourceMappingArn"))
3626
+
3627
+ @builtins.property
3628
+ @jsii.member(jsii_name="eventSourceMappingId")
3629
+ def event_source_mapping_id(self) -> builtins.str:
3630
+ '''The identifier for this EventSourceMapping.'''
3631
+ return typing.cast(builtins.str, jsii.get(self, "eventSourceMappingId"))
3632
+
3633
+ @builtins.property
3634
+ @jsii.member(jsii_name="streamConsumer")
3635
+ def stream_consumer(self) -> _IStreamConsumer_019d062e:
3636
+ return typing.cast(_IStreamConsumer_019d062e, jsii.get(self, "streamConsumer"))
3637
+
3638
+
3496
3639
  class KinesisEventSource(
3497
3640
  StreamEventSource,
3498
3641
  metaclass=jsii.JSIIMeta,
@@ -4164,6 +4307,7 @@ __all__ = [
4164
4307
  "DynamoEventSource",
4165
4308
  "DynamoEventSourceProps",
4166
4309
  "KafkaEventSourceProps",
4310
+ "KinesisConsumerEventSource",
4167
4311
  "KinesisEventSource",
4168
4312
  "KinesisEventSourceProps",
4169
4313
  "ManagedKafkaEventSource",
@@ -4501,6 +4645,35 @@ def _typecheckingstub__ec371d5e4612e8923bbdcc024d90e26915d64be2dc40151f22fc41139
4501
4645
  """Type checking stubs"""
4502
4646
  pass
4503
4647
 
4648
+ def _typecheckingstub__5d4436c01738e680b66bbfda741235beea5eb51d1e287ec05ae0c58abfa41f1a(
4649
+ stream_consumer: _IStreamConsumer_019d062e,
4650
+ *,
4651
+ starting_position_timestamp: typing.Optional[jsii.Number] = None,
4652
+ bisect_batch_on_error: typing.Optional[builtins.bool] = None,
4653
+ filter_encryption: typing.Optional[_IKey_5f11635f] = None,
4654
+ filters: typing.Optional[typing.Sequence[typing.Mapping[builtins.str, typing.Any]]] = None,
4655
+ max_record_age: typing.Optional[_Duration_4839e8c3] = None,
4656
+ metrics_config: typing.Optional[typing.Union[_MetricsConfig_48ab59c4, typing.Dict[builtins.str, typing.Any]]] = None,
4657
+ on_failure: typing.Optional[_IEventSourceDlq_5e2c6ad9] = None,
4658
+ parallelization_factor: typing.Optional[jsii.Number] = None,
4659
+ report_batch_item_failures: typing.Optional[builtins.bool] = None,
4660
+ retry_attempts: typing.Optional[jsii.Number] = None,
4661
+ tumbling_window: typing.Optional[_Duration_4839e8c3] = None,
4662
+ starting_position: _StartingPosition_c0a4852c,
4663
+ batch_size: typing.Optional[jsii.Number] = None,
4664
+ enabled: typing.Optional[builtins.bool] = None,
4665
+ max_batching_window: typing.Optional[_Duration_4839e8c3] = None,
4666
+ provisioned_poller_config: typing.Optional[typing.Union[ProvisionedPollerConfig, typing.Dict[builtins.str, typing.Any]]] = None,
4667
+ ) -> None:
4668
+ """Type checking stubs"""
4669
+ pass
4670
+
4671
+ def _typecheckingstub__7cd6beafb45fd5092320f505d4c975a198986b37261f8d9700231eee9d820412(
4672
+ target: _IFunction_6adb0ab8,
4673
+ ) -> None:
4674
+ """Type checking stubs"""
4675
+ pass
4676
+
4504
4677
  def _typecheckingstub__9f81acc98c12b4967363bdd43130a7e674a566679a7b200f5ccd6a0ae313ad2e(
4505
4678
  stream: _IStream_4e2457d2,
4506
4679
  *,