aws-cdk-lib 2.185.0__py3-none-any.whl → 2.187.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of aws-cdk-lib might be problematic. Click here for more details.

Files changed (87) hide show
  1. aws_cdk/__init__.py +383 -145
  2. aws_cdk/_jsii/__init__.py +1 -1
  3. aws_cdk/_jsii/{aws-cdk-lib@2.185.0.jsii.tgz → aws-cdk-lib@2.187.0.jsii.tgz} +0 -0
  4. aws_cdk/aws_amazonmq/__init__.py +3 -2
  5. aws_cdk/aws_amplify/__init__.py +124 -0
  6. aws_cdk/aws_apigateway/__init__.py +48 -2
  7. aws_cdk/aws_apigatewayv2/__init__.py +9 -0
  8. aws_cdk/aws_appconfig/__init__.py +3 -3
  9. aws_cdk/aws_applicationsignals/__init__.py +363 -3
  10. aws_cdk/aws_appsync/__init__.py +74 -3
  11. aws_cdk/aws_bedrock/__init__.py +395 -14
  12. aws_cdk/aws_cassandra/__init__.py +2 -2
  13. aws_cdk/aws_cleanrooms/__init__.py +21 -9
  14. aws_cdk/aws_cloudformation/__init__.py +1 -5
  15. aws_cdk/aws_cloudfront/__init__.py +15 -1
  16. aws_cdk/aws_cloudfront_origins/__init__.py +4 -2
  17. aws_cdk/aws_cloudtrail/__init__.py +4 -18
  18. aws_cdk/aws_cloudwatch/__init__.py +50 -50
  19. aws_cdk/aws_codeartifact/__init__.py +20 -33
  20. aws_cdk/aws_codebuild/__init__.py +9 -0
  21. aws_cdk/aws_codepipeline/__init__.py +1328 -120
  22. aws_cdk/aws_cognito/__init__.py +1 -1
  23. aws_cdk/aws_cognito_identitypool/__init__.py +2303 -0
  24. aws_cdk/aws_config/__init__.py +2 -5
  25. aws_cdk/aws_connect/__init__.py +3 -7
  26. aws_cdk/aws_controltower/__init__.py +18 -26
  27. aws_cdk/aws_datazone/__init__.py +3540 -10
  28. aws_cdk/aws_detective/__init__.py +3 -3
  29. aws_cdk/aws_dynamodb/__init__.py +37 -0
  30. aws_cdk/aws_ec2/__init__.py +714 -37
  31. aws_cdk/aws_ecr/__init__.py +143 -0
  32. aws_cdk/aws_ecr_assets/__init__.py +115 -4
  33. aws_cdk/aws_ecs/__init__.py +66 -20
  34. aws_cdk/aws_eks/__init__.py +114 -0
  35. aws_cdk/aws_events/__init__.py +26 -6
  36. aws_cdk/aws_forecast/__init__.py +1 -1
  37. aws_cdk/aws_fsx/__init__.py +2 -2
  38. aws_cdk/aws_gamelift/__init__.py +11 -11
  39. aws_cdk/aws_iam/__init__.py +264 -0
  40. aws_cdk/aws_identitystore/__init__.py +16 -16
  41. aws_cdk/aws_imagebuilder/__init__.py +3 -27
  42. aws_cdk/aws_iotsitewise/__init__.py +623 -0
  43. aws_cdk/aws_kinesisfirehose/__init__.py +2 -3
  44. aws_cdk/aws_kms/__init__.py +10 -11
  45. aws_cdk/aws_lakeformation/__init__.py +3 -3
  46. aws_cdk/aws_lambda/__init__.py +112 -5
  47. aws_cdk/aws_lambda_event_sources/__init__.py +65 -3
  48. aws_cdk/aws_lambda_nodejs/__init__.py +5 -24
  49. aws_cdk/aws_lex/__init__.py +981 -5
  50. aws_cdk/aws_location/__init__.py +24 -7
  51. aws_cdk/aws_mediaconnect/__init__.py +714 -290
  52. aws_cdk/aws_msk/__init__.py +8 -2
  53. aws_cdk/aws_mwaa/__init__.py +9 -9
  54. aws_cdk/aws_networkfirewall/__init__.py +60 -12
  55. aws_cdk/aws_oam/__init__.py +8 -37
  56. aws_cdk/aws_omics/__init__.py +216 -0
  57. aws_cdk/aws_quicksight/__init__.py +250 -108
  58. aws_cdk/aws_rds/__init__.py +102 -10
  59. aws_cdk/aws_redshiftserverless/__init__.py +192 -15
  60. aws_cdk/aws_route53/__init__.py +2 -2
  61. aws_cdk/aws_route53recoverycontrol/__init__.py +43 -2
  62. aws_cdk/aws_rum/__init__.py +315 -52
  63. aws_cdk/aws_s3_assets/__init__.py +70 -1
  64. aws_cdk/aws_s3_deployment/__init__.py +4 -0
  65. aws_cdk/aws_sagemaker/__init__.py +6 -4
  66. aws_cdk/aws_scheduler/__init__.py +3944 -121
  67. aws_cdk/aws_scheduler_targets/__init__.py +4460 -0
  68. aws_cdk/aws_securitylake/__init__.py +2 -2
  69. aws_cdk/aws_servicecatalog/__init__.py +4 -0
  70. aws_cdk/aws_sns/__init__.py +1 -1
  71. aws_cdk/aws_ssmquicksetup/__init__.py +5 -3
  72. aws_cdk/aws_stepfunctions/__init__.py +8 -0
  73. aws_cdk/aws_stepfunctions_tasks/__init__.py +4 -0
  74. aws_cdk/aws_synthetics/__init__.py +9 -0
  75. aws_cdk/aws_systemsmanagersap/__init__.py +150 -0
  76. aws_cdk/aws_timestream/__init__.py +4 -4
  77. aws_cdk/aws_wafv2/__init__.py +1117 -1446
  78. aws_cdk/aws_workspacesthinclient/__init__.py +4 -4
  79. aws_cdk/cloud_assembly_schema/__init__.py +60 -10
  80. aws_cdk/cx_api/__init__.py +38 -0
  81. aws_cdk/pipelines/__init__.py +20 -2
  82. {aws_cdk_lib-2.185.0.dist-info → aws_cdk_lib-2.187.0.dist-info}/METADATA +4 -4
  83. {aws_cdk_lib-2.185.0.dist-info → aws_cdk_lib-2.187.0.dist-info}/RECORD +87 -85
  84. {aws_cdk_lib-2.185.0.dist-info → aws_cdk_lib-2.187.0.dist-info}/WHEEL +1 -1
  85. {aws_cdk_lib-2.185.0.dist-info → aws_cdk_lib-2.187.0.dist-info}/LICENSE +0 -0
  86. {aws_cdk_lib-2.185.0.dist-info → aws_cdk_lib-2.187.0.dist-info}/NOTICE +0 -0
  87. {aws_cdk_lib-2.185.0.dist-info → aws_cdk_lib-2.187.0.dist-info}/top_level.txt +0 -0
@@ -241,7 +241,7 @@ behavior:
241
241
  * **onFailure**: In the event a record fails and consumes all retries, the record will be sent to S3 bucket, SQS queue or SNS topic that is specified here
242
242
  * **parallelizationFactor**: The number of batches to concurrently process on each shard.
243
243
  * **retryAttempts**: The maximum number of times a record should be retried in the event of failure.
244
- * **startingPosition**: Will determine where to begin consumption. 'LATEST' will start at the most recent record and ignore all records that arrived prior to attaching the event source, 'TRIM_HORIZON' will start at the oldest record and ensure you process all available data, while 'AT_TIMESTAMP' will start reading records from a specified time stamp. Note that 'AT_TIMESTAMP' is only supported for Amazon Kinesis streams.
244
+ * **startingPosition**: Will determine where to begin consumption. 'LATEST' will start at the most recent record and ignore all records that arrived prior to attaching the event source, 'TRIM_HORIZON' will start at the oldest record and ensure you process all available data, while 'AT_TIMESTAMP' will start reading records from a specified time stamp.
245
245
  * **startingPositionTimestamp**: The time stamp from which to start reading. Used in conjunction with **startingPosition** when set to 'AT_TIMESTAMP'.
246
246
  * **tumblingWindow**: The duration in seconds of a processing window when using streams.
247
247
  * **enabled**: If the event source mapping should be enabled. The default is true.
@@ -282,7 +282,14 @@ my_function.add_event_source(KinesisConsumerEventSource(stream_consumer,
282
282
 
283
283
  ## Kafka
284
284
 
285
- You can write Lambda functions to process data either from [Amazon MSK](https://docs.aws.amazon.com/lambda/latest/dg/with-msk.html) or a [self managed Kafka](https://docs.aws.amazon.com/lambda/latest/dg/kafka-smaa.html) cluster.
285
+ You can write Lambda functions to process data either from [Amazon MSK](https://docs.aws.amazon.com/lambda/latest/dg/with-msk.html) or a [self-managed Kafka](https://docs.aws.amazon.com/lambda/latest/dg/kafka-smaa.html) cluster. The following parameters will impact to the polling behavior:
286
+
287
+ * **startingPosition**: Will determine where to begin consumption. 'LATEST' will start at the most recent record and ignore all records that arrived prior to attaching the event source, 'TRIM_HORIZON' will start at the oldest record and ensure you process all available data, while 'AT_TIMESTAMP' will start reading records from a specified time stamp.
288
+ * **startingPositionTimestamp**: The time stamp from which to start reading. Used in conjunction with **startingPosition** when set to 'AT_TIMESTAMP'.
289
+ * **batchSize**: Determines how many records are buffered before invoking your lambda function - could impact your function's memory usage (if too high) and ability to keep up with incoming data velocity (if too low).
290
+ * **maxBatchingWindow**: The maximum amount of time to gather records before invoking the lambda. This increases the likelihood of a full batch at the cost of possibly delaying processing.
291
+ * **onFailure**: In the event a record fails and consumes all retries, the record will be sent to SQS queue or SNS topic that is specified here
292
+ * **enabled**: If the Kafka event source mapping should be enabled. The default is true.
286
293
 
287
294
  The following code sets up Amazon MSK as an event source for a lambda function. Credentials will need to be configured to access the
288
295
  MSK cluster, as described in [Username/Password authentication](https://docs.aws.amazon.com/msk/latest/developerguide/msk-password.html).
@@ -827,6 +834,7 @@ class BaseStreamEventSourceProps:
827
834
  "filters": "filters",
828
835
  "on_failure": "onFailure",
829
836
  "secret": "secret",
837
+ "starting_position_timestamp": "startingPositionTimestamp",
830
838
  },
831
839
  )
832
840
  class KafkaEventSourceProps(BaseStreamEventSourceProps):
@@ -844,6 +852,7 @@ class KafkaEventSourceProps(BaseStreamEventSourceProps):
844
852
  filters: typing.Optional[typing.Sequence[typing.Mapping[builtins.str, typing.Any]]] = None,
845
853
  on_failure: typing.Optional[_IEventSourceDlq_5e2c6ad9] = None,
846
854
  secret: typing.Optional[_ISecret_6e020e6a] = None,
855
+ starting_position_timestamp: typing.Optional[jsii.Number] = None,
847
856
  ) -> None:
848
857
  '''Properties for a Kafka event source.
849
858
 
@@ -858,6 +867,7 @@ class KafkaEventSourceProps(BaseStreamEventSourceProps):
858
867
  :param filters: Add filter criteria to Event Source. Default: - none
859
868
  :param on_failure: Add an on Failure Destination for this Kafka event. SNS/SQS/S3 are supported Default: - discarded records are ignored
860
869
  :param secret: The secret with the Kafka credentials, see https://docs.aws.amazon.com/msk/latest/developerguide/msk-password.html for details This field is required if your Kafka brokers are accessed over the Internet. Default: none
870
+ :param starting_position_timestamp: The time from which to start reading, in Unix time seconds. Default: - no timestamp
861
871
 
862
872
  :exampleMetadata: fixture=_generated
863
873
 
@@ -894,7 +904,8 @@ class KafkaEventSourceProps(BaseStreamEventSourceProps):
894
904
  maximum_pollers=123,
895
905
  minimum_pollers=123
896
906
  ),
897
- secret=secret
907
+ secret=secret,
908
+ starting_position_timestamp=123
898
909
  )
899
910
  '''
900
911
  if isinstance(provisioned_poller_config, dict):
@@ -912,6 +923,7 @@ class KafkaEventSourceProps(BaseStreamEventSourceProps):
912
923
  check_type(argname="argument filters", value=filters, expected_type=type_hints["filters"])
913
924
  check_type(argname="argument on_failure", value=on_failure, expected_type=type_hints["on_failure"])
914
925
  check_type(argname="argument secret", value=secret, expected_type=type_hints["secret"])
926
+ check_type(argname="argument starting_position_timestamp", value=starting_position_timestamp, expected_type=type_hints["starting_position_timestamp"])
915
927
  self._values: typing.Dict[builtins.str, typing.Any] = {
916
928
  "starting_position": starting_position,
917
929
  "topic": topic,
@@ -934,6 +946,8 @@ class KafkaEventSourceProps(BaseStreamEventSourceProps):
934
946
  self._values["on_failure"] = on_failure
935
947
  if secret is not None:
936
948
  self._values["secret"] = secret
949
+ if starting_position_timestamp is not None:
950
+ self._values["starting_position_timestamp"] = starting_position_timestamp
937
951
 
938
952
  @builtins.property
939
953
  def starting_position(self) -> _StartingPosition_c0a4852c:
@@ -1060,6 +1074,15 @@ class KafkaEventSourceProps(BaseStreamEventSourceProps):
1060
1074
  result = self._values.get("secret")
1061
1075
  return typing.cast(typing.Optional[_ISecret_6e020e6a], result)
1062
1076
 
1077
+ @builtins.property
1078
+ def starting_position_timestamp(self) -> typing.Optional[jsii.Number]:
1079
+ '''The time from which to start reading, in Unix time seconds.
1080
+
1081
+ :default: - no timestamp
1082
+ '''
1083
+ result = self._values.get("starting_position_timestamp")
1084
+ return typing.cast(typing.Optional[jsii.Number], result)
1085
+
1063
1086
  def __eq__(self, rhs: typing.Any) -> builtins.bool:
1064
1087
  return isinstance(rhs, self.__class__) and rhs._values == self._values
1065
1088
 
@@ -1087,6 +1110,7 @@ class KafkaEventSourceProps(BaseStreamEventSourceProps):
1087
1110
  "filters": "filters",
1088
1111
  "on_failure": "onFailure",
1089
1112
  "secret": "secret",
1113
+ "starting_position_timestamp": "startingPositionTimestamp",
1090
1114
  "cluster_arn": "clusterArn",
1091
1115
  },
1092
1116
  )
@@ -1105,6 +1129,7 @@ class ManagedKafkaEventSourceProps(KafkaEventSourceProps):
1105
1129
  filters: typing.Optional[typing.Sequence[typing.Mapping[builtins.str, typing.Any]]] = None,
1106
1130
  on_failure: typing.Optional[_IEventSourceDlq_5e2c6ad9] = None,
1107
1131
  secret: typing.Optional[_ISecret_6e020e6a] = None,
1132
+ starting_position_timestamp: typing.Optional[jsii.Number] = None,
1108
1133
  cluster_arn: builtins.str,
1109
1134
  ) -> None:
1110
1135
  '''Properties for a MSK event source.
@@ -1120,6 +1145,7 @@ class ManagedKafkaEventSourceProps(KafkaEventSourceProps):
1120
1145
  :param filters: Add filter criteria to Event Source. Default: - none
1121
1146
  :param on_failure: Add an on Failure Destination for this Kafka event. SNS/SQS/S3 are supported Default: - discarded records are ignored
1122
1147
  :param secret: The secret with the Kafka credentials, see https://docs.aws.amazon.com/msk/latest/developerguide/msk-password.html for details This field is required if your Kafka brokers are accessed over the Internet. Default: none
1148
+ :param starting_position_timestamp: The time from which to start reading, in Unix time seconds. Default: - no timestamp
1123
1149
  :param cluster_arn: An MSK cluster construct.
1124
1150
 
1125
1151
  :exampleMetadata: infused
@@ -1164,6 +1190,7 @@ class ManagedKafkaEventSourceProps(KafkaEventSourceProps):
1164
1190
  check_type(argname="argument filters", value=filters, expected_type=type_hints["filters"])
1165
1191
  check_type(argname="argument on_failure", value=on_failure, expected_type=type_hints["on_failure"])
1166
1192
  check_type(argname="argument secret", value=secret, expected_type=type_hints["secret"])
1193
+ check_type(argname="argument starting_position_timestamp", value=starting_position_timestamp, expected_type=type_hints["starting_position_timestamp"])
1167
1194
  check_type(argname="argument cluster_arn", value=cluster_arn, expected_type=type_hints["cluster_arn"])
1168
1195
  self._values: typing.Dict[builtins.str, typing.Any] = {
1169
1196
  "starting_position": starting_position,
@@ -1188,6 +1215,8 @@ class ManagedKafkaEventSourceProps(KafkaEventSourceProps):
1188
1215
  self._values["on_failure"] = on_failure
1189
1216
  if secret is not None:
1190
1217
  self._values["secret"] = secret
1218
+ if starting_position_timestamp is not None:
1219
+ self._values["starting_position_timestamp"] = starting_position_timestamp
1191
1220
 
1192
1221
  @builtins.property
1193
1222
  def starting_position(self) -> _StartingPosition_c0a4852c:
@@ -1314,6 +1343,15 @@ class ManagedKafkaEventSourceProps(KafkaEventSourceProps):
1314
1343
  result = self._values.get("secret")
1315
1344
  return typing.cast(typing.Optional[_ISecret_6e020e6a], result)
1316
1345
 
1346
+ @builtins.property
1347
+ def starting_position_timestamp(self) -> typing.Optional[jsii.Number]:
1348
+ '''The time from which to start reading, in Unix time seconds.
1349
+
1350
+ :default: - no timestamp
1351
+ '''
1352
+ result = self._values.get("starting_position_timestamp")
1353
+ return typing.cast(typing.Optional[jsii.Number], result)
1354
+
1317
1355
  @builtins.property
1318
1356
  def cluster_arn(self) -> builtins.str:
1319
1357
  '''An MSK cluster construct.'''
@@ -1680,6 +1718,7 @@ class S3OnFailureDestination(
1680
1718
  "filters": "filters",
1681
1719
  "on_failure": "onFailure",
1682
1720
  "secret": "secret",
1721
+ "starting_position_timestamp": "startingPositionTimestamp",
1683
1722
  "bootstrap_servers": "bootstrapServers",
1684
1723
  "authentication_method": "authenticationMethod",
1685
1724
  "root_ca_certificate": "rootCACertificate",
@@ -1703,6 +1742,7 @@ class SelfManagedKafkaEventSourceProps(KafkaEventSourceProps):
1703
1742
  filters: typing.Optional[typing.Sequence[typing.Mapping[builtins.str, typing.Any]]] = None,
1704
1743
  on_failure: typing.Optional[_IEventSourceDlq_5e2c6ad9] = None,
1705
1744
  secret: typing.Optional[_ISecret_6e020e6a] = None,
1745
+ starting_position_timestamp: typing.Optional[jsii.Number] = None,
1706
1746
  bootstrap_servers: typing.Sequence[builtins.str],
1707
1747
  authentication_method: typing.Optional[AuthenticationMethod] = None,
1708
1748
  root_ca_certificate: typing.Optional[_ISecret_6e020e6a] = None,
@@ -1725,6 +1765,7 @@ class SelfManagedKafkaEventSourceProps(KafkaEventSourceProps):
1725
1765
  :param filters: Add filter criteria to Event Source. Default: - none
1726
1766
  :param on_failure: Add an on Failure Destination for this Kafka event. SNS/SQS/S3 are supported Default: - discarded records are ignored
1727
1767
  :param secret: The secret with the Kafka credentials, see https://docs.aws.amazon.com/msk/latest/developerguide/msk-password.html for details This field is required if your Kafka brokers are accessed over the Internet. Default: none
1768
+ :param starting_position_timestamp: The time from which to start reading, in Unix time seconds. Default: - no timestamp
1728
1769
  :param bootstrap_servers: The list of host and port pairs that are the addresses of the Kafka brokers in a "bootstrap" Kafka cluster that a Kafka client connects to initially to bootstrap itself. They are in the format ``abc.xyz.com:xxxx``.
1729
1770
  :param authentication_method: The authentication method for your Kafka cluster. Default: AuthenticationMethod.SASL_SCRAM_512_AUTH
1730
1771
  :param root_ca_certificate: The secret with the root CA certificate used by your Kafka brokers for TLS encryption This field is required if your Kafka brokers use certificates signed by a private CA. Default: - none
@@ -1779,6 +1820,7 @@ class SelfManagedKafkaEventSourceProps(KafkaEventSourceProps):
1779
1820
  check_type(argname="argument filters", value=filters, expected_type=type_hints["filters"])
1780
1821
  check_type(argname="argument on_failure", value=on_failure, expected_type=type_hints["on_failure"])
1781
1822
  check_type(argname="argument secret", value=secret, expected_type=type_hints["secret"])
1823
+ check_type(argname="argument starting_position_timestamp", value=starting_position_timestamp, expected_type=type_hints["starting_position_timestamp"])
1782
1824
  check_type(argname="argument bootstrap_servers", value=bootstrap_servers, expected_type=type_hints["bootstrap_servers"])
1783
1825
  check_type(argname="argument authentication_method", value=authentication_method, expected_type=type_hints["authentication_method"])
1784
1826
  check_type(argname="argument root_ca_certificate", value=root_ca_certificate, expected_type=type_hints["root_ca_certificate"])
@@ -1808,6 +1850,8 @@ class SelfManagedKafkaEventSourceProps(KafkaEventSourceProps):
1808
1850
  self._values["on_failure"] = on_failure
1809
1851
  if secret is not None:
1810
1852
  self._values["secret"] = secret
1853
+ if starting_position_timestamp is not None:
1854
+ self._values["starting_position_timestamp"] = starting_position_timestamp
1811
1855
  if authentication_method is not None:
1812
1856
  self._values["authentication_method"] = authentication_method
1813
1857
  if root_ca_certificate is not None:
@@ -1944,6 +1988,15 @@ class SelfManagedKafkaEventSourceProps(KafkaEventSourceProps):
1944
1988
  result = self._values.get("secret")
1945
1989
  return typing.cast(typing.Optional[_ISecret_6e020e6a], result)
1946
1990
 
1991
+ @builtins.property
1992
+ def starting_position_timestamp(self) -> typing.Optional[jsii.Number]:
1993
+ '''The time from which to start reading, in Unix time seconds.
1994
+
1995
+ :default: - no timestamp
1996
+ '''
1997
+ result = self._values.get("starting_position_timestamp")
1998
+ return typing.cast(typing.Optional[jsii.Number], result)
1999
+
1947
2000
  @builtins.property
1948
2001
  def bootstrap_servers(self) -> typing.List[builtins.str]:
1949
2002
  '''The list of host and port pairs that are the addresses of the Kafka brokers in a "bootstrap" Kafka cluster that a Kafka client connects to initially to bootstrap itself.
@@ -4127,6 +4180,7 @@ class ManagedKafkaEventSource(
4127
4180
  filters: typing.Optional[typing.Sequence[typing.Mapping[builtins.str, typing.Any]]] = None,
4128
4181
  on_failure: typing.Optional[_IEventSourceDlq_5e2c6ad9] = None,
4129
4182
  secret: typing.Optional[_ISecret_6e020e6a] = None,
4183
+ starting_position_timestamp: typing.Optional[jsii.Number] = None,
4130
4184
  starting_position: _StartingPosition_c0a4852c,
4131
4185
  batch_size: typing.Optional[jsii.Number] = None,
4132
4186
  enabled: typing.Optional[builtins.bool] = None,
@@ -4141,6 +4195,7 @@ class ManagedKafkaEventSource(
4141
4195
  :param filters: Add filter criteria to Event Source. Default: - none
4142
4196
  :param on_failure: Add an on Failure Destination for this Kafka event. SNS/SQS/S3 are supported Default: - discarded records are ignored
4143
4197
  :param secret: The secret with the Kafka credentials, see https://docs.aws.amazon.com/msk/latest/developerguide/msk-password.html for details This field is required if your Kafka brokers are accessed over the Internet. Default: none
4198
+ :param starting_position_timestamp: The time from which to start reading, in Unix time seconds. Default: - no timestamp
4144
4199
  :param starting_position: Where to begin consuming the stream.
4145
4200
  :param batch_size: The largest number of records that AWS Lambda will retrieve from your event source at the time of invoking your function. Your function receives an event with all the retrieved records. Valid Range: - Minimum value of 1 - Maximum value of: - 1000 for ``DynamoEventSource`` - 10000 for ``KinesisEventSource``, ``ManagedKafkaEventSource`` and ``SelfManagedKafkaEventSource`` Default: 100
4146
4201
  :param enabled: If the stream event source mapping should be enabled. Default: true
@@ -4155,6 +4210,7 @@ class ManagedKafkaEventSource(
4155
4210
  filters=filters,
4156
4211
  on_failure=on_failure,
4157
4212
  secret=secret,
4213
+ starting_position_timestamp=starting_position_timestamp,
4158
4214
  starting_position=starting_position,
4159
4215
  batch_size=batch_size,
4160
4216
  enabled=enabled,
@@ -4241,6 +4297,7 @@ class SelfManagedKafkaEventSource(
4241
4297
  filters: typing.Optional[typing.Sequence[typing.Mapping[builtins.str, typing.Any]]] = None,
4242
4298
  on_failure: typing.Optional[_IEventSourceDlq_5e2c6ad9] = None,
4243
4299
  secret: typing.Optional[_ISecret_6e020e6a] = None,
4300
+ starting_position_timestamp: typing.Optional[jsii.Number] = None,
4244
4301
  starting_position: _StartingPosition_c0a4852c,
4245
4302
  batch_size: typing.Optional[jsii.Number] = None,
4246
4303
  enabled: typing.Optional[builtins.bool] = None,
@@ -4260,6 +4317,7 @@ class SelfManagedKafkaEventSource(
4260
4317
  :param filters: Add filter criteria to Event Source. Default: - none
4261
4318
  :param on_failure: Add an on Failure Destination for this Kafka event. SNS/SQS/S3 are supported Default: - discarded records are ignored
4262
4319
  :param secret: The secret with the Kafka credentials, see https://docs.aws.amazon.com/msk/latest/developerguide/msk-password.html for details This field is required if your Kafka brokers are accessed over the Internet. Default: none
4320
+ :param starting_position_timestamp: The time from which to start reading, in Unix time seconds. Default: - no timestamp
4263
4321
  :param starting_position: Where to begin consuming the stream.
4264
4322
  :param batch_size: The largest number of records that AWS Lambda will retrieve from your event source at the time of invoking your function. Your function receives an event with all the retrieved records. Valid Range: - Minimum value of 1 - Maximum value of: - 1000 for ``DynamoEventSource`` - 10000 for ``KinesisEventSource``, ``ManagedKafkaEventSource`` and ``SelfManagedKafkaEventSource`` Default: 100
4265
4323
  :param enabled: If the stream event source mapping should be enabled. Default: true
@@ -4279,6 +4337,7 @@ class SelfManagedKafkaEventSource(
4279
4337
  filters=filters,
4280
4338
  on_failure=on_failure,
4281
4339
  secret=secret,
4340
+ starting_position_timestamp=starting_position_timestamp,
4282
4341
  starting_position=starting_position,
4283
4342
  batch_size=batch_size,
4284
4343
  enabled=enabled,
@@ -4379,6 +4438,7 @@ def _typecheckingstub__980041697091a50415a7444df02a046d910ddd83f1229789d80780bf7
4379
4438
  filters: typing.Optional[typing.Sequence[typing.Mapping[builtins.str, typing.Any]]] = None,
4380
4439
  on_failure: typing.Optional[_IEventSourceDlq_5e2c6ad9] = None,
4381
4440
  secret: typing.Optional[_ISecret_6e020e6a] = None,
4441
+ starting_position_timestamp: typing.Optional[jsii.Number] = None,
4382
4442
  ) -> None:
4383
4443
  """Type checking stubs"""
4384
4444
  pass
@@ -4396,6 +4456,7 @@ def _typecheckingstub__e930f585c1bae37174885c54f0f224909bfb0a75d9f1b652bbcf33461
4396
4456
  filters: typing.Optional[typing.Sequence[typing.Mapping[builtins.str, typing.Any]]] = None,
4397
4457
  on_failure: typing.Optional[_IEventSourceDlq_5e2c6ad9] = None,
4398
4458
  secret: typing.Optional[_ISecret_6e020e6a] = None,
4459
+ starting_position_timestamp: typing.Optional[jsii.Number] = None,
4399
4460
  cluster_arn: builtins.str,
4400
4461
  ) -> None:
4401
4462
  """Type checking stubs"""
@@ -4473,6 +4534,7 @@ def _typecheckingstub__0100a45aa91b9c2103378e2ba54dd41b054f1d6a50733797256d6971b
4473
4534
  filters: typing.Optional[typing.Sequence[typing.Mapping[builtins.str, typing.Any]]] = None,
4474
4535
  on_failure: typing.Optional[_IEventSourceDlq_5e2c6ad9] = None,
4475
4536
  secret: typing.Optional[_ISecret_6e020e6a] = None,
4537
+ starting_position_timestamp: typing.Optional[jsii.Number] = None,
4476
4538
  bootstrap_servers: typing.Sequence[builtins.str],
4477
4539
  authentication_method: typing.Optional[AuthenticationMethod] = None,
4478
4540
  root_ca_certificate: typing.Optional[_ISecret_6e020e6a] = None,
@@ -1938,30 +1938,11 @@ class NodejsFunctionProps(_FunctionOptions_328f4d39):
1938
1938
 
1939
1939
  nodejs.NodejsFunction(self, "my-handler",
1940
1940
  bundling=nodejs.BundlingOptions(
1941
- minify=True, # minify code, defaults to false
1942
- source_map=True, # include source map, defaults to false
1943
- source_map_mode=nodejs.SourceMapMode.INLINE, # defaults to SourceMapMode.DEFAULT
1944
- sources_content=False, # do not include original source into source map, defaults to true
1945
- target="es2020", # target environment for the generated JavaScript code
1946
- loader={ # Use the 'dataurl' loader for '.png' files
1947
- ".png": "dataurl"},
1948
- define={ # Replace strings during build time
1949
- "process.env.API_KEY": JSON.stringify("xxx-xxxx-xxx"),
1950
- "process.env.PRODUCTION": JSON.stringify(True),
1951
- "process.env.NUMBER": JSON.stringify(123)},
1952
- log_level=nodejs.LogLevel.ERROR, # defaults to LogLevel.WARNING
1953
- keep_names=True, # defaults to false
1954
- tsconfig="custom-tsconfig.json", # use custom-tsconfig.json instead of default,
1955
- metafile=True, # include meta file, defaults to false
1956
- banner="/* comments */", # requires esbuild >= 0.9.0, defaults to none
1957
- footer="/* comments */", # requires esbuild >= 0.9.0, defaults to none
1958
- charset=nodejs.Charset.UTF8, # do not escape non-ASCII characters, defaults to Charset.ASCII
1959
- format=nodejs.OutputFormat.ESM, # ECMAScript module output format, defaults to OutputFormat.CJS (OutputFormat.ESM requires Node.js >= 14)
1960
- main_fields=["module", "main"], # prefer ECMAScript versions of dependencies
1961
- inject=["./my-shim.js", "./other-shim.js"], # allows to automatically replace a global variable with an import from another file
1962
- esbuild_args={ # Pass additional arguments to esbuild
1963
- "--log-limit": "0",
1964
- "--splitting": True}
1941
+ network="host",
1942
+ security_opt="no-new-privileges",
1943
+ user="user:group",
1944
+ volumes_from=["777f7dc92da7"],
1945
+ volumes=[DockerVolume(host_path="/host-path", container_path="/container-path")]
1965
1946
  )
1966
1947
  )
1967
1948
  '''