aws-cdk-lib 2.178.2__py3-none-any.whl → 2.180.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of aws-cdk-lib might be problematic. Click here for more details.

Files changed (70) hide show
  1. aws_cdk/__init__.py +83 -41
  2. aws_cdk/_jsii/__init__.py +1 -2
  3. aws_cdk/_jsii/{aws-cdk-lib@2.178.2.jsii.tgz → aws-cdk-lib@2.180.0.jsii.tgz} +0 -0
  4. aws_cdk/aws_acmpca/__init__.py +47 -0
  5. aws_cdk/aws_apigateway/__init__.py +176 -35
  6. aws_cdk/aws_apigatewayv2/__init__.py +151 -32
  7. aws_cdk/aws_apigatewayv2_integrations/__init__.py +348 -0
  8. aws_cdk/aws_applicationautoscaling/__init__.py +8 -8
  9. aws_cdk/aws_appsync/__init__.py +10 -7
  10. aws_cdk/aws_backup/__init__.py +89 -0
  11. aws_cdk/aws_batch/__init__.py +89 -50
  12. aws_cdk/aws_bedrock/__init__.py +506 -62
  13. aws_cdk/aws_cloudfront/__init__.py +1037 -146
  14. aws_cdk/aws_cloudfront_origins/__init__.py +1338 -144
  15. aws_cdk/aws_cloudtrail/__init__.py +4 -8
  16. aws_cdk/aws_cloudwatch/__init__.py +1 -1
  17. aws_cdk/aws_codebuild/__init__.py +218 -2
  18. aws_cdk/aws_codepipeline/__init__.py +113 -28
  19. aws_cdk/aws_codepipeline_actions/__init__.py +554 -63
  20. aws_cdk/aws_codestar/__init__.py +2 -1
  21. aws_cdk/aws_cognito/__init__.py +676 -29
  22. aws_cdk/aws_connect/__init__.py +257 -0
  23. aws_cdk/aws_datasync/__init__.py +279 -50
  24. aws_cdk/aws_deadline/__init__.py +683 -6
  25. aws_cdk/aws_directoryservice/__init__.py +9 -4
  26. aws_cdk/aws_dlm/__init__.py +2 -2
  27. aws_cdk/aws_dms/__init__.py +3 -3
  28. aws_cdk/aws_dynamodb/__init__.py +0 -54
  29. aws_cdk/aws_ec2/__init__.py +402 -130
  30. aws_cdk/aws_ecs/__init__.py +28 -43
  31. aws_cdk/aws_efs/__init__.py +1 -1
  32. aws_cdk/aws_eks/__init__.py +560 -182
  33. aws_cdk/aws_elasticloadbalancingv2/__init__.py +112 -27
  34. aws_cdk/aws_emrcontainers/__init__.py +44 -1
  35. aws_cdk/aws_events/__init__.py +17 -26
  36. aws_cdk/aws_events_targets/__init__.py +303 -16
  37. aws_cdk/aws_fms/__init__.py +5 -5
  38. aws_cdk/aws_fsx/__init__.py +5 -4
  39. aws_cdk/aws_glue/__init__.py +161 -0
  40. aws_cdk/aws_groundstation/__init__.py +23 -1
  41. aws_cdk/aws_iam/__init__.py +15 -15
  42. aws_cdk/aws_iot/__init__.py +7 -0
  43. aws_cdk/aws_ivs/__init__.py +254 -77
  44. aws_cdk/aws_kinesis/__init__.py +689 -35
  45. aws_cdk/aws_lambda/__init__.py +10 -15
  46. aws_cdk/aws_lambda_event_sources/__init__.py +175 -2
  47. aws_cdk/aws_logs/__init__.py +62 -13
  48. aws_cdk/aws_medialive/__init__.py +314 -4
  49. aws_cdk/aws_opensearchserverless/__init__.py +19 -0
  50. aws_cdk/aws_pinpoint/__init__.py +14 -9
  51. aws_cdk/aws_rds/__init__.py +246 -82
  52. aws_cdk/aws_s3/__init__.py +287 -9
  53. aws_cdk/aws_s3objectlambda/__init__.py +2 -2
  54. aws_cdk/aws_ses/__init__.py +228 -8
  55. aws_cdk/aws_ssm/__init__.py +4 -5
  56. aws_cdk/aws_stepfunctions/__init__.py +301 -70
  57. aws_cdk/aws_stepfunctions_tasks/__init__.py +269 -163
  58. aws_cdk/aws_supportapp/__init__.py +7 -7
  59. aws_cdk/aws_transfer/__init__.py +820 -2
  60. aws_cdk/aws_wafv2/__init__.py +17 -9
  61. aws_cdk/custom_resources/__init__.py +23 -26
  62. aws_cdk/cx_api/__init__.py +16 -0
  63. aws_cdk/pipelines/__init__.py +2 -2
  64. {aws_cdk_lib-2.178.2.dist-info → aws_cdk_lib-2.180.0.dist-info}/METADATA +1 -2
  65. {aws_cdk_lib-2.178.2.dist-info → aws_cdk_lib-2.180.0.dist-info}/RECORD +69 -70
  66. aws_cdk/lambda_layer_kubectl/__init__.py +0 -107
  67. {aws_cdk_lib-2.178.2.dist-info → aws_cdk_lib-2.180.0.dist-info}/LICENSE +0 -0
  68. {aws_cdk_lib-2.178.2.dist-info → aws_cdk_lib-2.180.0.dist-info}/NOTICE +0 -0
  69. {aws_cdk_lib-2.178.2.dist-info → aws_cdk_lib-2.180.0.dist-info}/WHEEL +0 -0
  70. {aws_cdk_lib-2.178.2.dist-info → aws_cdk_lib-2.180.0.dist-info}/top_level.txt +0 -0
@@ -23047,25 +23047,20 @@ class StartingPosition(enum.Enum):
23047
23047
 
23048
23048
  Example::
23049
23049
 
23050
- from aws_cdk.aws_lambda_event_sources import ManagedKafkaEventSource
23050
+ import aws_cdk.aws_kinesis as kinesis
23051
+ from aws_cdk.aws_lambda_event_sources import KinesisConsumerEventSource
23051
23052
 
23052
23053
  # my_function: lambda.Function
23053
23054
 
23054
23055
 
23055
- # Your MSK cluster arn
23056
- cluster_arn = "arn:aws:kafka:us-east-1:0123456789019:cluster/SalesCluster/abcd1234-abcd-cafe-abab-9876543210ab-4"
23057
-
23058
- # The Kafka topic you want to subscribe to
23059
- topic = "some-cool-topic"
23060
- my_function.add_event_source(ManagedKafkaEventSource(
23061
- cluster_arn=cluster_arn,
23062
- topic=topic,
23063
- starting_position=lambda_.StartingPosition.TRIM_HORIZON,
23064
- filters=[
23065
- lambda_.FilterCriteria.filter({
23066
- "string_equals": lambda_.FilterRule.is_equal("test")
23067
- })
23068
- ]
23056
+ stream = kinesis.Stream(self, "MyStream")
23057
+ stream_consumer = kinesis.StreamConsumer(self, "MyStreamConsumer",
23058
+ stream=stream,
23059
+ stream_consumer_name="MyStreamConsumer"
23060
+ )
23061
+ my_function.add_event_source(KinesisConsumerEventSource(stream_consumer,
23062
+ batch_size=100, # default
23063
+ starting_position=lambda_.StartingPosition.TRIM_HORIZON
23069
23064
  ))
23070
23065
  '''
23071
23066
 
@@ -244,7 +244,7 @@ behavior:
244
244
  * **startingPosition**: Will determine where to begin consumption. 'LATEST' will start at the most recent record and ignore all records that arrived prior to attaching the event source, 'TRIM_HORIZON' will start at the oldest record and ensure you process all available data, while 'AT_TIMESTAMP' will start reading records from a specified time stamp. Note that 'AT_TIMESTAMP' is only supported for Amazon Kinesis streams.
245
245
  * **startingPositionTimestamp**: The time stamp from which to start reading. Used in conjunction with **startingPosition** when set to 'AT_TIMESTAMP'.
246
246
  * **tumblingWindow**: The duration in seconds of a processing window when using streams.
247
- * **enabled**: If the DynamoDB Streams event source mapping should be enabled. The default is true.
247
+ * **enabled**: If the event source mapping should be enabled. The default is true.
248
248
 
249
249
  ```python
250
250
  import aws_cdk.aws_kinesis as kinesis
@@ -260,6 +260,26 @@ my_function.add_event_source(KinesisEventSource(stream,
260
260
  ))
261
261
  ```
262
262
 
263
+ To use a dedicated-throughput consumer with enhanced fan-out
264
+
265
+ ```python
266
+ import aws_cdk.aws_kinesis as kinesis
267
+ from aws_cdk.aws_lambda_event_sources import KinesisConsumerEventSource
268
+
269
+ # my_function: lambda.Function
270
+
271
+
272
+ stream = kinesis.Stream(self, "MyStream")
273
+ stream_consumer = kinesis.StreamConsumer(self, "MyStreamConsumer",
274
+ stream=stream,
275
+ stream_consumer_name="MyStreamConsumer"
276
+ )
277
+ my_function.add_event_source(KinesisConsumerEventSource(stream_consumer,
278
+ batch_size=100, # default
279
+ starting_position=lambda_.StartingPosition.TRIM_HORIZON
280
+ ))
281
+ ```
282
+
263
283
  ## Kafka
264
284
 
265
285
  You can write Lambda functions to process data either from [Amazon MSK](https://docs.aws.amazon.com/lambda/latest/dg/with-msk.html) or a [self managed Kafka](https://docs.aws.amazon.com/lambda/latest/dg/kafka-smaa.html) cluster.
@@ -486,7 +506,9 @@ from ..aws_ec2 import (
486
506
  IVpc as _IVpc_f30d5663,
487
507
  SubnetSelection as _SubnetSelection_e57d76df,
488
508
  )
489
- from ..aws_kinesis import IStream as _IStream_4e2457d2
509
+ from ..aws_kinesis import (
510
+ IStream as _IStream_4e2457d2, IStreamConsumer as _IStreamConsumer_019d062e
511
+ )
490
512
  from ..aws_kms import IKey as _IKey_5f11635f
491
513
  from ..aws_lambda import (
492
514
  DlqDestinationConfig as _DlqDestinationConfig_5fe54cfa,
@@ -3493,6 +3515,127 @@ class DynamoEventSourceProps(StreamEventSourceProps):
3493
3515
  )
3494
3516
 
3495
3517
 
3518
+ class KinesisConsumerEventSource(
3519
+ StreamEventSource,
3520
+ metaclass=jsii.JSIIMeta,
3521
+ jsii_type="aws-cdk-lib.aws_lambda_event_sources.KinesisConsumerEventSource",
3522
+ ):
3523
+ '''Use an Amazon Kinesis stream consumer as an event source for AWS Lambda.
3524
+
3525
+ :exampleMetadata: infused
3526
+
3527
+ Example::
3528
+
3529
+ import aws_cdk.aws_kinesis as kinesis
3530
+ from aws_cdk.aws_lambda_event_sources import KinesisConsumerEventSource
3531
+
3532
+ # my_function: lambda.Function
3533
+
3534
+
3535
+ stream = kinesis.Stream(self, "MyStream")
3536
+ stream_consumer = kinesis.StreamConsumer(self, "MyStreamConsumer",
3537
+ stream=stream,
3538
+ stream_consumer_name="MyStreamConsumer"
3539
+ )
3540
+ my_function.add_event_source(KinesisConsumerEventSource(stream_consumer,
3541
+ batch_size=100, # default
3542
+ starting_position=lambda_.StartingPosition.TRIM_HORIZON
3543
+ ))
3544
+ '''
3545
+
3546
+ def __init__(
3547
+ self,
3548
+ stream_consumer: _IStreamConsumer_019d062e,
3549
+ *,
3550
+ starting_position_timestamp: typing.Optional[jsii.Number] = None,
3551
+ bisect_batch_on_error: typing.Optional[builtins.bool] = None,
3552
+ filter_encryption: typing.Optional[_IKey_5f11635f] = None,
3553
+ filters: typing.Optional[typing.Sequence[typing.Mapping[builtins.str, typing.Any]]] = None,
3554
+ max_record_age: typing.Optional[_Duration_4839e8c3] = None,
3555
+ metrics_config: typing.Optional[typing.Union[_MetricsConfig_48ab59c4, typing.Dict[builtins.str, typing.Any]]] = None,
3556
+ on_failure: typing.Optional[_IEventSourceDlq_5e2c6ad9] = None,
3557
+ parallelization_factor: typing.Optional[jsii.Number] = None,
3558
+ report_batch_item_failures: typing.Optional[builtins.bool] = None,
3559
+ retry_attempts: typing.Optional[jsii.Number] = None,
3560
+ tumbling_window: typing.Optional[_Duration_4839e8c3] = None,
3561
+ starting_position: _StartingPosition_c0a4852c,
3562
+ batch_size: typing.Optional[jsii.Number] = None,
3563
+ enabled: typing.Optional[builtins.bool] = None,
3564
+ max_batching_window: typing.Optional[_Duration_4839e8c3] = None,
3565
+ provisioned_poller_config: typing.Optional[typing.Union[ProvisionedPollerConfig, typing.Dict[builtins.str, typing.Any]]] = None,
3566
+ ) -> None:
3567
+ '''
3568
+ :param stream_consumer: -
3569
+ :param starting_position_timestamp: The time from which to start reading, in Unix time seconds. Default: - no timestamp
3570
+ :param bisect_batch_on_error: If the function returns an error, split the batch in two and retry. Default: false
3571
+ :param filter_encryption: Add Customer managed KMS key to encrypt Filter Criteria. Default: - none
3572
+ :param filters: Add filter criteria option. Default: - None
3573
+ :param max_record_age: The maximum age of a record that Lambda sends to a function for processing. Valid Range: - Minimum value of 60 seconds - Maximum value of 7 days The default value is -1, which sets the maximum age to infinite. When the value is set to infinite, Lambda never discards old records. Record are valid until it expires in the event source. Default: -1
3574
+ :param metrics_config: Configuration for enhanced monitoring metrics collection When specified, enables collection of additional metrics for the stream event source. Default: - Enhanced monitoring is disabled
3575
+ :param on_failure: An Amazon SQS queue or Amazon SNS topic destination for discarded records. Default: - discarded records are ignored
3576
+ :param parallelization_factor: The number of batches to process from each shard concurrently. Valid Range: - Minimum value of 1 - Maximum value of 10 Default: 1
3577
+ :param report_batch_item_failures: Allow functions to return partially successful responses for a batch of records. Default: false
3578
+ :param retry_attempts: Maximum number of retry attempts Valid Range: * Minimum value of 0 * Maximum value of 10000. The default value is -1, which sets the maximum number of retries to infinite. When MaximumRetryAttempts is infinite, Lambda retries failed records until the record expires in the event source. Default: -1
3579
+ :param tumbling_window: The size of the tumbling windows to group records sent to DynamoDB or Kinesis Valid Range: 0 - 15 minutes. Default: - None
3580
+ :param starting_position: Where to begin consuming the stream.
3581
+ :param batch_size: The largest number of records that AWS Lambda will retrieve from your event source at the time of invoking your function. Your function receives an event with all the retrieved records. Valid Range: - Minimum value of 1 - Maximum value of: - 1000 for ``DynamoEventSource`` - 10000 for ``KinesisEventSource``, ``ManagedKafkaEventSource`` and ``SelfManagedKafkaEventSource`` Default: 100
3582
+ :param enabled: If the stream event source mapping should be enabled. Default: true
3583
+ :param max_batching_window: The maximum amount of time to gather records before invoking the function. Maximum of Duration.minutes(5). Default: - Duration.seconds(0) for Kinesis, DynamoDB, and SQS event sources, Duration.millis(500) for MSK, self-managed Kafka, and Amazon MQ.
3584
+ :param provisioned_poller_config: Configuration for provisioned pollers that read from the event source. When specified, allows control over the minimum and maximum number of pollers that can be provisioned to process events from the source. Default: - no provisioned pollers
3585
+ '''
3586
+ if __debug__:
3587
+ type_hints = typing.get_type_hints(_typecheckingstub__5d4436c01738e680b66bbfda741235beea5eb51d1e287ec05ae0c58abfa41f1a)
3588
+ check_type(argname="argument stream_consumer", value=stream_consumer, expected_type=type_hints["stream_consumer"])
3589
+ props = KinesisEventSourceProps(
3590
+ starting_position_timestamp=starting_position_timestamp,
3591
+ bisect_batch_on_error=bisect_batch_on_error,
3592
+ filter_encryption=filter_encryption,
3593
+ filters=filters,
3594
+ max_record_age=max_record_age,
3595
+ metrics_config=metrics_config,
3596
+ on_failure=on_failure,
3597
+ parallelization_factor=parallelization_factor,
3598
+ report_batch_item_failures=report_batch_item_failures,
3599
+ retry_attempts=retry_attempts,
3600
+ tumbling_window=tumbling_window,
3601
+ starting_position=starting_position,
3602
+ batch_size=batch_size,
3603
+ enabled=enabled,
3604
+ max_batching_window=max_batching_window,
3605
+ provisioned_poller_config=provisioned_poller_config,
3606
+ )
3607
+
3608
+ jsii.create(self.__class__, self, [stream_consumer, props])
3609
+
3610
+ @jsii.member(jsii_name="bind")
3611
+ def bind(self, target: _IFunction_6adb0ab8) -> None:
3612
+ '''Called by ``lambda.addEventSource`` to allow the event source to bind to this function.
3613
+
3614
+ :param target: -
3615
+ '''
3616
+ if __debug__:
3617
+ type_hints = typing.get_type_hints(_typecheckingstub__7cd6beafb45fd5092320f505d4c975a198986b37261f8d9700231eee9d820412)
3618
+ check_type(argname="argument target", value=target, expected_type=type_hints["target"])
3619
+ return typing.cast(None, jsii.invoke(self, "bind", [target]))
3620
+
3621
+ @builtins.property
3622
+ @jsii.member(jsii_name="eventSourceMappingArn")
3623
+ def event_source_mapping_arn(self) -> builtins.str:
3624
+ '''The ARN for this EventSourceMapping.'''
3625
+ return typing.cast(builtins.str, jsii.get(self, "eventSourceMappingArn"))
3626
+
3627
+ @builtins.property
3628
+ @jsii.member(jsii_name="eventSourceMappingId")
3629
+ def event_source_mapping_id(self) -> builtins.str:
3630
+ '''The identifier for this EventSourceMapping.'''
3631
+ return typing.cast(builtins.str, jsii.get(self, "eventSourceMappingId"))
3632
+
3633
+ @builtins.property
3634
+ @jsii.member(jsii_name="streamConsumer")
3635
+ def stream_consumer(self) -> _IStreamConsumer_019d062e:
3636
+ return typing.cast(_IStreamConsumer_019d062e, jsii.get(self, "streamConsumer"))
3637
+
3638
+
3496
3639
  class KinesisEventSource(
3497
3640
  StreamEventSource,
3498
3641
  metaclass=jsii.JSIIMeta,
@@ -4164,6 +4307,7 @@ __all__ = [
4164
4307
  "DynamoEventSource",
4165
4308
  "DynamoEventSourceProps",
4166
4309
  "KafkaEventSourceProps",
4310
+ "KinesisConsumerEventSource",
4167
4311
  "KinesisEventSource",
4168
4312
  "KinesisEventSourceProps",
4169
4313
  "ManagedKafkaEventSource",
@@ -4501,6 +4645,35 @@ def _typecheckingstub__ec371d5e4612e8923bbdcc024d90e26915d64be2dc40151f22fc41139
4501
4645
  """Type checking stubs"""
4502
4646
  pass
4503
4647
 
4648
+ def _typecheckingstub__5d4436c01738e680b66bbfda741235beea5eb51d1e287ec05ae0c58abfa41f1a(
4649
+ stream_consumer: _IStreamConsumer_019d062e,
4650
+ *,
4651
+ starting_position_timestamp: typing.Optional[jsii.Number] = None,
4652
+ bisect_batch_on_error: typing.Optional[builtins.bool] = None,
4653
+ filter_encryption: typing.Optional[_IKey_5f11635f] = None,
4654
+ filters: typing.Optional[typing.Sequence[typing.Mapping[builtins.str, typing.Any]]] = None,
4655
+ max_record_age: typing.Optional[_Duration_4839e8c3] = None,
4656
+ metrics_config: typing.Optional[typing.Union[_MetricsConfig_48ab59c4, typing.Dict[builtins.str, typing.Any]]] = None,
4657
+ on_failure: typing.Optional[_IEventSourceDlq_5e2c6ad9] = None,
4658
+ parallelization_factor: typing.Optional[jsii.Number] = None,
4659
+ report_batch_item_failures: typing.Optional[builtins.bool] = None,
4660
+ retry_attempts: typing.Optional[jsii.Number] = None,
4661
+ tumbling_window: typing.Optional[_Duration_4839e8c3] = None,
4662
+ starting_position: _StartingPosition_c0a4852c,
4663
+ batch_size: typing.Optional[jsii.Number] = None,
4664
+ enabled: typing.Optional[builtins.bool] = None,
4665
+ max_batching_window: typing.Optional[_Duration_4839e8c3] = None,
4666
+ provisioned_poller_config: typing.Optional[typing.Union[ProvisionedPollerConfig, typing.Dict[builtins.str, typing.Any]]] = None,
4667
+ ) -> None:
4668
+ """Type checking stubs"""
4669
+ pass
4670
+
4671
+ def _typecheckingstub__7cd6beafb45fd5092320f505d4c975a198986b37261f8d9700231eee9d820412(
4672
+ target: _IFunction_6adb0ab8,
4673
+ ) -> None:
4674
+ """Type checking stubs"""
4675
+ pass
4676
+
4504
4677
  def _typecheckingstub__9f81acc98c12b4967363bdd43130a7e674a566679a7b200f5ccd6a0ae313ad2e(
4505
4678
  stream: _IStream_4e2457d2,
4506
4679
  *,
@@ -176,7 +176,9 @@ MetricFilter(self, "MetricFilter",
176
176
  log_group=log_group,
177
177
  metric_namespace="MyApp",
178
178
  metric_name="Latency",
179
- filter_pattern=FilterPattern.exists("$.latency"),
179
+ filter_pattern=FilterPattern.all(
180
+ FilterPattern.exists("$.latency"),
181
+ FilterPattern.regex_value("$.message", "=", "bind: address already in use")),
180
182
  metric_value="$.latency"
181
183
  )
182
184
  ```
@@ -315,6 +317,8 @@ and then descending into it, such as `$.field` or `$.list[0].field`.
315
317
 
316
318
  * `FilterPattern.stringValue(field, comparison, string)`: matches if the given
317
319
  field compares as indicated with the given string value.
320
+ * `FilterPattern.regexValue(field, comparison, string)`: matches if the given
321
+ field compares as indicated with the given regex pattern.
318
322
  * `FilterPattern.numberValue(field, comparison, number)`: matches if the given
319
323
  field compares as indicated with the given numerical value.
320
324
  * `FilterPattern.isNull(field)`: matches if the given field exists and has the
@@ -342,7 +346,8 @@ pattern = logs.FilterPattern.all(
342
346
  logs.FilterPattern.string_value("$.component", "=", "HttpServer"),
343
347
  logs.FilterPattern.any(
344
348
  logs.FilterPattern.boolean_value("$.error", True),
345
- logs.FilterPattern.number_value("$.latency", ">", 1000)))
349
+ logs.FilterPattern.number_value("$.latency", ">", 1000)),
350
+ logs.FilterPattern.regex_value("$.message", "=", "bind address already in use"))
346
351
  ```
347
352
 
348
353
  ## Space-delimited table patterns
@@ -9754,6 +9759,37 @@ class FilterPattern(
9754
9759
  check_type(argname="argument value", value=value, expected_type=type_hints["value"])
9755
9760
  return typing.cast("JsonPattern", jsii.sinvoke(cls, "numberValue", [json_field, comparison, value]))
9756
9761
 
9762
+ @jsii.member(jsii_name="regexValue")
9763
+ @builtins.classmethod
9764
+ def regex_value(
9765
+ cls,
9766
+ json_field: builtins.str,
9767
+ comparison: builtins.str,
9768
+ value: builtins.str,
9769
+ ) -> "JsonPattern":
9770
+ '''A JSON log pattern that compares against a Regex values.
9771
+
9772
+ This pattern only matches if the event is a JSON event, and the indicated field inside
9773
+ compares with the regex value.
9774
+
9775
+ Use '$' to indicate the root of the JSON structure. The comparison operator can only
9776
+ compare equality or inequality.
9777
+
9778
+ For more information, see:
9779
+
9780
+ https://docs.aws.amazon.com/AmazonCloudWatch/latest/logs/FilterAndPatternSyntax.html
9781
+
9782
+ :param json_field: Field inside JSON. Example: "$.myField"
9783
+ :param comparison: Comparison to carry out. Either = or !=.
9784
+ :param value: The regex value to compare to.
9785
+ '''
9786
+ if __debug__:
9787
+ type_hints = typing.get_type_hints(_typecheckingstub__2a7a4c0a4a33b651a6a0a1fe2ce451f20e4fa36e60b8a3b9bd496de7c0f04c0f)
9788
+ check_type(argname="argument json_field", value=json_field, expected_type=type_hints["json_field"])
9789
+ check_type(argname="argument comparison", value=comparison, expected_type=type_hints["comparison"])
9790
+ check_type(argname="argument value", value=value, expected_type=type_hints["value"])
9791
+ return typing.cast("JsonPattern", jsii.sinvoke(cls, "regexValue", [json_field, comparison, value]))
9792
+
9757
9793
  @jsii.member(jsii_name="spaceDelimited")
9758
9794
  @builtins.classmethod
9759
9795
  def space_delimited(cls, *columns: builtins.str) -> "SpaceDelimitedTextPattern":
@@ -10494,18 +10530,19 @@ class JsonPattern(
10494
10530
  ):
10495
10531
  '''Base class for patterns that only match JSON log events.
10496
10532
 
10497
- :exampleMetadata: infused
10533
+ :exampleMetadata: lit=aws-logs/test/integ.metricfilter.lit.ts infused
10498
10534
 
10499
10535
  Example::
10500
10536
 
10501
- # Search for all events where the component field is equal to
10502
- # "HttpServer" and either error is true or the latency is higher
10503
- # than 1000.
10504
- pattern = logs.FilterPattern.all(
10505
- logs.FilterPattern.string_value("$.component", "=", "HttpServer"),
10506
- logs.FilterPattern.any(
10507
- logs.FilterPattern.boolean_value("$.error", True),
10508
- logs.FilterPattern.number_value("$.latency", ">", 1000)))
10537
+ MetricFilter(self, "MetricFilter",
10538
+ log_group=log_group,
10539
+ metric_namespace="MyApp",
10540
+ metric_name="Latency",
10541
+ filter_pattern=FilterPattern.all(
10542
+ FilterPattern.exists("$.latency"),
10543
+ FilterPattern.regex_value("$.message", "=", "bind: address already in use")),
10544
+ metric_value="$.latency"
10545
+ )
10509
10546
  '''
10510
10547
 
10511
10548
  def __init__(self, json_pattern_string: builtins.str) -> None:
@@ -11780,7 +11817,9 @@ class MetricFilter(
11780
11817
  log_group=log_group,
11781
11818
  metric_namespace="MyApp",
11782
11819
  metric_name="Latency",
11783
- filter_pattern=FilterPattern.exists("$.latency"),
11820
+ filter_pattern=FilterPattern.all(
11821
+ FilterPattern.exists("$.latency"),
11822
+ FilterPattern.regex_value("$.message", "=", "bind: address already in use")),
11784
11823
  metric_value="$.latency"
11785
11824
  )
11786
11825
  '''
@@ -12110,7 +12149,9 @@ class MetricFilterProps(MetricFilterOptions):
12110
12149
  log_group=log_group,
12111
12150
  metric_namespace="MyApp",
12112
12151
  metric_name="Latency",
12113
- filter_pattern=FilterPattern.exists("$.latency"),
12152
+ filter_pattern=FilterPattern.all(
12153
+ FilterPattern.exists("$.latency"),
12154
+ FilterPattern.regex_value("$.message", "=", "bind: address already in use")),
12114
12155
  metric_value="$.latency"
12115
12156
  )
12116
12157
  '''
@@ -14891,6 +14932,14 @@ def _typecheckingstub__b1cb7dce1caa0866199f67de4ab23972e5d6dc3cd90ca77ce9a5f09f7
14891
14932
  """Type checking stubs"""
14892
14933
  pass
14893
14934
 
14935
+ def _typecheckingstub__2a7a4c0a4a33b651a6a0a1fe2ce451f20e4fa36e60b8a3b9bd496de7c0f04c0f(
14936
+ json_field: builtins.str,
14937
+ comparison: builtins.str,
14938
+ value: builtins.str,
14939
+ ) -> None:
14940
+ """Type checking stubs"""
14941
+ pass
14942
+
14894
14943
  def _typecheckingstub__3f5f56f60ccfd9dae1e3e3f54e54d87c6fb3e287c5bd2ad7924a4578ee4f8121(
14895
14944
  *columns: builtins.str,
14896
14945
  ) -> None: