robhan-cdk-lib.aws-mwaa 0.0.1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,3282 @@
1
+ r'''
2
+ # @robhan-cdk-lib/aws_mwaa
3
+
4
+ AWS Cloud Development Kit (CDK) constructs for Amazon Managed Workflows for Apache Airflow (MWAA).
5
+
6
+ In [aws-cdk-lib.aws_mwaa](https://docs.aws.amazon.com/cdk/api/v2/docs/aws-cdk-lib.aws_mwaa-readme.html), there currently only exist L1 constructs for Amazon Managed Workflows for Apache Airflow (MWAA).
7
+
8
+ While helpful, they miss convenience like:
9
+
10
+ * advanced parameter checking (min/max number values, string lengths, array lengths...) before CloudFormation deployment
11
+ * proper parameter typing, e.g. enum values instead of strings
12
+ * simply referencing other constructs instead of e.g. ARN strings
13
+
14
+ Those features are implemented here.
15
+
16
+ The CDK maintainers explain that [publishing your own package](https://github.com/aws/aws-cdk/blob/main/CONTRIBUTING.md#publishing-your-own-package) is "by far the strongest signal you can give to the CDK team that a feature should be included within the core aws-cdk packages".
17
+
18
+ This project aims to develop aws_mwaa constructs to a maturity that can potentially be accepted to the CDK core.
19
+
20
+ It is not supported by AWS and is not endorsed by them. Please file issues in the [GitHub repository](https://github.com/robert-hanuschke/cdk-aws_mwaa/issues) if you find any.
21
+
22
+ ## Example use
23
+
24
+ ```python
25
+ import * as cdk from "aws-cdk-lib";
26
+ import { Construct } from "constructs";
27
+ import {
28
+ AirflowVersion,
29
+ Environment,
30
+ EnvironmentClass,
31
+ } from "@robhan-cdk-lib/aws_mwaa";
32
+
33
+ export class AwsMwaaCdkStack extends cdk.Stack {
34
+ constructor(scope: Construct, id: string, props?: cdk.StackProps) {
35
+ super(scope, id, props);
36
+
37
+ const environment = new Environment(this, "Environment", {
38
+ airflowConfigurationOptions: {
39
+ key: "value",
40
+ },
41
+ name: "myEnvironment",
42
+ airflowVersion: AirflowVersion.V3_0_6,
43
+ environmentClass: EnvironmentClass.MW1_MEDIUM,
44
+ minWebservers: 2,
45
+ maxWebservers: 4,
46
+ minWorkers: 2,
47
+ maxWorkers: 4,
48
+ });
49
+ }
50
+ }
51
+ ```
52
+
53
+ ## License
54
+
55
+ MIT
56
+ '''
57
+ from pkgutil import extend_path
58
+ __path__ = extend_path(__path__, __name__)
59
+
60
+ import abc
61
+ import builtins
62
+ import datetime
63
+ import enum
64
+ import typing
65
+
66
+ import jsii
67
+ import publication
68
+ import typing_extensions
69
+
70
+ import typeguard
71
+ from importlib.metadata import version as _metadata_package_version
72
+ TYPEGUARD_MAJOR_VERSION = int(_metadata_package_version('typeguard').split('.')[0])
73
+
74
+ def check_type(argname: str, value: object, expected_type: typing.Any) -> typing.Any:
75
+ if TYPEGUARD_MAJOR_VERSION <= 2:
76
+ return typeguard.check_type(argname=argname, value=value, expected_type=expected_type) # type:ignore
77
+ else:
78
+ if isinstance(value, jsii._reference_map.InterfaceDynamicProxy): # pyright: ignore [reportAttributeAccessIssue]
79
+ pass
80
+ else:
81
+ if TYPEGUARD_MAJOR_VERSION == 3:
82
+ typeguard.config.collection_check_strategy = typeguard.CollectionCheckStrategy.ALL_ITEMS # type:ignore
83
+ typeguard.check_type(value=value, expected_type=expected_type) # type:ignore
84
+ else:
85
+ typeguard.check_type(value=value, expected_type=expected_type, collection_check_strategy=typeguard.CollectionCheckStrategy.ALL_ITEMS) # type:ignore
86
+
87
+ from ._jsii import *
88
+
89
+ import aws_cdk as _aws_cdk_ceddda9d
90
+ import aws_cdk.aws_ec2 as _aws_cdk_aws_ec2_ceddda9d
91
+ import aws_cdk.aws_iam as _aws_cdk_aws_iam_ceddda9d
92
+ import aws_cdk.aws_kms as _aws_cdk_aws_kms_ceddda9d
93
+ import aws_cdk.aws_logs as _aws_cdk_aws_logs_ceddda9d
94
+ import aws_cdk.aws_s3 as _aws_cdk_aws_s3_ceddda9d
95
+ import constructs as _constructs_77d1e7e8
96
+
97
+
98
+ @jsii.enum(jsii_type="@robhan-cdk-lib/aws_mwaa.AirflowVersion")
99
+ class AirflowVersion(enum.Enum):
100
+ V2_7_2 = "V2_7_2"
101
+ V2_8_1 = "V2_8_1"
102
+ V2_9_2 = "V2_9_2"
103
+ V2_10_1 = "V2_10_1"
104
+ V2_10_3 = "V2_10_3"
105
+ V3_0_6 = "V3_0_6"
106
+
107
+
108
+ @jsii.enum(jsii_type="@robhan-cdk-lib/aws_mwaa.EndpointManagement")
109
+ class EndpointManagement(enum.Enum):
110
+ CUSTOMER = "CUSTOMER"
111
+ SERVICE = "SERVICE"
112
+
113
+
114
+ @jsii.data_type(
115
+ jsii_type="@robhan-cdk-lib/aws_mwaa.EnvironmentAttributes",
116
+ jsii_struct_bases=[],
117
+ name_mapping={
118
+ "airflow_configuration_options": "airflowConfigurationOptions",
119
+ "environment_arn": "environmentArn",
120
+ "name": "name",
121
+ "airflow_version": "airflowVersion",
122
+ "celery_executor_queue": "celeryExecutorQueue",
123
+ "dag_s3_path": "dagS3Path",
124
+ "database_vpc_endpoint_service": "databaseVpcEndpointService",
125
+ "endpoint_management": "endpointManagement",
126
+ "environment_class": "environmentClass",
127
+ "execution_role": "executionRole",
128
+ "kms_key": "kmsKey",
129
+ "logging_configuration": "loggingConfiguration",
130
+ "logging_configuration_dag_processing_logs_cloud_watch_log_group_arn": "loggingConfigurationDagProcessingLogsCloudWatchLogGroupArn",
131
+ "logging_configuration_scheduler_logs_cloud_watch_log_group_arn": "loggingConfigurationSchedulerLogsCloudWatchLogGroupArn",
132
+ "logging_configuration_task_logs_cloud_watch_log_group_arn": "loggingConfigurationTaskLogsCloudWatchLogGroupArn",
133
+ "logging_configuration_webserver_logs_cloud_watch_log_group_arn": "loggingConfigurationWebserverLogsCloudWatchLogGroupArn",
134
+ "logging_configuration_worker_logs_cloud_watch_log_group_arn": "loggingConfigurationWorkerLogsCloudWatchLogGroupArn",
135
+ "max_webservers": "maxWebservers",
136
+ "max_workers": "maxWorkers",
137
+ "min_webservers": "minWebservers",
138
+ "min_workers": "minWorkers",
139
+ "network_configuration": "networkConfiguration",
140
+ "plugins_s3_object_version": "pluginsS3ObjectVersion",
141
+ "plugins_s3_path": "pluginsS3Path",
142
+ "requirements_s3_object_version": "requirementsS3ObjectVersion",
143
+ "requirements_s3_path": "requirementsS3Path",
144
+ "schedulers": "schedulers",
145
+ "source_bucket": "sourceBucket",
146
+ "startup_script_s3_object_version": "startupScriptS3ObjectVersion",
147
+ "startup_script_s3_path": "startupScriptS3Path",
148
+ "webserver_access_mode": "webserverAccessMode",
149
+ "webserver_url": "webserverUrl",
150
+ "webserver_vpc_endpoint_service": "webserverVpcEndpointService",
151
+ "weekly_maintenance_window_start": "weeklyMaintenanceWindowStart",
152
+ },
153
+ )
154
+ class EnvironmentAttributes:
155
+ def __init__(
156
+ self,
157
+ *,
158
+ airflow_configuration_options: typing.Mapping[builtins.str, builtins.str],
159
+ environment_arn: builtins.str,
160
+ name: builtins.str,
161
+ airflow_version: typing.Optional[AirflowVersion] = None,
162
+ celery_executor_queue: typing.Optional[builtins.str] = None,
163
+ dag_s3_path: typing.Optional[builtins.str] = None,
164
+ database_vpc_endpoint_service: typing.Optional[builtins.str] = None,
165
+ endpoint_management: typing.Optional[EndpointManagement] = None,
166
+ environment_class: typing.Optional["EnvironmentClass"] = None,
167
+ execution_role: typing.Optional[_aws_cdk_aws_iam_ceddda9d.IRole] = None,
168
+ kms_key: typing.Optional[_aws_cdk_aws_kms_ceddda9d.IKey] = None,
169
+ logging_configuration: typing.Optional[typing.Union["LoggingConfiguration", typing.Dict[builtins.str, typing.Any]]] = None,
170
+ logging_configuration_dag_processing_logs_cloud_watch_log_group_arn: typing.Optional[builtins.str] = None,
171
+ logging_configuration_scheduler_logs_cloud_watch_log_group_arn: typing.Optional[builtins.str] = None,
172
+ logging_configuration_task_logs_cloud_watch_log_group_arn: typing.Optional[builtins.str] = None,
173
+ logging_configuration_webserver_logs_cloud_watch_log_group_arn: typing.Optional[builtins.str] = None,
174
+ logging_configuration_worker_logs_cloud_watch_log_group_arn: typing.Optional[builtins.str] = None,
175
+ max_webservers: typing.Optional[jsii.Number] = None,
176
+ max_workers: typing.Optional[jsii.Number] = None,
177
+ min_webservers: typing.Optional[jsii.Number] = None,
178
+ min_workers: typing.Optional[jsii.Number] = None,
179
+ network_configuration: typing.Optional[typing.Union["NetworkConfiguration", typing.Dict[builtins.str, typing.Any]]] = None,
180
+ plugins_s3_object_version: typing.Optional[builtins.str] = None,
181
+ plugins_s3_path: typing.Optional[builtins.str] = None,
182
+ requirements_s3_object_version: typing.Optional[builtins.str] = None,
183
+ requirements_s3_path: typing.Optional[builtins.str] = None,
184
+ schedulers: typing.Optional[jsii.Number] = None,
185
+ source_bucket: typing.Optional[_aws_cdk_aws_s3_ceddda9d.IBucket] = None,
186
+ startup_script_s3_object_version: typing.Optional[builtins.str] = None,
187
+ startup_script_s3_path: typing.Optional[builtins.str] = None,
188
+ webserver_access_mode: typing.Optional["WebserverAccessMode"] = None,
189
+ webserver_url: typing.Optional[builtins.str] = None,
190
+ webserver_vpc_endpoint_service: typing.Optional[builtins.str] = None,
191
+ weekly_maintenance_window_start: typing.Optional[builtins.str] = None,
192
+ ) -> None:
193
+ '''Properties for importing an Amazon Managed Workflows for Apache Airflow Environment.
194
+
195
+ :param airflow_configuration_options: A list of key-value pairs containing the Airflow configuration options for your environment. For example, core.default_timezone: utc.
196
+ :param environment_arn: The ARN for the Amazon MWAA environment.
197
+ :param name: The name of your Amazon MWAA environment.
198
+ :param airflow_version: The version of Apache Airflow to use for the environment. If no value is specified, defaults to the latest version. If you specify a newer version number for an existing environment, the version update requires some service interruption before taking effect.
199
+ :param celery_executor_queue: The queue ARN for the environment's Celery Executor. Amazon MWAA uses a Celery Executor to distribute tasks across multiple workers. When you create an environment in a shared VPC, you must provide access to the Celery Executor queue from your VPC.
200
+ :param dag_s3_path: The relative path to the DAGs folder on your Amazon S3 bucket. For example, dags.
201
+ :param database_vpc_endpoint_service: The VPC endpoint for the environment's Amazon RDS database.
202
+ :param endpoint_management: Defines whether the VPC endpoints configured for the environment are created, and managed, by the customer or by Amazon MWAA. If set to SERVICE, Amazon MWAA will create and manage the required VPC endpoints in your VPC. If set to CUSTOMER, you must create, and manage, the VPC endpoints in your VPC.
203
+ :param environment_class: The environment class type.
204
+ :param execution_role: The execution role in IAM that allows MWAA to access AWS resources in your environment.
205
+ :param kms_key: The AWS Key Management Service (KMS) key to encrypt and decrypt the data in your environment. You can use an AWS KMS key managed by MWAA, or a customer-managed KMS key (advanced).
206
+ :param logging_configuration: The Apache Airflow logs being sent to CloudWatch Logs.
207
+ :param logging_configuration_dag_processing_logs_cloud_watch_log_group_arn: The ARN for the CloudWatch Logs group where the Apache Airflow DAG processing logs are published.
208
+ :param logging_configuration_scheduler_logs_cloud_watch_log_group_arn: The ARN for the CloudWatch Logs group where the Apache Airflow Scheduler logs are published.
209
+ :param logging_configuration_task_logs_cloud_watch_log_group_arn: The ARN for the CloudWatch Logs group where the Apache Airflow task logs are published.
210
+ :param logging_configuration_webserver_logs_cloud_watch_log_group_arn: The ARN for the CloudWatch Logs group where the Apache Airflow Web server logs are published.
211
+ :param logging_configuration_worker_logs_cloud_watch_log_group_arn: The ARN for the CloudWatch Logs group where the Apache Airflow Worker logs are published.
212
+ :param max_webservers: The maximum number of web servers that you want to run in your environment. Amazon MWAA scales the number of Apache Airflow web servers up to the number you specify for MaxWebservers when you interact with your Apache Airflow environment using Apache Airflow REST API, or the Apache Airflow CLI. For example, in scenarios where your workload requires network calls to the Apache Airflow REST API with a high transaction-per-second (TPS) rate, Amazon MWAA will increase the number of web servers up to the number set in MaxWebserers. As TPS rates decrease Amazon MWAA disposes of the additional web servers, and scales down to the number set in MinxWebserers. Valid values: For environments larger than mw1.micro, accepts values from 2 to 5. Defaults to 2 for all environment sizes except mw1.micro, which defaults to 1.
213
+ :param max_workers: The maximum number of workers that you want to run in your environment. MWAA scales the number of Apache Airflow workers up to the number you specify in the MaxWorkers field. For example, 20. When there are no more tasks running, and no more in the queue, MWAA disposes of the extra workers leaving the one worker that is included with your environment, or the number you specify in MinWorkers.
214
+ :param min_webservers: The minimum number of web servers that you want to run in your environment. Amazon MWAA scales the number of Apache Airflow web servers up to the number you specify for MaxWebservers when you interact with your Apache Airflow environment using Apache Airflow REST API, or the Apache Airflow CLI. As the transaction-per-second rate, and the network load, decrease, Amazon MWAA disposes of the additional web servers, and scales down to the number set in MinxWebserers. Valid values: For environments larger than mw1.micro, accepts values from 2 to 5. Defaults to 2 for all environment sizes except mw1.micro, which defaults to 1.
215
+ :param min_workers: The minimum number of workers that you want to run in your environment. MWAA scales the number of Apache Airflow workers up to the number you specify in the MaxWorkers field. When there are no more tasks running, and no more in the queue, MWAA disposes of the extra workers leaving the worker count you specify in the MinWorkers field. For example, 2.
216
+ :param network_configuration: The VPC networking components used to secure and enable network traffic between the AWS resources for your environment.
217
+ :param plugins_s3_object_version: The version of the plugins.zip file on your Amazon S3 bucket.
218
+ :param plugins_s3_path: The relative path to the plugins.zip file on your Amazon S3 bucket. For example, plugins.zip.
219
+ :param requirements_s3_object_version: The version of the requirements.txt file on your Amazon S3 bucket.
220
+ :param requirements_s3_path: The relative path to the requirements.txt file on your Amazon S3 bucket. For example, requirements.txt.
221
+ :param schedulers: The number of schedulers that you want to run in your environment. Valid values: v2 - For environments larger than mw1.micro, accepts values from 2 to 5. Defaults to 2 for all environment sizes except mw1.micro, which defaults to 1. v1 - Accepts 1.
222
+ :param source_bucket: The Amazon S3 bucket where your DAG code and supporting files are stored.
223
+ :param startup_script_s3_object_version: The version of the startup shell script in your Amazon S3 bucket. You must specify the version ID that Amazon S3 assigns to the file every time you update the script. Version IDs are Unicode, UTF-8 encoded, URL-ready, opaque strings that are no more than 1,024 bytes long. The following is an example: 3sL4kqtJlcpXroDTDmJ+rmSpXd3dIbrHY+MTRCxf3vjVBH40Nr8X8gdRQBpUMLUo
224
+ :param startup_script_s3_path: The relative path to the startup shell script in your Amazon S3 bucket. For example, s3://mwaa-environment/startup.sh. Amazon MWAA runs the script as your environment starts, and before running the Apache Airflow process. You can use this script to install dependencies, modify Apache Airflow configuration options, and set environment variables.
225
+ :param webserver_access_mode: The Apache Airflow Web server access mode.
226
+ :param webserver_url: The URL of your Apache Airflow UI.
227
+ :param webserver_vpc_endpoint_service: The VPC endpoint for the environment's web server.
228
+ :param weekly_maintenance_window_start: The day and time of the week to start weekly maintenance updates of your environment in the following format: DAY:HH:MM. For example: TUE:03:30. You can specify a start time in 30 minute increments only. Supported input includes the following: MON|TUE|WED|THU|FRI|SAT|SUN:([01]\\d|2[0-3]):(00|30)
229
+ '''
230
+ if isinstance(logging_configuration, dict):
231
+ logging_configuration = LoggingConfiguration(**logging_configuration)
232
+ if isinstance(network_configuration, dict):
233
+ network_configuration = NetworkConfiguration(**network_configuration)
234
+ if __debug__:
235
+ type_hints = typing.get_type_hints(_typecheckingstub__d75f091b184b8fb2d88550b01b5b1291a3af0d350440b3c1dadc6631ec062c57)
236
+ check_type(argname="argument airflow_configuration_options", value=airflow_configuration_options, expected_type=type_hints["airflow_configuration_options"])
237
+ check_type(argname="argument environment_arn", value=environment_arn, expected_type=type_hints["environment_arn"])
238
+ check_type(argname="argument name", value=name, expected_type=type_hints["name"])
239
+ check_type(argname="argument airflow_version", value=airflow_version, expected_type=type_hints["airflow_version"])
240
+ check_type(argname="argument celery_executor_queue", value=celery_executor_queue, expected_type=type_hints["celery_executor_queue"])
241
+ check_type(argname="argument dag_s3_path", value=dag_s3_path, expected_type=type_hints["dag_s3_path"])
242
+ check_type(argname="argument database_vpc_endpoint_service", value=database_vpc_endpoint_service, expected_type=type_hints["database_vpc_endpoint_service"])
243
+ check_type(argname="argument endpoint_management", value=endpoint_management, expected_type=type_hints["endpoint_management"])
244
+ check_type(argname="argument environment_class", value=environment_class, expected_type=type_hints["environment_class"])
245
+ check_type(argname="argument execution_role", value=execution_role, expected_type=type_hints["execution_role"])
246
+ check_type(argname="argument kms_key", value=kms_key, expected_type=type_hints["kms_key"])
247
+ check_type(argname="argument logging_configuration", value=logging_configuration, expected_type=type_hints["logging_configuration"])
248
+ check_type(argname="argument logging_configuration_dag_processing_logs_cloud_watch_log_group_arn", value=logging_configuration_dag_processing_logs_cloud_watch_log_group_arn, expected_type=type_hints["logging_configuration_dag_processing_logs_cloud_watch_log_group_arn"])
249
+ check_type(argname="argument logging_configuration_scheduler_logs_cloud_watch_log_group_arn", value=logging_configuration_scheduler_logs_cloud_watch_log_group_arn, expected_type=type_hints["logging_configuration_scheduler_logs_cloud_watch_log_group_arn"])
250
+ check_type(argname="argument logging_configuration_task_logs_cloud_watch_log_group_arn", value=logging_configuration_task_logs_cloud_watch_log_group_arn, expected_type=type_hints["logging_configuration_task_logs_cloud_watch_log_group_arn"])
251
+ check_type(argname="argument logging_configuration_webserver_logs_cloud_watch_log_group_arn", value=logging_configuration_webserver_logs_cloud_watch_log_group_arn, expected_type=type_hints["logging_configuration_webserver_logs_cloud_watch_log_group_arn"])
252
+ check_type(argname="argument logging_configuration_worker_logs_cloud_watch_log_group_arn", value=logging_configuration_worker_logs_cloud_watch_log_group_arn, expected_type=type_hints["logging_configuration_worker_logs_cloud_watch_log_group_arn"])
253
+ check_type(argname="argument max_webservers", value=max_webservers, expected_type=type_hints["max_webservers"])
254
+ check_type(argname="argument max_workers", value=max_workers, expected_type=type_hints["max_workers"])
255
+ check_type(argname="argument min_webservers", value=min_webservers, expected_type=type_hints["min_webservers"])
256
+ check_type(argname="argument min_workers", value=min_workers, expected_type=type_hints["min_workers"])
257
+ check_type(argname="argument network_configuration", value=network_configuration, expected_type=type_hints["network_configuration"])
258
+ check_type(argname="argument plugins_s3_object_version", value=plugins_s3_object_version, expected_type=type_hints["plugins_s3_object_version"])
259
+ check_type(argname="argument plugins_s3_path", value=plugins_s3_path, expected_type=type_hints["plugins_s3_path"])
260
+ check_type(argname="argument requirements_s3_object_version", value=requirements_s3_object_version, expected_type=type_hints["requirements_s3_object_version"])
261
+ check_type(argname="argument requirements_s3_path", value=requirements_s3_path, expected_type=type_hints["requirements_s3_path"])
262
+ check_type(argname="argument schedulers", value=schedulers, expected_type=type_hints["schedulers"])
263
+ check_type(argname="argument source_bucket", value=source_bucket, expected_type=type_hints["source_bucket"])
264
+ check_type(argname="argument startup_script_s3_object_version", value=startup_script_s3_object_version, expected_type=type_hints["startup_script_s3_object_version"])
265
+ check_type(argname="argument startup_script_s3_path", value=startup_script_s3_path, expected_type=type_hints["startup_script_s3_path"])
266
+ check_type(argname="argument webserver_access_mode", value=webserver_access_mode, expected_type=type_hints["webserver_access_mode"])
267
+ check_type(argname="argument webserver_url", value=webserver_url, expected_type=type_hints["webserver_url"])
268
+ check_type(argname="argument webserver_vpc_endpoint_service", value=webserver_vpc_endpoint_service, expected_type=type_hints["webserver_vpc_endpoint_service"])
269
+ check_type(argname="argument weekly_maintenance_window_start", value=weekly_maintenance_window_start, expected_type=type_hints["weekly_maintenance_window_start"])
270
+ self._values: typing.Dict[builtins.str, typing.Any] = {
271
+ "airflow_configuration_options": airflow_configuration_options,
272
+ "environment_arn": environment_arn,
273
+ "name": name,
274
+ }
275
+ if airflow_version is not None:
276
+ self._values["airflow_version"] = airflow_version
277
+ if celery_executor_queue is not None:
278
+ self._values["celery_executor_queue"] = celery_executor_queue
279
+ if dag_s3_path is not None:
280
+ self._values["dag_s3_path"] = dag_s3_path
281
+ if database_vpc_endpoint_service is not None:
282
+ self._values["database_vpc_endpoint_service"] = database_vpc_endpoint_service
283
+ if endpoint_management is not None:
284
+ self._values["endpoint_management"] = endpoint_management
285
+ if environment_class is not None:
286
+ self._values["environment_class"] = environment_class
287
+ if execution_role is not None:
288
+ self._values["execution_role"] = execution_role
289
+ if kms_key is not None:
290
+ self._values["kms_key"] = kms_key
291
+ if logging_configuration is not None:
292
+ self._values["logging_configuration"] = logging_configuration
293
+ if logging_configuration_dag_processing_logs_cloud_watch_log_group_arn is not None:
294
+ self._values["logging_configuration_dag_processing_logs_cloud_watch_log_group_arn"] = logging_configuration_dag_processing_logs_cloud_watch_log_group_arn
295
+ if logging_configuration_scheduler_logs_cloud_watch_log_group_arn is not None:
296
+ self._values["logging_configuration_scheduler_logs_cloud_watch_log_group_arn"] = logging_configuration_scheduler_logs_cloud_watch_log_group_arn
297
+ if logging_configuration_task_logs_cloud_watch_log_group_arn is not None:
298
+ self._values["logging_configuration_task_logs_cloud_watch_log_group_arn"] = logging_configuration_task_logs_cloud_watch_log_group_arn
299
+ if logging_configuration_webserver_logs_cloud_watch_log_group_arn is not None:
300
+ self._values["logging_configuration_webserver_logs_cloud_watch_log_group_arn"] = logging_configuration_webserver_logs_cloud_watch_log_group_arn
301
+ if logging_configuration_worker_logs_cloud_watch_log_group_arn is not None:
302
+ self._values["logging_configuration_worker_logs_cloud_watch_log_group_arn"] = logging_configuration_worker_logs_cloud_watch_log_group_arn
303
+ if max_webservers is not None:
304
+ self._values["max_webservers"] = max_webservers
305
+ if max_workers is not None:
306
+ self._values["max_workers"] = max_workers
307
+ if min_webservers is not None:
308
+ self._values["min_webservers"] = min_webservers
309
+ if min_workers is not None:
310
+ self._values["min_workers"] = min_workers
311
+ if network_configuration is not None:
312
+ self._values["network_configuration"] = network_configuration
313
+ if plugins_s3_object_version is not None:
314
+ self._values["plugins_s3_object_version"] = plugins_s3_object_version
315
+ if plugins_s3_path is not None:
316
+ self._values["plugins_s3_path"] = plugins_s3_path
317
+ if requirements_s3_object_version is not None:
318
+ self._values["requirements_s3_object_version"] = requirements_s3_object_version
319
+ if requirements_s3_path is not None:
320
+ self._values["requirements_s3_path"] = requirements_s3_path
321
+ if schedulers is not None:
322
+ self._values["schedulers"] = schedulers
323
+ if source_bucket is not None:
324
+ self._values["source_bucket"] = source_bucket
325
+ if startup_script_s3_object_version is not None:
326
+ self._values["startup_script_s3_object_version"] = startup_script_s3_object_version
327
+ if startup_script_s3_path is not None:
328
+ self._values["startup_script_s3_path"] = startup_script_s3_path
329
+ if webserver_access_mode is not None:
330
+ self._values["webserver_access_mode"] = webserver_access_mode
331
+ if webserver_url is not None:
332
+ self._values["webserver_url"] = webserver_url
333
+ if webserver_vpc_endpoint_service is not None:
334
+ self._values["webserver_vpc_endpoint_service"] = webserver_vpc_endpoint_service
335
+ if weekly_maintenance_window_start is not None:
336
+ self._values["weekly_maintenance_window_start"] = weekly_maintenance_window_start
337
+
338
+ @builtins.property
339
+ def airflow_configuration_options(
340
+ self,
341
+ ) -> typing.Mapping[builtins.str, builtins.str]:
342
+ '''A list of key-value pairs containing the Airflow configuration options for your environment.
343
+
344
+ For example, core.default_timezone: utc.
345
+ '''
346
+ result = self._values.get("airflow_configuration_options")
347
+ assert result is not None, "Required property 'airflow_configuration_options' is missing"
348
+ return typing.cast(typing.Mapping[builtins.str, builtins.str], result)
349
+
350
+ @builtins.property
351
+ def environment_arn(self) -> builtins.str:
352
+ '''The ARN for the Amazon MWAA environment.'''
353
+ result = self._values.get("environment_arn")
354
+ assert result is not None, "Required property 'environment_arn' is missing"
355
+ return typing.cast(builtins.str, result)
356
+
357
+ @builtins.property
358
+ def name(self) -> builtins.str:
359
+ '''The name of your Amazon MWAA environment.'''
360
+ result = self._values.get("name")
361
+ assert result is not None, "Required property 'name' is missing"
362
+ return typing.cast(builtins.str, result)
363
+
364
+ @builtins.property
365
+ def airflow_version(self) -> typing.Optional[AirflowVersion]:
366
+ '''The version of Apache Airflow to use for the environment.
367
+
368
+ If no value is specified, defaults to the latest version.
369
+
370
+ If you specify a newer version number for an existing environment, the version update requires some service interruption before taking effect.
371
+ '''
372
+ result = self._values.get("airflow_version")
373
+ return typing.cast(typing.Optional[AirflowVersion], result)
374
+
375
+ @builtins.property
376
+ def celery_executor_queue(self) -> typing.Optional[builtins.str]:
377
+ '''The queue ARN for the environment's Celery Executor.
378
+
379
+ Amazon MWAA uses a Celery Executor to distribute tasks across multiple workers.
380
+ When you create an environment in a shared VPC, you must provide access to the Celery Executor queue from your VPC.
381
+ '''
382
+ result = self._values.get("celery_executor_queue")
383
+ return typing.cast(typing.Optional[builtins.str], result)
384
+
385
+ @builtins.property
386
+ def dag_s3_path(self) -> typing.Optional[builtins.str]:
387
+ '''The relative path to the DAGs folder on your Amazon S3 bucket.
388
+
389
+ For example, dags.
390
+ '''
391
+ result = self._values.get("dag_s3_path")
392
+ return typing.cast(typing.Optional[builtins.str], result)
393
+
394
+ @builtins.property
395
+ def database_vpc_endpoint_service(self) -> typing.Optional[builtins.str]:
396
+ '''The VPC endpoint for the environment's Amazon RDS database.'''
397
+ result = self._values.get("database_vpc_endpoint_service")
398
+ return typing.cast(typing.Optional[builtins.str], result)
399
+
400
+ @builtins.property
401
+ def endpoint_management(self) -> typing.Optional[EndpointManagement]:
402
+ '''Defines whether the VPC endpoints configured for the environment are created, and managed, by the customer or by Amazon MWAA.
403
+
404
+ If set to SERVICE, Amazon MWAA will create and manage the required VPC endpoints in your VPC.
405
+ If set to CUSTOMER, you must create, and manage, the VPC endpoints in your VPC.
406
+ '''
407
+ result = self._values.get("endpoint_management")
408
+ return typing.cast(typing.Optional[EndpointManagement], result)
409
+
410
+ @builtins.property
411
+ def environment_class(self) -> typing.Optional["EnvironmentClass"]:
412
+ '''The environment class type.'''
413
+ result = self._values.get("environment_class")
414
+ return typing.cast(typing.Optional["EnvironmentClass"], result)
415
+
416
+ @builtins.property
417
+ def execution_role(self) -> typing.Optional[_aws_cdk_aws_iam_ceddda9d.IRole]:
418
+ '''The execution role in IAM that allows MWAA to access AWS resources in your environment.'''
419
+ result = self._values.get("execution_role")
420
+ return typing.cast(typing.Optional[_aws_cdk_aws_iam_ceddda9d.IRole], result)
421
+
422
+ @builtins.property
423
+ def kms_key(self) -> typing.Optional[_aws_cdk_aws_kms_ceddda9d.IKey]:
424
+ '''The AWS Key Management Service (KMS) key to encrypt and decrypt the data in your environment.
425
+
426
+ You can use an AWS KMS key managed by MWAA, or a customer-managed KMS key (advanced).
427
+ '''
428
+ result = self._values.get("kms_key")
429
+ return typing.cast(typing.Optional[_aws_cdk_aws_kms_ceddda9d.IKey], result)
430
+
431
+ @builtins.property
432
+ def logging_configuration(self) -> typing.Optional["LoggingConfiguration"]:
433
+ '''The Apache Airflow logs being sent to CloudWatch Logs.'''
434
+ result = self._values.get("logging_configuration")
435
+ return typing.cast(typing.Optional["LoggingConfiguration"], result)
436
+
437
+ @builtins.property
438
+ def logging_configuration_dag_processing_logs_cloud_watch_log_group_arn(
439
+ self,
440
+ ) -> typing.Optional[builtins.str]:
441
+ '''The ARN for the CloudWatch Logs group where the Apache Airflow DAG processing logs are published.'''
442
+ result = self._values.get("logging_configuration_dag_processing_logs_cloud_watch_log_group_arn")
443
+ return typing.cast(typing.Optional[builtins.str], result)
444
+
445
+ @builtins.property
446
+ def logging_configuration_scheduler_logs_cloud_watch_log_group_arn(
447
+ self,
448
+ ) -> typing.Optional[builtins.str]:
449
+ '''The ARN for the CloudWatch Logs group where the Apache Airflow Scheduler logs are published.'''
450
+ result = self._values.get("logging_configuration_scheduler_logs_cloud_watch_log_group_arn")
451
+ return typing.cast(typing.Optional[builtins.str], result)
452
+
453
+ @builtins.property
454
+ def logging_configuration_task_logs_cloud_watch_log_group_arn(
455
+ self,
456
+ ) -> typing.Optional[builtins.str]:
457
+ '''The ARN for the CloudWatch Logs group where the Apache Airflow task logs are published.'''
458
+ result = self._values.get("logging_configuration_task_logs_cloud_watch_log_group_arn")
459
+ return typing.cast(typing.Optional[builtins.str], result)
460
+
461
+ @builtins.property
462
+ def logging_configuration_webserver_logs_cloud_watch_log_group_arn(
463
+ self,
464
+ ) -> typing.Optional[builtins.str]:
465
+ '''The ARN for the CloudWatch Logs group where the Apache Airflow Web server logs are published.'''
466
+ result = self._values.get("logging_configuration_webserver_logs_cloud_watch_log_group_arn")
467
+ return typing.cast(typing.Optional[builtins.str], result)
468
+
469
+ @builtins.property
470
+ def logging_configuration_worker_logs_cloud_watch_log_group_arn(
471
+ self,
472
+ ) -> typing.Optional[builtins.str]:
473
+ '''The ARN for the CloudWatch Logs group where the Apache Airflow Worker logs are published.'''
474
+ result = self._values.get("logging_configuration_worker_logs_cloud_watch_log_group_arn")
475
+ return typing.cast(typing.Optional[builtins.str], result)
476
+
477
+ @builtins.property
478
+ def max_webservers(self) -> typing.Optional[jsii.Number]:
479
+ '''The maximum number of web servers that you want to run in your environment.
480
+
481
+ Amazon MWAA scales the number of Apache Airflow web servers up to the number you specify for MaxWebservers when you interact with your Apache
482
+ Airflow environment using Apache Airflow REST API, or the Apache Airflow CLI. For example, in scenarios where your workload requires network
483
+ calls to the Apache Airflow REST API with a high transaction-per-second (TPS) rate, Amazon MWAA will increase the number of web servers up to
484
+ the number set in MaxWebserers. As TPS rates decrease Amazon MWAA disposes of the additional web servers, and scales down to the number set in
485
+ MinxWebserers.
486
+
487
+ Valid values: For environments larger than mw1.micro, accepts values from 2 to 5. Defaults to 2 for all environment sizes except mw1.micro,
488
+ which defaults to 1.
489
+ '''
490
+ result = self._values.get("max_webservers")
491
+ return typing.cast(typing.Optional[jsii.Number], result)
492
+
493
+ @builtins.property
494
+ def max_workers(self) -> typing.Optional[jsii.Number]:
495
+ '''The maximum number of workers that you want to run in your environment.
496
+
497
+ MWAA scales the number of Apache Airflow workers up to the number you specify in the MaxWorkers field. For example, 20. When there are no more
498
+ tasks running, and no more in the queue, MWAA disposes of the extra workers leaving the one worker that is included with your environment, or
499
+ the number you specify in MinWorkers.
500
+ '''
501
+ result = self._values.get("max_workers")
502
+ return typing.cast(typing.Optional[jsii.Number], result)
503
+
504
+ @builtins.property
505
+ def min_webservers(self) -> typing.Optional[jsii.Number]:
506
+ '''The minimum number of web servers that you want to run in your environment.
507
+
508
+ Amazon MWAA scales the number of Apache Airflow web servers up to the number you specify for MaxWebservers when you interact with your Apache
509
+ Airflow environment using Apache Airflow REST API, or the Apache Airflow CLI. As the transaction-per-second rate, and the network load,
510
+ decrease, Amazon MWAA disposes of the additional web servers, and scales down to the number set in MinxWebserers.
511
+
512
+ Valid values: For environments larger than mw1.micro, accepts values from 2 to 5. Defaults to 2 for all environment sizes except mw1.micro,
513
+ which defaults to 1.
514
+ '''
515
+ result = self._values.get("min_webservers")
516
+ return typing.cast(typing.Optional[jsii.Number], result)
517
+
518
+ @builtins.property
519
+ def min_workers(self) -> typing.Optional[jsii.Number]:
520
+ '''The minimum number of workers that you want to run in your environment.
521
+
522
+ MWAA scales the number of Apache Airflow workers up to the number you
523
+ specify in the MaxWorkers field. When there are no more tasks running, and no more in the queue, MWAA disposes of the extra workers leaving
524
+ the worker count you specify in the MinWorkers field. For example, 2.
525
+ '''
526
+ result = self._values.get("min_workers")
527
+ return typing.cast(typing.Optional[jsii.Number], result)
528
+
529
+ @builtins.property
530
+ def network_configuration(self) -> typing.Optional["NetworkConfiguration"]:
531
+ '''The VPC networking components used to secure and enable network traffic between the AWS resources for your environment.'''
532
+ result = self._values.get("network_configuration")
533
+ return typing.cast(typing.Optional["NetworkConfiguration"], result)
534
+
535
+ @builtins.property
536
+ def plugins_s3_object_version(self) -> typing.Optional[builtins.str]:
537
+ '''The version of the plugins.zip file on your Amazon S3 bucket.'''
538
+ result = self._values.get("plugins_s3_object_version")
539
+ return typing.cast(typing.Optional[builtins.str], result)
540
+
541
+ @builtins.property
542
+ def plugins_s3_path(self) -> typing.Optional[builtins.str]:
543
+ '''The relative path to the plugins.zip file on your Amazon S3 bucket. For example, plugins.zip.'''
544
+ result = self._values.get("plugins_s3_path")
545
+ return typing.cast(typing.Optional[builtins.str], result)
546
+
547
+ @builtins.property
548
+ def requirements_s3_object_version(self) -> typing.Optional[builtins.str]:
549
+ '''The version of the requirements.txt file on your Amazon S3 bucket.'''
550
+ result = self._values.get("requirements_s3_object_version")
551
+ return typing.cast(typing.Optional[builtins.str], result)
552
+
553
+ @builtins.property
554
+ def requirements_s3_path(self) -> typing.Optional[builtins.str]:
555
+ '''The relative path to the requirements.txt file on your Amazon S3 bucket. For example, requirements.txt.'''
556
+ result = self._values.get("requirements_s3_path")
557
+ return typing.cast(typing.Optional[builtins.str], result)
558
+
559
+ @builtins.property
560
+ def schedulers(self) -> typing.Optional[jsii.Number]:
561
+ '''The number of schedulers that you want to run in your environment.
562
+
563
+ Valid values:
564
+ v2 - For environments larger than mw1.micro, accepts values from 2 to 5.
565
+ Defaults to 2 for all environment sizes except mw1.micro, which defaults to 1.
566
+ v1 - Accepts 1.
567
+ '''
568
+ result = self._values.get("schedulers")
569
+ return typing.cast(typing.Optional[jsii.Number], result)
570
+
571
+ @builtins.property
572
+ def source_bucket(self) -> typing.Optional[_aws_cdk_aws_s3_ceddda9d.IBucket]:
573
+ '''The Amazon S3 bucket where your DAG code and supporting files are stored.'''
574
+ result = self._values.get("source_bucket")
575
+ return typing.cast(typing.Optional[_aws_cdk_aws_s3_ceddda9d.IBucket], result)
576
+
577
+ @builtins.property
578
+ def startup_script_s3_object_version(self) -> typing.Optional[builtins.str]:
579
+ '''The version of the startup shell script in your Amazon S3 bucket.
580
+
581
+ You must specify the version ID that Amazon S3 assigns to the file every time you update the script.
582
+ Version IDs are Unicode, UTF-8 encoded, URL-ready, opaque strings that are no more than 1,024 bytes long.
583
+
584
+ The following is an example:
585
+ 3sL4kqtJlcpXroDTDmJ+rmSpXd3dIbrHY+MTRCxf3vjVBH40Nr8X8gdRQBpUMLUo
586
+ '''
587
+ result = self._values.get("startup_script_s3_object_version")
588
+ return typing.cast(typing.Optional[builtins.str], result)
589
+
590
+ @builtins.property
591
+ def startup_script_s3_path(self) -> typing.Optional[builtins.str]:
592
+ '''The relative path to the startup shell script in your Amazon S3 bucket.
593
+
594
+ For example, s3://mwaa-environment/startup.sh.
595
+ Amazon MWAA runs the script as your environment starts, and before running the Apache Airflow process.
596
+ You can use this script to install dependencies, modify Apache Airflow configuration options, and set environment variables.
597
+ '''
598
+ result = self._values.get("startup_script_s3_path")
599
+ return typing.cast(typing.Optional[builtins.str], result)
600
+
601
+ @builtins.property
602
+ def webserver_access_mode(self) -> typing.Optional["WebserverAccessMode"]:
603
+ '''The Apache Airflow Web server access mode.'''
604
+ result = self._values.get("webserver_access_mode")
605
+ return typing.cast(typing.Optional["WebserverAccessMode"], result)
606
+
607
+ @builtins.property
608
+ def webserver_url(self) -> typing.Optional[builtins.str]:
609
+ '''The URL of your Apache Airflow UI.'''
610
+ result = self._values.get("webserver_url")
611
+ return typing.cast(typing.Optional[builtins.str], result)
612
+
613
+ @builtins.property
614
+ def webserver_vpc_endpoint_service(self) -> typing.Optional[builtins.str]:
615
+ '''The VPC endpoint for the environment's web server.'''
616
+ result = self._values.get("webserver_vpc_endpoint_service")
617
+ return typing.cast(typing.Optional[builtins.str], result)
618
+
619
+ @builtins.property
620
+ def weekly_maintenance_window_start(self) -> typing.Optional[builtins.str]:
621
+ '''The day and time of the week to start weekly maintenance updates of your environment in the following format: DAY:HH:MM.
622
+
623
+ For example: TUE:03:30. You can specify a start time in 30 minute increments only.
624
+
625
+ Supported input includes the following:
626
+ MON|TUE|WED|THU|FRI|SAT|SUN:([01]\\d|2[0-3]):(00|30)
627
+ '''
628
+ result = self._values.get("weekly_maintenance_window_start")
629
+ return typing.cast(typing.Optional[builtins.str], result)
630
+
631
+ def __eq__(self, rhs: typing.Any) -> builtins.bool:
632
+ return isinstance(rhs, self.__class__) and rhs._values == self._values
633
+
634
+ def __ne__(self, rhs: typing.Any) -> builtins.bool:
635
+ return not (rhs == self)
636
+
637
+ def __repr__(self) -> str:
638
+ return "EnvironmentAttributes(%s)" % ", ".join(
639
+ k + "=" + repr(v) for k, v in self._values.items()
640
+ )
641
+
642
+
643
+ @jsii.enum(jsii_type="@robhan-cdk-lib/aws_mwaa.EnvironmentClass")
644
+ class EnvironmentClass(enum.Enum):
645
+ MW1_MICRO = "MW1_MICRO"
646
+ MW1_SMALL = "MW1_SMALL"
647
+ MW1_MEDIUM = "MW1_MEDIUM"
648
+ MW1_LARGE = "MW1_LARGE"
649
+ MW1_1LARGE = "MW1_1LARGE"
650
+ MW1_2LARGE = "MW1_2LARGE"
651
+
652
+
653
+ @jsii.data_type(
654
+ jsii_type="@robhan-cdk-lib/aws_mwaa.EnvironmentProps",
655
+ jsii_struct_bases=[],
656
+ name_mapping={
657
+ "airflow_configuration_options": "airflowConfigurationOptions",
658
+ "name": "name",
659
+ "airflow_version": "airflowVersion",
660
+ "dag_s3_path": "dagS3Path",
661
+ "endpoint_management": "endpointManagement",
662
+ "environment_class": "environmentClass",
663
+ "execution_role": "executionRole",
664
+ "kms_key": "kmsKey",
665
+ "logging_configuration": "loggingConfiguration",
666
+ "max_webservers": "maxWebservers",
667
+ "max_workers": "maxWorkers",
668
+ "min_webservers": "minWebservers",
669
+ "min_workers": "minWorkers",
670
+ "network_configuration": "networkConfiguration",
671
+ "plugins_s3_object_version": "pluginsS3ObjectVersion",
672
+ "plugins_s3_path": "pluginsS3Path",
673
+ "requirements_s3_object_version": "requirementsS3ObjectVersion",
674
+ "requirements_s3_path": "requirementsS3Path",
675
+ "schedulers": "schedulers",
676
+ "source_bucket": "sourceBucket",
677
+ "startup_script_s3_object_version": "startupScriptS3ObjectVersion",
678
+ "startup_script_s3_path": "startupScriptS3Path",
679
+ "webserver_access_mode": "webserverAccessMode",
680
+ "weekly_maintenance_window_start": "weeklyMaintenanceWindowStart",
681
+ },
682
+ )
683
+ class EnvironmentProps:
684
+ def __init__(
685
+ self,
686
+ *,
687
+ airflow_configuration_options: typing.Mapping[builtins.str, builtins.str],
688
+ name: builtins.str,
689
+ airflow_version: typing.Optional[AirflowVersion] = None,
690
+ dag_s3_path: typing.Optional[builtins.str] = None,
691
+ endpoint_management: typing.Optional[EndpointManagement] = None,
692
+ environment_class: typing.Optional[EnvironmentClass] = None,
693
+ execution_role: typing.Optional[_aws_cdk_aws_iam_ceddda9d.IRole] = None,
694
+ kms_key: typing.Optional[_aws_cdk_aws_kms_ceddda9d.IKey] = None,
695
+ logging_configuration: typing.Optional[typing.Union["LoggingConfiguration", typing.Dict[builtins.str, typing.Any]]] = None,
696
+ max_webservers: typing.Optional[jsii.Number] = None,
697
+ max_workers: typing.Optional[jsii.Number] = None,
698
+ min_webservers: typing.Optional[jsii.Number] = None,
699
+ min_workers: typing.Optional[jsii.Number] = None,
700
+ network_configuration: typing.Optional[typing.Union["NetworkConfiguration", typing.Dict[builtins.str, typing.Any]]] = None,
701
+ plugins_s3_object_version: typing.Optional[builtins.str] = None,
702
+ plugins_s3_path: typing.Optional[builtins.str] = None,
703
+ requirements_s3_object_version: typing.Optional[builtins.str] = None,
704
+ requirements_s3_path: typing.Optional[builtins.str] = None,
705
+ schedulers: typing.Optional[jsii.Number] = None,
706
+ source_bucket: typing.Optional[_aws_cdk_aws_s3_ceddda9d.IBucket] = None,
707
+ startup_script_s3_object_version: typing.Optional[builtins.str] = None,
708
+ startup_script_s3_path: typing.Optional[builtins.str] = None,
709
+ webserver_access_mode: typing.Optional["WebserverAccessMode"] = None,
710
+ weekly_maintenance_window_start: typing.Optional[builtins.str] = None,
711
+ ) -> None:
712
+ '''Properties for creating an Amazon Managed Workflows for Apache Airflow Environment.
713
+
714
+ :param airflow_configuration_options: A list of key-value pairs containing the Airflow configuration options for your environment. For example, core.default_timezone: utc.
715
+ :param name: The name of your Amazon MWAA environment.
716
+ :param airflow_version: The version of Apache Airflow to use for the environment. If no value is specified, defaults to the latest version. If you specify a newer version number for an existing environment, the version update requires some service interruption before taking effect.
717
+ :param dag_s3_path: The relative path to the DAGs folder on your Amazon S3 bucket. For example, dags.
718
+ :param endpoint_management: Defines whether the VPC endpoints configured for the environment are created, and managed, by the customer or by Amazon MWAA. If set to SERVICE, Amazon MWAA will create and manage the required VPC endpoints in your VPC. If set to CUSTOMER, you must create, and manage, the VPC endpoints in your VPC.
719
+ :param environment_class: The environment class type.
720
+ :param execution_role: The execution role in IAM that allows MWAA to access AWS resources in your environment.
721
+ :param kms_key: The AWS Key Management Service (KMS) key to encrypt and decrypt the data in your environment. You can use an AWS KMS key managed by MWAA, or a customer-managed KMS key (advanced).
722
+ :param logging_configuration: The Apache Airflow logs being sent to CloudWatch Logs.
723
+ :param max_webservers: The maximum number of web servers that you want to run in your environment. Amazon MWAA scales the number of Apache Airflow web servers up to the number you specify for MaxWebservers when you interact with your Apache Airflow environment using Apache Airflow REST API, or the Apache Airflow CLI. For example, in scenarios where your workload requires network calls to the Apache Airflow REST API with a high transaction-per-second (TPS) rate, Amazon MWAA will increase the number of web servers up to the number set in MaxWebserers. As TPS rates decrease Amazon MWAA disposes of the additional web servers, and scales down to the number set in MinxWebserers. Valid values: For environments larger than mw1.micro, accepts values from 2 to 5. Defaults to 2 for all environment sizes except mw1.micro, which defaults to 1.
724
+ :param max_workers: The maximum number of workers that you want to run in your environment. MWAA scales the number of Apache Airflow workers up to the number you specify in the MaxWorkers field. For example, 20. When there are no more tasks running, and no more in the queue, MWAA disposes of the extra workers leaving the one worker that is included with your environment, or the number you specify in MinWorkers.
725
+ :param min_webservers: The minimum number of web servers that you want to run in your environment. Amazon MWAA scales the number of Apache Airflow web servers up to the number you specify for MaxWebservers when you interact with your Apache Airflow environment using Apache Airflow REST API, or the Apache Airflow CLI. As the transaction-per-second rate, and the network load, decrease, Amazon MWAA disposes of the additional web servers, and scales down to the number set in MinxWebserers. Valid values: For environments larger than mw1.micro, accepts values from 2 to 5. Defaults to 2 for all environment sizes except mw1.micro, which defaults to 1.
726
+ :param min_workers: The minimum number of workers that you want to run in your environment. MWAA scales the number of Apache Airflow workers up to the number you specify in the MaxWorkers field. When there are no more tasks running, and no more in the queue, MWAA disposes of the extra workers leaving the worker count you specify in the MinWorkers field. For example, 2.
727
+ :param network_configuration: The VPC networking components used to secure and enable network traffic between the AWS resources for your environment.
728
+ :param plugins_s3_object_version: The version of the plugins.zip file on your Amazon S3 bucket.
729
+ :param plugins_s3_path: The relative path to the plugins.zip file on your Amazon S3 bucket. For example, plugins.zip.
730
+ :param requirements_s3_object_version: The version of the requirements.txt file on your Amazon S3 bucket.
731
+ :param requirements_s3_path: The relative path to the requirements.txt file on your Amazon S3 bucket. For example, requirements.txt.
732
+ :param schedulers: The number of schedulers that you want to run in your environment. Valid values: v2 - For environments larger than mw1.micro, accepts values from 2 to 5. Defaults to 2 for all environment sizes except mw1.micro, which defaults to 1. v1 - Accepts 1.
733
+ :param source_bucket: The Amazon S3 bucket where your DAG code and supporting files are stored.
734
+ :param startup_script_s3_object_version: The version of the startup shell script in your Amazon S3 bucket. You must specify the version ID that Amazon S3 assigns to the file every time you update the script. Version IDs are Unicode, UTF-8 encoded, URL-ready, opaque strings that are no more than 1,024 bytes long. The following is an example: 3sL4kqtJlcpXroDTDmJ+rmSpXd3dIbrHY+MTRCxf3vjVBH40Nr8X8gdRQBpUMLUo
735
+ :param startup_script_s3_path: The relative path to the startup shell script in your Amazon S3 bucket. For example, s3://mwaa-environment/startup.sh. Amazon MWAA runs the script as your environment starts, and before running the Apache Airflow process. You can use this script to install dependencies, modify Apache Airflow configuration options, and set environment variables.
736
+ :param webserver_access_mode: The Apache Airflow Web server access mode.
737
+ :param weekly_maintenance_window_start: The day and time of the week to start weekly maintenance updates of your environment in the following format: DAY:HH:MM. For example: TUE:03:30. You can specify a start time in 30 minute increments only. Supported input includes the following: MON|TUE|WED|THU|FRI|SAT|SUN:([01]\\d|2[0-3]):(00|30)
738
+ '''
739
+ if isinstance(logging_configuration, dict):
740
+ logging_configuration = LoggingConfiguration(**logging_configuration)
741
+ if isinstance(network_configuration, dict):
742
+ network_configuration = NetworkConfiguration(**network_configuration)
743
+ if __debug__:
744
+ type_hints = typing.get_type_hints(_typecheckingstub__adae2e01428b106a0a65893828e0f65d1e96376eb6556581f26b272553f74e81)
745
+ check_type(argname="argument airflow_configuration_options", value=airflow_configuration_options, expected_type=type_hints["airflow_configuration_options"])
746
+ check_type(argname="argument name", value=name, expected_type=type_hints["name"])
747
+ check_type(argname="argument airflow_version", value=airflow_version, expected_type=type_hints["airflow_version"])
748
+ check_type(argname="argument dag_s3_path", value=dag_s3_path, expected_type=type_hints["dag_s3_path"])
749
+ check_type(argname="argument endpoint_management", value=endpoint_management, expected_type=type_hints["endpoint_management"])
750
+ check_type(argname="argument environment_class", value=environment_class, expected_type=type_hints["environment_class"])
751
+ check_type(argname="argument execution_role", value=execution_role, expected_type=type_hints["execution_role"])
752
+ check_type(argname="argument kms_key", value=kms_key, expected_type=type_hints["kms_key"])
753
+ check_type(argname="argument logging_configuration", value=logging_configuration, expected_type=type_hints["logging_configuration"])
754
+ check_type(argname="argument max_webservers", value=max_webservers, expected_type=type_hints["max_webservers"])
755
+ check_type(argname="argument max_workers", value=max_workers, expected_type=type_hints["max_workers"])
756
+ check_type(argname="argument min_webservers", value=min_webservers, expected_type=type_hints["min_webservers"])
757
+ check_type(argname="argument min_workers", value=min_workers, expected_type=type_hints["min_workers"])
758
+ check_type(argname="argument network_configuration", value=network_configuration, expected_type=type_hints["network_configuration"])
759
+ check_type(argname="argument plugins_s3_object_version", value=plugins_s3_object_version, expected_type=type_hints["plugins_s3_object_version"])
760
+ check_type(argname="argument plugins_s3_path", value=plugins_s3_path, expected_type=type_hints["plugins_s3_path"])
761
+ check_type(argname="argument requirements_s3_object_version", value=requirements_s3_object_version, expected_type=type_hints["requirements_s3_object_version"])
762
+ check_type(argname="argument requirements_s3_path", value=requirements_s3_path, expected_type=type_hints["requirements_s3_path"])
763
+ check_type(argname="argument schedulers", value=schedulers, expected_type=type_hints["schedulers"])
764
+ check_type(argname="argument source_bucket", value=source_bucket, expected_type=type_hints["source_bucket"])
765
+ check_type(argname="argument startup_script_s3_object_version", value=startup_script_s3_object_version, expected_type=type_hints["startup_script_s3_object_version"])
766
+ check_type(argname="argument startup_script_s3_path", value=startup_script_s3_path, expected_type=type_hints["startup_script_s3_path"])
767
+ check_type(argname="argument webserver_access_mode", value=webserver_access_mode, expected_type=type_hints["webserver_access_mode"])
768
+ check_type(argname="argument weekly_maintenance_window_start", value=weekly_maintenance_window_start, expected_type=type_hints["weekly_maintenance_window_start"])
769
+ self._values: typing.Dict[builtins.str, typing.Any] = {
770
+ "airflow_configuration_options": airflow_configuration_options,
771
+ "name": name,
772
+ }
773
+ if airflow_version is not None:
774
+ self._values["airflow_version"] = airflow_version
775
+ if dag_s3_path is not None:
776
+ self._values["dag_s3_path"] = dag_s3_path
777
+ if endpoint_management is not None:
778
+ self._values["endpoint_management"] = endpoint_management
779
+ if environment_class is not None:
780
+ self._values["environment_class"] = environment_class
781
+ if execution_role is not None:
782
+ self._values["execution_role"] = execution_role
783
+ if kms_key is not None:
784
+ self._values["kms_key"] = kms_key
785
+ if logging_configuration is not None:
786
+ self._values["logging_configuration"] = logging_configuration
787
+ if max_webservers is not None:
788
+ self._values["max_webservers"] = max_webservers
789
+ if max_workers is not None:
790
+ self._values["max_workers"] = max_workers
791
+ if min_webservers is not None:
792
+ self._values["min_webservers"] = min_webservers
793
+ if min_workers is not None:
794
+ self._values["min_workers"] = min_workers
795
+ if network_configuration is not None:
796
+ self._values["network_configuration"] = network_configuration
797
+ if plugins_s3_object_version is not None:
798
+ self._values["plugins_s3_object_version"] = plugins_s3_object_version
799
+ if plugins_s3_path is not None:
800
+ self._values["plugins_s3_path"] = plugins_s3_path
801
+ if requirements_s3_object_version is not None:
802
+ self._values["requirements_s3_object_version"] = requirements_s3_object_version
803
+ if requirements_s3_path is not None:
804
+ self._values["requirements_s3_path"] = requirements_s3_path
805
+ if schedulers is not None:
806
+ self._values["schedulers"] = schedulers
807
+ if source_bucket is not None:
808
+ self._values["source_bucket"] = source_bucket
809
+ if startup_script_s3_object_version is not None:
810
+ self._values["startup_script_s3_object_version"] = startup_script_s3_object_version
811
+ if startup_script_s3_path is not None:
812
+ self._values["startup_script_s3_path"] = startup_script_s3_path
813
+ if webserver_access_mode is not None:
814
+ self._values["webserver_access_mode"] = webserver_access_mode
815
+ if weekly_maintenance_window_start is not None:
816
+ self._values["weekly_maintenance_window_start"] = weekly_maintenance_window_start
817
+
818
+ @builtins.property
819
+ def airflow_configuration_options(
820
+ self,
821
+ ) -> typing.Mapping[builtins.str, builtins.str]:
822
+ '''A list of key-value pairs containing the Airflow configuration options for your environment.
823
+
824
+ For example, core.default_timezone: utc.
825
+ '''
826
+ result = self._values.get("airflow_configuration_options")
827
+ assert result is not None, "Required property 'airflow_configuration_options' is missing"
828
+ return typing.cast(typing.Mapping[builtins.str, builtins.str], result)
829
+
830
+ @builtins.property
831
+ def name(self) -> builtins.str:
832
+ '''The name of your Amazon MWAA environment.'''
833
+ result = self._values.get("name")
834
+ assert result is not None, "Required property 'name' is missing"
835
+ return typing.cast(builtins.str, result)
836
+
837
+ @builtins.property
838
+ def airflow_version(self) -> typing.Optional[AirflowVersion]:
839
+ '''The version of Apache Airflow to use for the environment.
840
+
841
+ If no value is specified, defaults to the latest version.
842
+
843
+ If you specify a newer version number for an existing environment, the version update requires some service interruption before taking effect.
844
+ '''
845
+ result = self._values.get("airflow_version")
846
+ return typing.cast(typing.Optional[AirflowVersion], result)
847
+
848
+ @builtins.property
849
+ def dag_s3_path(self) -> typing.Optional[builtins.str]:
850
+ '''The relative path to the DAGs folder on your Amazon S3 bucket.
851
+
852
+ For example, dags.
853
+ '''
854
+ result = self._values.get("dag_s3_path")
855
+ return typing.cast(typing.Optional[builtins.str], result)
856
+
857
+ @builtins.property
858
+ def endpoint_management(self) -> typing.Optional[EndpointManagement]:
859
+ '''Defines whether the VPC endpoints configured for the environment are created, and managed, by the customer or by Amazon MWAA.
860
+
861
+ If set to SERVICE, Amazon MWAA will create and manage the required VPC endpoints in your VPC.
862
+ If set to CUSTOMER, you must create, and manage, the VPC endpoints in your VPC.
863
+ '''
864
+ result = self._values.get("endpoint_management")
865
+ return typing.cast(typing.Optional[EndpointManagement], result)
866
+
867
+ @builtins.property
868
+ def environment_class(self) -> typing.Optional[EnvironmentClass]:
869
+ '''The environment class type.'''
870
+ result = self._values.get("environment_class")
871
+ return typing.cast(typing.Optional[EnvironmentClass], result)
872
+
873
+ @builtins.property
874
+ def execution_role(self) -> typing.Optional[_aws_cdk_aws_iam_ceddda9d.IRole]:
875
+ '''The execution role in IAM that allows MWAA to access AWS resources in your environment.'''
876
+ result = self._values.get("execution_role")
877
+ return typing.cast(typing.Optional[_aws_cdk_aws_iam_ceddda9d.IRole], result)
878
+
879
+ @builtins.property
880
+ def kms_key(self) -> typing.Optional[_aws_cdk_aws_kms_ceddda9d.IKey]:
881
+ '''The AWS Key Management Service (KMS) key to encrypt and decrypt the data in your environment.
882
+
883
+ You can use an AWS KMS key managed by MWAA, or a customer-managed KMS key (advanced).
884
+ '''
885
+ result = self._values.get("kms_key")
886
+ return typing.cast(typing.Optional[_aws_cdk_aws_kms_ceddda9d.IKey], result)
887
+
888
+ @builtins.property
889
+ def logging_configuration(self) -> typing.Optional["LoggingConfiguration"]:
890
+ '''The Apache Airflow logs being sent to CloudWatch Logs.'''
891
+ result = self._values.get("logging_configuration")
892
+ return typing.cast(typing.Optional["LoggingConfiguration"], result)
893
+
894
+ @builtins.property
895
+ def max_webservers(self) -> typing.Optional[jsii.Number]:
896
+ '''The maximum number of web servers that you want to run in your environment.
897
+
898
+ Amazon MWAA scales the number of Apache Airflow web servers up to the number you specify for MaxWebservers when you interact with your Apache
899
+ Airflow environment using Apache Airflow REST API, or the Apache Airflow CLI. For example, in scenarios where your workload requires network
900
+ calls to the Apache Airflow REST API with a high transaction-per-second (TPS) rate, Amazon MWAA will increase the number of web servers up to
901
+ the number set in MaxWebserers. As TPS rates decrease Amazon MWAA disposes of the additional web servers, and scales down to the number set in
902
+ MinxWebserers.
903
+
904
+ Valid values: For environments larger than mw1.micro, accepts values from 2 to 5. Defaults to 2 for all environment sizes except mw1.micro,
905
+ which defaults to 1.
906
+ '''
907
+ result = self._values.get("max_webservers")
908
+ return typing.cast(typing.Optional[jsii.Number], result)
909
+
910
+ @builtins.property
911
+ def max_workers(self) -> typing.Optional[jsii.Number]:
912
+ '''The maximum number of workers that you want to run in your environment.
913
+
914
+ MWAA scales the number of Apache Airflow workers up to the number you specify in the MaxWorkers field. For example, 20. When there are no more
915
+ tasks running, and no more in the queue, MWAA disposes of the extra workers leaving the one worker that is included with your environment, or
916
+ the number you specify in MinWorkers.
917
+ '''
918
+ result = self._values.get("max_workers")
919
+ return typing.cast(typing.Optional[jsii.Number], result)
920
+
921
+ @builtins.property
922
+ def min_webservers(self) -> typing.Optional[jsii.Number]:
923
+ '''The minimum number of web servers that you want to run in your environment.
924
+
925
+ Amazon MWAA scales the number of Apache Airflow web servers up to the number you specify for MaxWebservers when you interact with your Apache
926
+ Airflow environment using Apache Airflow REST API, or the Apache Airflow CLI. As the transaction-per-second rate, and the network load,
927
+ decrease, Amazon MWAA disposes of the additional web servers, and scales down to the number set in MinxWebserers.
928
+
929
+ Valid values: For environments larger than mw1.micro, accepts values from 2 to 5. Defaults to 2 for all environment sizes except mw1.micro,
930
+ which defaults to 1.
931
+ '''
932
+ result = self._values.get("min_webservers")
933
+ return typing.cast(typing.Optional[jsii.Number], result)
934
+
935
+ @builtins.property
936
+ def min_workers(self) -> typing.Optional[jsii.Number]:
937
+ '''The minimum number of workers that you want to run in your environment.
938
+
939
+ MWAA scales the number of Apache Airflow workers up to the number you
940
+ specify in the MaxWorkers field. When there are no more tasks running, and no more in the queue, MWAA disposes of the extra workers leaving
941
+ the worker count you specify in the MinWorkers field. For example, 2.
942
+ '''
943
+ result = self._values.get("min_workers")
944
+ return typing.cast(typing.Optional[jsii.Number], result)
945
+
946
+ @builtins.property
947
+ def network_configuration(self) -> typing.Optional["NetworkConfiguration"]:
948
+ '''The VPC networking components used to secure and enable network traffic between the AWS resources for your environment.'''
949
+ result = self._values.get("network_configuration")
950
+ return typing.cast(typing.Optional["NetworkConfiguration"], result)
951
+
952
+ @builtins.property
953
+ def plugins_s3_object_version(self) -> typing.Optional[builtins.str]:
954
+ '''The version of the plugins.zip file on your Amazon S3 bucket.'''
955
+ result = self._values.get("plugins_s3_object_version")
956
+ return typing.cast(typing.Optional[builtins.str], result)
957
+
958
+ @builtins.property
959
+ def plugins_s3_path(self) -> typing.Optional[builtins.str]:
960
+ '''The relative path to the plugins.zip file on your Amazon S3 bucket. For example, plugins.zip.'''
961
+ result = self._values.get("plugins_s3_path")
962
+ return typing.cast(typing.Optional[builtins.str], result)
963
+
964
+ @builtins.property
965
+ def requirements_s3_object_version(self) -> typing.Optional[builtins.str]:
966
+ '''The version of the requirements.txt file on your Amazon S3 bucket.'''
967
+ result = self._values.get("requirements_s3_object_version")
968
+ return typing.cast(typing.Optional[builtins.str], result)
969
+
970
+ @builtins.property
971
+ def requirements_s3_path(self) -> typing.Optional[builtins.str]:
972
+ '''The relative path to the requirements.txt file on your Amazon S3 bucket. For example, requirements.txt.'''
973
+ result = self._values.get("requirements_s3_path")
974
+ return typing.cast(typing.Optional[builtins.str], result)
975
+
976
+ @builtins.property
977
+ def schedulers(self) -> typing.Optional[jsii.Number]:
978
+ '''The number of schedulers that you want to run in your environment.
979
+
980
+ Valid values:
981
+ v2 - For environments larger than mw1.micro, accepts values from 2 to 5.
982
+ Defaults to 2 for all environment sizes except mw1.micro, which defaults to 1.
983
+ v1 - Accepts 1.
984
+ '''
985
+ result = self._values.get("schedulers")
986
+ return typing.cast(typing.Optional[jsii.Number], result)
987
+
988
+ @builtins.property
989
+ def source_bucket(self) -> typing.Optional[_aws_cdk_aws_s3_ceddda9d.IBucket]:
990
+ '''The Amazon S3 bucket where your DAG code and supporting files are stored.'''
991
+ result = self._values.get("source_bucket")
992
+ return typing.cast(typing.Optional[_aws_cdk_aws_s3_ceddda9d.IBucket], result)
993
+
994
+ @builtins.property
995
+ def startup_script_s3_object_version(self) -> typing.Optional[builtins.str]:
996
+ '''The version of the startup shell script in your Amazon S3 bucket.
997
+
998
+ You must specify the version ID that Amazon S3 assigns to the file every time you update the script.
999
+ Version IDs are Unicode, UTF-8 encoded, URL-ready, opaque strings that are no more than 1,024 bytes long.
1000
+
1001
+ The following is an example:
1002
+ 3sL4kqtJlcpXroDTDmJ+rmSpXd3dIbrHY+MTRCxf3vjVBH40Nr8X8gdRQBpUMLUo
1003
+ '''
1004
+ result = self._values.get("startup_script_s3_object_version")
1005
+ return typing.cast(typing.Optional[builtins.str], result)
1006
+
1007
+ @builtins.property
1008
+ def startup_script_s3_path(self) -> typing.Optional[builtins.str]:
1009
+ '''The relative path to the startup shell script in your Amazon S3 bucket.
1010
+
1011
+ For example, s3://mwaa-environment/startup.sh.
1012
+ Amazon MWAA runs the script as your environment starts, and before running the Apache Airflow process.
1013
+ You can use this script to install dependencies, modify Apache Airflow configuration options, and set environment variables.
1014
+ '''
1015
+ result = self._values.get("startup_script_s3_path")
1016
+ return typing.cast(typing.Optional[builtins.str], result)
1017
+
1018
+ @builtins.property
1019
+ def webserver_access_mode(self) -> typing.Optional["WebserverAccessMode"]:
1020
+ '''The Apache Airflow Web server access mode.'''
1021
+ result = self._values.get("webserver_access_mode")
1022
+ return typing.cast(typing.Optional["WebserverAccessMode"], result)
1023
+
1024
+ @builtins.property
1025
+ def weekly_maintenance_window_start(self) -> typing.Optional[builtins.str]:
1026
+ '''The day and time of the week to start weekly maintenance updates of your environment in the following format: DAY:HH:MM.
1027
+
1028
+ For example: TUE:03:30. You can specify a start time in 30 minute increments only.
1029
+
1030
+ Supported input includes the following:
1031
+ MON|TUE|WED|THU|FRI|SAT|SUN:([01]\\d|2[0-3]):(00|30)
1032
+ '''
1033
+ result = self._values.get("weekly_maintenance_window_start")
1034
+ return typing.cast(typing.Optional[builtins.str], result)
1035
+
1036
+ def __eq__(self, rhs: typing.Any) -> builtins.bool:
1037
+ return isinstance(rhs, self.__class__) and rhs._values == self._values
1038
+
1039
+ def __ne__(self, rhs: typing.Any) -> builtins.bool:
1040
+ return not (rhs == self)
1041
+
1042
+ def __repr__(self) -> str:
1043
+ return "EnvironmentProps(%s)" % ", ".join(
1044
+ k + "=" + repr(v) for k, v in self._values.items()
1045
+ )
1046
+
1047
+
1048
+ @jsii.interface(jsii_type="@robhan-cdk-lib/aws_mwaa.IEnvironment")
1049
+ class IEnvironment(_aws_cdk_ceddda9d.IResource, typing_extensions.Protocol):
1050
+ @builtins.property
1051
+ @jsii.member(jsii_name="airflowConfigurationOptions")
1052
+ def airflow_configuration_options(
1053
+ self,
1054
+ ) -> typing.Mapping[builtins.str, builtins.str]:
1055
+ '''A list of key-value pairs containing the Airflow configuration options for your environment.
1056
+
1057
+ For example, core.default_timezone: utc.
1058
+ '''
1059
+ ...
1060
+
1061
+ @builtins.property
1062
+ @jsii.member(jsii_name="environmentArn")
1063
+ def environment_arn(self) -> builtins.str:
1064
+ '''The ARN for the Amazon MWAA environment.'''
1065
+ ...
1066
+
1067
+ @builtins.property
1068
+ @jsii.member(jsii_name="name")
1069
+ def name(self) -> builtins.str:
1070
+ '''The name of your Amazon MWAA environment.'''
1071
+ ...
1072
+
1073
+ @builtins.property
1074
+ @jsii.member(jsii_name="airflowVersion")
1075
+ def airflow_version(self) -> typing.Optional[AirflowVersion]:
1076
+ '''The version of Apache Airflow to use for the environment.
1077
+
1078
+ If no value is specified, defaults to the latest version.
1079
+
1080
+ If you specify a newer version number for an existing environment, the version update requires some service interruption before taking effect.
1081
+ '''
1082
+ ...
1083
+
1084
+ @builtins.property
1085
+ @jsii.member(jsii_name="celeryExecutorQueue")
1086
+ def celery_executor_queue(self) -> typing.Optional[builtins.str]:
1087
+ '''The queue ARN for the environment's Celery Executor.
1088
+
1089
+ Amazon MWAA uses a Celery Executor to distribute tasks across multiple workers.
1090
+ When you create an environment in a shared VPC, you must provide access to the Celery Executor queue from your VPC.
1091
+ '''
1092
+ ...
1093
+
1094
+ @builtins.property
1095
+ @jsii.member(jsii_name="dagS3Path")
1096
+ def dag_s3_path(self) -> typing.Optional[builtins.str]:
1097
+ '''The relative path to the DAGs folder on your Amazon S3 bucket.
1098
+
1099
+ For example, dags.
1100
+ '''
1101
+ ...
1102
+
1103
+ @builtins.property
1104
+ @jsii.member(jsii_name="databaseVpcEndpointService")
1105
+ def database_vpc_endpoint_service(self) -> typing.Optional[builtins.str]:
1106
+ '''The VPC endpoint for the environment's Amazon RDS database.'''
1107
+ ...
1108
+
1109
+ @builtins.property
1110
+ @jsii.member(jsii_name="endpointManagement")
1111
+ def endpoint_management(self) -> typing.Optional[EndpointManagement]:
1112
+ '''Defines whether the VPC endpoints configured for the environment are created, and managed, by the customer or by Amazon MWAA.
1113
+
1114
+ If set to SERVICE, Amazon MWAA will create and manage the required VPC endpoints in your VPC.
1115
+ If set to CUSTOMER, you must create, and manage, the VPC endpoints in your VPC.
1116
+ '''
1117
+ ...
1118
+
1119
+ @builtins.property
1120
+ @jsii.member(jsii_name="environmentClass")
1121
+ def environment_class(self) -> typing.Optional[EnvironmentClass]:
1122
+ '''The environment class type.'''
1123
+ ...
1124
+
1125
+ @builtins.property
1126
+ @jsii.member(jsii_name="executionRole")
1127
+ def execution_role(self) -> typing.Optional[_aws_cdk_aws_iam_ceddda9d.IRole]:
1128
+ '''The execution role in IAM that allows MWAA to access AWS resources in your environment.'''
1129
+ ...
1130
+
1131
+ @builtins.property
1132
+ @jsii.member(jsii_name="kmsKey")
1133
+ def kms_key(self) -> typing.Optional[_aws_cdk_aws_kms_ceddda9d.IKey]:
1134
+ '''The AWS Key Management Service (KMS) key to encrypt and decrypt the data in your environment.
1135
+
1136
+ You can use an AWS KMS key managed by MWAA, or a customer-managed KMS key (advanced).
1137
+ '''
1138
+ ...
1139
+
1140
+ @builtins.property
1141
+ @jsii.member(jsii_name="loggingConfiguration")
1142
+ def logging_configuration(self) -> typing.Optional["LoggingConfiguration"]:
1143
+ '''The Apache Airflow logs being sent to CloudWatch Logs.'''
1144
+ ...
1145
+
1146
+ @builtins.property
1147
+ @jsii.member(jsii_name="loggingConfigurationDagProcessingLogsCloudWatchLogGroupArn")
1148
+ def logging_configuration_dag_processing_logs_cloud_watch_log_group_arn(
1149
+ self,
1150
+ ) -> typing.Optional[builtins.str]:
1151
+ '''The ARN for the CloudWatch Logs group where the Apache Airflow DAG processing logs are published.'''
1152
+ ...
1153
+
1154
+ @builtins.property
1155
+ @jsii.member(jsii_name="loggingConfigurationSchedulerLogsCloudWatchLogGroupArn")
1156
+ def logging_configuration_scheduler_logs_cloud_watch_log_group_arn(
1157
+ self,
1158
+ ) -> typing.Optional[builtins.str]:
1159
+ '''The ARN for the CloudWatch Logs group where the Apache Airflow Scheduler logs are published.'''
1160
+ ...
1161
+
1162
+ @builtins.property
1163
+ @jsii.member(jsii_name="loggingConfigurationTaskLogsCloudWatchLogGroupArn")
1164
+ def logging_configuration_task_logs_cloud_watch_log_group_arn(
1165
+ self,
1166
+ ) -> typing.Optional[builtins.str]:
1167
+ '''The ARN for the CloudWatch Logs group where the Apache Airflow task logs are published.'''
1168
+ ...
1169
+
1170
+ @builtins.property
1171
+ @jsii.member(jsii_name="loggingConfigurationWebserverLogsCloudWatchLogGroupArn")
1172
+ def logging_configuration_webserver_logs_cloud_watch_log_group_arn(
1173
+ self,
1174
+ ) -> typing.Optional[builtins.str]:
1175
+ '''The ARN for the CloudWatch Logs group where the Apache Airflow Web server logs are published.'''
1176
+ ...
1177
+
1178
+ @builtins.property
1179
+ @jsii.member(jsii_name="loggingConfigurationWorkerLogsCloudWatchLogGroupArn")
1180
+ def logging_configuration_worker_logs_cloud_watch_log_group_arn(
1181
+ self,
1182
+ ) -> typing.Optional[builtins.str]:
1183
+ '''The ARN for the CloudWatch Logs group where the Apache Airflow Worker logs are published.'''
1184
+ ...
1185
+
1186
+ @builtins.property
1187
+ @jsii.member(jsii_name="maxWebservers")
1188
+ def max_webservers(self) -> typing.Optional[jsii.Number]:
1189
+ '''The maximum number of web servers that you want to run in your environment.
1190
+
1191
+ Amazon MWAA scales the number of Apache Airflow web servers up to the number you specify for MaxWebservers when you interact with your Apache
1192
+ Airflow environment using Apache Airflow REST API, or the Apache Airflow CLI. For example, in scenarios where your workload requires network
1193
+ calls to the Apache Airflow REST API with a high transaction-per-second (TPS) rate, Amazon MWAA will increase the number of web servers up to
1194
+ the number set in MaxWebserers. As TPS rates decrease Amazon MWAA disposes of the additional web servers, and scales down to the number set in
1195
+ MinxWebserers.
1196
+
1197
+ Valid values: For environments larger than mw1.micro, accepts values from 2 to 5. Defaults to 2 for all environment sizes except mw1.micro,
1198
+ which defaults to 1.
1199
+ '''
1200
+ ...
1201
+
1202
+ @builtins.property
1203
+ @jsii.member(jsii_name="maxWorkers")
1204
+ def max_workers(self) -> typing.Optional[jsii.Number]:
1205
+ '''The maximum number of workers that you want to run in your environment.
1206
+
1207
+ MWAA scales the number of Apache Airflow workers up to the number you specify in the MaxWorkers field. For example, 20. When there are no more
1208
+ tasks running, and no more in the queue, MWAA disposes of the extra workers leaving the one worker that is included with your environment, or
1209
+ the number you specify in MinWorkers.
1210
+ '''
1211
+ ...
1212
+
1213
+ @builtins.property
1214
+ @jsii.member(jsii_name="minWebservers")
1215
+ def min_webservers(self) -> typing.Optional[jsii.Number]:
1216
+ '''The minimum number of web servers that you want to run in your environment.
1217
+
1218
+ Amazon MWAA scales the number of Apache Airflow web servers up to the number you specify for MaxWebservers when you interact with your Apache
1219
+ Airflow environment using Apache Airflow REST API, or the Apache Airflow CLI. As the transaction-per-second rate, and the network load,
1220
+ decrease, Amazon MWAA disposes of the additional web servers, and scales down to the number set in MinxWebserers.
1221
+
1222
+ Valid values: For environments larger than mw1.micro, accepts values from 2 to 5. Defaults to 2 for all environment sizes except mw1.micro,
1223
+ which defaults to 1.
1224
+ '''
1225
+ ...
1226
+
1227
+ @builtins.property
1228
+ @jsii.member(jsii_name="minWorkers")
1229
+ def min_workers(self) -> typing.Optional[jsii.Number]:
1230
+ '''The minimum number of workers that you want to run in your environment.
1231
+
1232
+ MWAA scales the number of Apache Airflow workers up to the number you
1233
+ specify in the MaxWorkers field. When there are no more tasks running, and no more in the queue, MWAA disposes of the extra workers leaving
1234
+ the worker count you specify in the MinWorkers field. For example, 2.
1235
+ '''
1236
+ ...
1237
+
1238
+ @builtins.property
1239
+ @jsii.member(jsii_name="networkConfiguration")
1240
+ def network_configuration(self) -> typing.Optional["NetworkConfiguration"]:
1241
+ '''The VPC networking components used to secure and enable network traffic between the AWS resources for your environment.'''
1242
+ ...
1243
+
1244
+ @builtins.property
1245
+ @jsii.member(jsii_name="pluginsS3ObjectVersion")
1246
+ def plugins_s3_object_version(self) -> typing.Optional[builtins.str]:
1247
+ '''The version of the plugins.zip file on your Amazon S3 bucket.'''
1248
+ ...
1249
+
1250
+ @builtins.property
1251
+ @jsii.member(jsii_name="pluginsS3Path")
1252
+ def plugins_s3_path(self) -> typing.Optional[builtins.str]:
1253
+ '''The relative path to the plugins.zip file on your Amazon S3 bucket. For example, plugins.zip.'''
1254
+ ...
1255
+
1256
+ @builtins.property
1257
+ @jsii.member(jsii_name="requirementsS3ObjectVersion")
1258
+ def requirements_s3_object_version(self) -> typing.Optional[builtins.str]:
1259
+ '''The version of the requirements.txt file on your Amazon S3 bucket.'''
1260
+ ...
1261
+
1262
+ @builtins.property
1263
+ @jsii.member(jsii_name="requirementsS3Path")
1264
+ def requirements_s3_path(self) -> typing.Optional[builtins.str]:
1265
+ '''The relative path to the requirements.txt file on your Amazon S3 bucket. For example, requirements.txt.'''
1266
+ ...
1267
+
1268
+ @builtins.property
1269
+ @jsii.member(jsii_name="schedulers")
1270
+ def schedulers(self) -> typing.Optional[jsii.Number]:
1271
+ '''The number of schedulers that you want to run in your environment.
1272
+
1273
+ Valid values:
1274
+ v2 - For environments larger than mw1.micro, accepts values from 2 to 5.
1275
+ Defaults to 2 for all environment sizes except mw1.micro, which defaults to 1.
1276
+ v1 - Accepts 1.
1277
+ '''
1278
+ ...
1279
+
1280
+ @builtins.property
1281
+ @jsii.member(jsii_name="sourceBucket")
1282
+ def source_bucket(self) -> typing.Optional[_aws_cdk_aws_s3_ceddda9d.IBucket]:
1283
+ '''The Amazon S3 bucket where your DAG code and supporting files are stored.'''
1284
+ ...
1285
+
1286
+ @builtins.property
1287
+ @jsii.member(jsii_name="startupScriptS3ObjectVersion")
1288
+ def startup_script_s3_object_version(self) -> typing.Optional[builtins.str]:
1289
+ '''The version of the startup shell script in your Amazon S3 bucket.
1290
+
1291
+ You must specify the version ID that Amazon S3 assigns to the file every time you update the script.
1292
+ Version IDs are Unicode, UTF-8 encoded, URL-ready, opaque strings that are no more than 1,024 bytes long.
1293
+
1294
+ The following is an example:
1295
+ 3sL4kqtJlcpXroDTDmJ+rmSpXd3dIbrHY+MTRCxf3vjVBH40Nr8X8gdRQBpUMLUo
1296
+ '''
1297
+ ...
1298
+
1299
+ @builtins.property
1300
+ @jsii.member(jsii_name="startupScriptS3Path")
1301
+ def startup_script_s3_path(self) -> typing.Optional[builtins.str]:
1302
+ '''The relative path to the startup shell script in your Amazon S3 bucket.
1303
+
1304
+ For example, s3://mwaa-environment/startup.sh.
1305
+ Amazon MWAA runs the script as your environment starts, and before running the Apache Airflow process.
1306
+ You can use this script to install dependencies, modify Apache Airflow configuration options, and set environment variables.
1307
+ '''
1308
+ ...
1309
+
1310
+ @builtins.property
1311
+ @jsii.member(jsii_name="webserverAccessMode")
1312
+ def webserver_access_mode(self) -> typing.Optional["WebserverAccessMode"]:
1313
+ '''The Apache Airflow Web server access mode.'''
1314
+ ...
1315
+
1316
+ @builtins.property
1317
+ @jsii.member(jsii_name="webserverUrl")
1318
+ def webserver_url(self) -> typing.Optional[builtins.str]:
1319
+ '''The URL of your Apache Airflow UI.'''
1320
+ ...
1321
+
1322
+ @builtins.property
1323
+ @jsii.member(jsii_name="webserverVpcEndpointService")
1324
+ def webserver_vpc_endpoint_service(self) -> typing.Optional[builtins.str]:
1325
+ '''The VPC endpoint for the environment's web server.'''
1326
+ ...
1327
+
1328
+ @builtins.property
1329
+ @jsii.member(jsii_name="weeklyMaintenanceWindowStart")
1330
+ def weekly_maintenance_window_start(self) -> typing.Optional[builtins.str]:
1331
+ '''The day and time of the week to start weekly maintenance updates of your environment in the following format: DAY:HH:MM.
1332
+
1333
+ For example: TUE:03:30. You can specify a start time in 30 minute increments only.
1334
+
1335
+ Supported input includes the following:
1336
+ MON|TUE|WED|THU|FRI|SAT|SUN:([01]\\d|2[0-3]):(00|30)
1337
+ '''
1338
+ ...
1339
+
1340
+
1341
+ class _IEnvironmentProxy(
1342
+ jsii.proxy_for(_aws_cdk_ceddda9d.IResource), # type: ignore[misc]
1343
+ ):
1344
+ __jsii_type__: typing.ClassVar[str] = "@robhan-cdk-lib/aws_mwaa.IEnvironment"
1345
+
1346
+ @builtins.property
1347
+ @jsii.member(jsii_name="airflowConfigurationOptions")
1348
+ def airflow_configuration_options(
1349
+ self,
1350
+ ) -> typing.Mapping[builtins.str, builtins.str]:
1351
+ '''A list of key-value pairs containing the Airflow configuration options for your environment.
1352
+
1353
+ For example, core.default_timezone: utc.
1354
+ '''
1355
+ return typing.cast(typing.Mapping[builtins.str, builtins.str], jsii.get(self, "airflowConfigurationOptions"))
1356
+
1357
+ @builtins.property
1358
+ @jsii.member(jsii_name="environmentArn")
1359
+ def environment_arn(self) -> builtins.str:
1360
+ '''The ARN for the Amazon MWAA environment.'''
1361
+ return typing.cast(builtins.str, jsii.get(self, "environmentArn"))
1362
+
1363
+ @builtins.property
1364
+ @jsii.member(jsii_name="name")
1365
+ def name(self) -> builtins.str:
1366
+ '''The name of your Amazon MWAA environment.'''
1367
+ return typing.cast(builtins.str, jsii.get(self, "name"))
1368
+
1369
+ @builtins.property
1370
+ @jsii.member(jsii_name="airflowVersion")
1371
+ def airflow_version(self) -> typing.Optional[AirflowVersion]:
1372
+ '''The version of Apache Airflow to use for the environment.
1373
+
1374
+ If no value is specified, defaults to the latest version.
1375
+
1376
+ If you specify a newer version number for an existing environment, the version update requires some service interruption before taking effect.
1377
+ '''
1378
+ return typing.cast(typing.Optional[AirflowVersion], jsii.get(self, "airflowVersion"))
1379
+
1380
+ @builtins.property
1381
+ @jsii.member(jsii_name="celeryExecutorQueue")
1382
+ def celery_executor_queue(self) -> typing.Optional[builtins.str]:
1383
+ '''The queue ARN for the environment's Celery Executor.
1384
+
1385
+ Amazon MWAA uses a Celery Executor to distribute tasks across multiple workers.
1386
+ When you create an environment in a shared VPC, you must provide access to the Celery Executor queue from your VPC.
1387
+ '''
1388
+ return typing.cast(typing.Optional[builtins.str], jsii.get(self, "celeryExecutorQueue"))
1389
+
1390
+ @builtins.property
1391
+ @jsii.member(jsii_name="dagS3Path")
1392
+ def dag_s3_path(self) -> typing.Optional[builtins.str]:
1393
+ '''The relative path to the DAGs folder on your Amazon S3 bucket.
1394
+
1395
+ For example, dags.
1396
+ '''
1397
+ return typing.cast(typing.Optional[builtins.str], jsii.get(self, "dagS3Path"))
1398
+
1399
+ @builtins.property
1400
+ @jsii.member(jsii_name="databaseVpcEndpointService")
1401
+ def database_vpc_endpoint_service(self) -> typing.Optional[builtins.str]:
1402
+ '''The VPC endpoint for the environment's Amazon RDS database.'''
1403
+ return typing.cast(typing.Optional[builtins.str], jsii.get(self, "databaseVpcEndpointService"))
1404
+
1405
+ @builtins.property
1406
+ @jsii.member(jsii_name="endpointManagement")
1407
+ def endpoint_management(self) -> typing.Optional[EndpointManagement]:
1408
+ '''Defines whether the VPC endpoints configured for the environment are created, and managed, by the customer or by Amazon MWAA.
1409
+
1410
+ If set to SERVICE, Amazon MWAA will create and manage the required VPC endpoints in your VPC.
1411
+ If set to CUSTOMER, you must create, and manage, the VPC endpoints in your VPC.
1412
+ '''
1413
+ return typing.cast(typing.Optional[EndpointManagement], jsii.get(self, "endpointManagement"))
1414
+
1415
+ @builtins.property
1416
+ @jsii.member(jsii_name="environmentClass")
1417
+ def environment_class(self) -> typing.Optional[EnvironmentClass]:
1418
+ '''The environment class type.'''
1419
+ return typing.cast(typing.Optional[EnvironmentClass], jsii.get(self, "environmentClass"))
1420
+
1421
+ @builtins.property
1422
+ @jsii.member(jsii_name="executionRole")
1423
+ def execution_role(self) -> typing.Optional[_aws_cdk_aws_iam_ceddda9d.IRole]:
1424
+ '''The execution role in IAM that allows MWAA to access AWS resources in your environment.'''
1425
+ return typing.cast(typing.Optional[_aws_cdk_aws_iam_ceddda9d.IRole], jsii.get(self, "executionRole"))
1426
+
1427
+ @builtins.property
1428
+ @jsii.member(jsii_name="kmsKey")
1429
+ def kms_key(self) -> typing.Optional[_aws_cdk_aws_kms_ceddda9d.IKey]:
1430
+ '''The AWS Key Management Service (KMS) key to encrypt and decrypt the data in your environment.
1431
+
1432
+ You can use an AWS KMS key managed by MWAA, or a customer-managed KMS key (advanced).
1433
+ '''
1434
+ return typing.cast(typing.Optional[_aws_cdk_aws_kms_ceddda9d.IKey], jsii.get(self, "kmsKey"))
1435
+
1436
+ @builtins.property
1437
+ @jsii.member(jsii_name="loggingConfiguration")
1438
+ def logging_configuration(self) -> typing.Optional["LoggingConfiguration"]:
1439
+ '''The Apache Airflow logs being sent to CloudWatch Logs.'''
1440
+ return typing.cast(typing.Optional["LoggingConfiguration"], jsii.get(self, "loggingConfiguration"))
1441
+
1442
+ @builtins.property
1443
+ @jsii.member(jsii_name="loggingConfigurationDagProcessingLogsCloudWatchLogGroupArn")
1444
+ def logging_configuration_dag_processing_logs_cloud_watch_log_group_arn(
1445
+ self,
1446
+ ) -> typing.Optional[builtins.str]:
1447
+ '''The ARN for the CloudWatch Logs group where the Apache Airflow DAG processing logs are published.'''
1448
+ return typing.cast(typing.Optional[builtins.str], jsii.get(self, "loggingConfigurationDagProcessingLogsCloudWatchLogGroupArn"))
1449
+
1450
+ @builtins.property
1451
+ @jsii.member(jsii_name="loggingConfigurationSchedulerLogsCloudWatchLogGroupArn")
1452
+ def logging_configuration_scheduler_logs_cloud_watch_log_group_arn(
1453
+ self,
1454
+ ) -> typing.Optional[builtins.str]:
1455
+ '''The ARN for the CloudWatch Logs group where the Apache Airflow Scheduler logs are published.'''
1456
+ return typing.cast(typing.Optional[builtins.str], jsii.get(self, "loggingConfigurationSchedulerLogsCloudWatchLogGroupArn"))
1457
+
1458
+ @builtins.property
1459
+ @jsii.member(jsii_name="loggingConfigurationTaskLogsCloudWatchLogGroupArn")
1460
+ def logging_configuration_task_logs_cloud_watch_log_group_arn(
1461
+ self,
1462
+ ) -> typing.Optional[builtins.str]:
1463
+ '''The ARN for the CloudWatch Logs group where the Apache Airflow task logs are published.'''
1464
+ return typing.cast(typing.Optional[builtins.str], jsii.get(self, "loggingConfigurationTaskLogsCloudWatchLogGroupArn"))
1465
+
1466
+ @builtins.property
1467
+ @jsii.member(jsii_name="loggingConfigurationWebserverLogsCloudWatchLogGroupArn")
1468
+ def logging_configuration_webserver_logs_cloud_watch_log_group_arn(
1469
+ self,
1470
+ ) -> typing.Optional[builtins.str]:
1471
+ '''The ARN for the CloudWatch Logs group where the Apache Airflow Web server logs are published.'''
1472
+ return typing.cast(typing.Optional[builtins.str], jsii.get(self, "loggingConfigurationWebserverLogsCloudWatchLogGroupArn"))
1473
+
1474
+ @builtins.property
1475
+ @jsii.member(jsii_name="loggingConfigurationWorkerLogsCloudWatchLogGroupArn")
1476
+ def logging_configuration_worker_logs_cloud_watch_log_group_arn(
1477
+ self,
1478
+ ) -> typing.Optional[builtins.str]:
1479
+ '''The ARN for the CloudWatch Logs group where the Apache Airflow Worker logs are published.'''
1480
+ return typing.cast(typing.Optional[builtins.str], jsii.get(self, "loggingConfigurationWorkerLogsCloudWatchLogGroupArn"))
1481
+
1482
+ @builtins.property
1483
+ @jsii.member(jsii_name="maxWebservers")
1484
+ def max_webservers(self) -> typing.Optional[jsii.Number]:
1485
+ '''The maximum number of web servers that you want to run in your environment.
1486
+
1487
+ Amazon MWAA scales the number of Apache Airflow web servers up to the number you specify for MaxWebservers when you interact with your Apache
1488
+ Airflow environment using Apache Airflow REST API, or the Apache Airflow CLI. For example, in scenarios where your workload requires network
1489
+ calls to the Apache Airflow REST API with a high transaction-per-second (TPS) rate, Amazon MWAA will increase the number of web servers up to
1490
+ the number set in MaxWebserers. As TPS rates decrease Amazon MWAA disposes of the additional web servers, and scales down to the number set in
1491
+ MinxWebserers.
1492
+
1493
+ Valid values: For environments larger than mw1.micro, accepts values from 2 to 5. Defaults to 2 for all environment sizes except mw1.micro,
1494
+ which defaults to 1.
1495
+ '''
1496
+ return typing.cast(typing.Optional[jsii.Number], jsii.get(self, "maxWebservers"))
1497
+
1498
+ @builtins.property
1499
+ @jsii.member(jsii_name="maxWorkers")
1500
+ def max_workers(self) -> typing.Optional[jsii.Number]:
1501
+ '''The maximum number of workers that you want to run in your environment.
1502
+
1503
+ MWAA scales the number of Apache Airflow workers up to the number you specify in the MaxWorkers field. For example, 20. When there are no more
1504
+ tasks running, and no more in the queue, MWAA disposes of the extra workers leaving the one worker that is included with your environment, or
1505
+ the number you specify in MinWorkers.
1506
+ '''
1507
+ return typing.cast(typing.Optional[jsii.Number], jsii.get(self, "maxWorkers"))
1508
+
1509
+ @builtins.property
1510
+ @jsii.member(jsii_name="minWebservers")
1511
+ def min_webservers(self) -> typing.Optional[jsii.Number]:
1512
+ '''The minimum number of web servers that you want to run in your environment.
1513
+
1514
+ Amazon MWAA scales the number of Apache Airflow web servers up to the number you specify for MaxWebservers when you interact with your Apache
1515
+ Airflow environment using Apache Airflow REST API, or the Apache Airflow CLI. As the transaction-per-second rate, and the network load,
1516
+ decrease, Amazon MWAA disposes of the additional web servers, and scales down to the number set in MinxWebserers.
1517
+
1518
+ Valid values: For environments larger than mw1.micro, accepts values from 2 to 5. Defaults to 2 for all environment sizes except mw1.micro,
1519
+ which defaults to 1.
1520
+ '''
1521
+ return typing.cast(typing.Optional[jsii.Number], jsii.get(self, "minWebservers"))
1522
+
1523
+ @builtins.property
1524
+ @jsii.member(jsii_name="minWorkers")
1525
+ def min_workers(self) -> typing.Optional[jsii.Number]:
1526
+ '''The minimum number of workers that you want to run in your environment.
1527
+
1528
+ MWAA scales the number of Apache Airflow workers up to the number you
1529
+ specify in the MaxWorkers field. When there are no more tasks running, and no more in the queue, MWAA disposes of the extra workers leaving
1530
+ the worker count you specify in the MinWorkers field. For example, 2.
1531
+ '''
1532
+ return typing.cast(typing.Optional[jsii.Number], jsii.get(self, "minWorkers"))
1533
+
1534
+ @builtins.property
1535
+ @jsii.member(jsii_name="networkConfiguration")
1536
+ def network_configuration(self) -> typing.Optional["NetworkConfiguration"]:
1537
+ '''The VPC networking components used to secure and enable network traffic between the AWS resources for your environment.'''
1538
+ return typing.cast(typing.Optional["NetworkConfiguration"], jsii.get(self, "networkConfiguration"))
1539
+
1540
+ @builtins.property
1541
+ @jsii.member(jsii_name="pluginsS3ObjectVersion")
1542
+ def plugins_s3_object_version(self) -> typing.Optional[builtins.str]:
1543
+ '''The version of the plugins.zip file on your Amazon S3 bucket.'''
1544
+ return typing.cast(typing.Optional[builtins.str], jsii.get(self, "pluginsS3ObjectVersion"))
1545
+
1546
+ @builtins.property
1547
+ @jsii.member(jsii_name="pluginsS3Path")
1548
+ def plugins_s3_path(self) -> typing.Optional[builtins.str]:
1549
+ '''The relative path to the plugins.zip file on your Amazon S3 bucket. For example, plugins.zip.'''
1550
+ return typing.cast(typing.Optional[builtins.str], jsii.get(self, "pluginsS3Path"))
1551
+
1552
+ @builtins.property
1553
+ @jsii.member(jsii_name="requirementsS3ObjectVersion")
1554
+ def requirements_s3_object_version(self) -> typing.Optional[builtins.str]:
1555
+ '''The version of the requirements.txt file on your Amazon S3 bucket.'''
1556
+ return typing.cast(typing.Optional[builtins.str], jsii.get(self, "requirementsS3ObjectVersion"))
1557
+
1558
+ @builtins.property
1559
+ @jsii.member(jsii_name="requirementsS3Path")
1560
+ def requirements_s3_path(self) -> typing.Optional[builtins.str]:
1561
+ '''The relative path to the requirements.txt file on your Amazon S3 bucket. For example, requirements.txt.'''
1562
+ return typing.cast(typing.Optional[builtins.str], jsii.get(self, "requirementsS3Path"))
1563
+
1564
+ @builtins.property
1565
+ @jsii.member(jsii_name="schedulers")
1566
+ def schedulers(self) -> typing.Optional[jsii.Number]:
1567
+ '''The number of schedulers that you want to run in your environment.
1568
+
1569
+ Valid values:
1570
+ v2 - For environments larger than mw1.micro, accepts values from 2 to 5.
1571
+ Defaults to 2 for all environment sizes except mw1.micro, which defaults to 1.
1572
+ v1 - Accepts 1.
1573
+ '''
1574
+ return typing.cast(typing.Optional[jsii.Number], jsii.get(self, "schedulers"))
1575
+
1576
+ @builtins.property
1577
+ @jsii.member(jsii_name="sourceBucket")
1578
+ def source_bucket(self) -> typing.Optional[_aws_cdk_aws_s3_ceddda9d.IBucket]:
1579
+ '''The Amazon S3 bucket where your DAG code and supporting files are stored.'''
1580
+ return typing.cast(typing.Optional[_aws_cdk_aws_s3_ceddda9d.IBucket], jsii.get(self, "sourceBucket"))
1581
+
1582
+ @builtins.property
1583
+ @jsii.member(jsii_name="startupScriptS3ObjectVersion")
1584
+ def startup_script_s3_object_version(self) -> typing.Optional[builtins.str]:
1585
+ '''The version of the startup shell script in your Amazon S3 bucket.
1586
+
1587
+ You must specify the version ID that Amazon S3 assigns to the file every time you update the script.
1588
+ Version IDs are Unicode, UTF-8 encoded, URL-ready, opaque strings that are no more than 1,024 bytes long.
1589
+
1590
+ The following is an example:
1591
+ 3sL4kqtJlcpXroDTDmJ+rmSpXd3dIbrHY+MTRCxf3vjVBH40Nr8X8gdRQBpUMLUo
1592
+ '''
1593
+ return typing.cast(typing.Optional[builtins.str], jsii.get(self, "startupScriptS3ObjectVersion"))
1594
+
1595
+ @builtins.property
1596
+ @jsii.member(jsii_name="startupScriptS3Path")
1597
+ def startup_script_s3_path(self) -> typing.Optional[builtins.str]:
1598
+ '''The relative path to the startup shell script in your Amazon S3 bucket.
1599
+
1600
+ For example, s3://mwaa-environment/startup.sh.
1601
+ Amazon MWAA runs the script as your environment starts, and before running the Apache Airflow process.
1602
+ You can use this script to install dependencies, modify Apache Airflow configuration options, and set environment variables.
1603
+ '''
1604
+ return typing.cast(typing.Optional[builtins.str], jsii.get(self, "startupScriptS3Path"))
1605
+
1606
+ @builtins.property
1607
+ @jsii.member(jsii_name="webserverAccessMode")
1608
+ def webserver_access_mode(self) -> typing.Optional["WebserverAccessMode"]:
1609
+ '''The Apache Airflow Web server access mode.'''
1610
+ return typing.cast(typing.Optional["WebserverAccessMode"], jsii.get(self, "webserverAccessMode"))
1611
+
1612
+ @builtins.property
1613
+ @jsii.member(jsii_name="webserverUrl")
1614
+ def webserver_url(self) -> typing.Optional[builtins.str]:
1615
+ '''The URL of your Apache Airflow UI.'''
1616
+ return typing.cast(typing.Optional[builtins.str], jsii.get(self, "webserverUrl"))
1617
+
1618
+ @builtins.property
1619
+ @jsii.member(jsii_name="webserverVpcEndpointService")
1620
+ def webserver_vpc_endpoint_service(self) -> typing.Optional[builtins.str]:
1621
+ '''The VPC endpoint for the environment's web server.'''
1622
+ return typing.cast(typing.Optional[builtins.str], jsii.get(self, "webserverVpcEndpointService"))
1623
+
1624
+ @builtins.property
1625
+ @jsii.member(jsii_name="weeklyMaintenanceWindowStart")
1626
+ def weekly_maintenance_window_start(self) -> typing.Optional[builtins.str]:
1627
+ '''The day and time of the week to start weekly maintenance updates of your environment in the following format: DAY:HH:MM.
1628
+
1629
+ For example: TUE:03:30. You can specify a start time in 30 minute increments only.
1630
+
1631
+ Supported input includes the following:
1632
+ MON|TUE|WED|THU|FRI|SAT|SUN:([01]\\d|2[0-3]):(00|30)
1633
+ '''
1634
+ return typing.cast(typing.Optional[builtins.str], jsii.get(self, "weeklyMaintenanceWindowStart"))
1635
+
1636
+ # Adding a "__jsii_proxy_class__(): typing.Type" function to the interface
1637
+ typing.cast(typing.Any, IEnvironment).__jsii_proxy_class__ = lambda : _IEnvironmentProxy
1638
+
1639
+
1640
+ @jsii.enum(jsii_type="@robhan-cdk-lib/aws_mwaa.LogLevel")
1641
+ class LogLevel(enum.Enum):
1642
+ DEBUG = "DEBUG"
1643
+ INFO = "INFO"
1644
+ WARNING = "WARNING"
1645
+ ERROR = "ERROR"
1646
+ CRITICAL = "CRITICAL"
1647
+
1648
+
1649
+ @jsii.data_type(
1650
+ jsii_type="@robhan-cdk-lib/aws_mwaa.LoggingConfiguration",
1651
+ jsii_struct_bases=[],
1652
+ name_mapping={
1653
+ "dag_processing_logs": "dagProcessingLogs",
1654
+ "scheduler_logs": "schedulerLogs",
1655
+ "task_logs": "taskLogs",
1656
+ "web_server_logs": "webServerLogs",
1657
+ "worker_logs": "workerLogs",
1658
+ },
1659
+ )
1660
+ class LoggingConfiguration:
1661
+ def __init__(
1662
+ self,
1663
+ *,
1664
+ dag_processing_logs: typing.Optional[typing.Union["ModuleLoggingConfiguration", typing.Dict[builtins.str, typing.Any]]] = None,
1665
+ scheduler_logs: typing.Optional[typing.Union["ModuleLoggingConfiguration", typing.Dict[builtins.str, typing.Any]]] = None,
1666
+ task_logs: typing.Optional[typing.Union["ModuleLoggingConfiguration", typing.Dict[builtins.str, typing.Any]]] = None,
1667
+ web_server_logs: typing.Optional[typing.Union["ModuleLoggingConfiguration", typing.Dict[builtins.str, typing.Any]]] = None,
1668
+ worker_logs: typing.Optional[typing.Union["ModuleLoggingConfiguration", typing.Dict[builtins.str, typing.Any]]] = None,
1669
+ ) -> None:
1670
+ '''The type of Apache Airflow logs to send to CloudWatch Logs.
1671
+
1672
+ :param dag_processing_logs: Defines the processing logs sent to CloudWatch Logs and the logging level to send.
1673
+ :param scheduler_logs: Defines the scheduler logs sent to CloudWatch Logs and the logging level to send.
1674
+ :param task_logs: Defines the task logs sent to CloudWatch Logs and the logging level to send.
1675
+ :param web_server_logs: Defines the web server logs sent to CloudWatch Logs and the logging level to send.
1676
+ :param worker_logs: Defines the worker logs sent to CloudWatch Logs and the logging level to send.
1677
+ '''
1678
+ if isinstance(dag_processing_logs, dict):
1679
+ dag_processing_logs = ModuleLoggingConfiguration(**dag_processing_logs)
1680
+ if isinstance(scheduler_logs, dict):
1681
+ scheduler_logs = ModuleLoggingConfiguration(**scheduler_logs)
1682
+ if isinstance(task_logs, dict):
1683
+ task_logs = ModuleLoggingConfiguration(**task_logs)
1684
+ if isinstance(web_server_logs, dict):
1685
+ web_server_logs = ModuleLoggingConfiguration(**web_server_logs)
1686
+ if isinstance(worker_logs, dict):
1687
+ worker_logs = ModuleLoggingConfiguration(**worker_logs)
1688
+ if __debug__:
1689
+ type_hints = typing.get_type_hints(_typecheckingstub__587e90c0429a944bc98095249fe5cd300a90dcf33089932ed503e117deb58614)
1690
+ check_type(argname="argument dag_processing_logs", value=dag_processing_logs, expected_type=type_hints["dag_processing_logs"])
1691
+ check_type(argname="argument scheduler_logs", value=scheduler_logs, expected_type=type_hints["scheduler_logs"])
1692
+ check_type(argname="argument task_logs", value=task_logs, expected_type=type_hints["task_logs"])
1693
+ check_type(argname="argument web_server_logs", value=web_server_logs, expected_type=type_hints["web_server_logs"])
1694
+ check_type(argname="argument worker_logs", value=worker_logs, expected_type=type_hints["worker_logs"])
1695
+ self._values: typing.Dict[builtins.str, typing.Any] = {}
1696
+ if dag_processing_logs is not None:
1697
+ self._values["dag_processing_logs"] = dag_processing_logs
1698
+ if scheduler_logs is not None:
1699
+ self._values["scheduler_logs"] = scheduler_logs
1700
+ if task_logs is not None:
1701
+ self._values["task_logs"] = task_logs
1702
+ if web_server_logs is not None:
1703
+ self._values["web_server_logs"] = web_server_logs
1704
+ if worker_logs is not None:
1705
+ self._values["worker_logs"] = worker_logs
1706
+
1707
+ @builtins.property
1708
+ def dag_processing_logs(self) -> typing.Optional["ModuleLoggingConfiguration"]:
1709
+ '''Defines the processing logs sent to CloudWatch Logs and the logging level to send.'''
1710
+ result = self._values.get("dag_processing_logs")
1711
+ return typing.cast(typing.Optional["ModuleLoggingConfiguration"], result)
1712
+
1713
+ @builtins.property
1714
+ def scheduler_logs(self) -> typing.Optional["ModuleLoggingConfiguration"]:
1715
+ '''Defines the scheduler logs sent to CloudWatch Logs and the logging level to send.'''
1716
+ result = self._values.get("scheduler_logs")
1717
+ return typing.cast(typing.Optional["ModuleLoggingConfiguration"], result)
1718
+
1719
+ @builtins.property
1720
+ def task_logs(self) -> typing.Optional["ModuleLoggingConfiguration"]:
1721
+ '''Defines the task logs sent to CloudWatch Logs and the logging level to send.'''
1722
+ result = self._values.get("task_logs")
1723
+ return typing.cast(typing.Optional["ModuleLoggingConfiguration"], result)
1724
+
1725
+ @builtins.property
1726
+ def web_server_logs(self) -> typing.Optional["ModuleLoggingConfiguration"]:
1727
+ '''Defines the web server logs sent to CloudWatch Logs and the logging level to send.'''
1728
+ result = self._values.get("web_server_logs")
1729
+ return typing.cast(typing.Optional["ModuleLoggingConfiguration"], result)
1730
+
1731
+ @builtins.property
1732
+ def worker_logs(self) -> typing.Optional["ModuleLoggingConfiguration"]:
1733
+ '''Defines the worker logs sent to CloudWatch Logs and the logging level to send.'''
1734
+ result = self._values.get("worker_logs")
1735
+ return typing.cast(typing.Optional["ModuleLoggingConfiguration"], result)
1736
+
1737
+ def __eq__(self, rhs: typing.Any) -> builtins.bool:
1738
+ return isinstance(rhs, self.__class__) and rhs._values == self._values
1739
+
1740
+ def __ne__(self, rhs: typing.Any) -> builtins.bool:
1741
+ return not (rhs == self)
1742
+
1743
+ def __repr__(self) -> str:
1744
+ return "LoggingConfiguration(%s)" % ", ".join(
1745
+ k + "=" + repr(v) for k, v in self._values.items()
1746
+ )
1747
+
1748
+
1749
+ @jsii.data_type(
1750
+ jsii_type="@robhan-cdk-lib/aws_mwaa.ModuleLoggingConfiguration",
1751
+ jsii_struct_bases=[],
1752
+ name_mapping={
1753
+ "cloud_watch_log_group": "cloudWatchLogGroup",
1754
+ "enabled": "enabled",
1755
+ "log_level": "logLevel",
1756
+ },
1757
+ )
1758
+ class ModuleLoggingConfiguration:
1759
+ def __init__(
1760
+ self,
1761
+ *,
1762
+ cloud_watch_log_group: typing.Optional[_aws_cdk_aws_logs_ceddda9d.ILogGroup] = None,
1763
+ enabled: typing.Optional[builtins.bool] = None,
1764
+ log_level: typing.Optional[LogLevel] = None,
1765
+ ) -> None:
1766
+ '''Defines the type of logs to send for the Apache Airflow log type (e.g. DagProcessingLogs).
1767
+
1768
+ :param cloud_watch_log_group: The CloudWatch Logs log group for each type ofApache Airflow log type that you have enabled.
1769
+ :param enabled: Indicates whether to enable the Apache Airflow log type (e.g. DagProcessingLogs) in CloudWatch Logs.
1770
+ :param log_level: Defines the Apache Airflow logs to send for the log type (e.g. DagProcessingLogs) to CloudWatch Logs.
1771
+ '''
1772
+ if __debug__:
1773
+ type_hints = typing.get_type_hints(_typecheckingstub__7b6b5c584242899ae7800864118420958302bf2568da5e2d6f5a683e345399aa)
1774
+ check_type(argname="argument cloud_watch_log_group", value=cloud_watch_log_group, expected_type=type_hints["cloud_watch_log_group"])
1775
+ check_type(argname="argument enabled", value=enabled, expected_type=type_hints["enabled"])
1776
+ check_type(argname="argument log_level", value=log_level, expected_type=type_hints["log_level"])
1777
+ self._values: typing.Dict[builtins.str, typing.Any] = {}
1778
+ if cloud_watch_log_group is not None:
1779
+ self._values["cloud_watch_log_group"] = cloud_watch_log_group
1780
+ if enabled is not None:
1781
+ self._values["enabled"] = enabled
1782
+ if log_level is not None:
1783
+ self._values["log_level"] = log_level
1784
+
1785
+ @builtins.property
1786
+ def cloud_watch_log_group(
1787
+ self,
1788
+ ) -> typing.Optional[_aws_cdk_aws_logs_ceddda9d.ILogGroup]:
1789
+ '''The CloudWatch Logs log group for each type ofApache Airflow log type that you have enabled.'''
1790
+ result = self._values.get("cloud_watch_log_group")
1791
+ return typing.cast(typing.Optional[_aws_cdk_aws_logs_ceddda9d.ILogGroup], result)
1792
+
1793
+ @builtins.property
1794
+ def enabled(self) -> typing.Optional[builtins.bool]:
1795
+ '''Indicates whether to enable the Apache Airflow log type (e.g. DagProcessingLogs) in CloudWatch Logs.'''
1796
+ result = self._values.get("enabled")
1797
+ return typing.cast(typing.Optional[builtins.bool], result)
1798
+
1799
+ @builtins.property
1800
+ def log_level(self) -> typing.Optional[LogLevel]:
1801
+ '''Defines the Apache Airflow logs to send for the log type (e.g. DagProcessingLogs) to CloudWatch Logs.'''
1802
+ result = self._values.get("log_level")
1803
+ return typing.cast(typing.Optional[LogLevel], result)
1804
+
1805
+ def __eq__(self, rhs: typing.Any) -> builtins.bool:
1806
+ return isinstance(rhs, self.__class__) and rhs._values == self._values
1807
+
1808
+ def __ne__(self, rhs: typing.Any) -> builtins.bool:
1809
+ return not (rhs == self)
1810
+
1811
+ def __repr__(self) -> str:
1812
+ return "ModuleLoggingConfiguration(%s)" % ", ".join(
1813
+ k + "=" + repr(v) for k, v in self._values.items()
1814
+ )
1815
+
1816
+
1817
+ @jsii.data_type(
1818
+ jsii_type="@robhan-cdk-lib/aws_mwaa.NetworkConfiguration",
1819
+ jsii_struct_bases=[],
1820
+ name_mapping={"security_groups": "securityGroups", "subnets": "subnets"},
1821
+ )
1822
+ class NetworkConfiguration:
1823
+ def __init__(
1824
+ self,
1825
+ *,
1826
+ security_groups: typing.Optional[typing.Sequence[_aws_cdk_aws_ec2_ceddda9d.ISecurityGroup]] = None,
1827
+ subnets: typing.Optional[typing.Sequence[_aws_cdk_aws_ec2_ceddda9d.ISubnet]] = None,
1828
+ ) -> None:
1829
+ '''The VPC networking components used to secure and enable network traffic between the AWS resources for your environment.
1830
+
1831
+ :param security_groups: A list of one or more security groups. Accepts up to 5 security groups. A security group must be attached to the same VPC as the subnets.
1832
+ :param subnets: A list of subnets. Required to create an environment. Must be private subnets in two different availability zones. A subnet must be attached to the same VPC as the security group.
1833
+ '''
1834
+ if __debug__:
1835
+ type_hints = typing.get_type_hints(_typecheckingstub__66a8db1008fda8f8cf9a9e9d41de07bde3dc8b894d4a91cd243e4e3057ff04ae)
1836
+ check_type(argname="argument security_groups", value=security_groups, expected_type=type_hints["security_groups"])
1837
+ check_type(argname="argument subnets", value=subnets, expected_type=type_hints["subnets"])
1838
+ self._values: typing.Dict[builtins.str, typing.Any] = {}
1839
+ if security_groups is not None:
1840
+ self._values["security_groups"] = security_groups
1841
+ if subnets is not None:
1842
+ self._values["subnets"] = subnets
1843
+
1844
+ @builtins.property
1845
+ def security_groups(
1846
+ self,
1847
+ ) -> typing.Optional[typing.List[_aws_cdk_aws_ec2_ceddda9d.ISecurityGroup]]:
1848
+ '''A list of one or more security groups.
1849
+
1850
+ Accepts up to 5 security groups. A security group must be attached to the same VPC as the subnets.
1851
+ '''
1852
+ result = self._values.get("security_groups")
1853
+ return typing.cast(typing.Optional[typing.List[_aws_cdk_aws_ec2_ceddda9d.ISecurityGroup]], result)
1854
+
1855
+ @builtins.property
1856
+ def subnets(
1857
+ self,
1858
+ ) -> typing.Optional[typing.List[_aws_cdk_aws_ec2_ceddda9d.ISubnet]]:
1859
+ '''A list of subnets.
1860
+
1861
+ Required to create an environment. Must be private subnets in two different availability zones.
1862
+ A subnet must be attached to the same VPC as the security group.
1863
+ '''
1864
+ result = self._values.get("subnets")
1865
+ return typing.cast(typing.Optional[typing.List[_aws_cdk_aws_ec2_ceddda9d.ISubnet]], result)
1866
+
1867
+ def __eq__(self, rhs: typing.Any) -> builtins.bool:
1868
+ return isinstance(rhs, self.__class__) and rhs._values == self._values
1869
+
1870
+ def __ne__(self, rhs: typing.Any) -> builtins.bool:
1871
+ return not (rhs == self)
1872
+
1873
+ def __repr__(self) -> str:
1874
+ return "NetworkConfiguration(%s)" % ", ".join(
1875
+ k + "=" + repr(v) for k, v in self._values.items()
1876
+ )
1877
+
1878
+
1879
+ @jsii.enum(jsii_type="@robhan-cdk-lib/aws_mwaa.WebserverAccessMode")
1880
+ class WebserverAccessMode(enum.Enum):
1881
+ PRIVATE_ONLY = "PRIVATE_ONLY"
1882
+ PUBLIC_ONLY = "PUBLIC_ONLY"
1883
+
1884
+
1885
+ @jsii.enum(jsii_type="@robhan-cdk-lib/aws_mwaa.WorkerReplacementStrategy")
1886
+ class WorkerReplacementStrategy(enum.Enum):
1887
+ FORCED = "FORCED"
1888
+ GRACEFUL = "GRACEFUL"
1889
+
1890
+
1891
+ @jsii.implements(IEnvironment)
1892
+ class EnvironmentBase(
1893
+ _aws_cdk_ceddda9d.Resource,
1894
+ metaclass=jsii.JSIIAbstractClass,
1895
+ jsii_type="@robhan-cdk-lib/aws_mwaa.EnvironmentBase",
1896
+ ):
1897
+ def __init__(
1898
+ self,
1899
+ scope: _constructs_77d1e7e8.Construct,
1900
+ id: builtins.str,
1901
+ *,
1902
+ account: typing.Optional[builtins.str] = None,
1903
+ environment_from_arn: typing.Optional[builtins.str] = None,
1904
+ physical_name: typing.Optional[builtins.str] = None,
1905
+ region: typing.Optional[builtins.str] = None,
1906
+ ) -> None:
1907
+ '''
1908
+ :param scope: -
1909
+ :param id: -
1910
+ :param account: The AWS account ID this resource belongs to. Default: - the resource is in the same account as the stack it belongs to
1911
+ :param environment_from_arn: ARN to deduce region and account from. The ARN is parsed and the account and region are taken from the ARN. This should be used for imported resources. Cannot be supplied together with either ``account`` or ``region``. Default: - take environment from ``account``, ``region`` parameters, or use Stack environment.
1912
+ :param physical_name: The value passed in by users to the physical name prop of the resource. - ``undefined`` implies that a physical name will be allocated by CloudFormation during deployment. - a concrete value implies a specific physical name - ``PhysicalName.GENERATE_IF_NEEDED`` is a marker that indicates that a physical will only be generated by the CDK if it is needed for cross-environment references. Otherwise, it will be allocated by CloudFormation. Default: - The physical name will be allocated by CloudFormation at deployment time
1913
+ :param region: The AWS region this resource belongs to. Default: - the resource is in the same region as the stack it belongs to
1914
+ '''
1915
+ if __debug__:
1916
+ type_hints = typing.get_type_hints(_typecheckingstub__ad5873da45b6688f4c218055f06ff0d6a531da884f4fbcf05c463ed354d7521f)
1917
+ check_type(argname="argument scope", value=scope, expected_type=type_hints["scope"])
1918
+ check_type(argname="argument id", value=id, expected_type=type_hints["id"])
1919
+ props = _aws_cdk_ceddda9d.ResourceProps(
1920
+ account=account,
1921
+ environment_from_arn=environment_from_arn,
1922
+ physical_name=physical_name,
1923
+ region=region,
1924
+ )
1925
+
1926
+ jsii.create(self.__class__, self, [scope, id, props])
1927
+
1928
+ @builtins.property
1929
+ @jsii.member(jsii_name="airflowConfigurationOptions")
1930
+ @abc.abstractmethod
1931
+ def airflow_configuration_options(
1932
+ self,
1933
+ ) -> typing.Mapping[builtins.str, builtins.str]:
1934
+ '''A list of key-value pairs containing the Airflow configuration options for your environment.
1935
+
1936
+ For example, core.default_timezone: utc.
1937
+ '''
1938
+ ...
1939
+
1940
+ @builtins.property
1941
+ @jsii.member(jsii_name="environmentArn")
1942
+ @abc.abstractmethod
1943
+ def environment_arn(self) -> builtins.str:
1944
+ '''The ARN for the Amazon MWAA environment.'''
1945
+ ...
1946
+
1947
+ @builtins.property
1948
+ @jsii.member(jsii_name="name")
1949
+ @abc.abstractmethod
1950
+ def name(self) -> builtins.str:
1951
+ '''The name of your Amazon MWAA environment.'''
1952
+ ...
1953
+
1954
+ @builtins.property
1955
+ @jsii.member(jsii_name="airflowVersion")
1956
+ @abc.abstractmethod
1957
+ def airflow_version(self) -> typing.Optional[AirflowVersion]:
1958
+ '''The version of Apache Airflow to use for the environment.
1959
+
1960
+ If no value is specified, defaults to the latest version.
1961
+
1962
+ If you specify a newer version number for an existing environment, the version update requires some service interruption before taking effect.
1963
+ '''
1964
+ ...
1965
+
1966
+ @builtins.property
1967
+ @jsii.member(jsii_name="celeryExecutorQueue")
1968
+ @abc.abstractmethod
1969
+ def celery_executor_queue(self) -> typing.Optional[builtins.str]:
1970
+ '''The queue ARN for the environment's Celery Executor.
1971
+
1972
+ Amazon MWAA uses a Celery Executor to distribute tasks across multiple workers.
1973
+ When you create an environment in a shared VPC, you must provide access to the Celery Executor queue from your VPC.
1974
+ '''
1975
+ ...
1976
+
1977
+ @builtins.property
1978
+ @jsii.member(jsii_name="dagS3Path")
1979
+ @abc.abstractmethod
1980
+ def dag_s3_path(self) -> typing.Optional[builtins.str]:
1981
+ '''The relative path to the DAGs folder on your Amazon S3 bucket.
1982
+
1983
+ For example, dags.
1984
+ '''
1985
+ ...
1986
+
1987
+ @builtins.property
1988
+ @jsii.member(jsii_name="databaseVpcEndpointService")
1989
+ @abc.abstractmethod
1990
+ def database_vpc_endpoint_service(self) -> typing.Optional[builtins.str]:
1991
+ '''The VPC endpoint for the environment's Amazon RDS database.'''
1992
+ ...
1993
+
1994
+ @builtins.property
1995
+ @jsii.member(jsii_name="endpointManagement")
1996
+ @abc.abstractmethod
1997
+ def endpoint_management(self) -> typing.Optional[EndpointManagement]:
1998
+ '''Defines whether the VPC endpoints configured for the environment are created, and managed, by the customer or by Amazon MWAA.
1999
+
2000
+ If set to SERVICE, Amazon MWAA will create and manage the required VPC endpoints in your VPC.
2001
+ If set to CUSTOMER, you must create, and manage, the VPC endpoints in your VPC.
2002
+ '''
2003
+ ...
2004
+
2005
+ @builtins.property
2006
+ @jsii.member(jsii_name="environmentClass")
2007
+ @abc.abstractmethod
2008
+ def environment_class(self) -> typing.Optional[EnvironmentClass]:
2009
+ '''The environment class type.'''
2010
+ ...
2011
+
2012
+ @builtins.property
2013
+ @jsii.member(jsii_name="executionRole")
2014
+ @abc.abstractmethod
2015
+ def execution_role(self) -> typing.Optional[_aws_cdk_aws_iam_ceddda9d.IRole]:
2016
+ '''The execution role in IAM that allows MWAA to access AWS resources in your environment.'''
2017
+ ...
2018
+
2019
+ @builtins.property
2020
+ @jsii.member(jsii_name="kmsKey")
2021
+ @abc.abstractmethod
2022
+ def kms_key(self) -> typing.Optional[_aws_cdk_aws_kms_ceddda9d.IKey]:
2023
+ '''The AWS Key Management Service (KMS) key to encrypt and decrypt the data in your environment.
2024
+
2025
+ You can use an AWS KMS key managed by MWAA, or a customer-managed KMS key (advanced).
2026
+ '''
2027
+ ...
2028
+
2029
+ @builtins.property
2030
+ @jsii.member(jsii_name="loggingConfiguration")
2031
+ @abc.abstractmethod
2032
+ def logging_configuration(self) -> typing.Optional[LoggingConfiguration]:
2033
+ '''The Apache Airflow logs being sent to CloudWatch Logs.'''
2034
+ ...
2035
+
2036
+ @builtins.property
2037
+ @jsii.member(jsii_name="loggingConfigurationDagProcessingLogsCloudWatchLogGroupArn")
2038
+ @abc.abstractmethod
2039
+ def logging_configuration_dag_processing_logs_cloud_watch_log_group_arn(
2040
+ self,
2041
+ ) -> typing.Optional[builtins.str]:
2042
+ '''The ARN for the CloudWatch Logs group where the Apache Airflow DAG processing logs are published.'''
2043
+ ...
2044
+
2045
+ @builtins.property
2046
+ @jsii.member(jsii_name="loggingConfigurationSchedulerLogsCloudWatchLogGroupArn")
2047
+ @abc.abstractmethod
2048
+ def logging_configuration_scheduler_logs_cloud_watch_log_group_arn(
2049
+ self,
2050
+ ) -> typing.Optional[builtins.str]:
2051
+ '''The ARN for the CloudWatch Logs group where the Apache Airflow Scheduler logs are published.'''
2052
+ ...
2053
+
2054
+ @builtins.property
2055
+ @jsii.member(jsii_name="loggingConfigurationTaskLogsCloudWatchLogGroupArn")
2056
+ @abc.abstractmethod
2057
+ def logging_configuration_task_logs_cloud_watch_log_group_arn(
2058
+ self,
2059
+ ) -> typing.Optional[builtins.str]:
2060
+ '''The ARN for the CloudWatch Logs group where the Apache Airflow task logs are published.'''
2061
+ ...
2062
+
2063
+ @builtins.property
2064
+ @jsii.member(jsii_name="loggingConfigurationWebserverLogsCloudWatchLogGroupArn")
2065
+ @abc.abstractmethod
2066
+ def logging_configuration_webserver_logs_cloud_watch_log_group_arn(
2067
+ self,
2068
+ ) -> typing.Optional[builtins.str]:
2069
+ '''The ARN for the CloudWatch Logs group where the Apache Airflow Web server logs are published.'''
2070
+ ...
2071
+
2072
+ @builtins.property
2073
+ @jsii.member(jsii_name="loggingConfigurationWorkerLogsCloudWatchLogGroupArn")
2074
+ @abc.abstractmethod
2075
+ def logging_configuration_worker_logs_cloud_watch_log_group_arn(
2076
+ self,
2077
+ ) -> typing.Optional[builtins.str]:
2078
+ '''The ARN for the CloudWatch Logs group where the Apache Airflow Worker logs are published.'''
2079
+ ...
2080
+
2081
+ @builtins.property
2082
+ @jsii.member(jsii_name="maxWebservers")
2083
+ @abc.abstractmethod
2084
+ def max_webservers(self) -> typing.Optional[jsii.Number]:
2085
+ '''The maximum number of web servers that you want to run in your environment.
2086
+
2087
+ Amazon MWAA scales the number of Apache Airflow web servers up to the number you specify for MaxWebservers when you interact with your Apache
2088
+ Airflow environment using Apache Airflow REST API, or the Apache Airflow CLI. For example, in scenarios where your workload requires network
2089
+ calls to the Apache Airflow REST API with a high transaction-per-second (TPS) rate, Amazon MWAA will increase the number of web servers up to
2090
+ the number set in MaxWebserers. As TPS rates decrease Amazon MWAA disposes of the additional web servers, and scales down to the number set in
2091
+ MinxWebserers.
2092
+
2093
+ Valid values: For environments larger than mw1.micro, accepts values from 2 to 5. Defaults to 2 for all environment sizes except mw1.micro,
2094
+ which defaults to 1.
2095
+ '''
2096
+ ...
2097
+
2098
+ @builtins.property
2099
+ @jsii.member(jsii_name="maxWorkers")
2100
+ @abc.abstractmethod
2101
+ def max_workers(self) -> typing.Optional[jsii.Number]:
2102
+ '''The maximum number of workers that you want to run in your environment.
2103
+
2104
+ MWAA scales the number of Apache Airflow workers up to the number you specify in the MaxWorkers field. For example, 20. When there are no more
2105
+ tasks running, and no more in the queue, MWAA disposes of the extra workers leaving the one worker that is included with your environment, or
2106
+ the number you specify in MinWorkers.
2107
+ '''
2108
+ ...
2109
+
2110
+ @builtins.property
2111
+ @jsii.member(jsii_name="minWebservers")
2112
+ @abc.abstractmethod
2113
+ def min_webservers(self) -> typing.Optional[jsii.Number]:
2114
+ '''The minimum number of web servers that you want to run in your environment.
2115
+
2116
+ Amazon MWAA scales the number of Apache Airflow web servers up to the number you specify for MaxWebservers when you interact with your Apache
2117
+ Airflow environment using Apache Airflow REST API, or the Apache Airflow CLI. As the transaction-per-second rate, and the network load,
2118
+ decrease, Amazon MWAA disposes of the additional web servers, and scales down to the number set in MinxWebserers.
2119
+
2120
+ Valid values: For environments larger than mw1.micro, accepts values from 2 to 5. Defaults to 2 for all environment sizes except mw1.micro,
2121
+ which defaults to 1.
2122
+ '''
2123
+ ...
2124
+
2125
+ @builtins.property
2126
+ @jsii.member(jsii_name="minWorkers")
2127
+ @abc.abstractmethod
2128
+ def min_workers(self) -> typing.Optional[jsii.Number]:
2129
+ '''The minimum number of workers that you want to run in your environment.
2130
+
2131
+ MWAA scales the number of Apache Airflow workers up to the number you
2132
+ specify in the MaxWorkers field. When there are no more tasks running, and no more in the queue, MWAA disposes of the extra workers leaving
2133
+ the worker count you specify in the MinWorkers field. For example, 2.
2134
+ '''
2135
+ ...
2136
+
2137
+ @builtins.property
2138
+ @jsii.member(jsii_name="networkConfiguration")
2139
+ @abc.abstractmethod
2140
+ def network_configuration(self) -> typing.Optional[NetworkConfiguration]:
2141
+ '''The VPC networking components used to secure and enable network traffic between the AWS resources for your environment.'''
2142
+ ...
2143
+
2144
+ @builtins.property
2145
+ @jsii.member(jsii_name="pluginsS3ObjectVersion")
2146
+ @abc.abstractmethod
2147
+ def plugins_s3_object_version(self) -> typing.Optional[builtins.str]:
2148
+ '''The version of the plugins.zip file on your Amazon S3 bucket.'''
2149
+ ...
2150
+
2151
+ @builtins.property
2152
+ @jsii.member(jsii_name="pluginsS3Path")
2153
+ @abc.abstractmethod
2154
+ def plugins_s3_path(self) -> typing.Optional[builtins.str]:
2155
+ '''The relative path to the plugins.zip file on your Amazon S3 bucket. For example, plugins.zip.'''
2156
+ ...
2157
+
2158
+ @builtins.property
2159
+ @jsii.member(jsii_name="requirementsS3ObjectVersion")
2160
+ @abc.abstractmethod
2161
+ def requirements_s3_object_version(self) -> typing.Optional[builtins.str]:
2162
+ '''The version of the requirements.txt file on your Amazon S3 bucket.'''
2163
+ ...
2164
+
2165
+ @builtins.property
2166
+ @jsii.member(jsii_name="requirementsS3Path")
2167
+ @abc.abstractmethod
2168
+ def requirements_s3_path(self) -> typing.Optional[builtins.str]:
2169
+ '''The relative path to the requirements.txt file on your Amazon S3 bucket. For example, requirements.txt.'''
2170
+ ...
2171
+
2172
+ @builtins.property
2173
+ @jsii.member(jsii_name="schedulers")
2174
+ @abc.abstractmethod
2175
+ def schedulers(self) -> typing.Optional[jsii.Number]:
2176
+ '''The number of schedulers that you want to run in your environment.
2177
+
2178
+ Valid values:
2179
+ v2 - For environments larger than mw1.micro, accepts values from 2 to 5.
2180
+ Defaults to 2 for all environment sizes except mw1.micro, which defaults to 1.
2181
+ v1 - Accepts 1.
2182
+ '''
2183
+ ...
2184
+
2185
+ @builtins.property
2186
+ @jsii.member(jsii_name="sourceBucket")
2187
+ @abc.abstractmethod
2188
+ def source_bucket(self) -> typing.Optional[_aws_cdk_aws_s3_ceddda9d.IBucket]:
2189
+ '''The Amazon S3 bucket where your DAG code and supporting files are stored.'''
2190
+ ...
2191
+
2192
+ @builtins.property
2193
+ @jsii.member(jsii_name="startupScriptS3ObjectVersion")
2194
+ @abc.abstractmethod
2195
+ def startup_script_s3_object_version(self) -> typing.Optional[builtins.str]:
2196
+ '''The version of the startup shell script in your Amazon S3 bucket.
2197
+
2198
+ You must specify the version ID that Amazon S3 assigns to the file every time you update the script.
2199
+ Version IDs are Unicode, UTF-8 encoded, URL-ready, opaque strings that are no more than 1,024 bytes long.
2200
+
2201
+ The following is an example:
2202
+ 3sL4kqtJlcpXroDTDmJ+rmSpXd3dIbrHY+MTRCxf3vjVBH40Nr8X8gdRQBpUMLUo
2203
+ '''
2204
+ ...
2205
+
2206
+ @builtins.property
2207
+ @jsii.member(jsii_name="startupScriptS3Path")
2208
+ @abc.abstractmethod
2209
+ def startup_script_s3_path(self) -> typing.Optional[builtins.str]:
2210
+ '''The relative path to the startup shell script in your Amazon S3 bucket.
2211
+
2212
+ For example, s3://mwaa-environment/startup.sh.
2213
+ Amazon MWAA runs the script as your environment starts, and before running the Apache Airflow process.
2214
+ You can use this script to install dependencies, modify Apache Airflow configuration options, and set environment variables.
2215
+ '''
2216
+ ...
2217
+
2218
+ @builtins.property
2219
+ @jsii.member(jsii_name="webserverAccessMode")
2220
+ @abc.abstractmethod
2221
+ def webserver_access_mode(self) -> typing.Optional[WebserverAccessMode]:
2222
+ '''The Apache Airflow Web server access mode.'''
2223
+ ...
2224
+
2225
+ @builtins.property
2226
+ @jsii.member(jsii_name="webserverUrl")
2227
+ @abc.abstractmethod
2228
+ def webserver_url(self) -> typing.Optional[builtins.str]:
2229
+ '''The URL of your Apache Airflow UI.'''
2230
+ ...
2231
+
2232
+ @builtins.property
2233
+ @jsii.member(jsii_name="webserverVpcEndpointService")
2234
+ @abc.abstractmethod
2235
+ def webserver_vpc_endpoint_service(self) -> typing.Optional[builtins.str]:
2236
+ '''The VPC endpoint for the environment's web server.'''
2237
+ ...
2238
+
2239
+ @builtins.property
2240
+ @jsii.member(jsii_name="weeklyMaintenanceWindowStart")
2241
+ @abc.abstractmethod
2242
+ def weekly_maintenance_window_start(self) -> typing.Optional[builtins.str]:
2243
+ '''The day and time of the week to start weekly maintenance updates of your environment in the following format: DAY:HH:MM.
2244
+
2245
+ For example: TUE:03:30. You can specify a start time in 30 minute increments only.
2246
+
2247
+ Supported input includes the following:
2248
+ MON|TUE|WED|THU|FRI|SAT|SUN:([01]\\d|2[0-3]):(00|30)
2249
+ '''
2250
+ ...
2251
+
2252
+
2253
+ class _EnvironmentBaseProxy(
2254
+ EnvironmentBase,
2255
+ jsii.proxy_for(_aws_cdk_ceddda9d.Resource), # type: ignore[misc]
2256
+ ):
2257
+ @builtins.property
2258
+ @jsii.member(jsii_name="airflowConfigurationOptions")
2259
+ def airflow_configuration_options(
2260
+ self,
2261
+ ) -> typing.Mapping[builtins.str, builtins.str]:
2262
+ '''A list of key-value pairs containing the Airflow configuration options for your environment.
2263
+
2264
+ For example, core.default_timezone: utc.
2265
+ '''
2266
+ return typing.cast(typing.Mapping[builtins.str, builtins.str], jsii.get(self, "airflowConfigurationOptions"))
2267
+
2268
+ @builtins.property
2269
+ @jsii.member(jsii_name="environmentArn")
2270
+ def environment_arn(self) -> builtins.str:
2271
+ '''The ARN for the Amazon MWAA environment.'''
2272
+ return typing.cast(builtins.str, jsii.get(self, "environmentArn"))
2273
+
2274
+ @builtins.property
2275
+ @jsii.member(jsii_name="name")
2276
+ def name(self) -> builtins.str:
2277
+ '''The name of your Amazon MWAA environment.'''
2278
+ return typing.cast(builtins.str, jsii.get(self, "name"))
2279
+
2280
+ @builtins.property
2281
+ @jsii.member(jsii_name="airflowVersion")
2282
+ def airflow_version(self) -> typing.Optional[AirflowVersion]:
2283
+ '''The version of Apache Airflow to use for the environment.
2284
+
2285
+ If no value is specified, defaults to the latest version.
2286
+
2287
+ If you specify a newer version number for an existing environment, the version update requires some service interruption before taking effect.
2288
+ '''
2289
+ return typing.cast(typing.Optional[AirflowVersion], jsii.get(self, "airflowVersion"))
2290
+
2291
+ @builtins.property
2292
+ @jsii.member(jsii_name="celeryExecutorQueue")
2293
+ def celery_executor_queue(self) -> typing.Optional[builtins.str]:
2294
+ '''The queue ARN for the environment's Celery Executor.
2295
+
2296
+ Amazon MWAA uses a Celery Executor to distribute tasks across multiple workers.
2297
+ When you create an environment in a shared VPC, you must provide access to the Celery Executor queue from your VPC.
2298
+ '''
2299
+ return typing.cast(typing.Optional[builtins.str], jsii.get(self, "celeryExecutorQueue"))
2300
+
2301
+ @builtins.property
2302
+ @jsii.member(jsii_name="dagS3Path")
2303
+ def dag_s3_path(self) -> typing.Optional[builtins.str]:
2304
+ '''The relative path to the DAGs folder on your Amazon S3 bucket.
2305
+
2306
+ For example, dags.
2307
+ '''
2308
+ return typing.cast(typing.Optional[builtins.str], jsii.get(self, "dagS3Path"))
2309
+
2310
+ @builtins.property
2311
+ @jsii.member(jsii_name="databaseVpcEndpointService")
2312
+ def database_vpc_endpoint_service(self) -> typing.Optional[builtins.str]:
2313
+ '''The VPC endpoint for the environment's Amazon RDS database.'''
2314
+ return typing.cast(typing.Optional[builtins.str], jsii.get(self, "databaseVpcEndpointService"))
2315
+
2316
+ @builtins.property
2317
+ @jsii.member(jsii_name="endpointManagement")
2318
+ def endpoint_management(self) -> typing.Optional[EndpointManagement]:
2319
+ '''Defines whether the VPC endpoints configured for the environment are created, and managed, by the customer or by Amazon MWAA.
2320
+
2321
+ If set to SERVICE, Amazon MWAA will create and manage the required VPC endpoints in your VPC.
2322
+ If set to CUSTOMER, you must create, and manage, the VPC endpoints in your VPC.
2323
+ '''
2324
+ return typing.cast(typing.Optional[EndpointManagement], jsii.get(self, "endpointManagement"))
2325
+
2326
+ @builtins.property
2327
+ @jsii.member(jsii_name="environmentClass")
2328
+ def environment_class(self) -> typing.Optional[EnvironmentClass]:
2329
+ '''The environment class type.'''
2330
+ return typing.cast(typing.Optional[EnvironmentClass], jsii.get(self, "environmentClass"))
2331
+
2332
+ @builtins.property
2333
+ @jsii.member(jsii_name="executionRole")
2334
+ def execution_role(self) -> typing.Optional[_aws_cdk_aws_iam_ceddda9d.IRole]:
2335
+ '''The execution role in IAM that allows MWAA to access AWS resources in your environment.'''
2336
+ return typing.cast(typing.Optional[_aws_cdk_aws_iam_ceddda9d.IRole], jsii.get(self, "executionRole"))
2337
+
2338
+ @builtins.property
2339
+ @jsii.member(jsii_name="kmsKey")
2340
+ def kms_key(self) -> typing.Optional[_aws_cdk_aws_kms_ceddda9d.IKey]:
2341
+ '''The AWS Key Management Service (KMS) key to encrypt and decrypt the data in your environment.
2342
+
2343
+ You can use an AWS KMS key managed by MWAA, or a customer-managed KMS key (advanced).
2344
+ '''
2345
+ return typing.cast(typing.Optional[_aws_cdk_aws_kms_ceddda9d.IKey], jsii.get(self, "kmsKey"))
2346
+
2347
+ @builtins.property
2348
+ @jsii.member(jsii_name="loggingConfiguration")
2349
+ def logging_configuration(self) -> typing.Optional[LoggingConfiguration]:
2350
+ '''The Apache Airflow logs being sent to CloudWatch Logs.'''
2351
+ return typing.cast(typing.Optional[LoggingConfiguration], jsii.get(self, "loggingConfiguration"))
2352
+
2353
+ @builtins.property
2354
+ @jsii.member(jsii_name="loggingConfigurationDagProcessingLogsCloudWatchLogGroupArn")
2355
+ def logging_configuration_dag_processing_logs_cloud_watch_log_group_arn(
2356
+ self,
2357
+ ) -> typing.Optional[builtins.str]:
2358
+ '''The ARN for the CloudWatch Logs group where the Apache Airflow DAG processing logs are published.'''
2359
+ return typing.cast(typing.Optional[builtins.str], jsii.get(self, "loggingConfigurationDagProcessingLogsCloudWatchLogGroupArn"))
2360
+
2361
+ @builtins.property
2362
+ @jsii.member(jsii_name="loggingConfigurationSchedulerLogsCloudWatchLogGroupArn")
2363
+ def logging_configuration_scheduler_logs_cloud_watch_log_group_arn(
2364
+ self,
2365
+ ) -> typing.Optional[builtins.str]:
2366
+ '''The ARN for the CloudWatch Logs group where the Apache Airflow Scheduler logs are published.'''
2367
+ return typing.cast(typing.Optional[builtins.str], jsii.get(self, "loggingConfigurationSchedulerLogsCloudWatchLogGroupArn"))
2368
+
2369
+ @builtins.property
2370
+ @jsii.member(jsii_name="loggingConfigurationTaskLogsCloudWatchLogGroupArn")
2371
+ def logging_configuration_task_logs_cloud_watch_log_group_arn(
2372
+ self,
2373
+ ) -> typing.Optional[builtins.str]:
2374
+ '''The ARN for the CloudWatch Logs group where the Apache Airflow task logs are published.'''
2375
+ return typing.cast(typing.Optional[builtins.str], jsii.get(self, "loggingConfigurationTaskLogsCloudWatchLogGroupArn"))
2376
+
2377
+ @builtins.property
2378
+ @jsii.member(jsii_name="loggingConfigurationWebserverLogsCloudWatchLogGroupArn")
2379
+ def logging_configuration_webserver_logs_cloud_watch_log_group_arn(
2380
+ self,
2381
+ ) -> typing.Optional[builtins.str]:
2382
+ '''The ARN for the CloudWatch Logs group where the Apache Airflow Web server logs are published.'''
2383
+ return typing.cast(typing.Optional[builtins.str], jsii.get(self, "loggingConfigurationWebserverLogsCloudWatchLogGroupArn"))
2384
+
2385
+ @builtins.property
2386
+ @jsii.member(jsii_name="loggingConfigurationWorkerLogsCloudWatchLogGroupArn")
2387
+ def logging_configuration_worker_logs_cloud_watch_log_group_arn(
2388
+ self,
2389
+ ) -> typing.Optional[builtins.str]:
2390
+ '''The ARN for the CloudWatch Logs group where the Apache Airflow Worker logs are published.'''
2391
+ return typing.cast(typing.Optional[builtins.str], jsii.get(self, "loggingConfigurationWorkerLogsCloudWatchLogGroupArn"))
2392
+
2393
+ @builtins.property
2394
+ @jsii.member(jsii_name="maxWebservers")
2395
+ def max_webservers(self) -> typing.Optional[jsii.Number]:
2396
+ '''The maximum number of web servers that you want to run in your environment.
2397
+
2398
+ Amazon MWAA scales the number of Apache Airflow web servers up to the number you specify for MaxWebservers when you interact with your Apache
2399
+ Airflow environment using Apache Airflow REST API, or the Apache Airflow CLI. For example, in scenarios where your workload requires network
2400
+ calls to the Apache Airflow REST API with a high transaction-per-second (TPS) rate, Amazon MWAA will increase the number of web servers up to
2401
+ the number set in MaxWebserers. As TPS rates decrease Amazon MWAA disposes of the additional web servers, and scales down to the number set in
2402
+ MinxWebserers.
2403
+
2404
+ Valid values: For environments larger than mw1.micro, accepts values from 2 to 5. Defaults to 2 for all environment sizes except mw1.micro,
2405
+ which defaults to 1.
2406
+ '''
2407
+ return typing.cast(typing.Optional[jsii.Number], jsii.get(self, "maxWebservers"))
2408
+
2409
+ @builtins.property
2410
+ @jsii.member(jsii_name="maxWorkers")
2411
+ def max_workers(self) -> typing.Optional[jsii.Number]:
2412
+ '''The maximum number of workers that you want to run in your environment.
2413
+
2414
+ MWAA scales the number of Apache Airflow workers up to the number you specify in the MaxWorkers field. For example, 20. When there are no more
2415
+ tasks running, and no more in the queue, MWAA disposes of the extra workers leaving the one worker that is included with your environment, or
2416
+ the number you specify in MinWorkers.
2417
+ '''
2418
+ return typing.cast(typing.Optional[jsii.Number], jsii.get(self, "maxWorkers"))
2419
+
2420
+ @builtins.property
2421
+ @jsii.member(jsii_name="minWebservers")
2422
+ def min_webservers(self) -> typing.Optional[jsii.Number]:
2423
+ '''The minimum number of web servers that you want to run in your environment.
2424
+
2425
+ Amazon MWAA scales the number of Apache Airflow web servers up to the number you specify for MaxWebservers when you interact with your Apache
2426
+ Airflow environment using Apache Airflow REST API, or the Apache Airflow CLI. As the transaction-per-second rate, and the network load,
2427
+ decrease, Amazon MWAA disposes of the additional web servers, and scales down to the number set in MinxWebserers.
2428
+
2429
+ Valid values: For environments larger than mw1.micro, accepts values from 2 to 5. Defaults to 2 for all environment sizes except mw1.micro,
2430
+ which defaults to 1.
2431
+ '''
2432
+ return typing.cast(typing.Optional[jsii.Number], jsii.get(self, "minWebservers"))
2433
+
2434
+ @builtins.property
2435
+ @jsii.member(jsii_name="minWorkers")
2436
+ def min_workers(self) -> typing.Optional[jsii.Number]:
2437
+ '''The minimum number of workers that you want to run in your environment.
2438
+
2439
+ MWAA scales the number of Apache Airflow workers up to the number you
2440
+ specify in the MaxWorkers field. When there are no more tasks running, and no more in the queue, MWAA disposes of the extra workers leaving
2441
+ the worker count you specify in the MinWorkers field. For example, 2.
2442
+ '''
2443
+ return typing.cast(typing.Optional[jsii.Number], jsii.get(self, "minWorkers"))
2444
+
2445
+ @builtins.property
2446
+ @jsii.member(jsii_name="networkConfiguration")
2447
+ def network_configuration(self) -> typing.Optional[NetworkConfiguration]:
2448
+ '''The VPC networking components used to secure and enable network traffic between the AWS resources for your environment.'''
2449
+ return typing.cast(typing.Optional[NetworkConfiguration], jsii.get(self, "networkConfiguration"))
2450
+
2451
+ @builtins.property
2452
+ @jsii.member(jsii_name="pluginsS3ObjectVersion")
2453
+ def plugins_s3_object_version(self) -> typing.Optional[builtins.str]:
2454
+ '''The version of the plugins.zip file on your Amazon S3 bucket.'''
2455
+ return typing.cast(typing.Optional[builtins.str], jsii.get(self, "pluginsS3ObjectVersion"))
2456
+
2457
+ @builtins.property
2458
+ @jsii.member(jsii_name="pluginsS3Path")
2459
+ def plugins_s3_path(self) -> typing.Optional[builtins.str]:
2460
+ '''The relative path to the plugins.zip file on your Amazon S3 bucket. For example, plugins.zip.'''
2461
+ return typing.cast(typing.Optional[builtins.str], jsii.get(self, "pluginsS3Path"))
2462
+
2463
+ @builtins.property
2464
+ @jsii.member(jsii_name="requirementsS3ObjectVersion")
2465
+ def requirements_s3_object_version(self) -> typing.Optional[builtins.str]:
2466
+ '''The version of the requirements.txt file on your Amazon S3 bucket.'''
2467
+ return typing.cast(typing.Optional[builtins.str], jsii.get(self, "requirementsS3ObjectVersion"))
2468
+
2469
+ @builtins.property
2470
+ @jsii.member(jsii_name="requirementsS3Path")
2471
+ def requirements_s3_path(self) -> typing.Optional[builtins.str]:
2472
+ '''The relative path to the requirements.txt file on your Amazon S3 bucket. For example, requirements.txt.'''
2473
+ return typing.cast(typing.Optional[builtins.str], jsii.get(self, "requirementsS3Path"))
2474
+
2475
+ @builtins.property
2476
+ @jsii.member(jsii_name="schedulers")
2477
+ def schedulers(self) -> typing.Optional[jsii.Number]:
2478
+ '''The number of schedulers that you want to run in your environment.
2479
+
2480
+ Valid values:
2481
+ v2 - For environments larger than mw1.micro, accepts values from 2 to 5.
2482
+ Defaults to 2 for all environment sizes except mw1.micro, which defaults to 1.
2483
+ v1 - Accepts 1.
2484
+ '''
2485
+ return typing.cast(typing.Optional[jsii.Number], jsii.get(self, "schedulers"))
2486
+
2487
+ @builtins.property
2488
+ @jsii.member(jsii_name="sourceBucket")
2489
+ def source_bucket(self) -> typing.Optional[_aws_cdk_aws_s3_ceddda9d.IBucket]:
2490
+ '''The Amazon S3 bucket where your DAG code and supporting files are stored.'''
2491
+ return typing.cast(typing.Optional[_aws_cdk_aws_s3_ceddda9d.IBucket], jsii.get(self, "sourceBucket"))
2492
+
2493
+ @builtins.property
2494
+ @jsii.member(jsii_name="startupScriptS3ObjectVersion")
2495
+ def startup_script_s3_object_version(self) -> typing.Optional[builtins.str]:
2496
+ '''The version of the startup shell script in your Amazon S3 bucket.
2497
+
2498
+ You must specify the version ID that Amazon S3 assigns to the file every time you update the script.
2499
+ Version IDs are Unicode, UTF-8 encoded, URL-ready, opaque strings that are no more than 1,024 bytes long.
2500
+
2501
+ The following is an example:
2502
+ 3sL4kqtJlcpXroDTDmJ+rmSpXd3dIbrHY+MTRCxf3vjVBH40Nr8X8gdRQBpUMLUo
2503
+ '''
2504
+ return typing.cast(typing.Optional[builtins.str], jsii.get(self, "startupScriptS3ObjectVersion"))
2505
+
2506
+ @builtins.property
2507
+ @jsii.member(jsii_name="startupScriptS3Path")
2508
+ def startup_script_s3_path(self) -> typing.Optional[builtins.str]:
2509
+ '''The relative path to the startup shell script in your Amazon S3 bucket.
2510
+
2511
+ For example, s3://mwaa-environment/startup.sh.
2512
+ Amazon MWAA runs the script as your environment starts, and before running the Apache Airflow process.
2513
+ You can use this script to install dependencies, modify Apache Airflow configuration options, and set environment variables.
2514
+ '''
2515
+ return typing.cast(typing.Optional[builtins.str], jsii.get(self, "startupScriptS3Path"))
2516
+
2517
+ @builtins.property
2518
+ @jsii.member(jsii_name="webserverAccessMode")
2519
+ def webserver_access_mode(self) -> typing.Optional[WebserverAccessMode]:
2520
+ '''The Apache Airflow Web server access mode.'''
2521
+ return typing.cast(typing.Optional[WebserverAccessMode], jsii.get(self, "webserverAccessMode"))
2522
+
2523
+ @builtins.property
2524
+ @jsii.member(jsii_name="webserverUrl")
2525
+ def webserver_url(self) -> typing.Optional[builtins.str]:
2526
+ '''The URL of your Apache Airflow UI.'''
2527
+ return typing.cast(typing.Optional[builtins.str], jsii.get(self, "webserverUrl"))
2528
+
2529
+ @builtins.property
2530
+ @jsii.member(jsii_name="webserverVpcEndpointService")
2531
+ def webserver_vpc_endpoint_service(self) -> typing.Optional[builtins.str]:
2532
+ '''The VPC endpoint for the environment's web server.'''
2533
+ return typing.cast(typing.Optional[builtins.str], jsii.get(self, "webserverVpcEndpointService"))
2534
+
2535
+ @builtins.property
2536
+ @jsii.member(jsii_name="weeklyMaintenanceWindowStart")
2537
+ def weekly_maintenance_window_start(self) -> typing.Optional[builtins.str]:
2538
+ '''The day and time of the week to start weekly maintenance updates of your environment in the following format: DAY:HH:MM.
2539
+
2540
+ For example: TUE:03:30. You can specify a start time in 30 minute increments only.
2541
+
2542
+ Supported input includes the following:
2543
+ MON|TUE|WED|THU|FRI|SAT|SUN:([01]\\d|2[0-3]):(00|30)
2544
+ '''
2545
+ return typing.cast(typing.Optional[builtins.str], jsii.get(self, "weeklyMaintenanceWindowStart"))
2546
+
2547
+ # Adding a "__jsii_proxy_class__(): typing.Type" function to the abstract class
2548
+ typing.cast(typing.Any, EnvironmentBase).__jsii_proxy_class__ = lambda : _EnvironmentBaseProxy
2549
+
2550
+
2551
+ class Environment(
2552
+ EnvironmentBase,
2553
+ metaclass=jsii.JSIIMeta,
2554
+ jsii_type="@robhan-cdk-lib/aws_mwaa.Environment",
2555
+ ):
2556
+ def __init__(
2557
+ self,
2558
+ scope: _constructs_77d1e7e8.Construct,
2559
+ id: builtins.str,
2560
+ *,
2561
+ airflow_configuration_options: typing.Mapping[builtins.str, builtins.str],
2562
+ name: builtins.str,
2563
+ airflow_version: typing.Optional[AirflowVersion] = None,
2564
+ dag_s3_path: typing.Optional[builtins.str] = None,
2565
+ endpoint_management: typing.Optional[EndpointManagement] = None,
2566
+ environment_class: typing.Optional[EnvironmentClass] = None,
2567
+ execution_role: typing.Optional[_aws_cdk_aws_iam_ceddda9d.IRole] = None,
2568
+ kms_key: typing.Optional[_aws_cdk_aws_kms_ceddda9d.IKey] = None,
2569
+ logging_configuration: typing.Optional[typing.Union[LoggingConfiguration, typing.Dict[builtins.str, typing.Any]]] = None,
2570
+ max_webservers: typing.Optional[jsii.Number] = None,
2571
+ max_workers: typing.Optional[jsii.Number] = None,
2572
+ min_webservers: typing.Optional[jsii.Number] = None,
2573
+ min_workers: typing.Optional[jsii.Number] = None,
2574
+ network_configuration: typing.Optional[typing.Union[NetworkConfiguration, typing.Dict[builtins.str, typing.Any]]] = None,
2575
+ plugins_s3_object_version: typing.Optional[builtins.str] = None,
2576
+ plugins_s3_path: typing.Optional[builtins.str] = None,
2577
+ requirements_s3_object_version: typing.Optional[builtins.str] = None,
2578
+ requirements_s3_path: typing.Optional[builtins.str] = None,
2579
+ schedulers: typing.Optional[jsii.Number] = None,
2580
+ source_bucket: typing.Optional[_aws_cdk_aws_s3_ceddda9d.IBucket] = None,
2581
+ startup_script_s3_object_version: typing.Optional[builtins.str] = None,
2582
+ startup_script_s3_path: typing.Optional[builtins.str] = None,
2583
+ webserver_access_mode: typing.Optional[WebserverAccessMode] = None,
2584
+ weekly_maintenance_window_start: typing.Optional[builtins.str] = None,
2585
+ ) -> None:
2586
+ '''
2587
+ :param scope: -
2588
+ :param id: -
2589
+ :param airflow_configuration_options: A list of key-value pairs containing the Airflow configuration options for your environment. For example, core.default_timezone: utc.
2590
+ :param name: The name of your Amazon MWAA environment.
2591
+ :param airflow_version: The version of Apache Airflow to use for the environment. If no value is specified, defaults to the latest version. If you specify a newer version number for an existing environment, the version update requires some service interruption before taking effect.
2592
+ :param dag_s3_path: The relative path to the DAGs folder on your Amazon S3 bucket. For example, dags.
2593
+ :param endpoint_management: Defines whether the VPC endpoints configured for the environment are created, and managed, by the customer or by Amazon MWAA. If set to SERVICE, Amazon MWAA will create and manage the required VPC endpoints in your VPC. If set to CUSTOMER, you must create, and manage, the VPC endpoints in your VPC.
2594
+ :param environment_class: The environment class type.
2595
+ :param execution_role: The execution role in IAM that allows MWAA to access AWS resources in your environment.
2596
+ :param kms_key: The AWS Key Management Service (KMS) key to encrypt and decrypt the data in your environment. You can use an AWS KMS key managed by MWAA, or a customer-managed KMS key (advanced).
2597
+ :param logging_configuration: The Apache Airflow logs being sent to CloudWatch Logs.
2598
+ :param max_webservers: The maximum number of web servers that you want to run in your environment. Amazon MWAA scales the number of Apache Airflow web servers up to the number you specify for MaxWebservers when you interact with your Apache Airflow environment using Apache Airflow REST API, or the Apache Airflow CLI. For example, in scenarios where your workload requires network calls to the Apache Airflow REST API with a high transaction-per-second (TPS) rate, Amazon MWAA will increase the number of web servers up to the number set in MaxWebserers. As TPS rates decrease Amazon MWAA disposes of the additional web servers, and scales down to the number set in MinxWebserers. Valid values: For environments larger than mw1.micro, accepts values from 2 to 5. Defaults to 2 for all environment sizes except mw1.micro, which defaults to 1.
2599
+ :param max_workers: The maximum number of workers that you want to run in your environment. MWAA scales the number of Apache Airflow workers up to the number you specify in the MaxWorkers field. For example, 20. When there are no more tasks running, and no more in the queue, MWAA disposes of the extra workers leaving the one worker that is included with your environment, or the number you specify in MinWorkers.
2600
+ :param min_webservers: The minimum number of web servers that you want to run in your environment. Amazon MWAA scales the number of Apache Airflow web servers up to the number you specify for MaxWebservers when you interact with your Apache Airflow environment using Apache Airflow REST API, or the Apache Airflow CLI. As the transaction-per-second rate, and the network load, decrease, Amazon MWAA disposes of the additional web servers, and scales down to the number set in MinxWebserers. Valid values: For environments larger than mw1.micro, accepts values from 2 to 5. Defaults to 2 for all environment sizes except mw1.micro, which defaults to 1.
2601
+ :param min_workers: The minimum number of workers that you want to run in your environment. MWAA scales the number of Apache Airflow workers up to the number you specify in the MaxWorkers field. When there are no more tasks running, and no more in the queue, MWAA disposes of the extra workers leaving the worker count you specify in the MinWorkers field. For example, 2.
2602
+ :param network_configuration: The VPC networking components used to secure and enable network traffic between the AWS resources for your environment.
2603
+ :param plugins_s3_object_version: The version of the plugins.zip file on your Amazon S3 bucket.
2604
+ :param plugins_s3_path: The relative path to the plugins.zip file on your Amazon S3 bucket. For example, plugins.zip.
2605
+ :param requirements_s3_object_version: The version of the requirements.txt file on your Amazon S3 bucket.
2606
+ :param requirements_s3_path: The relative path to the requirements.txt file on your Amazon S3 bucket. For example, requirements.txt.
2607
+ :param schedulers: The number of schedulers that you want to run in your environment. Valid values: v2 - For environments larger than mw1.micro, accepts values from 2 to 5. Defaults to 2 for all environment sizes except mw1.micro, which defaults to 1. v1 - Accepts 1.
2608
+ :param source_bucket: The Amazon S3 bucket where your DAG code and supporting files are stored.
2609
+ :param startup_script_s3_object_version: The version of the startup shell script in your Amazon S3 bucket. You must specify the version ID that Amazon S3 assigns to the file every time you update the script. Version IDs are Unicode, UTF-8 encoded, URL-ready, opaque strings that are no more than 1,024 bytes long. The following is an example: 3sL4kqtJlcpXroDTDmJ+rmSpXd3dIbrHY+MTRCxf3vjVBH40Nr8X8gdRQBpUMLUo
2610
+ :param startup_script_s3_path: The relative path to the startup shell script in your Amazon S3 bucket. For example, s3://mwaa-environment/startup.sh. Amazon MWAA runs the script as your environment starts, and before running the Apache Airflow process. You can use this script to install dependencies, modify Apache Airflow configuration options, and set environment variables.
2611
+ :param webserver_access_mode: The Apache Airflow Web server access mode.
2612
+ :param weekly_maintenance_window_start: The day and time of the week to start weekly maintenance updates of your environment in the following format: DAY:HH:MM. For example: TUE:03:30. You can specify a start time in 30 minute increments only. Supported input includes the following: MON|TUE|WED|THU|FRI|SAT|SUN:([01]\\d|2[0-3]):(00|30)
2613
+ '''
2614
+ if __debug__:
2615
+ type_hints = typing.get_type_hints(_typecheckingstub__5caedcd0e4f79944eef6be911818e685afe29161ed637d59813b0c34497c9a53)
2616
+ check_type(argname="argument scope", value=scope, expected_type=type_hints["scope"])
2617
+ check_type(argname="argument id", value=id, expected_type=type_hints["id"])
2618
+ props = EnvironmentProps(
2619
+ airflow_configuration_options=airflow_configuration_options,
2620
+ name=name,
2621
+ airflow_version=airflow_version,
2622
+ dag_s3_path=dag_s3_path,
2623
+ endpoint_management=endpoint_management,
2624
+ environment_class=environment_class,
2625
+ execution_role=execution_role,
2626
+ kms_key=kms_key,
2627
+ logging_configuration=logging_configuration,
2628
+ max_webservers=max_webservers,
2629
+ max_workers=max_workers,
2630
+ min_webservers=min_webservers,
2631
+ min_workers=min_workers,
2632
+ network_configuration=network_configuration,
2633
+ plugins_s3_object_version=plugins_s3_object_version,
2634
+ plugins_s3_path=plugins_s3_path,
2635
+ requirements_s3_object_version=requirements_s3_object_version,
2636
+ requirements_s3_path=requirements_s3_path,
2637
+ schedulers=schedulers,
2638
+ source_bucket=source_bucket,
2639
+ startup_script_s3_object_version=startup_script_s3_object_version,
2640
+ startup_script_s3_path=startup_script_s3_path,
2641
+ webserver_access_mode=webserver_access_mode,
2642
+ weekly_maintenance_window_start=weekly_maintenance_window_start,
2643
+ )
2644
+
2645
+ jsii.create(self.__class__, self, [scope, id, props])
2646
+
2647
+ @jsii.member(jsii_name="fromEnvironmentAttributes")
2648
+ @builtins.classmethod
2649
+ def from_environment_attributes(
2650
+ cls,
2651
+ scope: _constructs_77d1e7e8.Construct,
2652
+ id: builtins.str,
2653
+ *,
2654
+ airflow_configuration_options: typing.Mapping[builtins.str, builtins.str],
2655
+ environment_arn: builtins.str,
2656
+ name: builtins.str,
2657
+ airflow_version: typing.Optional[AirflowVersion] = None,
2658
+ celery_executor_queue: typing.Optional[builtins.str] = None,
2659
+ dag_s3_path: typing.Optional[builtins.str] = None,
2660
+ database_vpc_endpoint_service: typing.Optional[builtins.str] = None,
2661
+ endpoint_management: typing.Optional[EndpointManagement] = None,
2662
+ environment_class: typing.Optional[EnvironmentClass] = None,
2663
+ execution_role: typing.Optional[_aws_cdk_aws_iam_ceddda9d.IRole] = None,
2664
+ kms_key: typing.Optional[_aws_cdk_aws_kms_ceddda9d.IKey] = None,
2665
+ logging_configuration: typing.Optional[typing.Union[LoggingConfiguration, typing.Dict[builtins.str, typing.Any]]] = None,
2666
+ logging_configuration_dag_processing_logs_cloud_watch_log_group_arn: typing.Optional[builtins.str] = None,
2667
+ logging_configuration_scheduler_logs_cloud_watch_log_group_arn: typing.Optional[builtins.str] = None,
2668
+ logging_configuration_task_logs_cloud_watch_log_group_arn: typing.Optional[builtins.str] = None,
2669
+ logging_configuration_webserver_logs_cloud_watch_log_group_arn: typing.Optional[builtins.str] = None,
2670
+ logging_configuration_worker_logs_cloud_watch_log_group_arn: typing.Optional[builtins.str] = None,
2671
+ max_webservers: typing.Optional[jsii.Number] = None,
2672
+ max_workers: typing.Optional[jsii.Number] = None,
2673
+ min_webservers: typing.Optional[jsii.Number] = None,
2674
+ min_workers: typing.Optional[jsii.Number] = None,
2675
+ network_configuration: typing.Optional[typing.Union[NetworkConfiguration, typing.Dict[builtins.str, typing.Any]]] = None,
2676
+ plugins_s3_object_version: typing.Optional[builtins.str] = None,
2677
+ plugins_s3_path: typing.Optional[builtins.str] = None,
2678
+ requirements_s3_object_version: typing.Optional[builtins.str] = None,
2679
+ requirements_s3_path: typing.Optional[builtins.str] = None,
2680
+ schedulers: typing.Optional[jsii.Number] = None,
2681
+ source_bucket: typing.Optional[_aws_cdk_aws_s3_ceddda9d.IBucket] = None,
2682
+ startup_script_s3_object_version: typing.Optional[builtins.str] = None,
2683
+ startup_script_s3_path: typing.Optional[builtins.str] = None,
2684
+ webserver_access_mode: typing.Optional[WebserverAccessMode] = None,
2685
+ webserver_url: typing.Optional[builtins.str] = None,
2686
+ webserver_vpc_endpoint_service: typing.Optional[builtins.str] = None,
2687
+ weekly_maintenance_window_start: typing.Optional[builtins.str] = None,
2688
+ ) -> IEnvironment:
2689
+ '''
2690
+ :param scope: -
2691
+ :param id: -
2692
+ :param airflow_configuration_options: A list of key-value pairs containing the Airflow configuration options for your environment. For example, core.default_timezone: utc.
2693
+ :param environment_arn: The ARN for the Amazon MWAA environment.
2694
+ :param name: The name of your Amazon MWAA environment.
2695
+ :param airflow_version: The version of Apache Airflow to use for the environment. If no value is specified, defaults to the latest version. If you specify a newer version number for an existing environment, the version update requires some service interruption before taking effect.
2696
+ :param celery_executor_queue: The queue ARN for the environment's Celery Executor. Amazon MWAA uses a Celery Executor to distribute tasks across multiple workers. When you create an environment in a shared VPC, you must provide access to the Celery Executor queue from your VPC.
2697
+ :param dag_s3_path: The relative path to the DAGs folder on your Amazon S3 bucket. For example, dags.
2698
+ :param database_vpc_endpoint_service: The VPC endpoint for the environment's Amazon RDS database.
2699
+ :param endpoint_management: Defines whether the VPC endpoints configured for the environment are created, and managed, by the customer or by Amazon MWAA. If set to SERVICE, Amazon MWAA will create and manage the required VPC endpoints in your VPC. If set to CUSTOMER, you must create, and manage, the VPC endpoints in your VPC.
2700
+ :param environment_class: The environment class type.
2701
+ :param execution_role: The execution role in IAM that allows MWAA to access AWS resources in your environment.
2702
+ :param kms_key: The AWS Key Management Service (KMS) key to encrypt and decrypt the data in your environment. You can use an AWS KMS key managed by MWAA, or a customer-managed KMS key (advanced).
2703
+ :param logging_configuration: The Apache Airflow logs being sent to CloudWatch Logs.
2704
+ :param logging_configuration_dag_processing_logs_cloud_watch_log_group_arn: The ARN for the CloudWatch Logs group where the Apache Airflow DAG processing logs are published.
2705
+ :param logging_configuration_scheduler_logs_cloud_watch_log_group_arn: The ARN for the CloudWatch Logs group where the Apache Airflow Scheduler logs are published.
2706
+ :param logging_configuration_task_logs_cloud_watch_log_group_arn: The ARN for the CloudWatch Logs group where the Apache Airflow task logs are published.
2707
+ :param logging_configuration_webserver_logs_cloud_watch_log_group_arn: The ARN for the CloudWatch Logs group where the Apache Airflow Web server logs are published.
2708
+ :param logging_configuration_worker_logs_cloud_watch_log_group_arn: The ARN for the CloudWatch Logs group where the Apache Airflow Worker logs are published.
2709
+ :param max_webservers: The maximum number of web servers that you want to run in your environment. Amazon MWAA scales the number of Apache Airflow web servers up to the number you specify for MaxWebservers when you interact with your Apache Airflow environment using Apache Airflow REST API, or the Apache Airflow CLI. For example, in scenarios where your workload requires network calls to the Apache Airflow REST API with a high transaction-per-second (TPS) rate, Amazon MWAA will increase the number of web servers up to the number set in MaxWebserers. As TPS rates decrease Amazon MWAA disposes of the additional web servers, and scales down to the number set in MinxWebserers. Valid values: For environments larger than mw1.micro, accepts values from 2 to 5. Defaults to 2 for all environment sizes except mw1.micro, which defaults to 1.
2710
+ :param max_workers: The maximum number of workers that you want to run in your environment. MWAA scales the number of Apache Airflow workers up to the number you specify in the MaxWorkers field. For example, 20. When there are no more tasks running, and no more in the queue, MWAA disposes of the extra workers leaving the one worker that is included with your environment, or the number you specify in MinWorkers.
2711
+ :param min_webservers: The minimum number of web servers that you want to run in your environment. Amazon MWAA scales the number of Apache Airflow web servers up to the number you specify for MaxWebservers when you interact with your Apache Airflow environment using Apache Airflow REST API, or the Apache Airflow CLI. As the transaction-per-second rate, and the network load, decrease, Amazon MWAA disposes of the additional web servers, and scales down to the number set in MinxWebserers. Valid values: For environments larger than mw1.micro, accepts values from 2 to 5. Defaults to 2 for all environment sizes except mw1.micro, which defaults to 1.
2712
+ :param min_workers: The minimum number of workers that you want to run in your environment. MWAA scales the number of Apache Airflow workers up to the number you specify in the MaxWorkers field. When there are no more tasks running, and no more in the queue, MWAA disposes of the extra workers leaving the worker count you specify in the MinWorkers field. For example, 2.
2713
+ :param network_configuration: The VPC networking components used to secure and enable network traffic between the AWS resources for your environment.
2714
+ :param plugins_s3_object_version: The version of the plugins.zip file on your Amazon S3 bucket.
2715
+ :param plugins_s3_path: The relative path to the plugins.zip file on your Amazon S3 bucket. For example, plugins.zip.
2716
+ :param requirements_s3_object_version: The version of the requirements.txt file on your Amazon S3 bucket.
2717
+ :param requirements_s3_path: The relative path to the requirements.txt file on your Amazon S3 bucket. For example, requirements.txt.
2718
+ :param schedulers: The number of schedulers that you want to run in your environment. Valid values: v2 - For environments larger than mw1.micro, accepts values from 2 to 5. Defaults to 2 for all environment sizes except mw1.micro, which defaults to 1. v1 - Accepts 1.
2719
+ :param source_bucket: The Amazon S3 bucket where your DAG code and supporting files are stored.
2720
+ :param startup_script_s3_object_version: The version of the startup shell script in your Amazon S3 bucket. You must specify the version ID that Amazon S3 assigns to the file every time you update the script. Version IDs are Unicode, UTF-8 encoded, URL-ready, opaque strings that are no more than 1,024 bytes long. The following is an example: 3sL4kqtJlcpXroDTDmJ+rmSpXd3dIbrHY+MTRCxf3vjVBH40Nr8X8gdRQBpUMLUo
2721
+ :param startup_script_s3_path: The relative path to the startup shell script in your Amazon S3 bucket. For example, s3://mwaa-environment/startup.sh. Amazon MWAA runs the script as your environment starts, and before running the Apache Airflow process. You can use this script to install dependencies, modify Apache Airflow configuration options, and set environment variables.
2722
+ :param webserver_access_mode: The Apache Airflow Web server access mode.
2723
+ :param webserver_url: The URL of your Apache Airflow UI.
2724
+ :param webserver_vpc_endpoint_service: The VPC endpoint for the environment's web server.
2725
+ :param weekly_maintenance_window_start: The day and time of the week to start weekly maintenance updates of your environment in the following format: DAY:HH:MM. For example: TUE:03:30. You can specify a start time in 30 minute increments only. Supported input includes the following: MON|TUE|WED|THU|FRI|SAT|SUN:([01]\\d|2[0-3]):(00|30)
2726
+ '''
2727
+ if __debug__:
2728
+ type_hints = typing.get_type_hints(_typecheckingstub__95cc4f4257f86486302fcb2e763d242a3040b6f4ce19c4cd9698055844e13d0c)
2729
+ check_type(argname="argument scope", value=scope, expected_type=type_hints["scope"])
2730
+ check_type(argname="argument id", value=id, expected_type=type_hints["id"])
2731
+ attrs = EnvironmentAttributes(
2732
+ airflow_configuration_options=airflow_configuration_options,
2733
+ environment_arn=environment_arn,
2734
+ name=name,
2735
+ airflow_version=airflow_version,
2736
+ celery_executor_queue=celery_executor_queue,
2737
+ dag_s3_path=dag_s3_path,
2738
+ database_vpc_endpoint_service=database_vpc_endpoint_service,
2739
+ endpoint_management=endpoint_management,
2740
+ environment_class=environment_class,
2741
+ execution_role=execution_role,
2742
+ kms_key=kms_key,
2743
+ logging_configuration=logging_configuration,
2744
+ logging_configuration_dag_processing_logs_cloud_watch_log_group_arn=logging_configuration_dag_processing_logs_cloud_watch_log_group_arn,
2745
+ logging_configuration_scheduler_logs_cloud_watch_log_group_arn=logging_configuration_scheduler_logs_cloud_watch_log_group_arn,
2746
+ logging_configuration_task_logs_cloud_watch_log_group_arn=logging_configuration_task_logs_cloud_watch_log_group_arn,
2747
+ logging_configuration_webserver_logs_cloud_watch_log_group_arn=logging_configuration_webserver_logs_cloud_watch_log_group_arn,
2748
+ logging_configuration_worker_logs_cloud_watch_log_group_arn=logging_configuration_worker_logs_cloud_watch_log_group_arn,
2749
+ max_webservers=max_webservers,
2750
+ max_workers=max_workers,
2751
+ min_webservers=min_webservers,
2752
+ min_workers=min_workers,
2753
+ network_configuration=network_configuration,
2754
+ plugins_s3_object_version=plugins_s3_object_version,
2755
+ plugins_s3_path=plugins_s3_path,
2756
+ requirements_s3_object_version=requirements_s3_object_version,
2757
+ requirements_s3_path=requirements_s3_path,
2758
+ schedulers=schedulers,
2759
+ source_bucket=source_bucket,
2760
+ startup_script_s3_object_version=startup_script_s3_object_version,
2761
+ startup_script_s3_path=startup_script_s3_path,
2762
+ webserver_access_mode=webserver_access_mode,
2763
+ webserver_url=webserver_url,
2764
+ webserver_vpc_endpoint_service=webserver_vpc_endpoint_service,
2765
+ weekly_maintenance_window_start=weekly_maintenance_window_start,
2766
+ )
2767
+
2768
+ return typing.cast(IEnvironment, jsii.sinvoke(cls, "fromEnvironmentAttributes", [scope, id, attrs]))
2769
+
2770
+ @jsii.member(jsii_name="isEnvironment")
2771
+ @builtins.classmethod
2772
+ def is_environment(cls, x: typing.Any) -> builtins.bool:
2773
+ '''
2774
+ :param x: -
2775
+ '''
2776
+ if __debug__:
2777
+ type_hints = typing.get_type_hints(_typecheckingstub__7959ab616fb8602cff9e37f75ee4b0f7b75f4963daad74b8e571e648d8ece8a5)
2778
+ check_type(argname="argument x", value=x, expected_type=type_hints["x"])
2779
+ return typing.cast(builtins.bool, jsii.sinvoke(cls, "isEnvironment", [x]))
2780
+
2781
+ @builtins.property
2782
+ @jsii.member(jsii_name="airflowConfigurationOptions")
2783
+ def airflow_configuration_options(
2784
+ self,
2785
+ ) -> typing.Mapping[builtins.str, builtins.str]:
2786
+ '''A list of key-value pairs containing the Airflow configuration options for your environment.
2787
+
2788
+ For example, core.default_timezone: utc.
2789
+ '''
2790
+ return typing.cast(typing.Mapping[builtins.str, builtins.str], jsii.get(self, "airflowConfigurationOptions"))
2791
+
2792
+ @builtins.property
2793
+ @jsii.member(jsii_name="environmentArn")
2794
+ def environment_arn(self) -> builtins.str:
2795
+ '''The ARN for the Amazon MWAA environment.'''
2796
+ return typing.cast(builtins.str, jsii.get(self, "environmentArn"))
2797
+
2798
+ @builtins.property
2799
+ @jsii.member(jsii_name="name")
2800
+ def name(self) -> builtins.str:
2801
+ '''The name of your Amazon MWAA environment.'''
2802
+ return typing.cast(builtins.str, jsii.get(self, "name"))
2803
+
2804
+ @builtins.property
2805
+ @jsii.member(jsii_name="airflowVersion")
2806
+ def airflow_version(self) -> typing.Optional[AirflowVersion]:
2807
+ '''The version of Apache Airflow to use for the environment.
2808
+
2809
+ If no value is specified, defaults to the latest version.
2810
+
2811
+ If you specify a newer version number for an existing environment, the version update requires some service interruption before taking effect.
2812
+ '''
2813
+ return typing.cast(typing.Optional[AirflowVersion], jsii.get(self, "airflowVersion"))
2814
+
2815
+ @builtins.property
2816
+ @jsii.member(jsii_name="celeryExecutorQueue")
2817
+ def celery_executor_queue(self) -> typing.Optional[builtins.str]:
2818
+ '''The queue ARN for the environment's Celery Executor.
2819
+
2820
+ Amazon MWAA uses a Celery Executor to distribute tasks across multiple workers.
2821
+ When you create an environment in a shared VPC, you must provide access to the Celery Executor queue from your VPC.
2822
+ '''
2823
+ return typing.cast(typing.Optional[builtins.str], jsii.get(self, "celeryExecutorQueue"))
2824
+
2825
+ @builtins.property
2826
+ @jsii.member(jsii_name="dagS3Path")
2827
+ def dag_s3_path(self) -> typing.Optional[builtins.str]:
2828
+ '''The relative path to the DAGs folder on your Amazon S3 bucket.
2829
+
2830
+ For example, dags.
2831
+ '''
2832
+ return typing.cast(typing.Optional[builtins.str], jsii.get(self, "dagS3Path"))
2833
+
2834
+ @builtins.property
2835
+ @jsii.member(jsii_name="databaseVpcEndpointService")
2836
+ def database_vpc_endpoint_service(self) -> typing.Optional[builtins.str]:
2837
+ '''The VPC endpoint for the environment's Amazon RDS database.'''
2838
+ return typing.cast(typing.Optional[builtins.str], jsii.get(self, "databaseVpcEndpointService"))
2839
+
2840
+ @builtins.property
2841
+ @jsii.member(jsii_name="endpointManagement")
2842
+ def endpoint_management(self) -> typing.Optional[EndpointManagement]:
2843
+ '''Defines whether the VPC endpoints configured for the environment are created, and managed, by the customer or by Amazon MWAA.
2844
+
2845
+ If set to SERVICE, Amazon MWAA will create and manage the required VPC endpoints in your VPC.
2846
+ If set to CUSTOMER, you must create, and manage, the VPC endpoints in your VPC.
2847
+ '''
2848
+ return typing.cast(typing.Optional[EndpointManagement], jsii.get(self, "endpointManagement"))
2849
+
2850
+ @builtins.property
2851
+ @jsii.member(jsii_name="environmentClass")
2852
+ def environment_class(self) -> typing.Optional[EnvironmentClass]:
2853
+ '''The environment class type.'''
2854
+ return typing.cast(typing.Optional[EnvironmentClass], jsii.get(self, "environmentClass"))
2855
+
2856
+ @builtins.property
2857
+ @jsii.member(jsii_name="executionRole")
2858
+ def execution_role(self) -> typing.Optional[_aws_cdk_aws_iam_ceddda9d.IRole]:
2859
+ '''The execution role in IAM that allows MWAA to access AWS resources in your environment.'''
2860
+ return typing.cast(typing.Optional[_aws_cdk_aws_iam_ceddda9d.IRole], jsii.get(self, "executionRole"))
2861
+
2862
+ @builtins.property
2863
+ @jsii.member(jsii_name="kmsKey")
2864
+ def kms_key(self) -> typing.Optional[_aws_cdk_aws_kms_ceddda9d.IKey]:
2865
+ '''The AWS Key Management Service (KMS) key to encrypt and decrypt the data in your environment.
2866
+
2867
+ You can use an AWS KMS key managed by MWAA, or a customer-managed KMS key (advanced).
2868
+ '''
2869
+ return typing.cast(typing.Optional[_aws_cdk_aws_kms_ceddda9d.IKey], jsii.get(self, "kmsKey"))
2870
+
2871
+ @builtins.property
2872
+ @jsii.member(jsii_name="loggingConfiguration")
2873
+ def logging_configuration(self) -> typing.Optional[LoggingConfiguration]:
2874
+ '''The Apache Airflow logs being sent to CloudWatch Logs.'''
2875
+ return typing.cast(typing.Optional[LoggingConfiguration], jsii.get(self, "loggingConfiguration"))
2876
+
2877
+ @builtins.property
2878
+ @jsii.member(jsii_name="loggingConfigurationDagProcessingLogsCloudWatchLogGroupArn")
2879
+ def logging_configuration_dag_processing_logs_cloud_watch_log_group_arn(
2880
+ self,
2881
+ ) -> typing.Optional[builtins.str]:
2882
+ '''The ARN for the CloudWatch Logs group where the Apache Airflow DAG processing logs are published.'''
2883
+ return typing.cast(typing.Optional[builtins.str], jsii.get(self, "loggingConfigurationDagProcessingLogsCloudWatchLogGroupArn"))
2884
+
2885
+ @builtins.property
2886
+ @jsii.member(jsii_name="loggingConfigurationSchedulerLogsCloudWatchLogGroupArn")
2887
+ def logging_configuration_scheduler_logs_cloud_watch_log_group_arn(
2888
+ self,
2889
+ ) -> typing.Optional[builtins.str]:
2890
+ '''The ARN for the CloudWatch Logs group where the Apache Airflow Scheduler logs are published.'''
2891
+ return typing.cast(typing.Optional[builtins.str], jsii.get(self, "loggingConfigurationSchedulerLogsCloudWatchLogGroupArn"))
2892
+
2893
+ @builtins.property
2894
+ @jsii.member(jsii_name="loggingConfigurationTaskLogsCloudWatchLogGroupArn")
2895
+ def logging_configuration_task_logs_cloud_watch_log_group_arn(
2896
+ self,
2897
+ ) -> typing.Optional[builtins.str]:
2898
+ '''The ARN for the CloudWatch Logs group where the Apache Airflow task logs are published.'''
2899
+ return typing.cast(typing.Optional[builtins.str], jsii.get(self, "loggingConfigurationTaskLogsCloudWatchLogGroupArn"))
2900
+
2901
+ @builtins.property
2902
+ @jsii.member(jsii_name="loggingConfigurationWebserverLogsCloudWatchLogGroupArn")
2903
+ def logging_configuration_webserver_logs_cloud_watch_log_group_arn(
2904
+ self,
2905
+ ) -> typing.Optional[builtins.str]:
2906
+ '''The ARN for the CloudWatch Logs group where the Apache Airflow Web server logs are published.'''
2907
+ return typing.cast(typing.Optional[builtins.str], jsii.get(self, "loggingConfigurationWebserverLogsCloudWatchLogGroupArn"))
2908
+
2909
+ @builtins.property
2910
+ @jsii.member(jsii_name="loggingConfigurationWorkerLogsCloudWatchLogGroupArn")
2911
+ def logging_configuration_worker_logs_cloud_watch_log_group_arn(
2912
+ self,
2913
+ ) -> typing.Optional[builtins.str]:
2914
+ '''The ARN for the CloudWatch Logs group where the Apache Airflow Worker logs are published.'''
2915
+ return typing.cast(typing.Optional[builtins.str], jsii.get(self, "loggingConfigurationWorkerLogsCloudWatchLogGroupArn"))
2916
+
2917
+ @builtins.property
2918
+ @jsii.member(jsii_name="maxWebservers")
2919
+ def max_webservers(self) -> typing.Optional[jsii.Number]:
2920
+ '''The maximum number of web servers that you want to run in your environment.
2921
+
2922
+ Amazon MWAA scales the number of Apache Airflow web servers up to the number you specify for MaxWebservers when you interact with your Apache
2923
+ Airflow environment using Apache Airflow REST API, or the Apache Airflow CLI. For example, in scenarios where your workload requires network
2924
+ calls to the Apache Airflow REST API with a high transaction-per-second (TPS) rate, Amazon MWAA will increase the number of web servers up to
2925
+ the number set in MaxWebserers. As TPS rates decrease Amazon MWAA disposes of the additional web servers, and scales down to the number set in
2926
+ MinxWebserers.
2927
+
2928
+ Valid values: For environments larger than mw1.micro, accepts values from 2 to 5. Defaults to 2 for all environment sizes except mw1.micro,
2929
+ which defaults to 1.
2930
+ '''
2931
+ return typing.cast(typing.Optional[jsii.Number], jsii.get(self, "maxWebservers"))
2932
+
2933
+ @builtins.property
2934
+ @jsii.member(jsii_name="maxWorkers")
2935
+ def max_workers(self) -> typing.Optional[jsii.Number]:
2936
+ '''The maximum number of workers that you want to run in your environment.
2937
+
2938
+ MWAA scales the number of Apache Airflow workers up to the number you specify in the MaxWorkers field. For example, 20. When there are no more
2939
+ tasks running, and no more in the queue, MWAA disposes of the extra workers leaving the one worker that is included with your environment, or
2940
+ the number you specify in MinWorkers.
2941
+ '''
2942
+ return typing.cast(typing.Optional[jsii.Number], jsii.get(self, "maxWorkers"))
2943
+
2944
+ @builtins.property
2945
+ @jsii.member(jsii_name="minWebservers")
2946
+ def min_webservers(self) -> typing.Optional[jsii.Number]:
2947
+ '''The minimum number of web servers that you want to run in your environment.
2948
+
2949
+ Amazon MWAA scales the number of Apache Airflow web servers up to the number you specify for MaxWebservers when you interact with your Apache
2950
+ Airflow environment using Apache Airflow REST API, or the Apache Airflow CLI. As the transaction-per-second rate, and the network load,
2951
+ decrease, Amazon MWAA disposes of the additional web servers, and scales down to the number set in MinxWebserers.
2952
+
2953
+ Valid values: For environments larger than mw1.micro, accepts values from 2 to 5. Defaults to 2 for all environment sizes except mw1.micro,
2954
+ which defaults to 1.
2955
+ '''
2956
+ return typing.cast(typing.Optional[jsii.Number], jsii.get(self, "minWebservers"))
2957
+
2958
+ @builtins.property
2959
+ @jsii.member(jsii_name="minWorkers")
2960
+ def min_workers(self) -> typing.Optional[jsii.Number]:
2961
+ '''The minimum number of workers that you want to run in your environment.
2962
+
2963
+ MWAA scales the number of Apache Airflow workers up to the number you
2964
+ specify in the MaxWorkers field. When there are no more tasks running, and no more in the queue, MWAA disposes of the extra workers leaving
2965
+ the worker count you specify in the MinWorkers field. For example, 2.
2966
+ '''
2967
+ return typing.cast(typing.Optional[jsii.Number], jsii.get(self, "minWorkers"))
2968
+
2969
+ @builtins.property
2970
+ @jsii.member(jsii_name="networkConfiguration")
2971
+ def network_configuration(self) -> typing.Optional[NetworkConfiguration]:
2972
+ '''The VPC networking components used to secure and enable network traffic between the AWS resources for your environment.'''
2973
+ return typing.cast(typing.Optional[NetworkConfiguration], jsii.get(self, "networkConfiguration"))
2974
+
2975
+ @builtins.property
2976
+ @jsii.member(jsii_name="pluginsS3ObjectVersion")
2977
+ def plugins_s3_object_version(self) -> typing.Optional[builtins.str]:
2978
+ '''The version of the plugins.zip file on your Amazon S3 bucket.'''
2979
+ return typing.cast(typing.Optional[builtins.str], jsii.get(self, "pluginsS3ObjectVersion"))
2980
+
2981
+ @builtins.property
2982
+ @jsii.member(jsii_name="pluginsS3Path")
2983
+ def plugins_s3_path(self) -> typing.Optional[builtins.str]:
2984
+ '''The relative path to the plugins.zip file on your Amazon S3 bucket. For example, plugins.zip.'''
2985
+ return typing.cast(typing.Optional[builtins.str], jsii.get(self, "pluginsS3Path"))
2986
+
2987
+ @builtins.property
2988
+ @jsii.member(jsii_name="requirementsS3ObjectVersion")
2989
+ def requirements_s3_object_version(self) -> typing.Optional[builtins.str]:
2990
+ '''The version of the requirements.txt file on your Amazon S3 bucket.'''
2991
+ return typing.cast(typing.Optional[builtins.str], jsii.get(self, "requirementsS3ObjectVersion"))
2992
+
2993
+ @builtins.property
2994
+ @jsii.member(jsii_name="requirementsS3Path")
2995
+ def requirements_s3_path(self) -> typing.Optional[builtins.str]:
2996
+ '''The relative path to the requirements.txt file on your Amazon S3 bucket. For example, requirements.txt.'''
2997
+ return typing.cast(typing.Optional[builtins.str], jsii.get(self, "requirementsS3Path"))
2998
+
2999
+ @builtins.property
3000
+ @jsii.member(jsii_name="schedulers")
3001
+ def schedulers(self) -> typing.Optional[jsii.Number]:
3002
+ '''The number of schedulers that you want to run in your environment.
3003
+
3004
+ Valid values:
3005
+ v2 - For environments larger than mw1.micro, accepts values from 2 to 5.
3006
+ Defaults to 2 for all environment sizes except mw1.micro, which defaults to 1.
3007
+ v1 - Accepts 1.
3008
+ '''
3009
+ return typing.cast(typing.Optional[jsii.Number], jsii.get(self, "schedulers"))
3010
+
3011
+ @builtins.property
3012
+ @jsii.member(jsii_name="sourceBucket")
3013
+ def source_bucket(self) -> typing.Optional[_aws_cdk_aws_s3_ceddda9d.IBucket]:
3014
+ '''The Amazon S3 bucket where your DAG code and supporting files are stored.'''
3015
+ return typing.cast(typing.Optional[_aws_cdk_aws_s3_ceddda9d.IBucket], jsii.get(self, "sourceBucket"))
3016
+
3017
+ @builtins.property
3018
+ @jsii.member(jsii_name="startupScriptS3ObjectVersion")
3019
+ def startup_script_s3_object_version(self) -> typing.Optional[builtins.str]:
3020
+ '''The version of the startup shell script in your Amazon S3 bucket.
3021
+
3022
+ You must specify the version ID that Amazon S3 assigns to the file every time you update the script.
3023
+ Version IDs are Unicode, UTF-8 encoded, URL-ready, opaque strings that are no more than 1,024 bytes long.
3024
+
3025
+ The following is an example:
3026
+ 3sL4kqtJlcpXroDTDmJ+rmSpXd3dIbrHY+MTRCxf3vjVBH40Nr8X8gdRQBpUMLUo
3027
+ '''
3028
+ return typing.cast(typing.Optional[builtins.str], jsii.get(self, "startupScriptS3ObjectVersion"))
3029
+
3030
+ @builtins.property
3031
+ @jsii.member(jsii_name="startupScriptS3Path")
3032
+ def startup_script_s3_path(self) -> typing.Optional[builtins.str]:
3033
+ '''The relative path to the startup shell script in your Amazon S3 bucket.
3034
+
3035
+ For example, s3://mwaa-environment/startup.sh.
3036
+ Amazon MWAA runs the script as your environment starts, and before running the Apache Airflow process.
3037
+ You can use this script to install dependencies, modify Apache Airflow configuration options, and set environment variables.
3038
+ '''
3039
+ return typing.cast(typing.Optional[builtins.str], jsii.get(self, "startupScriptS3Path"))
3040
+
3041
+ @builtins.property
3042
+ @jsii.member(jsii_name="webserverAccessMode")
3043
+ def webserver_access_mode(self) -> typing.Optional[WebserverAccessMode]:
3044
+ '''The Apache Airflow Web server access mode.'''
3045
+ return typing.cast(typing.Optional[WebserverAccessMode], jsii.get(self, "webserverAccessMode"))
3046
+
3047
+ @builtins.property
3048
+ @jsii.member(jsii_name="webserverUrl")
3049
+ def webserver_url(self) -> typing.Optional[builtins.str]:
3050
+ '''The URL of your Apache Airflow UI.'''
3051
+ return typing.cast(typing.Optional[builtins.str], jsii.get(self, "webserverUrl"))
3052
+
3053
+ @builtins.property
3054
+ @jsii.member(jsii_name="webserverVpcEndpointService")
3055
+ def webserver_vpc_endpoint_service(self) -> typing.Optional[builtins.str]:
3056
+ '''The VPC endpoint for the environment's web server.'''
3057
+ return typing.cast(typing.Optional[builtins.str], jsii.get(self, "webserverVpcEndpointService"))
3058
+
3059
+ @builtins.property
3060
+ @jsii.member(jsii_name="weeklyMaintenanceWindowStart")
3061
+ def weekly_maintenance_window_start(self) -> typing.Optional[builtins.str]:
3062
+ '''The day and time of the week to start weekly maintenance updates of your environment in the following format: DAY:HH:MM.
3063
+
3064
+ For example: TUE:03:30. You can specify a start time in 30 minute increments only.
3065
+
3066
+ Supported input includes the following:
3067
+ MON|TUE|WED|THU|FRI|SAT|SUN:([01]\\d|2[0-3]):(00|30)
3068
+ '''
3069
+ return typing.cast(typing.Optional[builtins.str], jsii.get(self, "weeklyMaintenanceWindowStart"))
3070
+
3071
+
3072
+ __all__ = [
3073
+ "AirflowVersion",
3074
+ "EndpointManagement",
3075
+ "Environment",
3076
+ "EnvironmentAttributes",
3077
+ "EnvironmentBase",
3078
+ "EnvironmentClass",
3079
+ "EnvironmentProps",
3080
+ "IEnvironment",
3081
+ "LogLevel",
3082
+ "LoggingConfiguration",
3083
+ "ModuleLoggingConfiguration",
3084
+ "NetworkConfiguration",
3085
+ "WebserverAccessMode",
3086
+ "WorkerReplacementStrategy",
3087
+ ]
3088
+
3089
+ publication.publish()
3090
+
3091
+ def _typecheckingstub__d75f091b184b8fb2d88550b01b5b1291a3af0d350440b3c1dadc6631ec062c57(
3092
+ *,
3093
+ airflow_configuration_options: typing.Mapping[builtins.str, builtins.str],
3094
+ environment_arn: builtins.str,
3095
+ name: builtins.str,
3096
+ airflow_version: typing.Optional[AirflowVersion] = None,
3097
+ celery_executor_queue: typing.Optional[builtins.str] = None,
3098
+ dag_s3_path: typing.Optional[builtins.str] = None,
3099
+ database_vpc_endpoint_service: typing.Optional[builtins.str] = None,
3100
+ endpoint_management: typing.Optional[EndpointManagement] = None,
3101
+ environment_class: typing.Optional[EnvironmentClass] = None,
3102
+ execution_role: typing.Optional[_aws_cdk_aws_iam_ceddda9d.IRole] = None,
3103
+ kms_key: typing.Optional[_aws_cdk_aws_kms_ceddda9d.IKey] = None,
3104
+ logging_configuration: typing.Optional[typing.Union[LoggingConfiguration, typing.Dict[builtins.str, typing.Any]]] = None,
3105
+ logging_configuration_dag_processing_logs_cloud_watch_log_group_arn: typing.Optional[builtins.str] = None,
3106
+ logging_configuration_scheduler_logs_cloud_watch_log_group_arn: typing.Optional[builtins.str] = None,
3107
+ logging_configuration_task_logs_cloud_watch_log_group_arn: typing.Optional[builtins.str] = None,
3108
+ logging_configuration_webserver_logs_cloud_watch_log_group_arn: typing.Optional[builtins.str] = None,
3109
+ logging_configuration_worker_logs_cloud_watch_log_group_arn: typing.Optional[builtins.str] = None,
3110
+ max_webservers: typing.Optional[jsii.Number] = None,
3111
+ max_workers: typing.Optional[jsii.Number] = None,
3112
+ min_webservers: typing.Optional[jsii.Number] = None,
3113
+ min_workers: typing.Optional[jsii.Number] = None,
3114
+ network_configuration: typing.Optional[typing.Union[NetworkConfiguration, typing.Dict[builtins.str, typing.Any]]] = None,
3115
+ plugins_s3_object_version: typing.Optional[builtins.str] = None,
3116
+ plugins_s3_path: typing.Optional[builtins.str] = None,
3117
+ requirements_s3_object_version: typing.Optional[builtins.str] = None,
3118
+ requirements_s3_path: typing.Optional[builtins.str] = None,
3119
+ schedulers: typing.Optional[jsii.Number] = None,
3120
+ source_bucket: typing.Optional[_aws_cdk_aws_s3_ceddda9d.IBucket] = None,
3121
+ startup_script_s3_object_version: typing.Optional[builtins.str] = None,
3122
+ startup_script_s3_path: typing.Optional[builtins.str] = None,
3123
+ webserver_access_mode: typing.Optional[WebserverAccessMode] = None,
3124
+ webserver_url: typing.Optional[builtins.str] = None,
3125
+ webserver_vpc_endpoint_service: typing.Optional[builtins.str] = None,
3126
+ weekly_maintenance_window_start: typing.Optional[builtins.str] = None,
3127
+ ) -> None:
3128
+ """Type checking stubs"""
3129
+ pass
3130
+
3131
+ def _typecheckingstub__adae2e01428b106a0a65893828e0f65d1e96376eb6556581f26b272553f74e81(
3132
+ *,
3133
+ airflow_configuration_options: typing.Mapping[builtins.str, builtins.str],
3134
+ name: builtins.str,
3135
+ airflow_version: typing.Optional[AirflowVersion] = None,
3136
+ dag_s3_path: typing.Optional[builtins.str] = None,
3137
+ endpoint_management: typing.Optional[EndpointManagement] = None,
3138
+ environment_class: typing.Optional[EnvironmentClass] = None,
3139
+ execution_role: typing.Optional[_aws_cdk_aws_iam_ceddda9d.IRole] = None,
3140
+ kms_key: typing.Optional[_aws_cdk_aws_kms_ceddda9d.IKey] = None,
3141
+ logging_configuration: typing.Optional[typing.Union[LoggingConfiguration, typing.Dict[builtins.str, typing.Any]]] = None,
3142
+ max_webservers: typing.Optional[jsii.Number] = None,
3143
+ max_workers: typing.Optional[jsii.Number] = None,
3144
+ min_webservers: typing.Optional[jsii.Number] = None,
3145
+ min_workers: typing.Optional[jsii.Number] = None,
3146
+ network_configuration: typing.Optional[typing.Union[NetworkConfiguration, typing.Dict[builtins.str, typing.Any]]] = None,
3147
+ plugins_s3_object_version: typing.Optional[builtins.str] = None,
3148
+ plugins_s3_path: typing.Optional[builtins.str] = None,
3149
+ requirements_s3_object_version: typing.Optional[builtins.str] = None,
3150
+ requirements_s3_path: typing.Optional[builtins.str] = None,
3151
+ schedulers: typing.Optional[jsii.Number] = None,
3152
+ source_bucket: typing.Optional[_aws_cdk_aws_s3_ceddda9d.IBucket] = None,
3153
+ startup_script_s3_object_version: typing.Optional[builtins.str] = None,
3154
+ startup_script_s3_path: typing.Optional[builtins.str] = None,
3155
+ webserver_access_mode: typing.Optional[WebserverAccessMode] = None,
3156
+ weekly_maintenance_window_start: typing.Optional[builtins.str] = None,
3157
+ ) -> None:
3158
+ """Type checking stubs"""
3159
+ pass
3160
+
3161
+ def _typecheckingstub__587e90c0429a944bc98095249fe5cd300a90dcf33089932ed503e117deb58614(
3162
+ *,
3163
+ dag_processing_logs: typing.Optional[typing.Union[ModuleLoggingConfiguration, typing.Dict[builtins.str, typing.Any]]] = None,
3164
+ scheduler_logs: typing.Optional[typing.Union[ModuleLoggingConfiguration, typing.Dict[builtins.str, typing.Any]]] = None,
3165
+ task_logs: typing.Optional[typing.Union[ModuleLoggingConfiguration, typing.Dict[builtins.str, typing.Any]]] = None,
3166
+ web_server_logs: typing.Optional[typing.Union[ModuleLoggingConfiguration, typing.Dict[builtins.str, typing.Any]]] = None,
3167
+ worker_logs: typing.Optional[typing.Union[ModuleLoggingConfiguration, typing.Dict[builtins.str, typing.Any]]] = None,
3168
+ ) -> None:
3169
+ """Type checking stubs"""
3170
+ pass
3171
+
3172
+ def _typecheckingstub__7b6b5c584242899ae7800864118420958302bf2568da5e2d6f5a683e345399aa(
3173
+ *,
3174
+ cloud_watch_log_group: typing.Optional[_aws_cdk_aws_logs_ceddda9d.ILogGroup] = None,
3175
+ enabled: typing.Optional[builtins.bool] = None,
3176
+ log_level: typing.Optional[LogLevel] = None,
3177
+ ) -> None:
3178
+ """Type checking stubs"""
3179
+ pass
3180
+
3181
+ def _typecheckingstub__66a8db1008fda8f8cf9a9e9d41de07bde3dc8b894d4a91cd243e4e3057ff04ae(
3182
+ *,
3183
+ security_groups: typing.Optional[typing.Sequence[_aws_cdk_aws_ec2_ceddda9d.ISecurityGroup]] = None,
3184
+ subnets: typing.Optional[typing.Sequence[_aws_cdk_aws_ec2_ceddda9d.ISubnet]] = None,
3185
+ ) -> None:
3186
+ """Type checking stubs"""
3187
+ pass
3188
+
3189
+ def _typecheckingstub__ad5873da45b6688f4c218055f06ff0d6a531da884f4fbcf05c463ed354d7521f(
3190
+ scope: _constructs_77d1e7e8.Construct,
3191
+ id: builtins.str,
3192
+ *,
3193
+ account: typing.Optional[builtins.str] = None,
3194
+ environment_from_arn: typing.Optional[builtins.str] = None,
3195
+ physical_name: typing.Optional[builtins.str] = None,
3196
+ region: typing.Optional[builtins.str] = None,
3197
+ ) -> None:
3198
+ """Type checking stubs"""
3199
+ pass
3200
+
3201
+ def _typecheckingstub__5caedcd0e4f79944eef6be911818e685afe29161ed637d59813b0c34497c9a53(
3202
+ scope: _constructs_77d1e7e8.Construct,
3203
+ id: builtins.str,
3204
+ *,
3205
+ airflow_configuration_options: typing.Mapping[builtins.str, builtins.str],
3206
+ name: builtins.str,
3207
+ airflow_version: typing.Optional[AirflowVersion] = None,
3208
+ dag_s3_path: typing.Optional[builtins.str] = None,
3209
+ endpoint_management: typing.Optional[EndpointManagement] = None,
3210
+ environment_class: typing.Optional[EnvironmentClass] = None,
3211
+ execution_role: typing.Optional[_aws_cdk_aws_iam_ceddda9d.IRole] = None,
3212
+ kms_key: typing.Optional[_aws_cdk_aws_kms_ceddda9d.IKey] = None,
3213
+ logging_configuration: typing.Optional[typing.Union[LoggingConfiguration, typing.Dict[builtins.str, typing.Any]]] = None,
3214
+ max_webservers: typing.Optional[jsii.Number] = None,
3215
+ max_workers: typing.Optional[jsii.Number] = None,
3216
+ min_webservers: typing.Optional[jsii.Number] = None,
3217
+ min_workers: typing.Optional[jsii.Number] = None,
3218
+ network_configuration: typing.Optional[typing.Union[NetworkConfiguration, typing.Dict[builtins.str, typing.Any]]] = None,
3219
+ plugins_s3_object_version: typing.Optional[builtins.str] = None,
3220
+ plugins_s3_path: typing.Optional[builtins.str] = None,
3221
+ requirements_s3_object_version: typing.Optional[builtins.str] = None,
3222
+ requirements_s3_path: typing.Optional[builtins.str] = None,
3223
+ schedulers: typing.Optional[jsii.Number] = None,
3224
+ source_bucket: typing.Optional[_aws_cdk_aws_s3_ceddda9d.IBucket] = None,
3225
+ startup_script_s3_object_version: typing.Optional[builtins.str] = None,
3226
+ startup_script_s3_path: typing.Optional[builtins.str] = None,
3227
+ webserver_access_mode: typing.Optional[WebserverAccessMode] = None,
3228
+ weekly_maintenance_window_start: typing.Optional[builtins.str] = None,
3229
+ ) -> None:
3230
+ """Type checking stubs"""
3231
+ pass
3232
+
3233
+ def _typecheckingstub__95cc4f4257f86486302fcb2e763d242a3040b6f4ce19c4cd9698055844e13d0c(
3234
+ scope: _constructs_77d1e7e8.Construct,
3235
+ id: builtins.str,
3236
+ *,
3237
+ airflow_configuration_options: typing.Mapping[builtins.str, builtins.str],
3238
+ environment_arn: builtins.str,
3239
+ name: builtins.str,
3240
+ airflow_version: typing.Optional[AirflowVersion] = None,
3241
+ celery_executor_queue: typing.Optional[builtins.str] = None,
3242
+ dag_s3_path: typing.Optional[builtins.str] = None,
3243
+ database_vpc_endpoint_service: typing.Optional[builtins.str] = None,
3244
+ endpoint_management: typing.Optional[EndpointManagement] = None,
3245
+ environment_class: typing.Optional[EnvironmentClass] = None,
3246
+ execution_role: typing.Optional[_aws_cdk_aws_iam_ceddda9d.IRole] = None,
3247
+ kms_key: typing.Optional[_aws_cdk_aws_kms_ceddda9d.IKey] = None,
3248
+ logging_configuration: typing.Optional[typing.Union[LoggingConfiguration, typing.Dict[builtins.str, typing.Any]]] = None,
3249
+ logging_configuration_dag_processing_logs_cloud_watch_log_group_arn: typing.Optional[builtins.str] = None,
3250
+ logging_configuration_scheduler_logs_cloud_watch_log_group_arn: typing.Optional[builtins.str] = None,
3251
+ logging_configuration_task_logs_cloud_watch_log_group_arn: typing.Optional[builtins.str] = None,
3252
+ logging_configuration_webserver_logs_cloud_watch_log_group_arn: typing.Optional[builtins.str] = None,
3253
+ logging_configuration_worker_logs_cloud_watch_log_group_arn: typing.Optional[builtins.str] = None,
3254
+ max_webservers: typing.Optional[jsii.Number] = None,
3255
+ max_workers: typing.Optional[jsii.Number] = None,
3256
+ min_webservers: typing.Optional[jsii.Number] = None,
3257
+ min_workers: typing.Optional[jsii.Number] = None,
3258
+ network_configuration: typing.Optional[typing.Union[NetworkConfiguration, typing.Dict[builtins.str, typing.Any]]] = None,
3259
+ plugins_s3_object_version: typing.Optional[builtins.str] = None,
3260
+ plugins_s3_path: typing.Optional[builtins.str] = None,
3261
+ requirements_s3_object_version: typing.Optional[builtins.str] = None,
3262
+ requirements_s3_path: typing.Optional[builtins.str] = None,
3263
+ schedulers: typing.Optional[jsii.Number] = None,
3264
+ source_bucket: typing.Optional[_aws_cdk_aws_s3_ceddda9d.IBucket] = None,
3265
+ startup_script_s3_object_version: typing.Optional[builtins.str] = None,
3266
+ startup_script_s3_path: typing.Optional[builtins.str] = None,
3267
+ webserver_access_mode: typing.Optional[WebserverAccessMode] = None,
3268
+ webserver_url: typing.Optional[builtins.str] = None,
3269
+ webserver_vpc_endpoint_service: typing.Optional[builtins.str] = None,
3270
+ weekly_maintenance_window_start: typing.Optional[builtins.str] = None,
3271
+ ) -> None:
3272
+ """Type checking stubs"""
3273
+ pass
3274
+
3275
+ def _typecheckingstub__7959ab616fb8602cff9e37f75ee4b0f7b75f4963daad74b8e571e648d8ece8a5(
3276
+ x: typing.Any,
3277
+ ) -> None:
3278
+ """Type checking stubs"""
3279
+ pass
3280
+
3281
+ for cls in [IEnvironment]:
3282
+ typing.cast(typing.Any, cls).__protocol_attrs__ = typing.cast(typing.Any, cls).__protocol_attrs__ - set(['__jsii_proxy_class__', '__jsii_type__'])