apache-airflow-providers-amazon 8.19.0rc1__py3-none-any.whl → 8.20.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- airflow/providers/amazon/__init__.py +1 -1
- airflow/providers/amazon/aws/auth_manager/avp/entities.py +4 -2
- airflow/providers/amazon/aws/auth_manager/avp/facade.py +22 -7
- airflow/providers/amazon/aws/auth_manager/{cli → avp}/schema.json +34 -2
- airflow/providers/amazon/aws/auth_manager/aws_auth_manager.py +91 -170
- airflow/providers/amazon/aws/auth_manager/cli/avp_commands.py +7 -32
- airflow/providers/amazon/aws/auth_manager/cli/definition.py +1 -1
- airflow/providers/amazon/aws/auth_manager/cli/idc_commands.py +1 -0
- airflow/providers/amazon/aws/auth_manager/views/auth.py +1 -1
- airflow/providers/amazon/aws/executors/batch/__init__.py +16 -0
- airflow/providers/amazon/aws/executors/batch/batch_executor.py +420 -0
- airflow/providers/amazon/aws/executors/batch/batch_executor_config.py +87 -0
- airflow/providers/amazon/aws/executors/batch/boto_schema.py +67 -0
- airflow/providers/amazon/aws/executors/batch/utils.py +160 -0
- airflow/providers/amazon/aws/executors/ecs/ecs_executor.py +61 -18
- airflow/providers/amazon/aws/executors/ecs/utils.py +8 -13
- airflow/providers/amazon/aws/executors/utils/base_config_keys.py +25 -0
- airflow/providers/amazon/aws/hooks/athena.py +1 -0
- airflow/providers/amazon/aws/hooks/base_aws.py +1 -0
- airflow/providers/amazon/aws/hooks/batch_client.py +4 -3
- airflow/providers/amazon/aws/hooks/batch_waiters.py +1 -0
- airflow/providers/amazon/aws/hooks/bedrock.py +59 -0
- airflow/providers/amazon/aws/hooks/chime.py +1 -0
- airflow/providers/amazon/aws/hooks/cloud_formation.py +1 -0
- airflow/providers/amazon/aws/hooks/datasync.py +1 -0
- airflow/providers/amazon/aws/hooks/dynamodb.py +1 -0
- airflow/providers/amazon/aws/hooks/eks.py +1 -0
- airflow/providers/amazon/aws/hooks/glue.py +13 -5
- airflow/providers/amazon/aws/hooks/glue_catalog.py +1 -0
- airflow/providers/amazon/aws/hooks/kinesis.py +1 -0
- airflow/providers/amazon/aws/hooks/lambda_function.py +1 -0
- airflow/providers/amazon/aws/hooks/rds.py +1 -0
- airflow/providers/amazon/aws/hooks/s3.py +24 -30
- airflow/providers/amazon/aws/hooks/ses.py +1 -0
- airflow/providers/amazon/aws/hooks/sns.py +1 -0
- airflow/providers/amazon/aws/hooks/sqs.py +1 -0
- airflow/providers/amazon/aws/operators/athena.py +2 -2
- airflow/providers/amazon/aws/operators/base_aws.py +4 -1
- airflow/providers/amazon/aws/operators/batch.py +4 -2
- airflow/providers/amazon/aws/operators/bedrock.py +252 -0
- airflow/providers/amazon/aws/operators/cloud_formation.py +1 -0
- airflow/providers/amazon/aws/operators/datasync.py +1 -0
- airflow/providers/amazon/aws/operators/ecs.py +9 -10
- airflow/providers/amazon/aws/operators/eks.py +1 -0
- airflow/providers/amazon/aws/operators/emr.py +57 -7
- airflow/providers/amazon/aws/operators/s3.py +1 -0
- airflow/providers/amazon/aws/operators/sns.py +1 -0
- airflow/providers/amazon/aws/operators/sqs.py +1 -0
- airflow/providers/amazon/aws/secrets/secrets_manager.py +1 -0
- airflow/providers/amazon/aws/secrets/systems_manager.py +1 -0
- airflow/providers/amazon/aws/sensors/base_aws.py +4 -1
- airflow/providers/amazon/aws/sensors/bedrock.py +110 -0
- airflow/providers/amazon/aws/sensors/cloud_formation.py +1 -0
- airflow/providers/amazon/aws/sensors/eks.py +3 -4
- airflow/providers/amazon/aws/sensors/sqs.py +2 -1
- airflow/providers/amazon/aws/transfers/azure_blob_to_s3.py +4 -2
- airflow/providers/amazon/aws/transfers/base.py +1 -0
- airflow/providers/amazon/aws/transfers/exasol_to_s3.py +1 -0
- airflow/providers/amazon/aws/transfers/gcs_to_s3.py +1 -0
- airflow/providers/amazon/aws/transfers/google_api_to_s3.py +1 -0
- airflow/providers/amazon/aws/transfers/hive_to_dynamodb.py +1 -0
- airflow/providers/amazon/aws/transfers/http_to_s3.py +1 -0
- airflow/providers/amazon/aws/transfers/imap_attachment_to_s3.py +1 -0
- airflow/providers/amazon/aws/transfers/redshift_to_s3.py +21 -19
- airflow/providers/amazon/aws/triggers/bedrock.py +61 -0
- airflow/providers/amazon/aws/triggers/eks.py +1 -1
- airflow/providers/amazon/aws/triggers/redshift_cluster.py +1 -0
- airflow/providers/amazon/aws/triggers/s3.py +4 -2
- airflow/providers/amazon/aws/triggers/sagemaker.py +6 -4
- airflow/providers/amazon/aws/utils/emailer.py +1 -0
- airflow/providers/amazon/aws/waiters/bedrock.json +42 -0
- airflow/providers/amazon/get_provider_info.py +86 -1
- {apache_airflow_providers_amazon-8.19.0rc1.dist-info → apache_airflow_providers_amazon-8.20.0.dist-info}/METADATA +10 -9
- {apache_airflow_providers_amazon-8.19.0rc1.dist-info → apache_airflow_providers_amazon-8.20.0.dist-info}/RECORD +77 -66
- /airflow/providers/amazon/aws/executors/{ecs/Dockerfile → Dockerfile} +0 -0
- {apache_airflow_providers_amazon-8.19.0rc1.dist-info → apache_airflow_providers_amazon-8.20.0.dist-info}/WHEEL +0 -0
- {apache_airflow_providers_amazon-8.19.0rc1.dist-info → apache_airflow_providers_amazon-8.20.0.dist-info}/entry_points.txt +0 -0
@@ -16,6 +16,7 @@
|
|
16
16
|
# specific language governing permissions and limitations
|
17
17
|
# under the License.
|
18
18
|
"""Interact with AWS S3, using the boto3 library."""
|
19
|
+
|
19
20
|
from __future__ import annotations
|
20
21
|
|
21
22
|
import asyncio
|
@@ -30,16 +31,18 @@ import warnings
|
|
30
31
|
from contextlib import suppress
|
31
32
|
from copy import deepcopy
|
32
33
|
from datetime import datetime
|
33
|
-
from functools import wraps
|
34
|
+
from functools import cached_property, wraps
|
34
35
|
from inspect import signature
|
35
36
|
from io import BytesIO
|
36
37
|
from pathlib import Path
|
37
38
|
from tempfile import NamedTemporaryFile, gettempdir
|
38
|
-
from typing import TYPE_CHECKING, Any, Callable
|
39
|
+
from typing import TYPE_CHECKING, Any, Callable
|
39
40
|
from urllib.parse import urlsplit
|
40
41
|
from uuid import uuid4
|
41
42
|
|
42
43
|
if TYPE_CHECKING:
|
44
|
+
from mypy_boto3_s3.service_resource import Bucket as S3Bucket, Object as S3ResourceObject
|
45
|
+
|
43
46
|
from airflow.utils.types import ArgNotSet
|
44
47
|
|
45
48
|
with suppress(ImportError):
|
@@ -55,22 +58,17 @@ from airflow.providers.amazon.aws.hooks.base_aws import AwsBaseHook
|
|
55
58
|
from airflow.providers.amazon.aws.utils.tags import format_tags
|
56
59
|
from airflow.utils.helpers import chunks
|
57
60
|
|
58
|
-
if TYPE_CHECKING:
|
59
|
-
from mypy_boto3_s3.service_resource import Bucket as S3Bucket, Object as S3ResourceObject
|
60
|
-
|
61
|
-
T = TypeVar("T", bound=Callable)
|
62
|
-
|
63
61
|
logger = logging.getLogger(__name__)
|
64
62
|
|
65
63
|
|
66
|
-
def provide_bucket_name(func:
|
64
|
+
def provide_bucket_name(func: Callable) -> Callable:
|
67
65
|
"""Provide a bucket name taken from the connection if no bucket name has been passed to the function."""
|
68
66
|
if hasattr(func, "_unify_bucket_name_and_key_wrapped"):
|
69
67
|
logger.warning("`unify_bucket_name_and_key` should wrap `provide_bucket_name`.")
|
70
68
|
function_signature = signature(func)
|
71
69
|
|
72
70
|
@wraps(func)
|
73
|
-
def wrapper(*args, **kwargs) ->
|
71
|
+
def wrapper(*args, **kwargs) -> Callable:
|
74
72
|
bound_args = function_signature.bind(*args, **kwargs)
|
75
73
|
|
76
74
|
if "bucket_name" not in bound_args.arguments:
|
@@ -90,10 +88,10 @@ def provide_bucket_name(func: T) -> T:
|
|
90
88
|
|
91
89
|
return func(*bound_args.args, **bound_args.kwargs)
|
92
90
|
|
93
|
-
return
|
91
|
+
return wrapper
|
94
92
|
|
95
93
|
|
96
|
-
def provide_bucket_name_async(func:
|
94
|
+
def provide_bucket_name_async(func: Callable) -> Callable:
|
97
95
|
"""Provide a bucket name taken from the connection if no bucket name has been passed to the function."""
|
98
96
|
function_signature = signature(func)
|
99
97
|
|
@@ -110,15 +108,15 @@ def provide_bucket_name_async(func: T) -> T:
|
|
110
108
|
|
111
109
|
return await func(*bound_args.args, **bound_args.kwargs)
|
112
110
|
|
113
|
-
return
|
111
|
+
return wrapper
|
114
112
|
|
115
113
|
|
116
|
-
def unify_bucket_name_and_key(func:
|
114
|
+
def unify_bucket_name_and_key(func: Callable) -> Callable:
|
117
115
|
"""Unify bucket name and key in case no bucket name and at least a key has been passed to the function."""
|
118
116
|
function_signature = signature(func)
|
119
117
|
|
120
118
|
@wraps(func)
|
121
|
-
def wrapper(*args, **kwargs) ->
|
119
|
+
def wrapper(*args, **kwargs) -> Callable:
|
122
120
|
bound_args = function_signature.bind(*args, **kwargs)
|
123
121
|
|
124
122
|
if "wildcard_key" in bound_args.arguments:
|
@@ -141,7 +139,7 @@ def unify_bucket_name_and_key(func: T) -> T:
|
|
141
139
|
# if provide_bucket_name is applied first, and there's a bucket defined in conn
|
142
140
|
# then if user supplies full key, bucket in key is not respected
|
143
141
|
wrapper._unify_bucket_name_and_key_wrapped = True # type: ignore[attr-defined]
|
144
|
-
return
|
142
|
+
return wrapper
|
145
143
|
|
146
144
|
|
147
145
|
class S3Hook(AwsBaseHook):
|
@@ -188,6 +186,15 @@ class S3Hook(AwsBaseHook):
|
|
188
186
|
|
189
187
|
super().__init__(*args, **kwargs)
|
190
188
|
|
189
|
+
@cached_property
|
190
|
+
def resource(self):
|
191
|
+
return self.get_session().resource(
|
192
|
+
self.service_name,
|
193
|
+
endpoint_url=self.conn_config.get_service_endpoint_url(service_name=self.service_name),
|
194
|
+
config=self.config,
|
195
|
+
verify=self.verify,
|
196
|
+
)
|
197
|
+
|
191
198
|
@property
|
192
199
|
def extra_args(self):
|
193
200
|
"""Return hook's extra arguments (immutable)."""
|
@@ -307,13 +314,7 @@ class S3Hook(AwsBaseHook):
|
|
307
314
|
:param bucket_name: the name of the bucket
|
308
315
|
:return: the bucket object to the bucket name.
|
309
316
|
"""
|
310
|
-
|
311
|
-
"s3",
|
312
|
-
endpoint_url=self.conn_config.endpoint_url,
|
313
|
-
config=self.config,
|
314
|
-
verify=self.verify,
|
315
|
-
)
|
316
|
-
return s3_resource.Bucket(bucket_name)
|
317
|
+
return self.resource.Bucket(bucket_name)
|
317
318
|
|
318
319
|
@provide_bucket_name
|
319
320
|
def create_bucket(self, bucket_name: str | None = None, region_name: str | None = None) -> None:
|
@@ -943,14 +944,7 @@ class S3Hook(AwsBaseHook):
|
|
943
944
|
if arg_name in S3Transfer.ALLOWED_DOWNLOAD_ARGS
|
944
945
|
}
|
945
946
|
|
946
|
-
|
947
|
-
"s3",
|
948
|
-
endpoint_url=self.conn_config.endpoint_url,
|
949
|
-
config=self.config,
|
950
|
-
verify=self.verify,
|
951
|
-
)
|
952
|
-
obj = s3_resource.Object(bucket_name, key)
|
953
|
-
|
947
|
+
obj = self.resource.Object(bucket_name, key)
|
954
948
|
obj.load(**sanitize_extra_args())
|
955
949
|
return obj
|
956
950
|
|
@@ -165,12 +165,12 @@ class AthenaOperator(AwsBaseOperator[AthenaHook]):
|
|
165
165
|
|
166
166
|
if query_status in AthenaHook.FAILURE_STATES:
|
167
167
|
error_message = self.hook.get_state_change_reason(self.query_execution_id)
|
168
|
-
raise
|
168
|
+
raise AirflowException(
|
169
169
|
f"Final state of Athena job is {query_status}, query_execution_id is "
|
170
170
|
f"{self.query_execution_id}. Error: {error_message}"
|
171
171
|
)
|
172
172
|
elif not query_status or query_status in AthenaHook.INTERMEDIATE_STATES:
|
173
|
-
raise
|
173
|
+
raise AirflowException(
|
174
174
|
f"Final state of Athena job is {query_status}. Max tries of poll status exceeded, "
|
175
175
|
f"query_execution_id is {self.query_execution_id}."
|
176
176
|
)
|
@@ -26,6 +26,7 @@ from airflow.providers.amazon.aws.utils.mixins import (
|
|
26
26
|
AwsHookType,
|
27
27
|
aws_template_fields,
|
28
28
|
)
|
29
|
+
from airflow.utils.types import NOTSET, ArgNotSet
|
29
30
|
|
30
31
|
|
31
32
|
class AwsBaseOperator(BaseOperator, AwsBaseHookMixin[AwsHookType]):
|
@@ -85,10 +86,12 @@ class AwsBaseOperator(BaseOperator, AwsBaseHookMixin[AwsHookType]):
|
|
85
86
|
region_name: str | None = None,
|
86
87
|
verify: bool | str | None = None,
|
87
88
|
botocore_config: dict | None = None,
|
89
|
+
region: str | None | ArgNotSet = NOTSET, # Required for `.partial` signature check
|
88
90
|
**kwargs,
|
89
91
|
):
|
92
|
+
additional_params = {} if region is NOTSET else {"region": region}
|
90
93
|
hook_params = AwsHookParams.from_constructor(
|
91
|
-
aws_conn_id, region_name, verify, botocore_config, additional_params=
|
94
|
+
aws_conn_id, region_name, verify, botocore_config, additional_params=additional_params
|
92
95
|
)
|
93
96
|
super().__init__(**kwargs)
|
94
97
|
self.aws_conn_id = hook_params.aws_conn_id
|
@@ -22,6 +22,7 @@
|
|
22
22
|
- https://boto3.amazonaws.com/v1/documentation/api/latest/reference/services/batch.html
|
23
23
|
- https://docs.aws.amazon.com/batch/latest/APIReference/Welcome.html
|
24
24
|
"""
|
25
|
+
|
25
26
|
from __future__ import annotations
|
26
27
|
|
27
28
|
import warnings
|
@@ -46,6 +47,7 @@ from airflow.providers.amazon.aws.triggers.batch import (
|
|
46
47
|
)
|
47
48
|
from airflow.providers.amazon.aws.utils import trim_none_values, validate_execute_complete_event
|
48
49
|
from airflow.providers.amazon.aws.utils.task_log_fetcher import AwsTaskLogFetcher
|
50
|
+
from airflow.utils.types import NOTSET
|
49
51
|
|
50
52
|
if TYPE_CHECKING:
|
51
53
|
from airflow.utils.context import Context
|
@@ -479,16 +481,16 @@ class BatchCreateComputeEnvironmentOperator(BaseOperator):
|
|
479
481
|
aws_conn_id: str | None = None,
|
480
482
|
region_name: str | None = None,
|
481
483
|
deferrable: bool = conf.getboolean("operators", "default_deferrable", fallback=False),
|
484
|
+
status_retries=NOTSET,
|
482
485
|
**kwargs,
|
483
486
|
):
|
484
|
-
if
|
487
|
+
if status_retries is not NOTSET:
|
485
488
|
warnings.warn(
|
486
489
|
"The `status_retries` parameter is unused and should be removed. "
|
487
490
|
"It'll be deleted in a future version.",
|
488
491
|
AirflowProviderDeprecationWarning,
|
489
492
|
stacklevel=2,
|
490
493
|
)
|
491
|
-
kwargs.pop("status_retries") # remove before calling super() to prevent unexpected arg error
|
492
494
|
|
493
495
|
super().__init__(**kwargs)
|
494
496
|
|
@@ -0,0 +1,252 @@
|
|
1
|
+
# Licensed to the Apache Software Foundation (ASF) under one
|
2
|
+
# or more contributor license agreements. See the NOTICE file
|
3
|
+
# distributed with this work for additional information
|
4
|
+
# regarding copyright ownership. The ASF licenses this file
|
5
|
+
# to you under the Apache License, Version 2.0 (the
|
6
|
+
# "License"); you may not use this file except in compliance
|
7
|
+
# with the License. You may obtain a copy of the License at
|
8
|
+
#
|
9
|
+
# http://www.apache.org/licenses/LICENSE-2.0
|
10
|
+
#
|
11
|
+
# Unless required by applicable law or agreed to in writing,
|
12
|
+
# software distributed under the License is distributed on an
|
13
|
+
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
14
|
+
# KIND, either express or implied. See the License for the
|
15
|
+
# specific language governing permissions and limitations
|
16
|
+
# under the License.
|
17
|
+
from __future__ import annotations
|
18
|
+
|
19
|
+
import json
|
20
|
+
from typing import TYPE_CHECKING, Any, Sequence
|
21
|
+
|
22
|
+
from botocore.exceptions import ClientError
|
23
|
+
|
24
|
+
from airflow.configuration import conf
|
25
|
+
from airflow.exceptions import AirflowException
|
26
|
+
from airflow.providers.amazon.aws.hooks.bedrock import BedrockHook, BedrockRuntimeHook
|
27
|
+
from airflow.providers.amazon.aws.operators.base_aws import AwsBaseOperator
|
28
|
+
from airflow.providers.amazon.aws.triggers.bedrock import BedrockCustomizeModelCompletedTrigger
|
29
|
+
from airflow.providers.amazon.aws.utils import validate_execute_complete_event
|
30
|
+
from airflow.providers.amazon.aws.utils.mixins import aws_template_fields
|
31
|
+
from airflow.utils.helpers import prune_dict
|
32
|
+
from airflow.utils.timezone import utcnow
|
33
|
+
|
34
|
+
if TYPE_CHECKING:
|
35
|
+
from airflow.utils.context import Context
|
36
|
+
|
37
|
+
|
38
|
+
class BedrockInvokeModelOperator(AwsBaseOperator[BedrockRuntimeHook]):
|
39
|
+
"""
|
40
|
+
Invoke the specified Bedrock model to run inference using the input provided.
|
41
|
+
|
42
|
+
Use InvokeModel to run inference for text models, image models, and embedding models.
|
43
|
+
To see the format and content of the input_data field for different models, refer to
|
44
|
+
`Inference parameters docs <https://docs.aws.amazon.com/bedrock/latest/userguide/model-parameters.html>`_.
|
45
|
+
|
46
|
+
.. seealso::
|
47
|
+
For more information on how to use this operator, take a look at the guide:
|
48
|
+
:ref:`howto/operator:BedrockInvokeModelOperator`
|
49
|
+
|
50
|
+
:param model_id: The ID of the Bedrock model. (templated)
|
51
|
+
:param input_data: Input data in the format specified in the content-type request header. (templated)
|
52
|
+
:param content_type: The MIME type of the input data in the request. (templated) Default: application/json
|
53
|
+
:param accept: The desired MIME type of the inference body in the response.
|
54
|
+
(templated) Default: application/json
|
55
|
+
|
56
|
+
:param aws_conn_id: The Airflow connection used for AWS credentials.
|
57
|
+
If this is ``None`` or empty then the default boto3 behaviour is used. If
|
58
|
+
running Airflow in a distributed manner and aws_conn_id is None or
|
59
|
+
empty, then default boto3 configuration would be used (and must be
|
60
|
+
maintained on each worker node).
|
61
|
+
:param region_name: AWS region_name. If not specified then the default boto3 behaviour is used.
|
62
|
+
:param verify: Whether or not to verify SSL certificates. See:
|
63
|
+
https://boto3.amazonaws.com/v1/documentation/api/latest/reference/core/session.html
|
64
|
+
:param botocore_config: Configuration dictionary (key-values) for botocore client. See:
|
65
|
+
https://botocore.amazonaws.com/v1/documentation/api/latest/reference/config.html
|
66
|
+
"""
|
67
|
+
|
68
|
+
aws_hook_class = BedrockRuntimeHook
|
69
|
+
template_fields: Sequence[str] = aws_template_fields(
|
70
|
+
"model_id", "input_data", "content_type", "accept_type"
|
71
|
+
)
|
72
|
+
|
73
|
+
def __init__(
|
74
|
+
self,
|
75
|
+
model_id: str,
|
76
|
+
input_data: dict[str, Any],
|
77
|
+
content_type: str | None = None,
|
78
|
+
accept_type: str | None = None,
|
79
|
+
**kwargs,
|
80
|
+
):
|
81
|
+
super().__init__(**kwargs)
|
82
|
+
self.model_id = model_id
|
83
|
+
self.input_data = input_data
|
84
|
+
self.content_type = content_type
|
85
|
+
self.accept_type = accept_type
|
86
|
+
|
87
|
+
def execute(self, context: Context) -> dict[str, str | int]:
|
88
|
+
# These are optional values which the API defaults to "application/json" if not provided here.
|
89
|
+
invoke_kwargs = prune_dict({"contentType": self.content_type, "accept": self.accept_type})
|
90
|
+
|
91
|
+
response = self.hook.conn.invoke_model(
|
92
|
+
body=json.dumps(self.input_data),
|
93
|
+
modelId=self.model_id,
|
94
|
+
**invoke_kwargs,
|
95
|
+
)
|
96
|
+
|
97
|
+
response_body = json.loads(response["body"].read())
|
98
|
+
self.log.info("Bedrock %s prompt: %s", self.model_id, self.input_data)
|
99
|
+
self.log.info("Bedrock model response: %s", response_body)
|
100
|
+
return response_body
|
101
|
+
|
102
|
+
|
103
|
+
class BedrockCustomizeModelOperator(AwsBaseOperator[BedrockHook]):
|
104
|
+
"""
|
105
|
+
Create a fine-tuning job to customize a base model.
|
106
|
+
|
107
|
+
.. seealso::
|
108
|
+
For more information on how to use this operator, take a look at the guide:
|
109
|
+
:ref:`howto/operator:BedrockCustomizeModelOperator`
|
110
|
+
|
111
|
+
:param job_name: A unique name for the fine-tuning job.
|
112
|
+
:param custom_model_name: A name for the custom model being created.
|
113
|
+
:param role_arn: The Amazon Resource Name (ARN) of an IAM role that Amazon Bedrock can assume
|
114
|
+
to perform tasks on your behalf.
|
115
|
+
:param base_model_id: Name of the base model.
|
116
|
+
:param training_data_uri: The S3 URI where the training data is stored.
|
117
|
+
:param output_data_uri: The S3 URI where the output data is stored.
|
118
|
+
:param hyperparameters: Parameters related to tuning the model.
|
119
|
+
:param ensure_unique_job_name: If set to true, operator will check whether a model customization
|
120
|
+
job already exists for the name in the config and append the current timestamp if there is a
|
121
|
+
name conflict. (Default: True)
|
122
|
+
:param customization_job_kwargs: Any optional parameters to pass to the API.
|
123
|
+
|
124
|
+
:param wait_for_completion: Whether to wait for cluster to stop. (default: True)
|
125
|
+
:param waiter_delay: Time in seconds to wait between status checks. (default: 120)
|
126
|
+
:param waiter_max_attempts: Maximum number of attempts to check for job completion. (default: 75)
|
127
|
+
:param deferrable: If True, the operator will wait asynchronously for the cluster to stop.
|
128
|
+
This implies waiting for completion. This mode requires aiobotocore module to be installed.
|
129
|
+
(default: False)
|
130
|
+
:param aws_conn_id: The Airflow connection used for AWS credentials.
|
131
|
+
If this is ``None`` or empty then the default boto3 behaviour is used. If
|
132
|
+
running Airflow in a distributed manner and aws_conn_id is None or
|
133
|
+
empty, then default boto3 configuration would be used (and must be
|
134
|
+
maintained on each worker node).
|
135
|
+
:param region_name: AWS region_name. If not specified then the default boto3 behaviour is used.
|
136
|
+
:param verify: Whether or not to verify SSL certificates. See:
|
137
|
+
https://boto3.amazonaws.com/v1/documentation/api/latest/reference/core/session.html
|
138
|
+
:param botocore_config: Configuration dictionary (key-values) for botocore client. See:
|
139
|
+
https://botocore.amazonaws.com/v1/documentation/api/latest/reference/config.html
|
140
|
+
"""
|
141
|
+
|
142
|
+
aws_hook_class = BedrockHook
|
143
|
+
template_fields: Sequence[str] = aws_template_fields(
|
144
|
+
"job_name",
|
145
|
+
"custom_model_name",
|
146
|
+
"role_arn",
|
147
|
+
"base_model_id",
|
148
|
+
"hyperparameters",
|
149
|
+
"ensure_unique_job_name",
|
150
|
+
"customization_job_kwargs",
|
151
|
+
)
|
152
|
+
|
153
|
+
def __init__(
|
154
|
+
self,
|
155
|
+
job_name: str,
|
156
|
+
custom_model_name: str,
|
157
|
+
role_arn: str,
|
158
|
+
base_model_id: str,
|
159
|
+
training_data_uri: str,
|
160
|
+
output_data_uri: str,
|
161
|
+
hyperparameters: dict[str, str],
|
162
|
+
ensure_unique_job_name: bool = True,
|
163
|
+
customization_job_kwargs: dict[str, Any] | None = None,
|
164
|
+
wait_for_completion: bool = True,
|
165
|
+
waiter_delay: int = 120,
|
166
|
+
waiter_max_attempts: int = 75,
|
167
|
+
deferrable: bool = conf.getboolean("operators", "default_deferrable", fallback=False),
|
168
|
+
**kwargs,
|
169
|
+
):
|
170
|
+
super().__init__(**kwargs)
|
171
|
+
self.wait_for_completion = wait_for_completion
|
172
|
+
self.waiter_delay = waiter_delay
|
173
|
+
self.waiter_max_attempts = waiter_max_attempts
|
174
|
+
self.deferrable = deferrable
|
175
|
+
|
176
|
+
self.job_name = job_name
|
177
|
+
self.custom_model_name = custom_model_name
|
178
|
+
self.role_arn = role_arn
|
179
|
+
self.base_model_id = base_model_id
|
180
|
+
self.training_data_config = {"s3Uri": training_data_uri}
|
181
|
+
self.output_data_config = {"s3Uri": output_data_uri}
|
182
|
+
self.hyperparameters = hyperparameters
|
183
|
+
self.ensure_unique_job_name = ensure_unique_job_name
|
184
|
+
self.customization_job_kwargs = customization_job_kwargs or {}
|
185
|
+
|
186
|
+
self.valid_action_if_job_exists: set[str] = {"timestamp", "fail"}
|
187
|
+
|
188
|
+
def execute_complete(self, context: Context, event: dict[str, Any] | None = None) -> str:
|
189
|
+
event = validate_execute_complete_event(event)
|
190
|
+
|
191
|
+
if event["status"] != "success":
|
192
|
+
raise AirflowException(f"Error while running job: {event}")
|
193
|
+
|
194
|
+
self.log.info("Bedrock model customization job `%s` complete.", self.job_name)
|
195
|
+
return self.hook.conn.get_model_customization_job(jobIdentifier=event["job_name"])["jobArn"]
|
196
|
+
|
197
|
+
def execute(self, context: Context) -> dict:
|
198
|
+
response = {}
|
199
|
+
retry = True
|
200
|
+
while retry:
|
201
|
+
# If there is a name conflict and ensure_unique_job_name is True, append the current timestamp
|
202
|
+
# to the name and retry until there is no name conflict.
|
203
|
+
# - Break the loop when the API call returns success.
|
204
|
+
# - If the API returns an exception other than a name conflict, raise that exception.
|
205
|
+
# - If the API returns a name conflict and ensure_unique_job_name is false, raise that exception.
|
206
|
+
try:
|
207
|
+
# Ensure the loop is executed at least once, and not repeat unless explicitly set to do so.
|
208
|
+
retry = False
|
209
|
+
self.log.info("Creating Bedrock model customization job '%s'.", self.job_name)
|
210
|
+
|
211
|
+
response = self.hook.conn.create_model_customization_job(
|
212
|
+
jobName=self.job_name,
|
213
|
+
customModelName=self.custom_model_name,
|
214
|
+
roleArn=self.role_arn,
|
215
|
+
baseModelIdentifier=self.base_model_id,
|
216
|
+
trainingDataConfig=self.training_data_config,
|
217
|
+
outputDataConfig=self.output_data_config,
|
218
|
+
hyperParameters=self.hyperparameters,
|
219
|
+
**self.customization_job_kwargs,
|
220
|
+
)
|
221
|
+
except ClientError as error:
|
222
|
+
if error.response["Error"]["Message"] != "The provided job name is currently in use.":
|
223
|
+
raise error
|
224
|
+
if not self.ensure_unique_job_name:
|
225
|
+
raise error
|
226
|
+
retry = True
|
227
|
+
self.job_name = f"{self.job_name}-{int(utcnow().timestamp())}"
|
228
|
+
self.log.info("Changed job name to '%s' to avoid collision.", self.job_name)
|
229
|
+
|
230
|
+
if response["ResponseMetadata"]["HTTPStatusCode"] != 201:
|
231
|
+
raise AirflowException(f"Bedrock model customization job creation failed: {response}")
|
232
|
+
|
233
|
+
task_description = f"Bedrock model customization job {self.job_name} to complete."
|
234
|
+
if self.deferrable:
|
235
|
+
self.log.info("Deferring for %s", task_description)
|
236
|
+
self.defer(
|
237
|
+
trigger=BedrockCustomizeModelCompletedTrigger(
|
238
|
+
job_name=self.job_name,
|
239
|
+
waiter_delay=self.waiter_delay,
|
240
|
+
waiter_max_attempts=self.waiter_max_attempts,
|
241
|
+
aws_conn_id=self.aws_conn_id,
|
242
|
+
),
|
243
|
+
method_name="execute_complete",
|
244
|
+
)
|
245
|
+
elif self.wait_for_completion:
|
246
|
+
self.log.info("Waiting for %s", task_description)
|
247
|
+
self.hook.get_waiter("model_customization_job_complete").wait(
|
248
|
+
jobIdentifier=self.job_name,
|
249
|
+
WaiterConfig={"Delay": self.waiter_delay, "MaxAttempts": self.waiter_max_attempts},
|
250
|
+
)
|
251
|
+
|
252
|
+
return response["jobArn"]
|
@@ -40,6 +40,7 @@ from airflow.providers.amazon.aws.utils.identifiers import generate_uuid
|
|
40
40
|
from airflow.providers.amazon.aws.utils.mixins import aws_template_fields
|
41
41
|
from airflow.providers.amazon.aws.utils.task_log_fetcher import AwsTaskLogFetcher
|
42
42
|
from airflow.utils.helpers import prune_dict
|
43
|
+
from airflow.utils.types import NOTSET
|
43
44
|
|
44
45
|
if TYPE_CHECKING:
|
45
46
|
import boto3
|
@@ -257,19 +258,18 @@ class EcsDeregisterTaskDefinitionOperator(EcsBaseOperator):
|
|
257
258
|
self,
|
258
259
|
*,
|
259
260
|
task_definition: str,
|
261
|
+
wait_for_completion=NOTSET,
|
262
|
+
waiter_delay=NOTSET,
|
263
|
+
waiter_max_attempts=NOTSET,
|
260
264
|
**kwargs,
|
261
265
|
):
|
262
|
-
if
|
266
|
+
if any(arg is not NOTSET for arg in [wait_for_completion, waiter_delay, waiter_max_attempts]):
|
263
267
|
warnings.warn(
|
264
268
|
"'wait_for_completion' and waiter related params have no effect and are deprecated, "
|
265
269
|
"please remove them.",
|
266
270
|
AirflowProviderDeprecationWarning,
|
267
271
|
stacklevel=2,
|
268
272
|
)
|
269
|
-
# remove args to not trigger Invalid arguments exception
|
270
|
-
kwargs.pop("wait_for_completion", None)
|
271
|
-
kwargs.pop("waiter_delay", None)
|
272
|
-
kwargs.pop("waiter_max_attempts", None)
|
273
273
|
|
274
274
|
super().__init__(**kwargs)
|
275
275
|
self.task_definition = task_definition
|
@@ -311,19 +311,18 @@ class EcsRegisterTaskDefinitionOperator(EcsBaseOperator):
|
|
311
311
|
family: str,
|
312
312
|
container_definitions: list[dict],
|
313
313
|
register_task_kwargs: dict | None = None,
|
314
|
+
wait_for_completion=NOTSET,
|
315
|
+
waiter_delay=NOTSET,
|
316
|
+
waiter_max_attempts=NOTSET,
|
314
317
|
**kwargs,
|
315
318
|
):
|
316
|
-
if
|
319
|
+
if any(arg is not NOTSET for arg in [wait_for_completion, waiter_delay, waiter_max_attempts]):
|
317
320
|
warnings.warn(
|
318
321
|
"'wait_for_completion' and waiter related params have no effect and are deprecated, "
|
319
322
|
"please remove them.",
|
320
323
|
AirflowProviderDeprecationWarning,
|
321
324
|
stacklevel=2,
|
322
325
|
)
|
323
|
-
# remove args to not trigger Invalid arguments exception
|
324
|
-
kwargs.pop("wait_for_completion", None)
|
325
|
-
kwargs.pop("waiter_delay", None)
|
326
|
-
kwargs.pop("waiter_max_attempts", None)
|
327
326
|
|
328
327
|
super().__init__(**kwargs)
|
329
328
|
self.family = family
|