apache-airflow-providers-amazon 8.20.0rc1__py3-none-any.whl → 8.21.0rc1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (23) hide show
  1. airflow/providers/amazon/__init__.py +3 -3
  2. airflow/providers/amazon/aws/auth_manager/avp/entities.py +1 -1
  3. airflow/providers/amazon/aws/auth_manager/avp/facade.py +5 -2
  4. airflow/providers/amazon/aws/auth_manager/aws_auth_manager.py +1 -11
  5. airflow/providers/amazon/aws/auth_manager/constants.py +0 -1
  6. airflow/providers/amazon/aws/executors/batch/__init__.py +5 -0
  7. airflow/providers/amazon/aws/executors/ecs/__init__.py +5 -0
  8. airflow/providers/amazon/aws/executors/ecs/ecs_executor.py +1 -1
  9. airflow/providers/amazon/aws/hooks/batch_client.py +2 -0
  10. airflow/providers/amazon/aws/hooks/emr.py +8 -6
  11. airflow/providers/amazon/aws/hooks/lambda_function.py +10 -0
  12. airflow/providers/amazon/aws/operators/bedrock.py +102 -1
  13. airflow/providers/amazon/aws/operators/glue.py +7 -1
  14. airflow/providers/amazon/aws/sensors/bedrock.py +126 -23
  15. airflow/providers/amazon/aws/sensors/emr.py +8 -2
  16. airflow/providers/amazon/aws/transfers/azure_blob_to_s3.py +7 -1
  17. airflow/providers/amazon/aws/triggers/bedrock.py +36 -0
  18. airflow/providers/amazon/aws/waiters/bedrock.json +31 -0
  19. airflow/providers/amazon/get_provider_info.py +4 -2
  20. {apache_airflow_providers_amazon-8.20.0rc1.dist-info → apache_airflow_providers_amazon-8.21.0rc1.dist-info}/METADATA +10 -8
  21. {apache_airflow_providers_amazon-8.20.0rc1.dist-info → apache_airflow_providers_amazon-8.21.0rc1.dist-info}/RECORD +23 -23
  22. {apache_airflow_providers_amazon-8.20.0rc1.dist-info → apache_airflow_providers_amazon-8.21.0rc1.dist-info}/WHEEL +0 -0
  23. {apache_airflow_providers_amazon-8.20.0rc1.dist-info → apache_airflow_providers_amazon-8.21.0rc1.dist-info}/entry_points.txt +0 -0
@@ -27,7 +27,7 @@ import packaging.version
27
27
 
28
28
  __all__ = ["__version__"]
29
29
 
30
- __version__ = "8.20.0"
30
+ __version__ = "8.21.0"
31
31
 
32
32
  try:
33
33
  from airflow import __version__ as airflow_version
@@ -35,8 +35,8 @@ except ImportError:
35
35
  from airflow.version import version as airflow_version
36
36
 
37
37
  if packaging.version.parse(packaging.version.parse(airflow_version).base_version) < packaging.version.parse(
38
- "2.6.0"
38
+ "2.7.0"
39
39
  ):
40
40
  raise RuntimeError(
41
- f"The package `apache-airflow-providers-amazon:{__version__}` needs Apache Airflow 2.6.0+"
41
+ f"The package `apache-airflow-providers-amazon:{__version__}` needs Apache Airflow 2.7.0+"
42
42
  )
@@ -55,7 +55,7 @@ def get_entity_type(resource_type: AvpEntities) -> str:
55
55
  return AVP_PREFIX_ENTITIES + resource_type.value
56
56
 
57
57
 
58
- def get_action_id(resource_type: AvpEntities, method: ResourceMethod):
58
+ def get_action_id(resource_type: AvpEntities, method: ResourceMethod | str):
59
59
  """
60
60
  Return action id.
61
61
 
@@ -75,7 +75,7 @@ class AwsAuthManagerAmazonVerifiedPermissionsFacade(LoggingMixin):
75
75
  def is_authorized(
76
76
  self,
77
77
  *,
78
- method: ResourceMethod,
78
+ method: ResourceMethod | str,
79
79
  entity_type: AvpEntities,
80
80
  user: AwsAuthManagerUser | None,
81
81
  entity_id: str | None = None,
@@ -86,7 +86,10 @@ class AwsAuthManagerAmazonVerifiedPermissionsFacade(LoggingMixin):
86
86
 
87
87
  Check whether the user has permissions to access given resource.
88
88
 
89
- :param method: the method to perform
89
+ :param method: the method to perform.
90
+ The method can also be a string if the action has been defined in a plugin.
91
+ In that case, the action can be anything (e.g. can_do).
92
+ See https://github.com/apache/airflow/issues/39144
90
93
  :param entity_type: the entity type the user accesses
91
94
  :param user: the user
92
95
  :param entity_id: the entity ID the user accesses. If not provided, all entities of the type will be
@@ -24,7 +24,6 @@ from typing import TYPE_CHECKING, Container, Sequence, cast
24
24
  from flask import session, url_for
25
25
 
26
26
  from airflow.cli.cli_config import CLICommand, DefaultHelpParser, GroupCommand
27
- from airflow.configuration import conf
28
27
  from airflow.exceptions import AirflowOptionalProviderFeatureException
29
28
  from airflow.providers.amazon.aws.auth_manager.avp.entities import AvpEntities
30
29
  from airflow.providers.amazon.aws.auth_manager.avp.facade import (
@@ -34,10 +33,6 @@ from airflow.providers.amazon.aws.auth_manager.avp.facade import (
34
33
  from airflow.providers.amazon.aws.auth_manager.cli.definition import (
35
34
  AWS_AUTH_MANAGER_COMMANDS,
36
35
  )
37
- from airflow.providers.amazon.aws.auth_manager.constants import (
38
- CONF_ENABLE_KEY,
39
- CONF_SECTION_NAME,
40
- )
41
36
  from airflow.providers.amazon.aws.auth_manager.security_manager.aws_security_manager_override import (
42
37
  AwsSecurityManagerOverride,
43
38
  )
@@ -87,12 +82,7 @@ class AwsAuthManager(BaseAuthManager):
87
82
 
88
83
  def __init__(self, appbuilder: AirflowAppBuilder) -> None:
89
84
  super().__init__(appbuilder)
90
- enable = conf.getboolean(CONF_SECTION_NAME, CONF_ENABLE_KEY)
91
85
  self._check_avp_schema_version()
92
- if not enable:
93
- raise NotImplementedError(
94
- "The AWS auth manager is currently being built. It is not finalized. It is not intended to be used yet."
95
- )
96
86
 
97
87
  @cached_property
98
88
  def avp_facade(self):
@@ -207,7 +197,7 @@ class AwsAuthManager(BaseAuthManager):
207
197
  )
208
198
 
209
199
  def is_authorized_custom_view(
210
- self, *, method: ResourceMethod, resource_name: str, user: BaseUser | None = None
200
+ self, *, method: ResourceMethod | str, resource_name: str, user: BaseUser | None = None
211
201
  ):
212
202
  return self.avp_facade.is_authorized(
213
203
  method=method,
@@ -18,7 +18,6 @@
18
18
  # Configuration keys
19
19
  from __future__ import annotations
20
20
 
21
- CONF_ENABLE_KEY = "enable"
22
21
  CONF_SECTION_NAME = "aws_auth_manager"
23
22
  CONF_CONN_ID_KEY = "conn_id"
24
23
  CONF_REGION_NAME_KEY = "region_name"
@@ -14,3 +14,8 @@
14
14
  # KIND, either express or implied. See the License for the
15
15
  # specific language governing permissions and limitations
16
16
  # under the License.
17
+ from __future__ import annotations # Added by precommit hooks
18
+
19
+ __all__ = ["AwsBatchExecutor"]
20
+
21
+ from airflow.providers.amazon.aws.executors.batch.batch_executor import AwsBatchExecutor
@@ -14,3 +14,8 @@
14
14
  # KIND, either express or implied. See the License for the
15
15
  # specific language governing permissions and limitations
16
16
  # under the License.
17
+ from __future__ import annotations # Added by precommit hooks
18
+
19
+ __all__ = ["AwsEcsExecutor"]
20
+
21
+ from airflow.providers.amazon.aws.executors.ecs.ecs_executor import AwsEcsExecutor
@@ -515,7 +515,7 @@ class AwsEcsExecutor(BaseExecutor):
515
515
  task_descriptions = self.__describe_tasks(task_arns).get("tasks", [])
516
516
 
517
517
  for task in task_descriptions:
518
- ti = [ti for ti in tis if ti.external_executor_id == task.task_arn][0]
518
+ ti = next(ti for ti in tis if ti.external_executor_id == task.task_arn)
519
519
  self.active_workers.add_task(
520
520
  task,
521
521
  ti.key,
@@ -397,6 +397,8 @@ class BatchClientHook(AwsBaseHook):
397
397
  try:
398
398
  response = self.get_conn().describe_jobs(jobs=[job_id])
399
399
  return self.parse_job_description(job_id, response)
400
+ except AirflowException as err:
401
+ self.log.warning(err)
400
402
  except botocore.exceptions.ClientError as err:
401
403
  # Allow it to retry in case of exceeded quota limit of requests to AWS API
402
404
  if err.response.get("Error", {}).get("Code") != "TooManyRequestsException":
@@ -494,21 +494,23 @@ class EmrContainerHook(AwsBaseHook):
494
494
  :param poll_interval: Time (in seconds) to wait between calls to check query status on EMR
495
495
  :param max_polling_attempts: Number of times to poll for query state before function exits
496
496
  """
497
- try_number = 1
497
+ poll_attempt = 1
498
498
  while True:
499
499
  query_state = self.check_query_status(job_id)
500
500
  if query_state in self.TERMINAL_STATES:
501
- self.log.info("Try %s: Query execution completed. Final state is %s", try_number, query_state)
501
+ self.log.info(
502
+ "Try %s: Query execution completed. Final state is %s", poll_attempt, query_state
503
+ )
502
504
  return query_state
503
505
  if query_state is None:
504
- self.log.info("Try %s: Invalid query state. Retrying again", try_number)
506
+ self.log.info("Try %s: Invalid query state. Retrying again", poll_attempt)
505
507
  else:
506
- self.log.info("Try %s: Query is still in non-terminal state - %s", try_number, query_state)
508
+ self.log.info("Try %s: Query is still in non-terminal state - %s", poll_attempt, query_state)
507
509
  if (
508
- max_polling_attempts and try_number >= max_polling_attempts
510
+ max_polling_attempts and poll_attempt >= max_polling_attempts
509
511
  ): # Break loop if max_polling_attempts reached
510
512
  return query_state
511
- try_number += 1
513
+ poll_attempt += 1
512
514
  time.sleep(poll_interval)
513
515
 
514
516
  def stop_query(self, job_id: str) -> dict:
@@ -106,6 +106,9 @@ class LambdaHook(AwsBaseHook):
106
106
  image_config: Any | None = None,
107
107
  code_signing_config_arn: str | None = None,
108
108
  architectures: list[str] | None = None,
109
+ ephemeral_storage: Any | None = None,
110
+ snap_start: Any | None = None,
111
+ logging_config: Any | None = None,
109
112
  ) -> dict:
110
113
  """
111
114
  Create a Lambda function.
@@ -151,6 +154,10 @@ class LambdaHook(AwsBaseHook):
151
154
  A code-signing configuration includes a set of signing profiles,
152
155
  which define the trusted publishers for this function.
153
156
  :param architectures: The instruction set architecture that the function supports.
157
+ :param ephemeral_storage: The size of the function's /tmp directory in MB.
158
+ The default value is 512, but can be any whole number between 512 and 10,240 MB
159
+ :param snap_start: The function's SnapStart setting
160
+ :param logging_config: The function's Amazon CloudWatch Logs configuration settings
154
161
  """
155
162
  if package_type == "Zip":
156
163
  if handler is None:
@@ -181,6 +188,9 @@ class LambdaHook(AwsBaseHook):
181
188
  "ImageConfig": image_config,
182
189
  "CodeSigningConfigArn": code_signing_config_arn,
183
190
  "Architectures": architectures,
191
+ "EphemeralStorage": ephemeral_storage,
192
+ "SnapStart": snap_start,
193
+ "LoggingConfig": logging_config,
184
194
  }
185
195
  return self.conn.create_function(**trim_none_values(create_function_args))
186
196
 
@@ -25,7 +25,10 @@ from airflow.configuration import conf
25
25
  from airflow.exceptions import AirflowException
26
26
  from airflow.providers.amazon.aws.hooks.bedrock import BedrockHook, BedrockRuntimeHook
27
27
  from airflow.providers.amazon.aws.operators.base_aws import AwsBaseOperator
28
- from airflow.providers.amazon.aws.triggers.bedrock import BedrockCustomizeModelCompletedTrigger
28
+ from airflow.providers.amazon.aws.triggers.bedrock import (
29
+ BedrockCustomizeModelCompletedTrigger,
30
+ BedrockProvisionModelThroughputCompletedTrigger,
31
+ )
29
32
  from airflow.providers.amazon.aws.utils import validate_execute_complete_event
30
33
  from airflow.providers.amazon.aws.utils.mixins import aws_template_fields
31
34
  from airflow.utils.helpers import prune_dict
@@ -250,3 +253,101 @@ class BedrockCustomizeModelOperator(AwsBaseOperator[BedrockHook]):
250
253
  )
251
254
 
252
255
  return response["jobArn"]
256
+
257
+
258
+ class BedrockCreateProvisionedModelThroughputOperator(AwsBaseOperator[BedrockHook]):
259
+ """
260
+ Create a fine-tuning job to customize a base model.
261
+
262
+ .. seealso::
263
+ For more information on how to use this operator, take a look at the guide:
264
+ :ref:`howto/operator:BedrockCreateProvisionedModelThroughputOperator`
265
+
266
+ :param model_units: Number of model units to allocate. (templated)
267
+ :param provisioned_model_name: Unique name for this provisioned throughput. (templated)
268
+ :param model_id: Name or ARN of the model to associate with this provisioned throughput. (templated)
269
+ :param create_throughput_kwargs: Any optional parameters to pass to the API.
270
+
271
+ :param wait_for_completion: Whether to wait for cluster to stop. (default: True)
272
+ :param waiter_delay: Time in seconds to wait between status checks. (default: 60)
273
+ :param waiter_max_attempts: Maximum number of attempts to check for job completion. (default: 20)
274
+ :param deferrable: If True, the operator will wait asynchronously for the cluster to stop.
275
+ This implies waiting for completion. This mode requires aiobotocore module to be installed.
276
+ (default: False)
277
+ :param aws_conn_id: The Airflow connection used for AWS credentials.
278
+ If this is ``None`` or empty then the default boto3 behaviour is used. If
279
+ running Airflow in a distributed manner and aws_conn_id is None or
280
+ empty, then default boto3 configuration would be used (and must be
281
+ maintained on each worker node).
282
+ :param region_name: AWS region_name. If not specified then the default boto3 behaviour is used.
283
+ :param verify: Whether or not to verify SSL certificates. See:
284
+ https://boto3.amazonaws.com/v1/documentation/api/latest/reference/core/session.html
285
+ :param botocore_config: Configuration dictionary (key-values) for botocore client. See:
286
+ https://botocore.amazonaws.com/v1/documentation/api/latest/reference/config.html
287
+ """
288
+
289
+ aws_hook_class = BedrockHook
290
+ template_fields: Sequence[str] = aws_template_fields(
291
+ "model_units",
292
+ "provisioned_model_name",
293
+ "model_id",
294
+ )
295
+
296
+ def __init__(
297
+ self,
298
+ model_units: int,
299
+ provisioned_model_name: str,
300
+ model_id: str,
301
+ create_throughput_kwargs: dict[str, Any] | None = None,
302
+ wait_for_completion: bool = True,
303
+ waiter_delay: int = 60,
304
+ waiter_max_attempts: int = 20,
305
+ deferrable: bool = conf.getboolean("operators", "default_deferrable", fallback=False),
306
+ **kwargs,
307
+ ):
308
+ super().__init__(**kwargs)
309
+ self.model_units = model_units
310
+ self.provisioned_model_name = provisioned_model_name
311
+ self.model_id = model_id
312
+ self.create_throughput_kwargs = create_throughput_kwargs or {}
313
+ self.wait_for_completion = wait_for_completion
314
+ self.waiter_delay = waiter_delay
315
+ self.waiter_max_attempts = waiter_max_attempts
316
+ self.deferrable = deferrable
317
+
318
+ def execute(self, context: Context) -> str:
319
+ provisioned_model_id = self.hook.conn.create_provisioned_model_throughput(
320
+ modelUnits=self.model_units,
321
+ provisionedModelName=self.provisioned_model_name,
322
+ modelId=self.model_id,
323
+ **self.create_throughput_kwargs,
324
+ )["provisionedModelArn"]
325
+
326
+ if self.deferrable:
327
+ self.log.info("Deferring for provisioned throughput.")
328
+ self.defer(
329
+ trigger=BedrockProvisionModelThroughputCompletedTrigger(
330
+ provisioned_model_id=provisioned_model_id,
331
+ waiter_delay=self.waiter_delay,
332
+ waiter_max_attempts=self.waiter_max_attempts,
333
+ aws_conn_id=self.aws_conn_id,
334
+ ),
335
+ method_name="execute_complete",
336
+ )
337
+ if self.wait_for_completion:
338
+ self.log.info("Waiting for provisioned throughput.")
339
+ self.hook.get_waiter("provisioned_model_throughput_complete").wait(
340
+ provisionedModelId=provisioned_model_id,
341
+ WaiterConfig={"Delay": self.waiter_delay, "MaxAttempts": self.waiter_max_attempts},
342
+ )
343
+
344
+ return provisioned_model_id
345
+
346
+ def execute_complete(self, context: Context, event: dict[str, Any] | None = None) -> str:
347
+ event = validate_execute_complete_event(event)
348
+
349
+ if event["status"] != "success":
350
+ raise AirflowException(f"Error while running job: {event}")
351
+
352
+ self.log.info("Bedrock provisioned throughput job `%s` complete.", event["provisioned_model_id"])
353
+ return event["provisioned_model_id"]
@@ -64,6 +64,7 @@ class GlueJobOperator(BaseOperator):
64
64
  (default: False)
65
65
  :param verbose: If True, Glue Job Run logs show in the Airflow Task Logs. (default: False)
66
66
  :param update_config: If True, Operator will update job configuration. (default: False)
67
+ :param replace_script_file: If True, the script file will be replaced in S3. (default: False)
67
68
  :param stop_job_run_on_kill: If True, Operator will stop the job run when task is killed.
68
69
  """
69
70
 
@@ -105,6 +106,7 @@ class GlueJobOperator(BaseOperator):
105
106
  wait_for_completion: bool = True,
106
107
  deferrable: bool = conf.getboolean("operators", "default_deferrable", fallback=False),
107
108
  verbose: bool = False,
109
+ replace_script_file: bool = False,
108
110
  update_config: bool = False,
109
111
  job_poll_interval: int | float = 6,
110
112
  stop_job_run_on_kill: bool = False,
@@ -130,6 +132,7 @@ class GlueJobOperator(BaseOperator):
130
132
  self.wait_for_completion = wait_for_completion
131
133
  self.verbose = verbose
132
134
  self.update_config = update_config
135
+ self.replace_script_file = replace_script_file
133
136
  self.deferrable = deferrable
134
137
  self.job_poll_interval = job_poll_interval
135
138
  self.stop_job_run_on_kill = stop_job_run_on_kill
@@ -143,7 +146,10 @@ class GlueJobOperator(BaseOperator):
143
146
  s3_hook = S3Hook(aws_conn_id=self.aws_conn_id)
144
147
  script_name = os.path.basename(self.script_location)
145
148
  s3_hook.load_file(
146
- self.script_location, self.s3_artifacts_prefix + script_name, bucket_name=self.s3_bucket
149
+ self.script_location,
150
+ self.s3_artifacts_prefix + script_name,
151
+ bucket_name=self.s3_bucket,
152
+ replace=self.replace_script_file,
147
153
  )
148
154
  s3_script_location = f"s3://{self.s3_bucket}/{self.s3_artifacts_prefix}{script_name}"
149
155
  else:
@@ -17,21 +17,73 @@
17
17
  # under the License.
18
18
  from __future__ import annotations
19
19
 
20
+ import abc
20
21
  from typing import TYPE_CHECKING, Any, Sequence
21
22
 
22
23
  from airflow.configuration import conf
24
+ from airflow.exceptions import AirflowException, AirflowSkipException
25
+ from airflow.providers.amazon.aws.hooks.bedrock import BedrockHook
23
26
  from airflow.providers.amazon.aws.sensors.base_aws import AwsBaseSensor
24
- from airflow.providers.amazon.aws.triggers.bedrock import BedrockCustomizeModelCompletedTrigger
27
+ from airflow.providers.amazon.aws.triggers.bedrock import (
28
+ BedrockCustomizeModelCompletedTrigger,
29
+ BedrockProvisionModelThroughputCompletedTrigger,
30
+ )
25
31
  from airflow.providers.amazon.aws.utils.mixins import aws_template_fields
26
32
 
27
33
  if TYPE_CHECKING:
28
34
  from airflow.utils.context import Context
29
35
 
30
- from airflow.exceptions import AirflowException, AirflowSkipException
31
- from airflow.providers.amazon.aws.hooks.bedrock import BedrockHook
36
+
37
+ class BedrockBaseSensor(AwsBaseSensor[BedrockHook]):
38
+ """
39
+ General sensor behavior for Amazon Bedrock.
40
+
41
+ Subclasses must implement following methods:
42
+ - ``get_state()``
43
+
44
+ Subclasses must set the following fields:
45
+ - ``INTERMEDIATE_STATES``
46
+ - ``FAILURE_STATES``
47
+ - ``SUCCESS_STATES``
48
+ - ``FAILURE_MESSAGE``
49
+
50
+ :param deferrable: If True, the sensor will operate in deferrable mode. This mode requires aiobotocore
51
+ module to be installed.
52
+ (default: False, but can be overridden in config file by setting default_deferrable to True)
53
+ """
54
+
55
+ INTERMEDIATE_STATES: tuple[str, ...] = ()
56
+ FAILURE_STATES: tuple[str, ...] = ()
57
+ SUCCESS_STATES: tuple[str, ...] = ()
58
+ FAILURE_MESSAGE = ""
59
+
60
+ aws_hook_class = BedrockHook
61
+ ui_color = "#66c3ff"
62
+
63
+ def __init__(
64
+ self,
65
+ deferrable: bool = conf.getboolean("operators", "default_deferrable", fallback=False),
66
+ **kwargs: Any,
67
+ ):
68
+ super().__init__(**kwargs)
69
+ self.deferrable = deferrable
70
+
71
+ def poke(self, context: Context) -> bool:
72
+ state = self.get_state()
73
+ if state in self.FAILURE_STATES:
74
+ # TODO: remove this if block when min_airflow_version is set to higher than 2.7.1
75
+ if self.soft_fail:
76
+ raise AirflowSkipException(self.FAILURE_MESSAGE)
77
+ raise AirflowException(self.FAILURE_MESSAGE)
78
+
79
+ return state not in self.INTERMEDIATE_STATES
80
+
81
+ @abc.abstractmethod
82
+ def get_state(self) -> str:
83
+ """Implement in subclasses."""
32
84
 
33
85
 
34
- class BedrockCustomizeModelCompletedSensor(AwsBaseSensor[BedrockHook]):
86
+ class BedrockCustomizeModelCompletedSensor(BedrockBaseSensor):
35
87
  """
36
88
  Poll the state of the model customization job until it reaches a terminal state; fails if the job fails.
37
89
 
@@ -39,14 +91,13 @@ class BedrockCustomizeModelCompletedSensor(AwsBaseSensor[BedrockHook]):
39
91
  For more information on how to use this sensor, take a look at the guide:
40
92
  :ref:`howto/sensor:BedrockCustomizeModelCompletedSensor`
41
93
 
42
-
43
94
  :param job_name: The name of the Bedrock model customization job.
44
95
 
45
96
  :param deferrable: If True, the sensor will operate in deferrable mode. This mode requires aiobotocore
46
97
  module to be installed.
47
98
  (default: False, but can be overridden in config file by setting default_deferrable to True)
48
- :param max_retries: Number of times before returning the current state. (default: 75)
49
99
  :param poke_interval: Polling period in seconds to check for the status of the job. (default: 120)
100
+ :param max_retries: Number of times before returning the current state. (default: 75)
50
101
  :param aws_conn_id: The Airflow connection used for AWS credentials.
51
102
  If this is ``None`` or empty then the default boto3 behaviour is used. If
52
103
  running Airflow in a distributed manner and aws_conn_id is None or
@@ -59,14 +110,12 @@ class BedrockCustomizeModelCompletedSensor(AwsBaseSensor[BedrockHook]):
59
110
  https://botocore.amazonaws.com/v1/documentation/api/latest/reference/config.html
60
111
  """
61
112
 
62
- INTERMEDIATE_STATES = ("InProgress",)
63
- FAILURE_STATES = ("Failed", "Stopping", "Stopped")
64
- SUCCESS_STATES = ("Completed",)
113
+ INTERMEDIATE_STATES: tuple[str, ...] = ("InProgress",)
114
+ FAILURE_STATES: tuple[str, ...] = ("Failed", "Stopping", "Stopped")
115
+ SUCCESS_STATES: tuple[str, ...] = ("Completed",)
65
116
  FAILURE_MESSAGE = "Bedrock model customization job sensor failed."
66
117
 
67
- aws_hook_class = BedrockHook
68
118
  template_fields: Sequence[str] = aws_template_fields("job_name")
69
- ui_color = "#66c3ff"
70
119
 
71
120
  def __init__(
72
121
  self,
@@ -74,14 +123,12 @@ class BedrockCustomizeModelCompletedSensor(AwsBaseSensor[BedrockHook]):
74
123
  job_name: str,
75
124
  max_retries: int = 75,
76
125
  poke_interval: int = 120,
77
- deferrable: bool = conf.getboolean("operators", "default_deferrable", fallback=False),
78
126
  **kwargs: Any,
79
127
  ) -> None:
80
128
  super().__init__(**kwargs)
81
- self.job_name = job_name
82
129
  self.poke_interval = poke_interval
83
130
  self.max_retries = max_retries
84
- self.deferrable = deferrable
131
+ self.job_name = job_name
85
132
 
86
133
  def execute(self, context: Context) -> Any:
87
134
  if self.deferrable:
@@ -97,14 +144,70 @@ class BedrockCustomizeModelCompletedSensor(AwsBaseSensor[BedrockHook]):
97
144
  else:
98
145
  super().execute(context=context)
99
146
 
100
- def poke(self, context: Context) -> bool:
101
- state = self.hook.conn.get_model_customization_job(jobIdentifier=self.job_name)["status"]
102
- self.log.info("Job '%s' state: %s", self.job_name, state)
147
+ def get_state(self) -> str:
148
+ return self.hook.conn.get_model_customization_job(jobIdentifier=self.job_name)["status"]
103
149
 
104
- if state in self.FAILURE_STATES:
105
- # TODO: remove this if block when min_airflow_version is set to higher than 2.7.1
106
- if self.soft_fail:
107
- raise AirflowSkipException(self.FAILURE_MESSAGE)
108
- raise AirflowException(self.FAILURE_MESSAGE)
109
150
 
110
- return state not in self.INTERMEDIATE_STATES
151
+ class BedrockProvisionModelThroughputCompletedSensor(BedrockBaseSensor):
152
+ """
153
+ Poll the provisioned model throughput job until it reaches a terminal state; fails if the job fails.
154
+
155
+ .. seealso::
156
+ For more information on how to use this sensor, take a look at the guide:
157
+ :ref:`howto/sensor:BedrockProvisionModelThroughputCompletedSensor`
158
+
159
+ :param model_id: The ARN or name of the provisioned throughput.
160
+
161
+ :param deferrable: If True, the sensor will operate in deferrable more. This mode requires aiobotocore
162
+ module to be installed.
163
+ (default: False, but can be overridden in config file by setting default_deferrable to True)
164
+ :param poke_interval: Polling period in seconds to check for the status of the job. (default: 60)
165
+ :param max_retries: Number of times before returning the current state (default: 20)
166
+ :param aws_conn_id: The Airflow connection used for AWS credentials.
167
+ If this is ``None`` or empty then the default boto3 behaviour is used. If
168
+ running Airflow in a distributed manner and aws_conn_id is None or
169
+ empty, then default boto3 configuration would be used (and must be
170
+ maintained on each worker node).
171
+ :param region_name: AWS region_name. If not specified then the default boto3 behaviour is used.
172
+ :param verify: Whether or not to verify SSL certificates. See:
173
+ https://boto3.amazonaws.com/v1/documentation/api/latest/reference/core/session.html
174
+ :param botocore_config: Configuration dictionary (key-values) for botocore client. See:
175
+ https://botocore.amazonaws.com/v1/documentation/api/latest/reference/config.html
176
+ """
177
+
178
+ INTERMEDIATE_STATES: tuple[str, ...] = ("Creating", "Updating")
179
+ FAILURE_STATES: tuple[str, ...] = ("Failed",)
180
+ SUCCESS_STATES: tuple[str, ...] = ("InService",)
181
+ FAILURE_MESSAGE = "Bedrock provision model throughput sensor failed."
182
+
183
+ template_fields: Sequence[str] = aws_template_fields("model_id")
184
+
185
+ def __init__(
186
+ self,
187
+ *,
188
+ model_id: str,
189
+ poke_interval: int = 60,
190
+ max_retries: int = 20,
191
+ **kwargs,
192
+ ) -> None:
193
+ super().__init__(**kwargs)
194
+ self.poke_interval = poke_interval
195
+ self.max_retries = max_retries
196
+ self.model_id = model_id
197
+
198
+ def get_state(self) -> str:
199
+ return self.hook.conn.get_provisioned_model_throughput(provisionedModelId=self.model_id)["status"]
200
+
201
+ def execute(self, context: Context) -> Any:
202
+ if self.deferrable:
203
+ self.defer(
204
+ trigger=BedrockProvisionModelThroughputCompletedTrigger(
205
+ provisioned_model_id=self.model_id,
206
+ waiter_delay=int(self.poke_interval),
207
+ waiter_max_attempts=self.max_retries,
208
+ aws_conn_id=self.aws_conn_id,
209
+ ),
210
+ method_name="poke",
211
+ )
212
+ else:
213
+ super().execute(context=context)
@@ -24,7 +24,11 @@ from typing import TYPE_CHECKING, Any, Iterable, Sequence
24
24
  from deprecated import deprecated
25
25
 
26
26
  from airflow.configuration import conf
27
- from airflow.exceptions import AirflowException, AirflowProviderDeprecationWarning, AirflowSkipException
27
+ from airflow.exceptions import (
28
+ AirflowException,
29
+ AirflowProviderDeprecationWarning,
30
+ AirflowSkipException,
31
+ )
28
32
  from airflow.providers.amazon.aws.hooks.emr import EmrContainerHook, EmrHook, EmrServerlessHook
29
33
  from airflow.providers.amazon.aws.links.emr import EmrClusterLink, EmrLogsLink, get_log_uri
30
34
  from airflow.providers.amazon.aws.triggers.emr import (
@@ -231,7 +235,9 @@ class EmrServerlessApplicationSensor(BaseSensorOperator):
231
235
 
232
236
  if state in EmrServerlessHook.APPLICATION_FAILURE_STATES:
233
237
  # TODO: remove this if check when min_airflow_version is set to higher than 2.7.1
234
- failure_message = f"EMR Serverless job failed: {self.failure_message_from_response(response)}"
238
+ failure_message = (
239
+ f"EMR Serverless application failed: {self.failure_message_from_response(response)}"
240
+ )
235
241
  if self.soft_fail:
236
242
  raise AirflowSkipException(failure_message)
237
243
  raise AirflowException(failure_message)
@@ -23,7 +23,13 @@ from typing import TYPE_CHECKING, Sequence
23
23
 
24
24
  from airflow.models import BaseOperator
25
25
  from airflow.providers.amazon.aws.hooks.s3 import S3Hook
26
- from airflow.providers.microsoft.azure.hooks.wasb import WasbHook
26
+
27
+ try:
28
+ from airflow.providers.microsoft.azure.hooks.wasb import WasbHook
29
+ except ModuleNotFoundError as e:
30
+ from airflow.exceptions import AirflowOptionalProviderFeatureException
31
+
32
+ raise AirflowOptionalProviderFeatureException(e)
27
33
 
28
34
  if TYPE_CHECKING:
29
35
  from airflow.utils.context import Context
@@ -59,3 +59,39 @@ class BedrockCustomizeModelCompletedTrigger(AwsBaseWaiterTrigger):
59
59
 
60
60
  def hook(self) -> AwsGenericHook:
61
61
  return BedrockHook(aws_conn_id=self.aws_conn_id)
62
+
63
+
64
+ class BedrockProvisionModelThroughputCompletedTrigger(AwsBaseWaiterTrigger):
65
+ """
66
+ Trigger when a provisioned throughput job is complete.
67
+
68
+ :param provisioned_model_id: The ARN or name of the provisioned throughput.
69
+ :param waiter_delay: The amount of time in seconds to wait between attempts. (default: 120)
70
+ :param waiter_max_attempts: The maximum number of attempts to be made. (default: 75)
71
+ :param aws_conn_id: The Airflow connection used for AWS credentials.
72
+ """
73
+
74
+ def __init__(
75
+ self,
76
+ *,
77
+ provisioned_model_id: str,
78
+ waiter_delay: int = 120,
79
+ waiter_max_attempts: int = 75,
80
+ aws_conn_id: str | None = None,
81
+ ) -> None:
82
+ super().__init__(
83
+ serialized_fields={"provisioned_model_id": provisioned_model_id},
84
+ waiter_name="provisioned_model_throughput_complete",
85
+ waiter_args={"provisionedModelId": provisioned_model_id},
86
+ failure_message="Bedrock provisioned throughput job failed.",
87
+ status_message="Status of Bedrock provisioned throughput job is",
88
+ status_queries=["status"],
89
+ return_key="provisioned_model_id",
90
+ return_value=provisioned_model_id,
91
+ waiter_delay=waiter_delay,
92
+ waiter_max_attempts=waiter_max_attempts,
93
+ aws_conn_id=aws_conn_id,
94
+ )
95
+
96
+ def hook(self) -> AwsGenericHook:
97
+ return BedrockHook(aws_conn_id=self.aws_conn_id)
@@ -37,6 +37,37 @@
37
37
  "state": "failure"
38
38
  }
39
39
  ]
40
+ },
41
+ "provisioned_model_throughput_complete": {
42
+ "delay": 60,
43
+ "maxAttempts": 20,
44
+ "operation": "getProvisionedModelThroughput",
45
+ "acceptors": [
46
+ {
47
+ "matcher": "path",
48
+ "argument": "status",
49
+ "expected": "InService",
50
+ "state": "success"
51
+ },
52
+ {
53
+ "matcher": "path",
54
+ "argument": "status",
55
+ "expected": "Creating",
56
+ "state": "retry"
57
+ },
58
+ {
59
+ "matcher": "path",
60
+ "argument": "status",
61
+ "expected": "Updating",
62
+ "state": "retry"
63
+ },
64
+ {
65
+ "matcher": "path",
66
+ "argument": "status",
67
+ "expected": "Failed",
68
+ "state": "failure"
69
+ }
70
+ ]
40
71
  }
41
72
  }
42
73
  }
@@ -28,8 +28,9 @@ def get_provider_info():
28
28
  "name": "Amazon",
29
29
  "description": "Amazon integration (including `Amazon Web Services (AWS) <https://aws.amazon.com/>`__).\n",
30
30
  "state": "ready",
31
- "source-date-epoch": 1712664326,
31
+ "source-date-epoch": 1714475383,
32
32
  "versions": [
33
+ "8.21.0",
33
34
  "8.20.0",
34
35
  "8.19.0",
35
36
  "8.18.0",
@@ -87,7 +88,7 @@ def get_provider_info():
87
88
  "1.0.0",
88
89
  ],
89
90
  "dependencies": [
90
- "apache-airflow>=2.6.0",
91
+ "apache-airflow>=2.7.0",
91
92
  "apache-airflow-providers-common-sql>=1.3.1",
92
93
  "apache-airflow-providers-http",
93
94
  "boto3>=1.33.0",
@@ -99,6 +100,7 @@ def get_provider_info():
99
100
  "sqlalchemy_redshift>=0.8.6",
100
101
  "asgiref",
101
102
  "PyAthena>=3.0.10",
103
+ "xmlsec<1.3.14",
102
104
  ],
103
105
  "additional-extras": [
104
106
  {"name": "pandas", "dependencies": ["pandas>=1.2.5,<2.2"]},
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: apache-airflow-providers-amazon
3
- Version: 8.20.0rc1
3
+ Version: 8.21.0rc1
4
4
  Summary: Provider package apache-airflow-providers-amazon for Apache Airflow
5
5
  Keywords: airflow-provider,amazon,airflow,integration
6
6
  Author-email: Apache Software Foundation <dev@airflow.apache.org>
@@ -24,7 +24,7 @@ Classifier: Topic :: System :: Monitoring
24
24
  Requires-Dist: PyAthena>=3.0.10
25
25
  Requires-Dist: apache-airflow-providers-common-sql>=1.3.1rc0
26
26
  Requires-Dist: apache-airflow-providers-http
27
- Requires-Dist: apache-airflow>=2.6.0rc0
27
+ Requires-Dist: apache-airflow>=2.7.0rc0
28
28
  Requires-Dist: asgiref
29
29
  Requires-Dist: boto3>=1.33.0
30
30
  Requires-Dist: botocore>=1.33.0
@@ -33,6 +33,7 @@ Requires-Dist: jsonpath_ng>=1.5.3
33
33
  Requires-Dist: redshift_connector>=2.0.918
34
34
  Requires-Dist: sqlalchemy_redshift>=0.8.6
35
35
  Requires-Dist: watchtower>=2.0.1,<4
36
+ Requires-Dist: xmlsec<1.3.14
36
37
  Requires-Dist: aiobotocore[boto3]>=2.5.3 ; extra == "aiobotocore"
37
38
  Requires-Dist: apache-airflow-providers-apache-hive ; extra == "apache.hive"
38
39
  Requires-Dist: apache-airflow-providers-cncf-kubernetes>=7.2.0rc0 ; extra == "cncf.kubernetes"
@@ -51,8 +52,8 @@ Requires-Dist: s3fs>=2023.10.0 ; extra == "s3fs"
51
52
  Requires-Dist: apache-airflow-providers-salesforce ; extra == "salesforce"
52
53
  Requires-Dist: apache-airflow-providers-ssh ; extra == "ssh"
53
54
  Project-URL: Bug Tracker, https://github.com/apache/airflow/issues
54
- Project-URL: Changelog, https://airflow.apache.org/docs/apache-airflow-providers-amazon/8.20.0/changelog.html
55
- Project-URL: Documentation, https://airflow.apache.org/docs/apache-airflow-providers-amazon/8.20.0
55
+ Project-URL: Changelog, https://airflow.apache.org/docs/apache-airflow-providers-amazon/8.21.0/changelog.html
56
+ Project-URL: Documentation, https://airflow.apache.org/docs/apache-airflow-providers-amazon/8.21.0
56
57
  Project-URL: Slack Chat, https://s.apache.org/airflow-slack
57
58
  Project-URL: Source Code, https://github.com/apache/airflow
58
59
  Project-URL: Twitter, https://twitter.com/ApacheAirflow
@@ -119,7 +120,7 @@ Provides-Extra: ssh
119
120
 
120
121
  Package ``apache-airflow-providers-amazon``
121
122
 
122
- Release: ``8.20.0.rc1``
123
+ Release: ``8.21.0.rc1``
123
124
 
124
125
 
125
126
  Amazon integration (including `Amazon Web Services (AWS) <https://aws.amazon.com/>`__).
@@ -132,7 +133,7 @@ This is a provider package for ``amazon`` provider. All classes for this provide
132
133
  are in ``airflow.providers.amazon`` python package.
133
134
 
134
135
  You can find package information and changelog for the provider
135
- in the `documentation <https://airflow.apache.org/docs/apache-airflow-providers-amazon/8.20.0/>`_.
136
+ in the `documentation <https://airflow.apache.org/docs/apache-airflow-providers-amazon/8.21.0/>`_.
136
137
 
137
138
  Installation
138
139
  ------------
@@ -149,7 +150,7 @@ Requirements
149
150
  ======================================= ==================
150
151
  PIP package Version required
151
152
  ======================================= ==================
152
- ``apache-airflow`` ``>=2.6.0``
153
+ ``apache-airflow`` ``>=2.7.0``
153
154
  ``apache-airflow-providers-common-sql`` ``>=1.3.1``
154
155
  ``apache-airflow-providers-http``
155
156
  ``boto3`` ``>=1.33.0``
@@ -161,6 +162,7 @@ PIP package Version required
161
162
  ``sqlalchemy_redshift`` ``>=0.8.6``
162
163
  ``asgiref``
163
164
  ``PyAthena`` ``>=3.0.10``
165
+ ``xmlsec`` ``<1.3.14``
164
166
  ======================================= ==================
165
167
 
166
168
  Cross provider package dependencies
@@ -195,4 +197,4 @@ Dependent package
195
197
  ====================================================================================================================== ===================
196
198
 
197
199
  The changelog for the provider package can be found in the
198
- `changelog <https://airflow.apache.org/docs/apache-airflow-providers-amazon/8.20.0/changelog.html>`_.
200
+ `changelog <https://airflow.apache.org/docs/apache-airflow-providers-amazon/8.21.0/changelog.html>`_.
@@ -1,15 +1,15 @@
1
1
  airflow/providers/amazon/LICENSE,sha256=ywUBpKZc7Jb96rVt5I3IDbg7dIJAbUSHkuoDcF3jbH4,13569
2
- airflow/providers/amazon/__init__.py,sha256=R8BBhCd76vWdc8EGaNmdAsPkrRihUonA0ZFq-Lkf3FM,1582
3
- airflow/providers/amazon/get_provider_info.py,sha256=CPzaK0yGmSccZrNEwMpxCBHdvOXj8PLIb6IZZlNwXAA,64085
2
+ airflow/providers/amazon/__init__.py,sha256=ObBKYGps0vpdjT5woWVLANqJtnxS3PU6IUr-sDxPr0w,1582
3
+ airflow/providers/amazon/get_provider_info.py,sha256=_SdLzwOp4igo2MQ-zhoDSrqr9kMUXWNLaQaesIKWEP4,64136
4
4
  airflow/providers/amazon/aws/__init__.py,sha256=9hdXHABrVpkbpjZgUft39kOFL2xSGeG4GEua0Hmelus,785
5
5
  airflow/providers/amazon/aws/exceptions.py,sha256=uRGNMgXvgdzfphpOTiyj74lQhjzb70J-X8n6fsx5Jog,1864
6
6
  airflow/providers/amazon/aws/auth_manager/__init__.py,sha256=9hdXHABrVpkbpjZgUft39kOFL2xSGeG4GEua0Hmelus,785
7
- airflow/providers/amazon/aws/auth_manager/aws_auth_manager.py,sha256=AzBDCi09nt3A25XGppUU1h5uQtzwzEtJDFkzWx5I7QM,16511
8
- airflow/providers/amazon/aws/auth_manager/constants.py,sha256=Na8YvT1JVgtkZRpWHuc2aWfbXoVPHnFtdNPdDZnr2pM,1077
7
+ airflow/providers/amazon/aws/auth_manager/aws_auth_manager.py,sha256=dSvuhLw5npmNMIIhZp1_yGx2OrV0UrgBfdxsSWw64TM,16100
8
+ airflow/providers/amazon/aws/auth_manager/constants.py,sha256=Jdluo42InhyNGkYHB_dRtoFMpKanJLJdH0hyR9-5AZg,1050
9
9
  airflow/providers/amazon/aws/auth_manager/user.py,sha256=SoiiA3sVB1-G02qhQDSTst_25MjW4xbSE0vVDxwR-uw,1882
10
10
  airflow/providers/amazon/aws/auth_manager/avp/__init__.py,sha256=9hdXHABrVpkbpjZgUft39kOFL2xSGeG4GEua0Hmelus,785
11
- airflow/providers/amazon/aws/auth_manager/avp/entities.py,sha256=qDp7CjaAeU8TrUAaL6Hhe2nQffLKCeYVDZzPft-ZTyw,1965
12
- airflow/providers/amazon/aws/auth_manager/avp/facade.py,sha256=697E7VAgeQSr0LGBtIsw3haxacCx7hCUgfLOs1Zql_w,10951
11
+ airflow/providers/amazon/aws/auth_manager/avp/entities.py,sha256=YhV-oa4dDp3MmzS14FcWma4w8WvNvuysR8X4T1pqdk0,1971
12
+ airflow/providers/amazon/aws/auth_manager/avp/facade.py,sha256=9Y866BnSj-fzXRjhiaXiQR2vFQe8kL0KkAQkao585CI,11177
13
13
  airflow/providers/amazon/aws/auth_manager/avp/schema.json,sha256=BQhsP11UvFEGD2v3GFduAbEw0bp1ZMB5IkirWhlkaFA,6375
14
14
  airflow/providers/amazon/aws/auth_manager/cli/__init__.py,sha256=9hdXHABrVpkbpjZgUft39kOFL2xSGeG4GEua0Hmelus,785
15
15
  airflow/providers/amazon/aws/auth_manager/cli/avp_commands.py,sha256=SCTdbAsedEpDuoAAQGLbAeKxcoGzjacgmQAxbkkvUbU,5549
@@ -21,14 +21,14 @@ airflow/providers/amazon/aws/auth_manager/views/__init__.py,sha256=9hdXHABrVpkbp
21
21
  airflow/providers/amazon/aws/auth_manager/views/auth.py,sha256=e5InDh2jYEBClkgn9xm6fYl8qK-miP692WuOGa5gC6g,5884
22
22
  airflow/providers/amazon/aws/executors/Dockerfile,sha256=VZ-YOR59KSMoztJV_g7v5hUwetKR0Ii4wNNaKqDIfyQ,4275
23
23
  airflow/providers/amazon/aws/executors/__init__.py,sha256=9hdXHABrVpkbpjZgUft39kOFL2xSGeG4GEua0Hmelus,785
24
- airflow/providers/amazon/aws/executors/batch/__init__.py,sha256=9hdXHABrVpkbpjZgUft39kOFL2xSGeG4GEua0Hmelus,785
24
+ airflow/providers/amazon/aws/executors/batch/__init__.py,sha256=TPSNZJ6E3zqN7mvdrMrarqwHeFYN9Efd2jD3hpN7tr0,970
25
25
  airflow/providers/amazon/aws/executors/batch/batch_executor.py,sha256=HDMinOB1KHo5OinQjlS0MSqgzfrlDGX0ZiIAyijUzOU,18503
26
26
  airflow/providers/amazon/aws/executors/batch/batch_executor_config.py,sha256=7yYLKB1jRoBy0AeW5chcpz7i2UfvSQob9QLvMhYUWDQ,3223
27
27
  airflow/providers/amazon/aws/executors/batch/boto_schema.py,sha256=Rqr_uk6Tx6hNVYsQRPNlLj0zC8TC_awWk2rv3tkUuYU,2445
28
28
  airflow/providers/amazon/aws/executors/batch/utils.py,sha256=Jugs8lvvtWey_CcwMkHnRVe9G0Sn8wyVmbROVrjgk9A,5286
29
- airflow/providers/amazon/aws/executors/ecs/__init__.py,sha256=9hdXHABrVpkbpjZgUft39kOFL2xSGeG4GEua0Hmelus,785
29
+ airflow/providers/amazon/aws/executors/ecs/__init__.py,sha256=J_B7TIPPQmn67Y7kzr4pgzcpFRr0wUp6gVsyfz5GKc4,962
30
30
  airflow/providers/amazon/aws/executors/ecs/boto_schema.py,sha256=hxj76uoo4y9koshb5Ou2hyjvNKCtrSK5wXea3iVtPqs,3762
31
- airflow/providers/amazon/aws/executors/ecs/ecs_executor.py,sha256=mQejWxB3oQz64JrO-Cp9yHoqgBWZY0-GYB6WSeJQazI,23918
31
+ airflow/providers/amazon/aws/executors/ecs/ecs_executor.py,sha256=7C8tS4G685eaWHTrcYvhbN-M8IfWvnTnokozC47lGj8,23919
32
32
  airflow/providers/amazon/aws/executors/ecs/ecs_executor_config.py,sha256=iRP39ynsYFOisLN9NQsiLikTrBBN54bWaFQs60Snrsw,5436
33
33
  airflow/providers/amazon/aws/executors/ecs/utils.py,sha256=RLsmPN5MpLpXQftkyoIb8i8HxAw2R3vQWK1zM_M5XDg,9477
34
34
  airflow/providers/amazon/aws/executors/utils/__init__.py,sha256=9hdXHABrVpkbpjZgUft39kOFL2xSGeG4GEua0Hmelus,785
@@ -41,7 +41,7 @@ airflow/providers/amazon/aws/hooks/appflow.py,sha256=-le6RsIMWIqTav7KGknsph9Td42
41
41
  airflow/providers/amazon/aws/hooks/athena.py,sha256=0jVsdD56D_eJ16tMJlhmKf06LI2gFVna35aTKx2c67w,13567
42
42
  airflow/providers/amazon/aws/hooks/athena_sql.py,sha256=GDQ2VM0ECs3wRBPYPLB3yJ4EfKecyJCUfc6X3cnScR0,6839
43
43
  airflow/providers/amazon/aws/hooks/base_aws.py,sha256=U5ASy6AAi2tS19SiEsnyLvzPPa1qhing4u2Zbg6QZaM,49224
44
- airflow/providers/amazon/aws/hooks/batch_client.py,sha256=eoz3ytB-VM5ucu4SpJPGk5cb-0C1VDQFDwiTdL1_uo4,21349
44
+ airflow/providers/amazon/aws/hooks/batch_client.py,sha256=CHdPR14XxU4dBQMK4NIU7NwYUPNdF0qfy2XsCnhkVQU,21431
45
45
  airflow/providers/amazon/aws/hooks/batch_waiters.json,sha256=eoN5YDgeTNZ2Xz17TrbKBPhd7z9-6KD3RhaDKXXOvqU,2511
46
46
  airflow/providers/amazon/aws/hooks/batch_waiters.py,sha256=VlAf3NYyGsfmOw9S4Ods8VKua3mBWSLHEAr8hHCHdmY,10579
47
47
  airflow/providers/amazon/aws/hooks/bedrock.py,sha256=Y9Y1p0uxzBF6beVxDNe9rydGR9PmEpRMe6At1ktOW6A,2077
@@ -55,7 +55,7 @@ airflow/providers/amazon/aws/hooks/ecr.py,sha256=X_VnUgSlqqxCftWE6R_sSfp_FFOnBQ9
55
55
  airflow/providers/amazon/aws/hooks/ecs.py,sha256=p_7qHGZbZWY-WHJEJD186o6mP5HBXmUfCJZ7usdydO0,6666
56
56
  airflow/providers/amazon/aws/hooks/eks.py,sha256=rj15KoRbcYicLUO4SOFq4-G-T_iLh0WzRkVtZ8wVM6M,24440
57
57
  airflow/providers/amazon/aws/hooks/elasticache_replication_group.py,sha256=x6kkaR2nzDF8w1kqolbaS3-XCbHl5qlJMcpGYmlsxuU,12089
58
- airflow/providers/amazon/aws/hooks/emr.py,sha256=EK99rlcx_qKjC20rmyTVxOAyE7mXkCQDX6a73ZPJMHQ,21380
58
+ airflow/providers/amazon/aws/hooks/emr.py,sha256=7x2YCGKr5nylRfbBitqRXmvW0RwYgARXv4pXXeu_WJ0,21430
59
59
  airflow/providers/amazon/aws/hooks/eventbridge.py,sha256=dSaKbFB8ueOUJGl6YLIz70zXy0Xzr3yMflKS2wGFDSM,3364
60
60
  airflow/providers/amazon/aws/hooks/glacier.py,sha256=BTDavN3NUwWe1hBCLWYWhmpliuwybEByMGtWnYPRL5Q,3463
61
61
  airflow/providers/amazon/aws/hooks/glue.py,sha256=WqT0yv9k24jZW2kCKTbZE_BKY1L2K1znIHgw5N28uDE,17230
@@ -63,7 +63,7 @@ airflow/providers/amazon/aws/hooks/glue_catalog.py,sha256=XQu9v_b37TXO7F_V3u7WuL
63
63
  airflow/providers/amazon/aws/hooks/glue_crawler.py,sha256=C9O2YG63BiNS6UvvB1Mn1aHWdRYzDBf2a5brimLU9IQ,7926
64
64
  airflow/providers/amazon/aws/hooks/glue_databrew.py,sha256=96duZVYtLDQgfJ02XUdov-QWPoG2Wp0O0RFuwB-6nkU,2580
65
65
  airflow/providers/amazon/aws/hooks/kinesis.py,sha256=aR7Oi_2_ZtBfH1--U83wWjSYzzy_Uh2tqplLAPRQugA,1997
66
- airflow/providers/amazon/aws/hooks/lambda_function.py,sha256=ORNMoYSXSXaVqZMgb0Qaq923n30Bj3jK5KwJcDcdqLU,8932
66
+ airflow/providers/amazon/aws/hooks/lambda_function.py,sha256=m08jo6R9qoFBMaBuJwgDIvw7i-IbIJ2F1jZD4NbhHLA,9520
67
67
  airflow/providers/amazon/aws/hooks/logs.py,sha256=pS987tewWr8HY7KTzIhImoYpdaconHRjcLV2bQIMGJ8,9818
68
68
  airflow/providers/amazon/aws/hooks/neptune.py,sha256=IvQjAdtdDKWwHmfs-t7YG516p6Y3xHe453WgU0C47j0,3264
69
69
  airflow/providers/amazon/aws/hooks/quicksight.py,sha256=MFTlrWV88wLky2swo-b5fFQDLbMQCw6w6lcgAvJqveU,7957
@@ -101,7 +101,7 @@ airflow/providers/amazon/aws/operators/appflow.py,sha256=PoHw62T_lD4burfRzXPJsjA
101
101
  airflow/providers/amazon/aws/operators/athena.py,sha256=CmaaSn0sCe6RZEbb_l-9xKXb-9DW21IiL2ewpA3jZnI,14172
102
102
  airflow/providers/amazon/aws/operators/base_aws.py,sha256=cdc5GZkl_YGDDtlV9CVsdbTH3j7bza6d3RrDm93seOo,3864
103
103
  airflow/providers/amazon/aws/operators/batch.py,sha256=-ZmD-kFu99KXg3tKKNd8DcMhj0N3_GTL9bum7h-oE_U,22341
104
- airflow/providers/amazon/aws/operators/bedrock.py,sha256=79TwPRDx12_0tkbikRhxvXWRPEDNUYv5SbfX0OVnL1g,12066
104
+ airflow/providers/amazon/aws/operators/bedrock.py,sha256=AfNTc3NlFwWVs7oXKwu5iHlihzzqwTdQ8p-IotUsf70,16774
105
105
  airflow/providers/amazon/aws/operators/cloud_formation.py,sha256=-WMYq-oA8WpPN2i5aTgBenFj9-CjbeEcy9NuRCnSwpM,5066
106
106
  airflow/providers/amazon/aws/operators/datasync.py,sha256=Pxxt1G1FbmwSqXChZjeQKuS0uUS4QS_qbK69F8Y1SqE,18920
107
107
  airflow/providers/amazon/aws/operators/dms.py,sha256=6RhUtbELAjp0LLkUWl73kdcH4MRmyTzwHi1NxOlkE0Q,12313
@@ -111,7 +111,7 @@ airflow/providers/amazon/aws/operators/eks.py,sha256=0xHtPzxZx3ymr6-iqGvAoZsnA24
111
111
  airflow/providers/amazon/aws/operators/emr.py,sha256=bbXCN-qRoaXuJykNLJ8fxQpeissP6ZgardZilNT_N5M,87889
112
112
  airflow/providers/amazon/aws/operators/eventbridge.py,sha256=e686XFhVi54DbaCk7oVc0fhvH6GIPU3p8jgyCie1yBU,10394
113
113
  airflow/providers/amazon/aws/operators/glacier.py,sha256=zxwC6lLk6sWerjlogXq6HgNOJx4h0hkqpGpqn23hJWk,3654
114
- airflow/providers/amazon/aws/operators/glue.py,sha256=_wKO-SXB9qPgO9vfByJCRLGUZOqENc_yJHMSi3MaYsk,10148
114
+ airflow/providers/amazon/aws/operators/glue.py,sha256=7UfrdxD1-hMNjmbinUISNZiEM0BHeLXYxgaNE_zUpsk,10427
115
115
  airflow/providers/amazon/aws/operators/glue_crawler.py,sha256=KNf-f7HUFfTKVVd1cyVrNS5suKAF4LibzP3aQT_aREM,4835
116
116
  airflow/providers/amazon/aws/operators/glue_databrew.py,sha256=zXP0_ZoqFmjcG0REDj-e7GdZmrSvJtfvZrqg1yKSH_U,4475
117
117
  airflow/providers/amazon/aws/operators/lambda_function.py,sha256=96KtK5KUpMPW2i8Xay1UdKPMX211hS6FqweFnRNuTFQ,10619
@@ -132,14 +132,14 @@ airflow/providers/amazon/aws/sensors/__init__.py,sha256=9hdXHABrVpkbpjZgUft39kOF
132
132
  airflow/providers/amazon/aws/sensors/athena.py,sha256=CS7sTcQrMW6VVxaUhOY0pIm8382_KpkCBmXl5fCCMk4,3863
133
133
  airflow/providers/amazon/aws/sensors/base_aws.py,sha256=vkSs3jwtvrdAs4z2flrOLW8zHHPqc7Pfb-EuCTSs7ZY,3853
134
134
  airflow/providers/amazon/aws/sensors/batch.py,sha256=_--bT2Wh59x4C3W9Xu4h7xrC--bCOPn29YzbsB6TRzw,11487
135
- airflow/providers/amazon/aws/sensors/bedrock.py,sha256=g9uuz68R4m8dTd3lgAVpmkw5kNvaPsuIya-5-UCB-no,4853
135
+ airflow/providers/amazon/aws/sensors/bedrock.py,sha256=iq6a3_R-XHZGbcDqcalI1p01cOMQzRMFawGIrMsTo24,8870
136
136
  airflow/providers/amazon/aws/sensors/cloud_formation.py,sha256=kLINQol-ZFpjOpSBFQ7I4JXZkBjEICT-g8MT-gXscLw,5426
137
137
  airflow/providers/amazon/aws/sensors/dms.py,sha256=SzKUgEtL0agejWKWJvjqmrJdtwbd2vJriHcCsYV0j8s,5744
138
138
  airflow/providers/amazon/aws/sensors/dynamodb.py,sha256=P43g73ACBoDo0Lrxzm9wOoEepbRVAOjqtwWYvIq1Tls,5008
139
139
  airflow/providers/amazon/aws/sensors/ec2.py,sha256=GpEVRZEl4oW4eB4ION8H2nfAHp-MGptwav7DpqL3Uo0,4119
140
140
  airflow/providers/amazon/aws/sensors/ecs.py,sha256=Bju2xJHNI8SdddD1muDcqtihL__EAHQwa-RtYxPjfoI,7087
141
141
  airflow/providers/amazon/aws/sensors/eks.py,sha256=TeSQ__B3eUYfdkr4a8x0oT0JUl_8JdiLs3p0p92YRlo,9831
142
- airflow/providers/amazon/aws/sensors/emr.py,sha256=Aiya22VyC-ZAoCOi0CF2_kbEj1Hn0OV0bJYFM0EjNQQ,25923
142
+ airflow/providers/amazon/aws/sensors/emr.py,sha256=vxZhqM0-FUFB8huCwiPXv84pwBi5n91QSX8JbweaWxA,25980
143
143
  airflow/providers/amazon/aws/sensors/glacier.py,sha256=2UUI-y-x07DH8I5OikA_d5_FHCMQjBpxMKxRvlZSlS4,4282
144
144
  airflow/providers/amazon/aws/sensors/glue.py,sha256=Jxq4kXNzhX3hOsodWFIFez19M_HhyHcZBzTAHXEz64A,3547
145
145
  airflow/providers/amazon/aws/sensors/glue_catalog_partition.py,sha256=YYahteocna91s3TZ7yrmiAKBKNQgvfs6D8mey0TRrrg,5610
@@ -153,7 +153,7 @@ airflow/providers/amazon/aws/sensors/sagemaker.py,sha256=Pd8S0hExbaqdqOKglQAi51E
153
153
  airflow/providers/amazon/aws/sensors/sqs.py,sha256=GFzHT5nFSyIMATqwqjhEmOWZfwdOcAe4T6yUFNUlvWk,11329
154
154
  airflow/providers/amazon/aws/sensors/step_function.py,sha256=pqAtBJd3m003qvaJwr4BrKBHhYWGrJ67yaqczjcE1_w,4089
155
155
  airflow/providers/amazon/aws/transfers/__init__.py,sha256=9hdXHABrVpkbpjZgUft39kOFL2xSGeG4GEua0Hmelus,785
156
- airflow/providers/amazon/aws/transfers/azure_blob_to_s3.py,sha256=TgElDGp084SGEKR07xWnQ9WWPsE7XqMqSQVk7NSTGyI,7049
156
+ airflow/providers/amazon/aws/transfers/azure_blob_to_s3.py,sha256=NtfuaKa5Jp5rTafp71xoTaOR52jrfmhYeZKGEppJJ3U,7221
157
157
  airflow/providers/amazon/aws/transfers/base.py,sha256=LMIgzvDgCMMojkhoJCSJh4egcpUZ9V2FoX8yMF3xeOk,2987
158
158
  airflow/providers/amazon/aws/transfers/dynamodb_to_s3.py,sha256=c-wOOXL1tUFPKACjWZiiDewsfCcMoCdcofZ2S5Af330,8352
159
159
  airflow/providers/amazon/aws/transfers/exasol_to_s3.py,sha256=kevKzRidFKgibSKSz5-blkxHDV0isc8euU08iHn83z0,4418
@@ -179,7 +179,7 @@ airflow/providers/amazon/aws/triggers/__init__.py,sha256=mlJxuZLkd5x-iq2SBwD3mvR
179
179
  airflow/providers/amazon/aws/triggers/athena.py,sha256=TAn2snAGHPfRT-pG8r0EMKlTSgwKr4APsJMqAE8pORI,2666
180
180
  airflow/providers/amazon/aws/triggers/base.py,sha256=QT-omauJw_ksUb6gS2erm_FVXSZdwHIpyznFXTkoMXo,6384
181
181
  airflow/providers/amazon/aws/triggers/batch.py,sha256=rQYVBgcChS1PuOTFiyEKMdC2nbYLPH_Gkm1AAr1Tuas,10849
182
- airflow/providers/amazon/aws/triggers/bedrock.py,sha256=lPEdS92QxIeB4VKZ4nnAqMFquPC7hsSShgS8ACvyvDg,2443
182
+ airflow/providers/amazon/aws/triggers/bedrock.py,sha256=_YbNfSU0FJow7R84xJHLPjaJR7S5Zin4t3rn35tktes,3901
183
183
  airflow/providers/amazon/aws/triggers/ec2.py,sha256=gMY3EP4TmL6SodLw12FNSLttlHd7hRhOu-q3CiG7y2w,3245
184
184
  airflow/providers/amazon/aws/triggers/ecs.py,sha256=wdHCqhTfOUDsf3MRWlwQmxZ-jxgGIN2ug9JA1bZiG9k,9131
185
185
  airflow/providers/amazon/aws/triggers/eks.py,sha256=t69h3uIgeQTG3d7n7faOJyMPfsbL6OtkmCTUrpq_jqI,16980
@@ -217,7 +217,7 @@ airflow/providers/amazon/aws/waiters/appflow.json,sha256=aeYUa6gDxvOjDxarOUOPVjp
217
217
  airflow/providers/amazon/aws/waiters/athena.json,sha256=sn4OEGG6PKVbd2JVCL3M8iqj0k-7EisesRxdPMnGrqI,892
218
218
  airflow/providers/amazon/aws/waiters/base_waiter.py,sha256=CTzi5Q1mgboCeOxUeRL5zZ-lRnMdWHQ-Y-Xv3LDz7MQ,1910
219
219
  airflow/providers/amazon/aws/waiters/batch.json,sha256=Viw4UP1nm2D80sG-4ezMAed9FsdBWd1ID1SuM9uaKpA,1206
220
- airflow/providers/amazon/aws/waiters/bedrock.json,sha256=DqRR3fc33TFmv9DiTNFsYOMBeSbQujf2cLg-u4AGji8,1254
220
+ airflow/providers/amazon/aws/waiters/bedrock.json,sha256=HtnOezFlA28a39LZc0Q1L7vnepCn-UXrlbOyuG_Ga9I,2269
221
221
  airflow/providers/amazon/aws/waiters/databrew.json,sha256=2phkNTbNjBWmnqggtXeT5YtoWOPYRtQ-J18lGUm2zTI,1017
222
222
  airflow/providers/amazon/aws/waiters/dynamodb.json,sha256=wKG3hryTq3bNrSMzSKWbH-SPBaIPtZAKSTTflr3E5rM,895
223
223
  airflow/providers/amazon/aws/waiters/ecs.json,sha256=msjoYqzJMf8SCldtlzG4sGBcyzy2sN3W91JS3akXKy4,1674
@@ -230,7 +230,7 @@ airflow/providers/amazon/aws/waiters/neptune.json,sha256=4IP0FPqdItVmyP_au9hxpMT
230
230
  airflow/providers/amazon/aws/waiters/redshift.json,sha256=jOBotCgbkko1b_CHcGEbhhRvusgt0YSzVuFiZrqVP30,1742
231
231
  airflow/providers/amazon/aws/waiters/sagemaker.json,sha256=JPHuQtUFZ1B7EMLfVmCRevNZ9jgpB71LM0dva8ZEO9A,5254
232
232
  airflow/providers/amazon/aws/waiters/stepfunctions.json,sha256=aBaAZaGv8ZZGdN-2gvYEbq3fL_WHI_7s6SSDL-nWS1A,1034
233
- apache_airflow_providers_amazon-8.20.0rc1.dist-info/entry_points.txt,sha256=vlc0ZzhBkMrav1maTRofgksnAw4SwoQLFX9cmnTgktk,102
234
- apache_airflow_providers_amazon-8.20.0rc1.dist-info/WHEEL,sha256=EZbGkh7Ie4PoZfRQ8I0ZuP9VklN_TvcZ6DSE5Uar4z4,81
235
- apache_airflow_providers_amazon-8.20.0rc1.dist-info/METADATA,sha256=THFJIZ3ksDRO5DA-OkyIv-HOihCNXh1Fj9c8tYWdRp4,10061
236
- apache_airflow_providers_amazon-8.20.0rc1.dist-info/RECORD,,
233
+ apache_airflow_providers_amazon-8.21.0rc1.dist-info/entry_points.txt,sha256=vlc0ZzhBkMrav1maTRofgksnAw4SwoQLFX9cmnTgktk,102
234
+ apache_airflow_providers_amazon-8.21.0rc1.dist-info/WHEEL,sha256=EZbGkh7Ie4PoZfRQ8I0ZuP9VklN_TvcZ6DSE5Uar4z4,81
235
+ apache_airflow_providers_amazon-8.21.0rc1.dist-info/METADATA,sha256=Kg60xUaneHYCZnLlLNdDxkufzWohbfGV_wM_peF1hz8,10143
236
+ apache_airflow_providers_amazon-8.21.0rc1.dist-info/RECORD,,