apache-airflow-providers-amazon 9.9.1rc1__py3-none-any.whl → 9.10.0rc1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -29,7 +29,7 @@ from airflow import __version__ as airflow_version
29
29
 
30
30
  __all__ = ["__version__"]
31
31
 
32
- __version__ = "9.9.1"
32
+ __version__ = "9.10.0"
33
33
 
34
34
  if packaging.version.parse(packaging.version.parse(airflow_version).base_version) < packaging.version.parse(
35
35
  "2.10.0"
@@ -37,6 +37,13 @@ from airflow.utils.log.logging_mixin import LoggingMixin
37
37
 
38
38
  if TYPE_CHECKING:
39
39
  from airflow.api_fastapi.auth.managers.base_auth_manager import ResourceMethod
40
+
41
+ try:
42
+ from airflow.api_fastapi.auth.managers.base_auth_manager import ExtendedResourceMethod
43
+ except ImportError:
44
+ from airflow.api_fastapi.auth.managers.base_auth_manager import (
45
+ ResourceMethod as ExtendedResourceMethod,
46
+ )
40
47
  from airflow.providers.amazon.aws.auth_manager.user import AwsAuthManagerUser
41
48
 
42
49
 
@@ -48,7 +55,7 @@ NB_REQUESTS_PER_BATCH = 30
48
55
  class IsAuthorizedRequest(TypedDict, total=False):
49
56
  """Represent the parameters of ``is_authorized`` method in AVP facade."""
50
57
 
51
- method: ResourceMethod
58
+ method: ExtendedResourceMethod
52
59
  entity_type: AvpEntities
53
60
  entity_id: str | None
54
61
  context: dict | None
@@ -44,10 +44,7 @@ from airflow.providers.amazon.version_compat import AIRFLOW_V_3_0_PLUS
44
44
  if TYPE_CHECKING:
45
45
  from airflow.api_fastapi.auth.managers.base_auth_manager import ResourceMethod
46
46
  from airflow.api_fastapi.auth.managers.models.batch_apis import (
47
- IsAuthorizedConnectionRequest,
48
47
  IsAuthorizedDagRequest,
49
- IsAuthorizedPoolRequest,
50
- IsAuthorizedVariableRequest,
51
48
  )
52
49
  from airflow.api_fastapi.auth.managers.models.resource_details import (
53
50
  AccessView,
@@ -247,24 +244,6 @@ class AwsAuthManager(BaseAuthManager[AwsAuthManagerUser]):
247
244
 
248
245
  return [menu_item for menu_item in menu_items if _has_access_to_menu_item(requests[menu_item.value])]
249
246
 
250
- def batch_is_authorized_connection(
251
- self,
252
- requests: Sequence[IsAuthorizedConnectionRequest],
253
- *,
254
- user: AwsAuthManagerUser,
255
- ) -> bool:
256
- facade_requests: Sequence[IsAuthorizedRequest] = [
257
- {
258
- "method": request["method"],
259
- "entity_type": AvpEntities.CONNECTION,
260
- "entity_id": cast("ConnectionDetails", request["details"]).conn_id
261
- if request.get("details")
262
- else None,
263
- }
264
- for request in requests
265
- ]
266
- return self.avp_facade.batch_is_authorized(requests=facade_requests, user=user)
267
-
268
247
  def batch_is_authorized_dag(
269
248
  self,
270
249
  requests: Sequence[IsAuthorizedDagRequest],
@@ -288,40 +267,6 @@ class AwsAuthManager(BaseAuthManager[AwsAuthManagerUser]):
288
267
  ]
289
268
  return self.avp_facade.batch_is_authorized(requests=facade_requests, user=user)
290
269
 
291
- def batch_is_authorized_pool(
292
- self,
293
- requests: Sequence[IsAuthorizedPoolRequest],
294
- *,
295
- user: AwsAuthManagerUser,
296
- ) -> bool:
297
- facade_requests: Sequence[IsAuthorizedRequest] = [
298
- {
299
- "method": request["method"],
300
- "entity_type": AvpEntities.POOL,
301
- "entity_id": cast("PoolDetails", request["details"]).name if request.get("details") else None,
302
- }
303
- for request in requests
304
- ]
305
- return self.avp_facade.batch_is_authorized(requests=facade_requests, user=user)
306
-
307
- def batch_is_authorized_variable(
308
- self,
309
- requests: Sequence[IsAuthorizedVariableRequest],
310
- *,
311
- user: AwsAuthManagerUser,
312
- ) -> bool:
313
- facade_requests: Sequence[IsAuthorizedRequest] = [
314
- {
315
- "method": request["method"],
316
- "entity_type": AvpEntities.VARIABLE,
317
- "entity_id": cast("VariableDetails", request["details"]).key
318
- if request.get("details")
319
- else None,
320
- }
321
- for request in requests
322
- ]
323
- return self.avp_facade.batch_is_authorized(requests=facade_requests, user=user)
324
-
325
270
  def filter_authorized_dag_ids(
326
271
  self,
327
272
  *,
@@ -0,0 +1,16 @@
1
+ # Licensed to the Apache Software Foundation (ASF) under one
2
+ # or more contributor license agreements. See the NOTICE file
3
+ # distributed with this work for additional information
4
+ # regarding copyright ownership. The ASF licenses this file
5
+ # to you under the Apache License, Version 2.0 (the
6
+ # "License"); you may not use this file except in compliance
7
+ # with the License. You may obtain a copy of the License at
8
+ #
9
+ # http://www.apache.org/licenses/LICENSE-2.0
10
+ #
11
+ # Unless required by applicable law or agreed to in writing,
12
+ # software distributed under the License is distributed on an
13
+ # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
14
+ # KIND, either express or implied. See the License for the
15
+ # specific language governing permissions and limitations
16
+ # under the License.
@@ -0,0 +1,152 @@
1
+ # Licensed to the Apache Software Foundation (ASF) under one
2
+ # or more contributor license agreements. See the NOTICE file
3
+ # distributed with this work for additional information
4
+ # regarding copyright ownership. The ASF licenses this file
5
+ # to you under the Apache License, Version 2.0 (the
6
+ # "License"); you may not use this file except in compliance
7
+ # with the License. You may obtain a copy of the License at
8
+ #
9
+ # http://www.apache.org/licenses/LICENSE-2.0
10
+ #
11
+ # Unless required by applicable law or agreed to in writing,
12
+ # software distributed under the License is distributed on an
13
+ # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
14
+ # KIND, either express or implied. See the License for the
15
+ # specific language governing permissions and limitations
16
+ # under the License.
17
+ from __future__ import annotations
18
+
19
+ import os
20
+ from pathlib import Path
21
+
22
+ import structlog
23
+
24
+ from airflow.dag_processing.bundles.base import BaseDagBundle
25
+ from airflow.exceptions import AirflowException
26
+ from airflow.providers.amazon.aws.hooks.base_aws import AwsBaseHook
27
+ from airflow.providers.amazon.aws.hooks.s3 import S3Hook
28
+
29
+
30
+ class S3DagBundle(BaseDagBundle):
31
+ """
32
+ S3 DAG bundle - exposes a directory in S3 as a DAG bundle.
33
+
34
+ This allows Airflow to load DAGs directly from an S3 bucket.
35
+
36
+ :param aws_conn_id: Airflow connection ID for AWS. Defaults to AwsBaseHook.default_conn_name.
37
+ :param bucket_name: The name of the S3 bucket containing the DAG files.
38
+ :param prefix: Optional subdirectory within the S3 bucket where the DAGs are stored.
39
+ If None, DAGs are assumed to be at the root of the bucket (Optional).
40
+ """
41
+
42
+ supports_versioning = False
43
+
44
+ def __init__(
45
+ self,
46
+ *,
47
+ aws_conn_id: str = AwsBaseHook.default_conn_name,
48
+ bucket_name: str,
49
+ prefix: str = "",
50
+ **kwargs,
51
+ ) -> None:
52
+ super().__init__(**kwargs)
53
+ self.aws_conn_id = aws_conn_id
54
+ self.bucket_name = bucket_name
55
+ self.prefix = prefix
56
+ # Local path where S3 DAGs are downloaded
57
+ self.s3_dags_dir: Path = self.base_dir
58
+
59
+ log = structlog.get_logger(__name__)
60
+ self._log = log.bind(
61
+ bundle_name=self.name,
62
+ version=self.version,
63
+ bucket_name=self.bucket_name,
64
+ prefix=self.prefix,
65
+ aws_conn_id=self.aws_conn_id,
66
+ )
67
+ self._s3_hook: S3Hook | None = None
68
+
69
+ def _initialize(self):
70
+ with self.lock():
71
+ if not self.s3_dags_dir.exists():
72
+ self._log.info("Creating local DAGs directory: %s", self.s3_dags_dir)
73
+ os.makedirs(self.s3_dags_dir)
74
+
75
+ if not self.s3_dags_dir.is_dir():
76
+ raise AirflowException(f"Local DAGs path: {self.s3_dags_dir} is not a directory.")
77
+
78
+ if not self.s3_hook.check_for_bucket(bucket_name=self.bucket_name):
79
+ raise AirflowException(f"S3 bucket '{self.bucket_name}' does not exist.")
80
+
81
+ if self.prefix:
82
+ # don't check when prefix is ""
83
+ if not self.s3_hook.check_for_prefix(
84
+ bucket_name=self.bucket_name, prefix=self.prefix, delimiter="/"
85
+ ):
86
+ raise AirflowException(
87
+ f"S3 prefix 's3://{self.bucket_name}/{self.prefix}' does not exist."
88
+ )
89
+ self.refresh()
90
+
91
+ def initialize(self) -> None:
92
+ self._initialize()
93
+ super().initialize()
94
+
95
+ @property
96
+ def s3_hook(self):
97
+ if self._s3_hook is None:
98
+ try:
99
+ self._s3_hook: S3Hook = S3Hook(aws_conn_id=self.aws_conn_id) # Initialize S3 hook.
100
+ except AirflowException as e:
101
+ self._log.warning("Could not create S3Hook for connection %s: %s", self.aws_conn_id, e)
102
+ return self._s3_hook
103
+
104
+ def __repr__(self):
105
+ return (
106
+ f"<S3DagBundle("
107
+ f"name={self.name!r}, "
108
+ f"bucket_name={self.bucket_name!r}, "
109
+ f"prefix={self.prefix!r}, "
110
+ f"version={self.version!r}"
111
+ f")>"
112
+ )
113
+
114
+ def get_current_version(self) -> str | None:
115
+ """Return the current version of the DAG bundle. Currently not supported."""
116
+ return None
117
+
118
+ @property
119
+ def path(self) -> Path:
120
+ """Return the local path to the DAG files."""
121
+ return self.s3_dags_dir # Path where DAGs are downloaded.
122
+
123
+ def refresh(self) -> None:
124
+ """Refresh the DAG bundle by re-downloading the DAGs from S3."""
125
+ if self.version:
126
+ raise AirflowException("Refreshing a specific version is not supported")
127
+
128
+ with self.lock():
129
+ self._log.debug(
130
+ "Downloading DAGs from s3://%s/%s to %s", self.bucket_name, self.prefix, self.s3_dags_dir
131
+ )
132
+ self.s3_hook.sync_to_local_dir(
133
+ bucket_name=self.bucket_name,
134
+ s3_prefix=self.prefix,
135
+ local_dir=self.s3_dags_dir,
136
+ delete_stale=True,
137
+ )
138
+
139
+ def view_url(self, version: str | None = None) -> str | None:
140
+ """Return a URL for viewing the DAGs in S3. Currently, versioning is not supported."""
141
+ if self.version:
142
+ raise AirflowException("S3 url with version is not supported")
143
+
144
+ # https://<bucket-name>.s3.<region>.amazonaws.com/<object-key>
145
+ url = f"https://{self.bucket_name}.s3"
146
+ if self.s3_hook.region_name:
147
+ url += f".{self.s3_hook.region_name}"
148
+ url += ".amazonaws.com"
149
+ if self.prefix:
150
+ url += f"/{self.prefix}"
151
+
152
+ return url
@@ -60,13 +60,9 @@ from airflow.exceptions import (
60
60
  from airflow.providers.amazon.aws.utils.connection_wrapper import AwsConnectionWrapper
61
61
  from airflow.providers.amazon.aws.utils.identifiers import generate_uuid
62
62
  from airflow.providers.amazon.aws.utils.suppress import return_on_error
63
+ from airflow.providers.amazon.version_compat import BaseHook
63
64
  from airflow.providers.common.compat.version_compat import AIRFLOW_V_3_0_PLUS
64
65
  from airflow.providers_manager import ProvidersManager
65
-
66
- try:
67
- from airflow.sdk import BaseHook
68
- except ImportError:
69
- from airflow.hooks.base import BaseHook # type: ignore[attr-defined,no-redef]
70
66
  from airflow.utils.helpers import exactly_one
71
67
  from airflow.utils.log.logging_mixin import LoggingMixin
72
68
 
@@ -82,7 +78,7 @@ BaseAwsConnection = TypeVar("BaseAwsConnection", bound=Union[BaseClient, Service
82
78
 
83
79
 
84
80
  if AIRFLOW_V_3_0_PLUS:
85
- from airflow.sdk.exceptions import AirflowRuntimeError
81
+ from airflow.sdk.exceptions import AirflowRuntimeError, ErrorType
86
82
 
87
83
  if TYPE_CHECKING:
88
84
  from aiobotocore.session import AioSession
@@ -623,19 +619,16 @@ class AwsGenericHook(BaseHook, Generic[BaseAwsConnection]):
623
619
  """Get the Airflow Connection object and wrap it in helper (cached)."""
624
620
  connection = None
625
621
  if self.aws_conn_id:
626
- possible_exceptions: tuple[type[Exception], ...]
627
-
628
- if AIRFLOW_V_3_0_PLUS:
629
- possible_exceptions = (AirflowNotFoundException, AirflowRuntimeError)
630
- else:
631
- possible_exceptions = (AirflowNotFoundException,)
632
-
633
622
  try:
634
623
  connection = self.get_connection(self.aws_conn_id)
635
- except possible_exceptions as e:
636
- if isinstance(
637
- e, AirflowNotFoundException
638
- ) or f"Connection with ID {self.aws_conn_id} not found" in str(e):
624
+ except Exception as e:
625
+ not_found_exc_via_core = isinstance(e, AirflowNotFoundException)
626
+ not_found_exc_via_task_sdk = (
627
+ AIRFLOW_V_3_0_PLUS
628
+ and isinstance(e, AirflowRuntimeError)
629
+ and e.error.error == ErrorType.CONNECTION_NOT_FOUND
630
+ )
631
+ if not_found_exc_via_core or not_found_exc_via_task_sdk:
639
632
  self.log.warning(
640
633
  "Unable to find AWS Connection ID '%s', switching to empty.", self.aws_conn_id
641
634
  )
@@ -635,6 +635,10 @@ class S3Hook(AwsBaseHook):
635
635
  delimiter: str | None = "/",
636
636
  ) -> list[Any]:
637
637
  """Get a list of files in the bucket."""
638
+ # Validate that bucket_keys is in fact a list, otherwise, the characters will be split
639
+ if isinstance(bucket_keys, str):
640
+ bucket_keys = [bucket_keys]
641
+
638
642
  keys: list[Any] = []
639
643
  for key in bucket_keys:
640
644
  prefix = key
@@ -652,7 +656,9 @@ class S3Hook(AwsBaseHook):
652
656
  response = paginator.paginate(**params)
653
657
  async for page in response:
654
658
  if "Contents" in page:
655
- keys.extend(k for k in page["Contents"] if isinstance(k.get("Size"), (int, float)))
659
+ keys.extend(
660
+ k.get("Key") for k in page["Contents"] if isinstance(k.get("Size"), (int, float))
661
+ )
656
662
  return keys
657
663
 
658
664
  async def _list_keys_async(
@@ -1683,3 +1689,80 @@ class S3Hook(AwsBaseHook):
1683
1689
  """
1684
1690
  s3_client = self.get_conn()
1685
1691
  s3_client.delete_bucket_tagging(Bucket=bucket_name)
1692
+
1693
+ def _sync_to_local_dir_delete_stale_local_files(self, current_s3_objects: list[Path], local_dir: Path):
1694
+ current_s3_keys = {key for key in current_s3_objects}
1695
+
1696
+ for item in local_dir.iterdir():
1697
+ item: Path # type: ignore[no-redef]
1698
+ absolute_item_path = item.resolve()
1699
+
1700
+ if absolute_item_path not in current_s3_keys:
1701
+ try:
1702
+ if item.is_file():
1703
+ item.unlink(missing_ok=True)
1704
+ self.log.debug("Deleted stale local file: %s", item)
1705
+ elif item.is_dir():
1706
+ # delete only when the folder is empty
1707
+ if not os.listdir(item):
1708
+ item.rmdir()
1709
+ self.log.debug("Deleted stale empty directory: %s", item)
1710
+ else:
1711
+ self.log.debug("Skipping stale item of unknown type: %s", item)
1712
+ except OSError as e:
1713
+ self.log.error("Error deleting stale item %s: %s", item, e)
1714
+ raise e
1715
+
1716
+ def _sync_to_local_dir_if_changed(self, s3_bucket, s3_object, local_target_path: Path):
1717
+ should_download = False
1718
+ download_msg = ""
1719
+ if not local_target_path.exists():
1720
+ should_download = True
1721
+ download_msg = f"Local file {local_target_path} does not exist."
1722
+ else:
1723
+ local_stats = local_target_path.stat()
1724
+
1725
+ if s3_object.size != local_stats.st_size:
1726
+ should_download = True
1727
+ download_msg = (
1728
+ f"S3 object size ({s3_object.size}) and local file size ({local_stats.st_size}) differ."
1729
+ )
1730
+
1731
+ s3_last_modified = s3_object.last_modified
1732
+ if local_stats.st_mtime < s3_last_modified.microsecond:
1733
+ should_download = True
1734
+ download_msg = f"S3 object last modified ({s3_last_modified.microsecond}) and local file last modified ({local_stats.st_mtime}) differ."
1735
+
1736
+ if should_download:
1737
+ s3_bucket.download_file(s3_object.key, local_target_path)
1738
+ self.log.debug(
1739
+ "%s Downloaded %s to %s", download_msg, s3_object.key, local_target_path.as_posix()
1740
+ )
1741
+ else:
1742
+ self.log.debug(
1743
+ "Local file %s is up-to-date with S3 object %s. Skipping download.",
1744
+ local_target_path.as_posix(),
1745
+ s3_object.key,
1746
+ )
1747
+
1748
+ def sync_to_local_dir(self, bucket_name: str, local_dir: Path, s3_prefix="", delete_stale: bool = True):
1749
+ """Download S3 files from the S3 bucket to the local directory."""
1750
+ self.log.debug("Downloading data from s3://%s/%s to %s", bucket_name, s3_prefix, local_dir)
1751
+
1752
+ local_s3_objects = []
1753
+ s3_bucket = self.get_bucket(bucket_name)
1754
+ for obj in s3_bucket.objects.filter(Prefix=s3_prefix):
1755
+ obj_path = Path(obj.key)
1756
+ local_target_path = local_dir.joinpath(obj_path.relative_to(s3_prefix))
1757
+ if not local_target_path.parent.exists():
1758
+ local_target_path.parent.mkdir(parents=True, exist_ok=True)
1759
+ self.log.debug("Created local directory: %s", local_target_path.parent)
1760
+ self._sync_to_local_dir_if_changed(
1761
+ s3_bucket=s3_bucket, s3_object=obj, local_target_path=local_target_path
1762
+ )
1763
+ local_s3_objects.append(local_target_path)
1764
+
1765
+ if delete_stale:
1766
+ self._sync_to_local_dir_delete_stale_local_files(
1767
+ current_s3_objects=local_s3_objects, local_dir=local_dir
1768
+ )
@@ -26,11 +26,7 @@ from sagemaker_studio.sagemaker_studio_api import SageMakerStudioAPI
26
26
 
27
27
  from airflow.exceptions import AirflowException
28
28
  from airflow.providers.amazon.aws.utils.sagemaker_unified_studio import is_local_runner
29
-
30
- try:
31
- from airflow.sdk import BaseHook
32
- except ImportError:
33
- from airflow.hooks.base import BaseHook # type: ignore[attr-defined,no-redef]
29
+ from airflow.providers.amazon.version_compat import BaseHook
34
30
 
35
31
 
36
32
  class SageMakerNotebookHook(BaseHook):
@@ -249,6 +249,7 @@ class GlueJobOperator(AwsBaseOperator[GlueJobHook]):
249
249
  aws_conn_id=self.aws_conn_id,
250
250
  waiter_delay=self.waiter_delay,
251
251
  waiter_max_attempts=self.waiter_max_attempts,
252
+ region_name=self.region_name,
252
253
  ),
253
254
  method_name="execute_complete",
254
255
  )
@@ -105,6 +105,7 @@ class GlueJobSensor(AwsBaseSensor[GlueJobHook]):
105
105
  aws_conn_id=self.aws_conn_id,
106
106
  waiter_delay=int(self.poke_interval),
107
107
  waiter_max_attempts=self.max_retries,
108
+ region_name=self.region_name,
108
109
  ),
109
110
  method_name="execute_complete",
110
111
  )
@@ -24,7 +24,7 @@ import sys
24
24
  from collections.abc import Sequence
25
25
  from typing import TYPE_CHECKING
26
26
 
27
- from airflow.models.xcom import MAX_XCOM_SIZE, XCOM_RETURN_KEY
27
+ from airflow.models.xcom import XCOM_RETURN_KEY
28
28
  from airflow.providers.amazon.aws.hooks.s3 import S3Hook
29
29
  from airflow.providers.amazon.version_compat import BaseOperator
30
30
  from airflow.providers.google.common.hooks.discovery_api import GoogleDiscoveryApiHook
@@ -36,6 +36,10 @@ if TYPE_CHECKING:
36
36
  from airflow.models import TaskInstance as RuntimeTaskInstanceProtocol # type: ignore[assignment]
37
37
  from airflow.utils.context import Context
38
38
 
39
+ # MAX XCOM Size is 48KB
40
+ # https://github.com/apache/airflow/pull/1618#discussion_r68249677
41
+ MAX_XCOM_SIZE = 49344
42
+
39
43
 
40
44
  class GoogleApiToS3Operator(BaseOperator):
41
45
  """
@@ -23,12 +23,7 @@ from typing import TYPE_CHECKING
23
23
 
24
24
  from airflow.exceptions import AirflowException
25
25
  from airflow.providers.amazon.aws.hooks.s3 import S3Hook
26
- from airflow.providers.amazon.version_compat import BaseOperator
27
-
28
- try:
29
- from airflow.sdk import BaseHook
30
- except ImportError:
31
- from airflow.hooks.base import BaseHook # type: ignore[attr-defined,no-redef]
26
+ from airflow.providers.amazon.version_compat import BaseHook, BaseOperator
32
27
 
33
28
  if TYPE_CHECKING:
34
29
  from airflow.utils.context import Context
@@ -26,12 +26,7 @@ from typing import TYPE_CHECKING, Any, Literal, cast
26
26
 
27
27
  from airflow.exceptions import AirflowException
28
28
  from airflow.providers.amazon.aws.hooks.s3 import S3Hook
29
- from airflow.providers.amazon.version_compat import BaseOperator
30
-
31
- try:
32
- from airflow.sdk import BaseHook
33
- except ImportError:
34
- from airflow.hooks.base import BaseHook # type: ignore[attr-defined,no-redef]
29
+ from airflow.providers.amazon.version_compat import BaseHook, BaseOperator
35
30
 
36
31
  if TYPE_CHECKING:
37
32
  import pandas as pd
@@ -248,6 +248,12 @@ class AwsConnectionWrapper(LoggingMixin):
248
248
  config_kwargs["signature_version"] = UNSIGNED
249
249
  self.botocore_config = Config(**config_kwargs)
250
250
 
251
+ if "endpoint_url" not in extra:
252
+ self.log.debug(
253
+ "Missing endpoint_url in extra config of AWS Connection with id %s. Using default AWS service endpoint",
254
+ conn.conn_id,
255
+ )
256
+
251
257
  self.endpoint_url = extra.get("endpoint_url")
252
258
 
253
259
  # Retrieve Assume Role Configuration
@@ -33,6 +33,12 @@ def get_base_airflow_version_tuple() -> tuple[int, int, int]:
33
33
 
34
34
 
35
35
  AIRFLOW_V_3_0_PLUS = get_base_airflow_version_tuple() >= (3, 0, 0)
36
+ AIRFLOW_V_3_1_PLUS: bool = get_base_airflow_version_tuple() >= (3, 1, 0)
37
+
38
+ if AIRFLOW_V_3_1_PLUS:
39
+ from airflow.sdk import BaseHook
40
+ else:
41
+ from airflow.hooks.base import BaseHook # type: ignore[attr-defined,no-redef]
36
42
 
37
43
  if AIRFLOW_V_3_0_PLUS:
38
44
  from airflow.sdk import BaseOperator, BaseOperatorLink, BaseSensorOperator
@@ -42,4 +48,12 @@ else:
42
48
  from airflow.models.baseoperatorlink import BaseOperatorLink # type: ignore[no-redef]
43
49
  from airflow.sensors.base import BaseSensorOperator # type: ignore[no-redef]
44
50
 
45
- __all__ = ["AIRFLOW_V_3_0_PLUS", "BaseOperator", "BaseOperatorLink", "BaseSensorOperator", "XCom"]
51
+ __all__ = [
52
+ "AIRFLOW_V_3_0_PLUS",
53
+ "AIRFLOW_V_3_1_PLUS",
54
+ "BaseHook",
55
+ "BaseOperator",
56
+ "BaseOperatorLink",
57
+ "BaseSensorOperator",
58
+ "XCom",
59
+ ]
@@ -1,11 +1,11 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: apache-airflow-providers-amazon
3
- Version: 9.9.1rc1
3
+ Version: 9.10.0rc1
4
4
  Summary: Provider package apache-airflow-providers-amazon for Apache Airflow
5
5
  Keywords: airflow-provider,amazon,airflow,integration
6
6
  Author-email: Apache Software Foundation <dev@airflow.apache.org>
7
7
  Maintainer-email: Apache Software Foundation <dev@airflow.apache.org>
8
- Requires-Python: ~=3.10
8
+ Requires-Python: >=3.10
9
9
  Description-Content-Type: text/x-rst
10
10
  Classifier: Development Status :: 5 - Production/Stable
11
11
  Classifier: Environment :: Console
@@ -54,8 +54,8 @@ Requires-Dist: apache-airflow-providers-salesforce ; extra == "salesforce"
54
54
  Requires-Dist: apache-airflow-providers-ssh ; extra == "ssh"
55
55
  Requires-Dist: apache-airflow-providers-standard ; extra == "standard"
56
56
  Project-URL: Bug Tracker, https://github.com/apache/airflow/issues
57
- Project-URL: Changelog, https://airflow.staged.apache.org/docs/apache-airflow-providers-amazon/9.9.1/changelog.html
58
- Project-URL: Documentation, https://airflow.staged.apache.org/docs/apache-airflow-providers-amazon/9.9.1
57
+ Project-URL: Changelog, https://airflow.staged.apache.org/docs/apache-airflow-providers-amazon/9.10.0/changelog.html
58
+ Project-URL: Documentation, https://airflow.staged.apache.org/docs/apache-airflow-providers-amazon/9.10.0
59
59
  Project-URL: Mastodon, https://fosstodon.org/@airflow
60
60
  Project-URL: Slack Chat, https://s.apache.org/airflow-slack
61
61
  Project-URL: Source Code, https://github.com/apache/airflow
@@ -103,7 +103,7 @@ Provides-Extra: standard
103
103
 
104
104
  Package ``apache-airflow-providers-amazon``
105
105
 
106
- Release: ``9.9.1``
106
+ Release: ``9.10.0``
107
107
 
108
108
 
109
109
  Amazon integration (including `Amazon Web Services (AWS) <https://aws.amazon.com/>`__).
@@ -116,7 +116,7 @@ This is a provider package for ``amazon`` provider. All classes for this provide
116
116
  are in ``airflow.providers.amazon`` python package.
117
117
 
118
118
  You can find package information and changelog for the provider
119
- in the `documentation <https://airflow.apache.org/docs/apache-airflow-providers-amazon/9.9.1/>`_.
119
+ in the `documentation <https://airflow.apache.org/docs/apache-airflow-providers-amazon/9.10.0/>`_.
120
120
 
121
121
  Installation
122
122
  ------------
@@ -184,5 +184,5 @@ Dependent package
184
184
  ======================================================================================================================== ====================
185
185
 
186
186
  The changelog for the provider package can be found in the
187
- `changelog <https://airflow.apache.org/docs/apache-airflow-providers-amazon/9.9.1/changelog.html>`_.
187
+ `changelog <https://airflow.apache.org/docs/apache-airflow-providers-amazon/9.10.0/changelog.html>`_.
188
188
 
@@ -1,18 +1,18 @@
1
1
  airflow/providers/amazon/LICENSE,sha256=gXPVwptPlW1TJ4HSuG5OMPg-a3h43OGMkZRR1rpwfJA,10850
2
- airflow/providers/amazon/__init__.py,sha256=emD5t6Jn8eKGfk8-p1sTibSWaBHqIMut49LxbHGheDA,1495
2
+ airflow/providers/amazon/__init__.py,sha256=iSLz0UFeGo5Z6gcCXR4EgRzkhPiEW_Ztl-UsBA-puG0,1496
3
3
  airflow/providers/amazon/get_provider_info.py,sha256=TV5uNdxIE2kTx-CJJAkfApueVYUXBscm8jdwA8MQYV8,72655
4
- airflow/providers/amazon/version_compat.py,sha256=y2xh9tpCOBJS_c3NucAdax6s7FDPVYwVW3rWjuebxGo,2047
4
+ airflow/providers/amazon/version_compat.py,sha256=7Czyw4hDHy79lHEc0n01cv1Q8sAMF9xQSxDWnkmgh4M,2335
5
5
  airflow/providers/amazon/aws/__init__.py,sha256=9hdXHABrVpkbpjZgUft39kOFL2xSGeG4GEua0Hmelus,785
6
6
  airflow/providers/amazon/aws/exceptions.py,sha256=uRGNMgXvgdzfphpOTiyj74lQhjzb70J-X8n6fsx5Jog,1864
7
7
  airflow/providers/amazon/aws/assets/__init__.py,sha256=9hdXHABrVpkbpjZgUft39kOFL2xSGeG4GEua0Hmelus,785
8
8
  airflow/providers/amazon/aws/assets/s3.py,sha256=wNaJiOM90-SCauD4EQneZVXMO54yDRjLPfI8D5o0-fw,1861
9
9
  airflow/providers/amazon/aws/auth_manager/__init__.py,sha256=9hdXHABrVpkbpjZgUft39kOFL2xSGeG4GEua0Hmelus,785
10
- airflow/providers/amazon/aws/auth_manager/aws_auth_manager.py,sha256=WG5DkZ70peGfCAVIjlNKanqHtC2y6E1Z2s5-rfTEGpA,15168
10
+ airflow/providers/amazon/aws/auth_manager/aws_auth_manager.py,sha256=8Gc7luaHkKWfgoeJu5CXsNF3UdnpE_JVRvKwh2LKigs,13189
11
11
  airflow/providers/amazon/aws/auth_manager/constants.py,sha256=Jdluo42InhyNGkYHB_dRtoFMpKanJLJdH0hyR9-5AZg,1050
12
12
  airflow/providers/amazon/aws/auth_manager/user.py,sha256=zds3U6gHmwAy1MuxFFPtGTYikMj-RjYVki9-TSdfnbg,2043
13
13
  airflow/providers/amazon/aws/auth_manager/avp/__init__.py,sha256=9hdXHABrVpkbpjZgUft39kOFL2xSGeG4GEua0Hmelus,785
14
14
  airflow/providers/amazon/aws/auth_manager/avp/entities.py,sha256=fmoJeukg7b4XdsLjLD1tCtbU-SLiiGgPEgPYnss4KJ8,2036
15
- airflow/providers/amazon/aws/auth_manager/avp/facade.py,sha256=bfz9w9qvoeh_JI8PmhmTsGBW_NBq2rvSWOpiz8dMVS4,11158
15
+ airflow/providers/amazon/aws/auth_manager/avp/facade.py,sha256=KpVWCnibHd3Lg-FmpT0skjNbSapMpfI5n1sB4No0Sbg,11433
16
16
  airflow/providers/amazon/aws/auth_manager/avp/schema.json,sha256=pflVyo9hfa-sYDAFv8HcEI7WOwXXmymwQLqb5ji6qRc,7203
17
17
  airflow/providers/amazon/aws/auth_manager/cli/__init__.py,sha256=9hdXHABrVpkbpjZgUft39kOFL2xSGeG4GEua0Hmelus,785
18
18
  airflow/providers/amazon/aws/auth_manager/cli/avp_commands.py,sha256=dnKfF-PFvkaZMicHjVPxrLkQr1y003898i2AyjT1XnY,5376
@@ -21,6 +21,8 @@ airflow/providers/amazon/aws/auth_manager/datamodels/__init__.py,sha256=9hdXHABr
21
21
  airflow/providers/amazon/aws/auth_manager/datamodels/login.py,sha256=s2mwJ3Hg3PArNgWT9GJxfQdeIGxDJ3remaLNEQDYw7Y,978
22
22
  airflow/providers/amazon/aws/auth_manager/routes/__init__.py,sha256=9hdXHABrVpkbpjZgUft39kOFL2xSGeG4GEua0Hmelus,785
23
23
  airflow/providers/amazon/aws/auth_manager/routes/login.py,sha256=gfEhqrrTIIzktTZe_kwOryRloURmZdzSUJDoj23_cLM,6061
24
+ airflow/providers/amazon/aws/bundles/__init__.py,sha256=9hdXHABrVpkbpjZgUft39kOFL2xSGeG4GEua0Hmelus,785
25
+ airflow/providers/amazon/aws/bundles/s3.py,sha256=I9vjYnn5kQyOlwMcANTxHeH79zGzocz_9UWpUCBgCaI,5623
24
26
  airflow/providers/amazon/aws/executors/Dockerfile,sha256=VZ-YOR59KSMoztJV_g7v5hUwetKR0Ii4wNNaKqDIfyQ,4275
25
27
  airflow/providers/amazon/aws/executors/__init__.py,sha256=9hdXHABrVpkbpjZgUft39kOFL2xSGeG4GEua0Hmelus,785
26
28
  airflow/providers/amazon/aws/executors/aws_lambda/__init__.py,sha256=1PebDNZ6KXaXd3Zojp8lhULD6Elk-Pi_NiK3qi4G45s,950
@@ -48,7 +50,7 @@ airflow/providers/amazon/aws/hooks/__init__.py,sha256=9hdXHABrVpkbpjZgUft39kOFL2
48
50
  airflow/providers/amazon/aws/hooks/appflow.py,sha256=-le6RsIMWIqTav7KGknsph9Td42znSm_eIYztxc_RsE,5263
49
51
  airflow/providers/amazon/aws/hooks/athena.py,sha256=ubpgNafkIRzFh1h7Bexhj4L83SLvyZf-yDcmGmAIVt8,13463
50
52
  airflow/providers/amazon/aws/hooks/athena_sql.py,sha256=0LzWXpmUYVphkuhnH5dUgnopmuAAvjkC0nAFq-C0Xk8,7201
51
- airflow/providers/amazon/aws/hooks/base_aws.py,sha256=EgB2o0ObLq_gR7D_RkMd51Bcgd7xZxYFQ68_kHCO5JE,46976
53
+ airflow/providers/amazon/aws/hooks/base_aws.py,sha256=vwHLkdL-o99CmNNNh2VRafQ7zw8wTxsTlgPTS2a-1ig,46851
52
54
  airflow/providers/amazon/aws/hooks/batch_client.py,sha256=0FGUSCtcgvuO6A8oKNykZ6T8ZZGQav4OP-F2PcrIw7o,22016
53
55
  airflow/providers/amazon/aws/hooks/batch_waiters.json,sha256=eoN5YDgeTNZ2Xz17TrbKBPhd7z9-6KD3RhaDKXXOvqU,2511
54
56
  airflow/providers/amazon/aws/hooks/batch_waiters.py,sha256=V-ZvO6cAAVAJqOHx8aU5QEzaCYnPnCC8jBWLESi9-qs,10981
@@ -83,9 +85,9 @@ airflow/providers/amazon/aws/hooks/rds.py,sha256=bAcaGeP7uNN0lp_FZtIPlt2JCZxcTEr
83
85
  airflow/providers/amazon/aws/hooks/redshift_cluster.py,sha256=hyX_ldjn_gNOehcPV-3M3wzO1HdIdujG8JjrUZmCnNg,7962
84
86
  airflow/providers/amazon/aws/hooks/redshift_data.py,sha256=JxyXEyFeJHUtMxjjtMlCMJSW9P-cnixISd3R4Ob7fy8,11841
85
87
  airflow/providers/amazon/aws/hooks/redshift_sql.py,sha256=MzFW0erCvc3pJ-_1PPrp-H3gYcwRpMvY31xx09GvQts,10980
86
- airflow/providers/amazon/aws/hooks/s3.py,sha256=s5cdMk50aCwLQ2V3h-JnySuyG5iCRqIZBNChbE2391Y,63882
88
+ airflow/providers/amazon/aws/hooks/s3.py,sha256=EB5pN_091fOdJYlljqT5ETKwjknlOtP-cC6Jc6F8ycI,67760
87
89
  airflow/providers/amazon/aws/hooks/sagemaker.py,sha256=mq-zL8hQHa6SoXArbHzZ31IFylF-P2vhLW310Ggb9Ts,60418
88
- airflow/providers/amazon/aws/hooks/sagemaker_unified_studio.py,sha256=AtBiVF9dVEVAyl1MR5VBM-nAB49iBs8DtrkQ23IC-8U,8074
90
+ airflow/providers/amazon/aws/hooks/sagemaker_unified_studio.py,sha256=d3A50iQGOtqvQP9FywkZONr7JU0vMMP2MoqfjoZ_554,7989
89
91
  airflow/providers/amazon/aws/hooks/secrets_manager.py,sha256=6srh3jUeSGoqyrSj1M6aSOaA9xT5kna0VGUC0kzH-q0,2690
90
92
  airflow/providers/amazon/aws/hooks/ses.py,sha256=DuCJwFhtg3D3mu0RSjRrebyUpwBAhrWSr-kdu8VR9qU,4174
91
93
  airflow/providers/amazon/aws/hooks/sns.py,sha256=SwhLeurqaV8TLhgCGsZgtf98D0_kLVLlhXer_96Anv4,4101
@@ -130,7 +132,7 @@ airflow/providers/amazon/aws/operators/eks.py,sha256=KXcsb8ZvBsnBiI0Z2Hn_AI4lpCY
130
132
  airflow/providers/amazon/aws/operators/emr.py,sha256=eMAZjklENP7bNDwWKTKgrrkHLObPSvMDfpukOFCrNTQ,75908
131
133
  airflow/providers/amazon/aws/operators/eventbridge.py,sha256=NacTdvRzZZFizSzC3rb0Z7g8dHQWkKQEXGYzFKOp3fc,10421
132
134
  airflow/providers/amazon/aws/operators/glacier.py,sha256=6TFC07B0EOmtRxLs7Bok4jwV84po2yVDa-DnlbnAOVg,3681
133
- airflow/providers/amazon/aws/operators/glue.py,sha256=e0_TyndqM5HumpGaYf_AxxUzjP9kqIiMYvkmssBofYQ,30484
135
+ airflow/providers/amazon/aws/operators/glue.py,sha256=2LA7KZp7mhitk9SrcqKBUVnS_NlqSrOwRnCDeuBCuGE,30534
134
136
  airflow/providers/amazon/aws/operators/glue_crawler.py,sha256=lO9xbliRKkPhrVemGyEBr0kTcpkQU5ovQ3IItCr2Iyo,5310
135
137
  airflow/providers/amazon/aws/operators/glue_databrew.py,sha256=SZQmTpVK2_fSxyD_lgK0B0oJ6zDZFrAgIsc28mGOnN0,6169
136
138
  airflow/providers/amazon/aws/operators/kinesis_analytics.py,sha256=uO8_IEYYzDt6A_BEHJr-J88FpCaZL7nM3E0xOTZjSiU,15920
@@ -166,7 +168,7 @@ airflow/providers/amazon/aws/sensors/ecs.py,sha256=0O7N9x5SJNYHdcuQP1QnNJWdZ_-cX
166
168
  airflow/providers/amazon/aws/sensors/eks.py,sha256=90FiEWLzbAUt10H8JMTgkAZRUemSjlJrAaxmt7x8G-k,10795
167
169
  airflow/providers/amazon/aws/sensors/emr.py,sha256=F84xKy0SmYcbP5gLIRmfEp8ZbZeGX5wrMYzH7FwbQaY,25270
168
170
  airflow/providers/amazon/aws/sensors/glacier.py,sha256=QbQXgO6C1KiosC5hP1X4Jn5SM4o1xM4t-Vo6R3ELE9g,4050
169
- airflow/providers/amazon/aws/sensors/glue.py,sha256=lPq0nXhnK9NIq9VtOSznW8hUndj-e4ZF0jCyTu9MpHA,16516
171
+ airflow/providers/amazon/aws/sensors/glue.py,sha256=_dXh0PwHhmIho1ffV03R9X2WUor0R7p-JcR5WXSSBpw,16566
170
172
  airflow/providers/amazon/aws/sensors/glue_catalog_partition.py,sha256=YXvkQRnu5TnM2E8Tfi5B_0STqRUvRE9pgdoluz8oNCw,5831
171
173
  airflow/providers/amazon/aws/sensors/glue_crawler.py,sha256=ps-fG09QV_QNs8dxlmFKXNVKhvPIuvJoMJCeYi-6QBc,3375
172
174
  airflow/providers/amazon/aws/sensors/kinesis_analytics.py,sha256=TDsQKi5nx10CgMoTSVbYRo4m-PiKFDhyhnO7dQZSnuI,9933
@@ -189,7 +191,7 @@ airflow/providers/amazon/aws/transfers/exasol_to_s3.py,sha256=k86XX7pKCLCE07i1HZ
189
191
  airflow/providers/amazon/aws/transfers/ftp_to_s3.py,sha256=nVhQCGNcAc35DlcLl99Qs1pU-DZiU8m2ZgKzFlrN4JI,6418
190
192
  airflow/providers/amazon/aws/transfers/gcs_to_s3.py,sha256=XhHR4Y8bnB8kBY46EEQWM-MUJOYNk_4Q3Dq2_zxU5OA,9246
191
193
  airflow/providers/amazon/aws/transfers/glacier_to_gcs.py,sha256=Qt6KE1qJbFRQYrA6WQ3MVcfnuACq2XwTGl6PxMaaMEI,4762
192
- airflow/providers/amazon/aws/transfers/google_api_to_s3.py,sha256=--8NHs_B_QwXTySDYvUhOnBdVJnWKmVcL4TKegkeAJI,9152
194
+ airflow/providers/amazon/aws/transfers/google_api_to_s3.py,sha256=9scBbrYP2Rrkn2XyYD7uMjI8hKq9jz7hyxM7G7TAiIg,9251
193
195
  airflow/providers/amazon/aws/transfers/hive_to_dynamodb.py,sha256=_jLPbK_6Z2YDSFgNy2qZC_SX0lEoe54tnLW9G4Glj98,4244
194
196
  airflow/providers/amazon/aws/transfers/http_to_s3.py,sha256=PTms3EA-7sZTRVvE2H_J2CnqLGQCavnzc783urzAgSE,7563
195
197
  airflow/providers/amazon/aws/transfers/imap_attachment_to_s3.py,sha256=9CEwqrEOlStYXI_AQdmmsxL-gnUFxJ8IfA8mp0a1UX4,4588
@@ -200,10 +202,10 @@ airflow/providers/amazon/aws/transfers/s3_to_dynamodb.py,sha256=hjSEVKiUUp1Pl7tA
200
202
  airflow/providers/amazon/aws/transfers/s3_to_ftp.py,sha256=V9a12bCtqXIpscOrO95dHw7umRRy_jJmB8zA25qVEM8,3018
201
203
  airflow/providers/amazon/aws/transfers/s3_to_redshift.py,sha256=ZwzHcM48bjmvdU5AUVvy0iI-VRnY4Y-acyPSQ3vGMWI,12019
202
204
  airflow/providers/amazon/aws/transfers/s3_to_sftp.py,sha256=sor0_phlRN-yltJiaz0lZ_ZPWIqqgv_GEwTHH1Ftq2M,3540
203
- airflow/providers/amazon/aws/transfers/s3_to_sql.py,sha256=_7C1Zj3OfXjgh07cEaMZ1XyeV1nL3Vq-uVsAQAYrfcY,5107
205
+ airflow/providers/amazon/aws/transfers/s3_to_sql.py,sha256=RLuAwBHJOMcGaZcDgrivAhLRsOuZsjwTxJEOcLB_1MY,4971
204
206
  airflow/providers/amazon/aws/transfers/salesforce_to_s3.py,sha256=noALwo6dpjEHF33ZDDZY0z47HK0Gsv-BU3Zr2NE3zRA,5738
205
207
  airflow/providers/amazon/aws/transfers/sftp_to_s3.py,sha256=o5IDLFmeHzqBH6_Uh_fGTk9iymjQYsuGznnH-qZ1M-Y,4234
206
- airflow/providers/amazon/aws/transfers/sql_to_s3.py,sha256=_IMUnicQyZIk9YyfIBjxwmCjTOo9dre9tESX1C3Kd-g,11219
208
+ airflow/providers/amazon/aws/transfers/sql_to_s3.py,sha256=EmRN8Hp6GVNbsQWzeUm4tP1RI-9C-pfDZy83ScspHr8,11083
207
209
  airflow/providers/amazon/aws/triggers/README.md,sha256=ax2F0w2CuQSDN4ghJADozrrv5W4OeCDPA8Vzp00BXOU,10919
208
210
  airflow/providers/amazon/aws/triggers/__init__.py,sha256=mlJxuZLkd5x-iq2SBwD3mvRQpt3YR7wjz_nceyF1IaI,787
209
211
  airflow/providers/amazon/aws/triggers/athena.py,sha256=62ty40zejcm5Y0d1rTQZuYzSjq3hUkmAs0d_zxM_Kjw,2596
@@ -233,7 +235,7 @@ airflow/providers/amazon/aws/triggers/sagemaker_unified_studio.py,sha256=1WGrng4
233
235
  airflow/providers/amazon/aws/triggers/sqs.py,sha256=NIwBc2ZisY164xfkSYyox3dcuYIEPrn9MhsEFS0eXDA,8590
234
236
  airflow/providers/amazon/aws/triggers/step_function.py,sha256=M1HGdrnxL_T9KSCBNy2t531xMNJaFc-Y792T9cSmLGM,2685
235
237
  airflow/providers/amazon/aws/utils/__init__.py,sha256=-Q5XK8ZV7EK6unj_4hlciqztACPuftMjNKMuBA21q84,3178
236
- airflow/providers/amazon/aws/utils/connection_wrapper.py,sha256=81myzLKQM4YylT01ZKMm5LtVaBP-oKp4yBCHmaGeoe8,16201
238
+ airflow/providers/amazon/aws/utils/connection_wrapper.py,sha256=KJsYG3qnESxxh2PFWvf83gHKzqEEAE9jBANTMoyRn3A,16435
237
239
  airflow/providers/amazon/aws/utils/eks_get_token.py,sha256=q4utFF2c02T2Lm6KIZLABOiXJeglVZKCOxq6gn14dsk,2342
238
240
  airflow/providers/amazon/aws/utils/emailer.py,sha256=y-bzg1BZzOQ8J9-ed-74LY3VMv6LrLfBDtw5S4t3Tv4,1855
239
241
  airflow/providers/amazon/aws/utils/identifiers.py,sha256=KqkEJ96mz4BYt0wuKX-_DaFk-8Lv9CuDVo-VrlAK29U,1944
@@ -275,7 +277,7 @@ airflow/providers/amazon/aws/waiters/rds.json,sha256=HNmNQm5J-VaFHzjWb1pE5P7-Ix-
275
277
  airflow/providers/amazon/aws/waiters/redshift.json,sha256=jOBotCgbkko1b_CHcGEbhhRvusgt0YSzVuFiZrqVP30,1742
276
278
  airflow/providers/amazon/aws/waiters/sagemaker.json,sha256=JPHuQtUFZ1B7EMLfVmCRevNZ9jgpB71LM0dva8ZEO9A,5254
277
279
  airflow/providers/amazon/aws/waiters/stepfunctions.json,sha256=GsOH-emGerKGBAUFmI5lpMfNGH4c0ol_PSiea25DCEY,1033
278
- apache_airflow_providers_amazon-9.9.1rc1.dist-info/entry_points.txt,sha256=vlc0ZzhBkMrav1maTRofgksnAw4SwoQLFX9cmnTgktk,102
279
- apache_airflow_providers_amazon-9.9.1rc1.dist-info/WHEEL,sha256=G2gURzTEtmeR8nrdXUJfNiB3VYVxigPQ-bEQujpNiNs,82
280
- apache_airflow_providers_amazon-9.9.1rc1.dist-info/METADATA,sha256=Mw5rW0jhkfxWBq9RcwzEvqQyLMjqEfJKiTjaxC7HMdE,9988
281
- apache_airflow_providers_amazon-9.9.1rc1.dist-info/RECORD,,
280
+ apache_airflow_providers_amazon-9.10.0rc1.dist-info/entry_points.txt,sha256=vlc0ZzhBkMrav1maTRofgksnAw4SwoQLFX9cmnTgktk,102
281
+ apache_airflow_providers_amazon-9.10.0rc1.dist-info/WHEEL,sha256=G2gURzTEtmeR8nrdXUJfNiB3VYVxigPQ-bEQujpNiNs,82
282
+ apache_airflow_providers_amazon-9.10.0rc1.dist-info/METADATA,sha256=qYPMrN1BxtFb9h-Kx9szM_omTtbzrlLIxKLn1IF7vgU,9994
283
+ apache_airflow_providers_amazon-9.10.0rc1.dist-info/RECORD,,