apache-airflow-providers-amazon 9.0.0rc1__py3-none-any.whl → 9.1.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (32) hide show
  1. airflow/providers/amazon/__init__.py +1 -1
  2. airflow/providers/amazon/aws/assets/s3.py +7 -7
  3. airflow/providers/amazon/aws/auth_manager/aws_auth_manager.py +12 -1
  4. airflow/providers/amazon/aws/hooks/athena.py +25 -15
  5. airflow/providers/amazon/aws/hooks/eks.py +2 -2
  6. airflow/providers/amazon/aws/hooks/glue.py +5 -1
  7. airflow/providers/amazon/aws/hooks/redshift_sql.py +1 -1
  8. airflow/providers/amazon/aws/hooks/s3.py +79 -31
  9. airflow/providers/amazon/aws/hooks/sagemaker.py +2 -0
  10. airflow/providers/amazon/aws/operators/appflow.py +1 -1
  11. airflow/providers/amazon/aws/operators/athena.py +3 -1
  12. airflow/providers/amazon/aws/operators/comprehend.py +3 -3
  13. airflow/providers/amazon/aws/operators/dms.py +3 -3
  14. airflow/providers/amazon/aws/operators/ecs.py +11 -3
  15. airflow/providers/amazon/aws/operators/eks.py +4 -2
  16. airflow/providers/amazon/aws/operators/glue.py +10 -1
  17. airflow/providers/amazon/aws/operators/kinesis_analytics.py +3 -3
  18. airflow/providers/amazon/aws/operators/redshift_data.py +43 -20
  19. airflow/providers/amazon/aws/operators/sagemaker.py +2 -2
  20. airflow/providers/amazon/aws/sensors/sagemaker.py +32 -0
  21. airflow/providers/amazon/aws/transfers/redshift_to_s3.py +106 -7
  22. airflow/providers/amazon/aws/transfers/s3_to_dynamodb.py +1 -1
  23. airflow/providers/amazon/aws/transfers/s3_to_redshift.py +1 -1
  24. airflow/providers/amazon/aws/triggers/athena.py +1 -2
  25. airflow/providers/amazon/aws/triggers/ecs.py +6 -6
  26. airflow/providers/amazon/aws/triggers/glue.py +1 -1
  27. airflow/providers/amazon/get_provider_info.py +5 -5
  28. {apache_airflow_providers_amazon-9.0.0rc1.dist-info → apache_airflow_providers_amazon-9.1.0.dist-info}/METADATA +22 -24
  29. {apache_airflow_providers_amazon-9.0.0rc1.dist-info → apache_airflow_providers_amazon-9.1.0.dist-info}/RECORD +31 -32
  30. {apache_airflow_providers_amazon-9.0.0rc1.dist-info → apache_airflow_providers_amazon-9.1.0.dist-info}/WHEEL +1 -1
  31. airflow/providers/amazon/aws/utils/asset_compat_lineage_collector.py +0 -106
  32. {apache_airflow_providers_amazon-9.0.0rc1.dist-info → apache_airflow_providers_amazon-9.1.0.dist-info}/entry_points.txt +0 -0
@@ -28,7 +28,10 @@ from airflow.providers.amazon.aws.utils import validate_execute_complete_event
28
28
  from airflow.providers.amazon.aws.utils.mixins import aws_template_fields
29
29
 
30
30
  if TYPE_CHECKING:
31
- from mypy_boto3_redshift_data.type_defs import GetStatementResultResponseTypeDef
31
+ from mypy_boto3_redshift_data.type_defs import (
32
+ DescribeStatementResponseTypeDef,
33
+ GetStatementResultResponseTypeDef,
34
+ )
32
35
 
33
36
  from airflow.utils.context import Context
34
37
 
@@ -37,7 +40,7 @@ class RedshiftDataOperator(AwsBaseOperator[RedshiftDataHook]):
37
40
  """
38
41
  Executes SQL Statements against an Amazon Redshift cluster using Redshift Data.
39
42
 
40
- .. seealso::
43
+ ... see also::
41
44
  For more information on how to use this operator, take a look at the guide:
42
45
  :ref:`howto/operator:RedshiftDataOperator`
43
46
 
@@ -84,7 +87,6 @@ class RedshiftDataOperator(AwsBaseOperator[RedshiftDataHook]):
84
87
  )
85
88
  template_ext = (".sql",)
86
89
  template_fields_renderers = {"sql": "sql"}
87
- statement_id: str | None
88
90
 
89
91
  def __init__(
90
92
  self,
@@ -124,12 +126,11 @@ class RedshiftDataOperator(AwsBaseOperator[RedshiftDataHook]):
124
126
  poll_interval,
125
127
  )
126
128
  self.return_sql_result = return_sql_result
127
- self.statement_id: str | None = None
128
129
  self.deferrable = deferrable
129
130
  self.session_id = session_id
130
131
  self.session_keep_alive_seconds = session_keep_alive_seconds
131
132
 
132
- def execute(self, context: Context) -> GetStatementResultResponseTypeDef | str:
133
+ def execute(self, context: Context) -> list[GetStatementResultResponseTypeDef] | list[str]:
133
134
  """Execute a statement against Amazon Redshift."""
134
135
  self.log.info("Executing statement: %s", self.sql)
135
136
 
@@ -154,13 +155,14 @@ class RedshiftDataOperator(AwsBaseOperator[RedshiftDataHook]):
154
155
  session_keep_alive_seconds=self.session_keep_alive_seconds,
155
156
  )
156
157
 
157
- self.statement_id = query_execution_output.statement_id
158
+ # Pull the statement ID, session ID
159
+ self.statement_id: str = query_execution_output.statement_id
158
160
 
159
161
  if query_execution_output.session_id:
160
162
  self.xcom_push(context, key="session_id", value=query_execution_output.session_id)
161
163
 
162
164
  if self.deferrable and self.wait_for_completion:
163
- is_finished = self.hook.check_query_is_finished(self.statement_id)
165
+ is_finished: bool = self.hook.check_query_is_finished(self.statement_id)
164
166
  if not is_finished:
165
167
  self.defer(
166
168
  timeout=self.execution_timeout,
@@ -176,16 +178,13 @@ class RedshiftDataOperator(AwsBaseOperator[RedshiftDataHook]):
176
178
  method_name="execute_complete",
177
179
  )
178
180
 
179
- if self.return_sql_result:
180
- result = self.hook.conn.get_statement_result(Id=self.statement_id)
181
- self.log.debug("Statement result: %s", result)
182
- return result
183
- else:
184
- return self.statement_id
181
+ # Use the get_sql_results method to return the results of the SQL query, or the statement_ids,
182
+ # depending on the value of self.return_sql_result
183
+ return self.get_sql_results(statement_id=self.statement_id, return_sql_result=self.return_sql_result)
185
184
 
186
185
  def execute_complete(
187
186
  self, context: Context, event: dict[str, Any] | None = None
188
- ) -> GetStatementResultResponseTypeDef | str:
187
+ ) -> list[GetStatementResultResponseTypeDef] | list[str]:
189
188
  event = validate_execute_complete_event(event)
190
189
 
191
190
  if event["status"] == "error":
@@ -197,16 +196,40 @@ class RedshiftDataOperator(AwsBaseOperator[RedshiftDataHook]):
197
196
  raise AirflowException("statement_id should not be empty.")
198
197
 
199
198
  self.log.info("%s completed successfully.", self.task_id)
200
- if self.return_sql_result:
201
- result = self.hook.conn.get_statement_result(Id=statement_id)
202
- self.log.debug("Statement result: %s", result)
203
- return result
204
199
 
205
- return statement_id
200
+ # Use the get_sql_results method to return the results of the SQL query, or the statement_ids,
201
+ # depending on the value of self.return_sql_result
202
+ return self.get_sql_results(statement_id=statement_id, return_sql_result=self.return_sql_result)
203
+
204
+ def get_sql_results(
205
+ self, statement_id: str, return_sql_result: bool
206
+ ) -> list[GetStatementResultResponseTypeDef] | list[str]:
207
+ """
208
+ Retrieve either the result of the SQL query, or the statement ID(s).
209
+
210
+ :param statement_id: Statement ID of the running queries
211
+ :param return_sql_result: Boolean, true if results should be returned
212
+ """
213
+ # ISSUE-40427: Pull the statement, and check to see if there are sub-statements. If that is the
214
+ # case, pull each of the sub-statement ID's, and grab the results. Otherwise, just use statement_id
215
+ statement: DescribeStatementResponseTypeDef = self.hook.conn.describe_statement(Id=statement_id)
216
+ statement_ids: list[str] = (
217
+ [sub_statement["Id"] for sub_statement in statement["SubStatements"]]
218
+ if len(statement.get("SubStatements", [])) > 0
219
+ else [statement_id]
220
+ )
221
+
222
+ # If returning the SQL result, use get_statement_result to return the records for each query
223
+ if return_sql_result:
224
+ results: list = [self.hook.conn.get_statement_result(Id=sid) for sid in statement_ids]
225
+ self.log.debug("Statement result(s): %s", results)
226
+ return results
227
+ else:
228
+ return statement_ids
206
229
 
207
230
  def on_kill(self) -> None:
208
231
  """Cancel the submitted redshift query."""
209
- if self.statement_id:
232
+ if hasattr(self, "statement_id"):
210
233
  self.log.info("Received a kill signal.")
211
234
  self.log.info("Stopping Query with statementId - %s", self.statement_id)
212
235
 
@@ -20,7 +20,7 @@ import datetime
20
20
  import json
21
21
  import time
22
22
  from functools import cached_property
23
- from typing import TYPE_CHECKING, Any, Callable, Sequence
23
+ from typing import TYPE_CHECKING, Any, Callable, ClassVar, Sequence
24
24
 
25
25
  from botocore.exceptions import ClientError
26
26
 
@@ -65,7 +65,7 @@ class SageMakerBaseOperator(BaseOperator):
65
65
 
66
66
  template_fields: Sequence[str] = ("config",)
67
67
  template_ext: Sequence[str] = ()
68
- template_fields_renderers: dict = {"config": "json"}
68
+ template_fields_renderers: ClassVar[dict] = {"config": "json"}
69
69
  ui_color: str = "#ededed"
70
70
  integer_fields: list[list[Any]] = []
71
71
 
@@ -330,3 +330,35 @@ class SageMakerAutoMLSensor(SageMakerBaseSensor):
330
330
 
331
331
  def state_from_response(self, response: dict) -> str:
332
332
  return response["AutoMLJobStatus"]
333
+
334
+
335
+ class SageMakerProcessingSensor(SageMakerBaseSensor):
336
+ """
337
+ Poll the processing job until it reaches a terminal state; raise AirflowException with the failure reason.
338
+
339
+ .. seealso::
340
+ For more information on how to use this sensor, take a look at the guide:
341
+ :ref:`howto/sensor:SageMakerProcessingSensor`
342
+
343
+ :param job_name: Name of the processing job to watch.
344
+ """
345
+
346
+ template_fields: Sequence[str] = ("job_name",)
347
+ template_ext: Sequence[str] = ()
348
+
349
+ def __init__(self, *, job_name: str, **kwargs):
350
+ super().__init__(**kwargs)
351
+ self.job_name = job_name
352
+
353
+ def non_terminal_states(self) -> set[str]:
354
+ return SageMakerHook.processing_job_non_terminal_states
355
+
356
+ def failed_states(self) -> set[str]:
357
+ return SageMakerHook.processing_job_failed_states
358
+
359
+ def get_sagemaker_response(self) -> dict:
360
+ self.log.info("Poking Sagemaker ProcessingJob %s", self.job_name)
361
+ return self.hook.describe_processing_job(self.job_name)
362
+
363
+ def state_from_response(self, response: dict) -> str:
364
+ return response["ProcessingJobStatus"]
@@ -152,6 +152,10 @@ class RedshiftToS3Operator(BaseOperator):
152
152
  table = self.table
153
153
  return f"SELECT * FROM {table}"
154
154
 
155
+ @property
156
+ def use_redshift_data(self):
157
+ return bool(self.redshift_data_api_kwargs)
158
+
155
159
  def execute(self, context: Context) -> None:
156
160
  if self.table and self.table_as_file_name:
157
161
  self.s3_key = f"{self.s3_key}/{self.table}_"
@@ -164,14 +168,13 @@ class RedshiftToS3Operator(BaseOperator):
164
168
  if self.include_header and "HEADER" not in [uo.upper().strip() for uo in self.unload_options]:
165
169
  self.unload_options = [*self.unload_options, "HEADER"]
166
170
 
167
- redshift_hook: RedshiftDataHook | RedshiftSQLHook
168
- if self.redshift_data_api_kwargs:
169
- redshift_hook = RedshiftDataHook(aws_conn_id=self.redshift_conn_id)
171
+ if self.use_redshift_data:
172
+ redshift_data_hook = RedshiftDataHook(aws_conn_id=self.redshift_conn_id)
170
173
  for arg in ["sql", "parameters"]:
171
174
  if arg in self.redshift_data_api_kwargs:
172
175
  raise AirflowException(f"Cannot include param '{arg}' in Redshift Data API kwargs")
173
176
  else:
174
- redshift_hook = RedshiftSQLHook(redshift_conn_id=self.redshift_conn_id)
177
+ redshift_sql_hook = RedshiftSQLHook(redshift_conn_id=self.redshift_conn_id)
175
178
  conn = S3Hook.get_connection(conn_id=self.aws_conn_id) if self.aws_conn_id else None
176
179
  if conn and conn.extra_dejson.get("role_arn", False):
177
180
  credentials_block = f"aws_iam_role={conn.extra_dejson['role_arn']}"
@@ -187,10 +190,106 @@ class RedshiftToS3Operator(BaseOperator):
187
190
  )
188
191
 
189
192
  self.log.info("Executing UNLOAD command...")
190
- if isinstance(redshift_hook, RedshiftDataHook):
191
- redshift_hook.execute_query(
193
+ if self.use_redshift_data:
194
+ redshift_data_hook.execute_query(
192
195
  sql=unload_query, parameters=self.parameters, **self.redshift_data_api_kwargs
193
196
  )
194
197
  else:
195
- redshift_hook.run(unload_query, self.autocommit, parameters=self.parameters)
198
+ redshift_sql_hook.run(unload_query, self.autocommit, parameters=self.parameters)
196
199
  self.log.info("UNLOAD command complete...")
200
+
201
+ def get_openlineage_facets_on_complete(self, task_instance):
202
+ """Implement on_complete as we may query for table details."""
203
+ from airflow.providers.amazon.aws.utils.openlineage import (
204
+ get_facets_from_redshift_table,
205
+ get_identity_column_lineage_facet,
206
+ )
207
+ from airflow.providers.common.compat.openlineage.facet import (
208
+ Dataset,
209
+ Error,
210
+ ExtractionErrorRunFacet,
211
+ )
212
+ from airflow.providers.openlineage.extractors import OperatorLineage
213
+
214
+ output_dataset = Dataset(
215
+ namespace=f"s3://{self.s3_bucket}",
216
+ name=self.s3_key,
217
+ )
218
+
219
+ if self.use_redshift_data:
220
+ redshift_data_hook = RedshiftDataHook(aws_conn_id=self.redshift_conn_id)
221
+ database = self.redshift_data_api_kwargs.get("database")
222
+ identifier = self.redshift_data_api_kwargs.get(
223
+ "cluster_identifier", self.redshift_data_api_kwargs.get("workgroup_name")
224
+ )
225
+ port = self.redshift_data_api_kwargs.get("port", "5439")
226
+ authority = f"{identifier}.{redshift_data_hook.region_name}:{port}"
227
+ else:
228
+ redshift_sql_hook = RedshiftSQLHook(redshift_conn_id=self.redshift_conn_id)
229
+ database = redshift_sql_hook.conn.schema
230
+ authority = redshift_sql_hook.get_openlineage_database_info(redshift_sql_hook.conn).authority
231
+
232
+ if self.select_query == self.default_select_query:
233
+ if self.use_redshift_data:
234
+ input_dataset_facets = get_facets_from_redshift_table(
235
+ redshift_data_hook, self.table, self.redshift_data_api_kwargs, self.schema
236
+ )
237
+ else:
238
+ input_dataset_facets = get_facets_from_redshift_table(
239
+ redshift_sql_hook, self.table, {}, self.schema
240
+ )
241
+
242
+ input_dataset = Dataset(
243
+ namespace=f"redshift://{authority}",
244
+ name=f"{database}.{self.schema}.{self.table}" if database else f"{self.schema}.{self.table}",
245
+ facets=input_dataset_facets,
246
+ )
247
+
248
+ # If default select query is used (SELECT *) output file matches the input table.
249
+ output_dataset.facets = {
250
+ "schema": input_dataset_facets["schema"],
251
+ "columnLineage": get_identity_column_lineage_facet(
252
+ field_names=[field.name for field in input_dataset_facets["schema"].fields],
253
+ input_datasets=[input_dataset],
254
+ ),
255
+ }
256
+
257
+ return OperatorLineage(inputs=[input_dataset], outputs=[output_dataset])
258
+
259
+ try:
260
+ from airflow.providers.openlineage.sqlparser import SQLParser, from_table_meta
261
+ except ImportError:
262
+ return OperatorLineage(outputs=[output_dataset])
263
+
264
+ run_facets = {}
265
+ parse_result = SQLParser(dialect="redshift", default_schema=self.schema).parse(self.select_query)
266
+ if parse_result.errors:
267
+ run_facets["extractionError"] = ExtractionErrorRunFacet(
268
+ totalTasks=1,
269
+ failedTasks=1,
270
+ errors=[
271
+ Error(
272
+ errorMessage=error.message,
273
+ stackTrace=None,
274
+ task=error.origin_statement,
275
+ taskNumber=error.index,
276
+ )
277
+ for error in parse_result.errors
278
+ ],
279
+ )
280
+
281
+ input_datasets = []
282
+ for in_tb in parse_result.in_tables:
283
+ ds = from_table_meta(in_tb, database, f"redshift://{authority}", False)
284
+ schema, table = ds.name.split(".")[-2:]
285
+ if self.use_redshift_data:
286
+ input_dataset_facets = get_facets_from_redshift_table(
287
+ redshift_data_hook, table, self.redshift_data_api_kwargs, schema
288
+ )
289
+ else:
290
+ input_dataset_facets = get_facets_from_redshift_table(redshift_sql_hook, table, {}, schema)
291
+
292
+ ds.facets = input_dataset_facets
293
+ input_datasets.append(ds)
294
+
295
+ return OperatorLineage(inputs=input_datasets, outputs=[output_dataset], run_facets=run_facets)
@@ -240,7 +240,7 @@ class S3ToDynamoDBOperator(BaseOperator):
240
240
  finally:
241
241
  self.log.info("Delete tmp DynamoDB table %s", self.tmp_table_name)
242
242
  client.delete_table(TableName=self.tmp_table_name)
243
- return dynamodb_hook.get_conn().Table(self.dynamodb_table_name).table_arn
243
+ return dynamodb_hook.get_conn().Table(self.dynamodb_table_name).table_arn
244
244
 
245
245
  def execute(self, context: Context) -> str:
246
246
  """
@@ -241,7 +241,7 @@ class S3ToRedshiftOperator(BaseOperator):
241
241
 
242
242
  output_dataset = Dataset(
243
243
  namespace=f"redshift://{authority}",
244
- name=f"{database}.{self.schema}.{self.table}",
244
+ name=f"{database}.{self.schema}.{self.table}" if database else f"{self.schema}.{self.table}",
245
245
  facets=output_dataset_facets,
246
246
  )
247
247
 
@@ -29,8 +29,7 @@ class AthenaTrigger(AwsBaseWaiterTrigger):
29
29
  """
30
30
  Trigger for AthenaOperator.
31
31
 
32
- The trigger will asynchronously poll the boto3 API and wait for the
33
- Redshift cluster to be in the `available` state.
32
+ The trigger will asynchronously poll the boto3 API
34
33
 
35
34
  :param query_execution_id: ID of the Athena query execution to watch
36
35
  :param waiter_delay: The amount of time in seconds to wait between attempts.
@@ -18,7 +18,8 @@
18
18
  from __future__ import annotations
19
19
 
20
20
  import asyncio
21
- from typing import TYPE_CHECKING, Any, AsyncIterator
21
+ from collections.abc import AsyncIterator
22
+ from typing import TYPE_CHECKING, Any
22
23
 
23
24
  from botocore.exceptions import ClientError, WaiterError
24
25
 
@@ -165,11 +166,10 @@ class TaskDoneTrigger(BaseTrigger):
165
166
  )
166
167
 
167
168
  async def run(self) -> AsyncIterator[TriggerEvent]:
168
- async with EcsHook(
169
- aws_conn_id=self.aws_conn_id, region_name=self.region
170
- ).async_conn as ecs_client, AwsLogsHook(
171
- aws_conn_id=self.aws_conn_id, region_name=self.region
172
- ).async_conn as logs_client:
169
+ async with (
170
+ EcsHook(aws_conn_id=self.aws_conn_id, region_name=self.region).async_conn as ecs_client,
171
+ AwsLogsHook(aws_conn_id=self.aws_conn_id, region_name=self.region).async_conn as logs_client,
172
+ ):
173
173
  waiter = ecs_client.get_waiter("tasks_stopped")
174
174
  logs_token = None
175
175
  while self.waiter_max_attempts:
@@ -62,7 +62,7 @@ class GlueJobCompleteTrigger(BaseTrigger):
62
62
  {
63
63
  "job_name": self.job_name,
64
64
  "run_id": self.run_id,
65
- "verbose": str(self.verbose),
65
+ "verbose": self.verbose,
66
66
  "aws_conn_id": self.aws_conn_id,
67
67
  "job_poll_interval": self.job_poll_interval,
68
68
  },
@@ -28,8 +28,9 @@ def get_provider_info():
28
28
  "name": "Amazon",
29
29
  "description": "Amazon integration (including `Amazon Web Services (AWS) <https://aws.amazon.com/>`__).\n",
30
30
  "state": "ready",
31
- "source-date-epoch": 1728484310,
31
+ "source-date-epoch": 1730011042,
32
32
  "versions": [
33
+ "9.1.0",
33
34
  "9.0.0",
34
35
  "8.29.0",
35
36
  "8.28.0",
@@ -98,8 +99,8 @@ def get_provider_info():
98
99
  ],
99
100
  "dependencies": [
100
101
  "apache-airflow>=2.8.0",
101
- "apache-airflow-providers-common-compat>=1.1.0",
102
- "apache-airflow-providers-common-sql>=1.3.1",
102
+ "apache-airflow-providers-common-compat>=1.2.1",
103
+ "apache-airflow-providers-common-sql>=1.20.0",
103
104
  "apache-airflow-providers-http",
104
105
  "boto3>=1.34.90",
105
106
  "botocore>=1.34.90",
@@ -107,7 +108,6 @@ def get_provider_info():
107
108
  "watchtower>=3.0.0,!=3.3.0,<4",
108
109
  "jsonpath_ng>=1.5.3",
109
110
  "redshift_connector>=2.0.918",
110
- "sqlalchemy_redshift>=0.8.6",
111
111
  "asgiref>=2.3.0",
112
112
  "PyAthena>=3.0.10",
113
113
  "jmespath>=0.7.0",
@@ -130,7 +130,7 @@ def get_provider_info():
130
130
  "aiobotocore>=2.13.0",
131
131
  "aws_xray_sdk>=2.12.0",
132
132
  "moto[cloudformation,glue]>=5.0.0",
133
- "mypy-boto3-appflow>=1.34.0",
133
+ "mypy-boto3-appflow>=1.34.0,<1.35.39",
134
134
  "mypy-boto3-rds>=1.34.90",
135
135
  "mypy-boto3-redshift-data>=1.34.0",
136
136
  "mypy-boto3-s3>=1.34.90",
@@ -1,6 +1,6 @@
1
- Metadata-Version: 2.1
1
+ Metadata-Version: 2.3
2
2
  Name: apache-airflow-providers-amazon
3
- Version: 9.0.0rc1
3
+ Version: 9.1.0
4
4
  Summary: Provider package apache-airflow-providers-amazon for Apache Airflow
5
5
  Keywords: airflow-provider,amazon,airflow,integration
6
6
  Author-email: Apache Software Foundation <dev@airflow.apache.org>
@@ -21,10 +21,10 @@ Classifier: Programming Language :: Python :: 3.11
21
21
  Classifier: Programming Language :: Python :: 3.12
22
22
  Classifier: Topic :: System :: Monitoring
23
23
  Requires-Dist: PyAthena>=3.0.10
24
- Requires-Dist: apache-airflow-providers-common-compat>=1.1.0rc0
25
- Requires-Dist: apache-airflow-providers-common-sql>=1.3.1rc0
24
+ Requires-Dist: apache-airflow-providers-common-compat>=1.2.1
25
+ Requires-Dist: apache-airflow-providers-common-sql>=1.20.0
26
26
  Requires-Dist: apache-airflow-providers-http
27
- Requires-Dist: apache-airflow>=2.8.0rc0
27
+ Requires-Dist: apache-airflow>=2.8.0
28
28
  Requires-Dist: asgiref>=2.3.0
29
29
  Requires-Dist: boto3>=1.34.90
30
30
  Requires-Dist: botocore>=1.34.90
@@ -33,19 +33,18 @@ Requires-Dist: jmespath>=0.7.0
33
33
  Requires-Dist: jsonpath_ng>=1.5.3
34
34
  Requires-Dist: python3-saml>=1.16.0
35
35
  Requires-Dist: redshift_connector>=2.0.918
36
- Requires-Dist: sqlalchemy_redshift>=0.8.6
37
36
  Requires-Dist: watchtower>=3.0.0,!=3.3.0,<4
38
37
  Requires-Dist: aiobotocore[boto3]>=2.13.0 ; extra == "aiobotocore"
39
- Requires-Dist: apache-airflow-providers-apache-hive ; extra == "apache.hive"
40
- Requires-Dist: apache-airflow-providers-cncf-kubernetes>=7.2.0rc0 ; extra == "cncf.kubernetes"
41
- Requires-Dist: apache-airflow-providers-common-compat ; extra == "common.compat"
42
- Requires-Dist: apache-airflow-providers-common-sql ; extra == "common.sql"
38
+ Requires-Dist: apache-airflow-providers-apache-hive ; extra == "apache-hive"
39
+ Requires-Dist: apache-airflow-providers-cncf-kubernetes>=7.2.0 ; extra == "cncf-kubernetes"
40
+ Requires-Dist: apache-airflow-providers-common-compat ; extra == "common-compat"
41
+ Requires-Dist: apache-airflow-providers-common-sql ; extra == "common-sql"
43
42
  Requires-Dist: apache-airflow-providers-exasol ; extra == "exasol"
44
43
  Requires-Dist: apache-airflow-providers-ftp ; extra == "ftp"
45
44
  Requires-Dist: apache-airflow-providers-google ; extra == "google"
46
45
  Requires-Dist: apache-airflow-providers-http ; extra == "http"
47
46
  Requires-Dist: apache-airflow-providers-imap ; extra == "imap"
48
- Requires-Dist: apache-airflow-providers-microsoft-azure ; extra == "microsoft.azure"
47
+ Requires-Dist: apache-airflow-providers-microsoft-azure ; extra == "microsoft-azure"
49
48
  Requires-Dist: apache-airflow-providers-mongo ; extra == "mongo"
50
49
  Requires-Dist: apache-airflow-providers-openlineage ; extra == "openlineage"
51
50
  Requires-Dist: pandas>=2.1.2,<2.2 ; extra == "pandas" and (python_version>="3.9")
@@ -55,23 +54,23 @@ Requires-Dist: s3fs>=2023.10.0 ; extra == "s3fs"
55
54
  Requires-Dist: apache-airflow-providers-salesforce ; extra == "salesforce"
56
55
  Requires-Dist: apache-airflow-providers-ssh ; extra == "ssh"
57
56
  Project-URL: Bug Tracker, https://github.com/apache/airflow/issues
58
- Project-URL: Changelog, https://airflow.apache.org/docs/apache-airflow-providers-amazon/9.0.0/changelog.html
59
- Project-URL: Documentation, https://airflow.apache.org/docs/apache-airflow-providers-amazon/9.0.0
57
+ Project-URL: Changelog, https://airflow.apache.org/docs/apache-airflow-providers-amazon/9.1.0/changelog.html
58
+ Project-URL: Documentation, https://airflow.apache.org/docs/apache-airflow-providers-amazon/9.1.0
60
59
  Project-URL: Slack Chat, https://s.apache.org/airflow-slack
61
60
  Project-URL: Source Code, https://github.com/apache/airflow
62
61
  Project-URL: Twitter, https://twitter.com/ApacheAirflow
63
62
  Project-URL: YouTube, https://www.youtube.com/channel/UCSXwxpWZQ7XZ1WL3wqevChA/
64
63
  Provides-Extra: aiobotocore
65
- Provides-Extra: apache.hive
66
- Provides-Extra: cncf.kubernetes
67
- Provides-Extra: common.compat
68
- Provides-Extra: common.sql
64
+ Provides-Extra: apache-hive
65
+ Provides-Extra: cncf-kubernetes
66
+ Provides-Extra: common-compat
67
+ Provides-Extra: common-sql
69
68
  Provides-Extra: exasol
70
69
  Provides-Extra: ftp
71
70
  Provides-Extra: google
72
71
  Provides-Extra: http
73
72
  Provides-Extra: imap
74
- Provides-Extra: microsoft.azure
73
+ Provides-Extra: microsoft-azure
75
74
  Provides-Extra: mongo
76
75
  Provides-Extra: openlineage
77
76
  Provides-Extra: pandas
@@ -124,7 +123,7 @@ Provides-Extra: ssh
124
123
 
125
124
  Package ``apache-airflow-providers-amazon``
126
125
 
127
- Release: ``9.0.0.rc1``
126
+ Release: ``9.1.0``
128
127
 
129
128
 
130
129
  Amazon integration (including `Amazon Web Services (AWS) <https://aws.amazon.com/>`__).
@@ -137,7 +136,7 @@ This is a provider package for ``amazon`` provider. All classes for this provide
137
136
  are in ``airflow.providers.amazon`` python package.
138
137
 
139
138
  You can find package information and changelog for the provider
140
- in the `documentation <https://airflow.apache.org/docs/apache-airflow-providers-amazon/9.0.0/>`_.
139
+ in the `documentation <https://airflow.apache.org/docs/apache-airflow-providers-amazon/9.1.0/>`_.
141
140
 
142
141
  Installation
143
142
  ------------
@@ -155,8 +154,8 @@ Requirements
155
154
  PIP package Version required
156
155
  ========================================== ======================
157
156
  ``apache-airflow`` ``>=2.8.0``
158
- ``apache-airflow-providers-common-compat`` ``>=1.1.0``
159
- ``apache-airflow-providers-common-sql`` ``>=1.3.1``
157
+ ``apache-airflow-providers-common-compat`` ``>=1.2.1``
158
+ ``apache-airflow-providers-common-sql`` ``>=1.20.0``
160
159
  ``apache-airflow-providers-http``
161
160
  ``boto3`` ``>=1.34.90``
162
161
  ``botocore`` ``>=1.34.90``
@@ -164,7 +163,6 @@ PIP package Version required
164
163
  ``watchtower`` ``>=3.0.0,!=3.3.0,<4``
165
164
  ``jsonpath_ng`` ``>=1.5.3``
166
165
  ``redshift_connector`` ``>=2.0.918``
167
- ``sqlalchemy_redshift`` ``>=0.8.6``
168
166
  ``asgiref`` ``>=2.3.0``
169
167
  ``PyAthena`` ``>=3.0.10``
170
168
  ``jmespath`` ``>=0.7.0``
@@ -204,4 +202,4 @@ Dependent package
204
202
  ====================================================================================================================== ===================
205
203
 
206
204
  The changelog for the provider package can be found in the
207
- `changelog <https://airflow.apache.org/docs/apache-airflow-providers-amazon/9.0.0/changelog.html>`_.
205
+ `changelog <https://airflow.apache.org/docs/apache-airflow-providers-amazon/9.1.0/changelog.html>`_.