apache-airflow-providers-amazon 8.24.0rc2__py3-none-any.whl → 8.25.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- airflow/providers/amazon/__init__.py +1 -1
- airflow/providers/amazon/aws/hooks/comprehend.py +33 -0
- airflow/providers/amazon/aws/hooks/redshift_sql.py +8 -1
- airflow/providers/amazon/aws/operators/comprehend.py +148 -1
- airflow/providers/amazon/aws/sensors/comprehend.py +112 -1
- airflow/providers/amazon/aws/transfers/redshift_to_s3.py +1 -1
- airflow/providers/amazon/aws/triggers/comprehend.py +36 -0
- airflow/providers/amazon/aws/utils/__init__.py +2 -3
- airflow/providers/amazon/aws/waiters/comprehend.json +55 -0
- airflow/providers/amazon/get_provider_info.py +4 -3
- {apache_airflow_providers_amazon-8.24.0rc2.dist-info → apache_airflow_providers_amazon-8.25.0.dist-info}/METADATA +11 -11
- {apache_airflow_providers_amazon-8.24.0rc2.dist-info → apache_airflow_providers_amazon-8.25.0.dist-info}/RECORD +14 -14
- {apache_airflow_providers_amazon-8.24.0rc2.dist-info → apache_airflow_providers_amazon-8.25.0.dist-info}/WHEEL +0 -0
- {apache_airflow_providers_amazon-8.24.0rc2.dist-info → apache_airflow_providers_amazon-8.25.0.dist-info}/entry_points.txt +0 -0
@@ -29,7 +29,7 @@ from airflow import __version__ as airflow_version
|
|
29
29
|
|
30
30
|
__all__ = ["__version__"]
|
31
31
|
|
32
|
-
__version__ = "8.
|
32
|
+
__version__ = "8.25.0"
|
33
33
|
|
34
34
|
if packaging.version.parse(packaging.version.parse(airflow_version).base_version) < packaging.version.parse(
|
35
35
|
"2.7.0"
|
@@ -16,6 +16,7 @@
|
|
16
16
|
# under the License.
|
17
17
|
from __future__ import annotations
|
18
18
|
|
19
|
+
from airflow.exceptions import AirflowException
|
19
20
|
from airflow.providers.amazon.aws.hooks.base_aws import AwsBaseHook
|
20
21
|
|
21
22
|
|
@@ -35,3 +36,35 @@ class ComprehendHook(AwsBaseHook):
|
|
35
36
|
def __init__(self, *args, **kwargs) -> None:
|
36
37
|
kwargs["client_type"] = "comprehend"
|
37
38
|
super().__init__(*args, **kwargs)
|
39
|
+
|
40
|
+
def validate_document_classifier_training_status(
|
41
|
+
self, document_classifier_arn: str, fail_on_warnings: bool = False
|
42
|
+
) -> None:
|
43
|
+
"""
|
44
|
+
Log the Information about the document classifier.
|
45
|
+
|
46
|
+
NumberOfLabels
|
47
|
+
NumberOfTrainedDocuments
|
48
|
+
NumberOfTestDocuments
|
49
|
+
EvaluationMetrics
|
50
|
+
|
51
|
+
"""
|
52
|
+
response = self.conn.describe_document_classifier(DocumentClassifierArn=document_classifier_arn)
|
53
|
+
|
54
|
+
status = response["DocumentClassifierProperties"]["Status"]
|
55
|
+
|
56
|
+
if status == "TRAINED_WITH_WARNING":
|
57
|
+
self.log.info(
|
58
|
+
"AWS Comprehend document classifier training completed with %s, Message: %s please review the skipped files folder in the output location %s",
|
59
|
+
status,
|
60
|
+
response["DocumentClassifierProperties"]["Message"],
|
61
|
+
response["DocumentClassifierProperties"]["OutputDataConfig"]["S3Uri"],
|
62
|
+
)
|
63
|
+
|
64
|
+
if fail_on_warnings:
|
65
|
+
raise AirflowException("Warnings in AWS Comprehend document classifier training.")
|
66
|
+
|
67
|
+
self.log.info(
|
68
|
+
"AWS Comprehend document classifier metadata: %s",
|
69
|
+
response["DocumentClassifierProperties"]["ClassifierMetadata"],
|
70
|
+
)
|
@@ -20,14 +20,19 @@ from functools import cached_property
|
|
20
20
|
from typing import TYPE_CHECKING
|
21
21
|
|
22
22
|
import redshift_connector
|
23
|
+
from packaging.version import Version
|
23
24
|
from redshift_connector import Connection as RedshiftConnection
|
24
25
|
from sqlalchemy import create_engine
|
25
26
|
from sqlalchemy.engine.url import URL
|
26
27
|
|
28
|
+
from airflow import __version__ as AIRFLOW_VERSION
|
27
29
|
from airflow.exceptions import AirflowException
|
28
30
|
from airflow.providers.amazon.aws.hooks.base_aws import AwsBaseHook
|
29
31
|
from airflow.providers.common.sql.hooks.sql import DbApiHook
|
30
32
|
|
33
|
+
_IS_AIRFLOW_2_10_OR_HIGHER = Version(Version(AIRFLOW_VERSION).base_version) >= Version("2.10.0")
|
34
|
+
|
35
|
+
|
31
36
|
if TYPE_CHECKING:
|
32
37
|
from airflow.models.connection import Connection
|
33
38
|
from airflow.providers.openlineage.sqlparser import DatabaseInfo
|
@@ -257,4 +262,6 @@ class RedshiftSQLHook(DbApiHook):
|
|
257
262
|
|
258
263
|
def get_openlineage_default_schema(self) -> str | None:
|
259
264
|
"""Return current schema. This is usually changed with ``SEARCH_PATH`` parameter."""
|
260
|
-
|
265
|
+
if _IS_AIRFLOW_2_10_OR_HIGHER:
|
266
|
+
return self.get_first("SELECT CURRENT_SCHEMA();")[0]
|
267
|
+
return super().get_openlineage_default_schema()
|
@@ -23,7 +23,10 @@ from airflow.configuration import conf
|
|
23
23
|
from airflow.exceptions import AirflowException
|
24
24
|
from airflow.providers.amazon.aws.hooks.comprehend import ComprehendHook
|
25
25
|
from airflow.providers.amazon.aws.operators.base_aws import AwsBaseOperator
|
26
|
-
from airflow.providers.amazon.aws.triggers.comprehend import
|
26
|
+
from airflow.providers.amazon.aws.triggers.comprehend import (
|
27
|
+
ComprehendCreateDocumentClassifierCompletedTrigger,
|
28
|
+
ComprehendPiiEntitiesDetectionJobCompletedTrigger,
|
29
|
+
)
|
27
30
|
from airflow.providers.amazon.aws.utils import validate_execute_complete_event
|
28
31
|
from airflow.providers.amazon.aws.utils.mixins import aws_template_fields
|
29
32
|
from airflow.utils.timezone import utcnow
|
@@ -190,3 +193,147 @@ class ComprehendStartPiiEntitiesDetectionJobOperator(ComprehendBaseOperator):
|
|
190
193
|
|
191
194
|
self.log.info("Comprehend pii entities detection job `%s` complete.", event["job_id"])
|
192
195
|
return event["job_id"]
|
196
|
+
|
197
|
+
|
198
|
+
class ComprehendCreateDocumentClassifierOperator(AwsBaseOperator[ComprehendHook]):
|
199
|
+
"""
|
200
|
+
Create a comprehend document classifier that can categorize documents.
|
201
|
+
|
202
|
+
Provide a set of training documents that are labeled with the categories.
|
203
|
+
|
204
|
+
.. seealso::
|
205
|
+
For more information on how to use this operator, take a look at the guide:
|
206
|
+
:ref:`howto/operator:ComprehendCreateDocumentClassifierOperator`
|
207
|
+
|
208
|
+
:param document_classifier_name: The name of the document classifier. (templated)
|
209
|
+
:param input_data_config: Specifies the format and location of the input data for the job. (templated)
|
210
|
+
:param mode: Indicates the mode in which the classifier will be trained. (templated)
|
211
|
+
:param data_access_role_arn: The Amazon Resource Name (ARN) of the IAM role that grants Amazon Comprehend
|
212
|
+
read access to your input data. (templated)
|
213
|
+
:param language_code: The language of the input documents. You can specify any of the languages supported by
|
214
|
+
Amazon Comprehend. All documents must be in the same language. (templated)
|
215
|
+
:param fail_on_warnings: If set to True, the document classifier training job will throw an error when the
|
216
|
+
status is TRAINED_WITH_WARNING. (default False)
|
217
|
+
:param output_data_config: Specifies the location for the output files from a custom classifier job.
|
218
|
+
This parameter is required for a request that creates a native document model. (templated)
|
219
|
+
:param document_classifier_kwargs: Any optional parameters to pass to the document classifier. (templated)
|
220
|
+
|
221
|
+
:param wait_for_completion: Whether to wait for job to stop. (default: True)
|
222
|
+
:param waiter_delay: Time in seconds to wait between status checks. (default: 60)
|
223
|
+
:param waiter_max_attempts: Maximum number of attempts to check for job completion. (default: 20)
|
224
|
+
:param deferrable: If True, the operator will wait asynchronously for the job to stop.
|
225
|
+
This implies waiting for completion. This mode requires aiobotocore module to be installed.
|
226
|
+
(default: False)
|
227
|
+
:param aws_conn_id: The Airflow connection used for AWS credentials.
|
228
|
+
If this is ``None`` or empty then the default boto3 behaviour is used. If
|
229
|
+
running Airflow in a distributed manner and aws_conn_id is None or
|
230
|
+
empty, then default boto3 configuration would be used (and must be
|
231
|
+
maintained on each worker node).
|
232
|
+
:param region_name: AWS region_name. If not specified then the default boto3 behaviour is used.
|
233
|
+
:param verify: Whether to verify SSL certificates. See:
|
234
|
+
https://boto3.amazonaws.com/v1/documentation/api/latest/reference/core/session.html
|
235
|
+
:param botocore_config: Configuration dictionary (key-values) for botocore client. See:
|
236
|
+
https://botocore.amazonaws.com/v1/documentation/api/latest/reference/config.html
|
237
|
+
"""
|
238
|
+
|
239
|
+
aws_hook_class = ComprehendHook
|
240
|
+
|
241
|
+
template_fields: Sequence[str] = aws_template_fields(
|
242
|
+
"document_classifier_name",
|
243
|
+
"input_data_config",
|
244
|
+
"mode",
|
245
|
+
"data_access_role_arn",
|
246
|
+
"language_code",
|
247
|
+
"output_data_config",
|
248
|
+
"document_classifier_kwargs",
|
249
|
+
)
|
250
|
+
|
251
|
+
template_fields_renderers: dict = {
|
252
|
+
"input_data_config": "json",
|
253
|
+
"output_data_config": "json",
|
254
|
+
"document_classifier_kwargs": "json",
|
255
|
+
}
|
256
|
+
|
257
|
+
def __init__(
|
258
|
+
self,
|
259
|
+
document_classifier_name: str,
|
260
|
+
input_data_config: dict[str, Any],
|
261
|
+
mode: str,
|
262
|
+
data_access_role_arn: str,
|
263
|
+
language_code: str,
|
264
|
+
fail_on_warnings: bool = False,
|
265
|
+
output_data_config: dict[str, Any] | None = None,
|
266
|
+
document_classifier_kwargs: dict[str, Any] | None = None,
|
267
|
+
wait_for_completion: bool = True,
|
268
|
+
waiter_delay: int = 60,
|
269
|
+
waiter_max_attempts: int = 20,
|
270
|
+
deferrable: bool = conf.getboolean("operators", "default_deferrable", fallback=False),
|
271
|
+
aws_conn_id: str | None = "aws_default",
|
272
|
+
**kwargs,
|
273
|
+
):
|
274
|
+
super().__init__(**kwargs)
|
275
|
+
self.document_classifier_name = document_classifier_name
|
276
|
+
self.input_data_config = input_data_config
|
277
|
+
self.mode = mode
|
278
|
+
self.data_access_role_arn = data_access_role_arn
|
279
|
+
self.language_code = language_code
|
280
|
+
self.fail_on_warnings = fail_on_warnings
|
281
|
+
self.output_data_config = output_data_config
|
282
|
+
self.document_classifier_kwargs = document_classifier_kwargs or {}
|
283
|
+
self.wait_for_completion = wait_for_completion
|
284
|
+
self.waiter_delay = waiter_delay
|
285
|
+
self.waiter_max_attempts = waiter_max_attempts
|
286
|
+
self.deferrable = deferrable
|
287
|
+
self.aws_conn_id = aws_conn_id
|
288
|
+
|
289
|
+
def execute(self, context: Context) -> str:
|
290
|
+
if self.output_data_config:
|
291
|
+
self.document_classifier_kwargs["OutputDataConfig"] = self.output_data_config
|
292
|
+
|
293
|
+
document_classifier_arn = self.hook.conn.create_document_classifier(
|
294
|
+
DocumentClassifierName=self.document_classifier_name,
|
295
|
+
InputDataConfig=self.input_data_config,
|
296
|
+
Mode=self.mode,
|
297
|
+
DataAccessRoleArn=self.data_access_role_arn,
|
298
|
+
LanguageCode=self.language_code,
|
299
|
+
**self.document_classifier_kwargs,
|
300
|
+
)["DocumentClassifierArn"]
|
301
|
+
|
302
|
+
message_description = f"document classifier {document_classifier_arn} to complete."
|
303
|
+
if self.deferrable:
|
304
|
+
self.log.info("Deferring %s", message_description)
|
305
|
+
self.defer(
|
306
|
+
trigger=ComprehendCreateDocumentClassifierCompletedTrigger(
|
307
|
+
document_classifier_arn=document_classifier_arn,
|
308
|
+
waiter_delay=self.waiter_delay,
|
309
|
+
waiter_max_attempts=self.waiter_max_attempts,
|
310
|
+
aws_conn_id=self.aws_conn_id,
|
311
|
+
),
|
312
|
+
method_name="execute_complete",
|
313
|
+
)
|
314
|
+
elif self.wait_for_completion:
|
315
|
+
self.log.info("Waiting for %s", message_description)
|
316
|
+
|
317
|
+
self.hook.get_waiter("create_document_classifier_complete").wait(
|
318
|
+
DocumentClassifierArn=document_classifier_arn,
|
319
|
+
WaiterConfig={"Delay": self.waiter_delay, "MaxAttempts": self.waiter_max_attempts},
|
320
|
+
)
|
321
|
+
|
322
|
+
self.hook.validate_document_classifier_training_status(
|
323
|
+
document_classifier_arn=document_classifier_arn, fail_on_warnings=self.fail_on_warnings
|
324
|
+
)
|
325
|
+
|
326
|
+
return document_classifier_arn
|
327
|
+
|
328
|
+
def execute_complete(self, context: Context, event: dict[str, Any] | None = None) -> str:
|
329
|
+
event = validate_execute_complete_event(event)
|
330
|
+
if event["status"] != "success":
|
331
|
+
raise AirflowException("Error while running comprehend create document classifier: %s", event)
|
332
|
+
|
333
|
+
self.hook.validate_document_classifier_training_status(
|
334
|
+
document_classifier_arn=event["document_classifier_arn"], fail_on_warnings=self.fail_on_warnings
|
335
|
+
)
|
336
|
+
|
337
|
+
self.log.info("Comprehend document classifier `%s` complete.", event["document_classifier_arn"])
|
338
|
+
|
339
|
+
return event["document_classifier_arn"]
|
@@ -23,7 +23,10 @@ from airflow.configuration import conf
|
|
23
23
|
from airflow.exceptions import AirflowException, AirflowSkipException
|
24
24
|
from airflow.providers.amazon.aws.hooks.comprehend import ComprehendHook
|
25
25
|
from airflow.providers.amazon.aws.sensors.base_aws import AwsBaseSensor
|
26
|
-
from airflow.providers.amazon.aws.triggers.comprehend import
|
26
|
+
from airflow.providers.amazon.aws.triggers.comprehend import (
|
27
|
+
ComprehendCreateDocumentClassifierCompletedTrigger,
|
28
|
+
ComprehendPiiEntitiesDetectionJobCompletedTrigger,
|
29
|
+
)
|
27
30
|
from airflow.providers.amazon.aws.utils.mixins import aws_template_fields
|
28
31
|
|
29
32
|
if TYPE_CHECKING:
|
@@ -145,3 +148,111 @@ class ComprehendStartPiiEntitiesDetectionJobCompletedSensor(ComprehendBaseSensor
|
|
145
148
|
return self.hook.conn.describe_pii_entities_detection_job(JobId=self.job_id)[
|
146
149
|
"PiiEntitiesDetectionJobProperties"
|
147
150
|
]["JobStatus"]
|
151
|
+
|
152
|
+
|
153
|
+
class ComprehendCreateDocumentClassifierCompletedSensor(AwsBaseSensor[ComprehendHook]):
|
154
|
+
"""
|
155
|
+
Poll the state of the document classifier until it reaches a completed state; fails if the job fails.
|
156
|
+
|
157
|
+
.. seealso::
|
158
|
+
For more information on how to use this sensor, take a look at the guide:
|
159
|
+
:ref:`howto/sensor:ComprehendCreateDocumentClassifierCompletedSensor`
|
160
|
+
|
161
|
+
:param document_classifier_arn: The arn of the Comprehend document classifier.
|
162
|
+
:param fail_on_warnings: If set to True, the document classifier training job will throw an error when the
|
163
|
+
status is TRAINED_WITH_WARNING. (default False)
|
164
|
+
|
165
|
+
:param deferrable: If True, the sensor will operate in deferrable mode. This mode requires aiobotocore
|
166
|
+
module to be installed.
|
167
|
+
(default: False, but can be overridden in config file by setting default_deferrable to True)
|
168
|
+
:param poke_interval: Polling period in seconds to check for the status of the job. (default: 120)
|
169
|
+
:param max_retries: Number of times before returning the current state. (default: 75)
|
170
|
+
:param aws_conn_id: The Airflow connection used for AWS credentials.
|
171
|
+
If this is ``None`` or empty then the default boto3 behaviour is used. If
|
172
|
+
running Airflow in a distributed manner and aws_conn_id is None or
|
173
|
+
empty, then default boto3 configuration would be used (and must be
|
174
|
+
maintained on each worker node).
|
175
|
+
:param region_name: AWS region_name. If not specified then the default boto3 behaviour is used.
|
176
|
+
:param verify: Whether to verify SSL certificates. See:
|
177
|
+
https://boto3.amazonaws.com/v1/documentation/api/latest/reference/core/session.html
|
178
|
+
:param botocore_config: Configuration dictionary (key-values) for botocore client. See:
|
179
|
+
https://botocore.amazonaws.com/v1/documentation/api/latest/reference/config.html
|
180
|
+
"""
|
181
|
+
|
182
|
+
aws_hook_class = ComprehendHook
|
183
|
+
|
184
|
+
INTERMEDIATE_STATES: tuple[str, ...] = (
|
185
|
+
"SUBMITTED",
|
186
|
+
"TRAINING",
|
187
|
+
)
|
188
|
+
FAILURE_STATES: tuple[str, ...] = (
|
189
|
+
"DELETING",
|
190
|
+
"STOP_REQUESTED",
|
191
|
+
"STOPPED",
|
192
|
+
"IN_ERROR",
|
193
|
+
)
|
194
|
+
SUCCESS_STATES: tuple[str, ...] = ("TRAINED", "TRAINED_WITH_WARNING")
|
195
|
+
FAILURE_MESSAGE = "Comprehend document classifier failed."
|
196
|
+
|
197
|
+
template_fields: Sequence[str] = aws_template_fields("document_classifier_arn")
|
198
|
+
|
199
|
+
def __init__(
|
200
|
+
self,
|
201
|
+
*,
|
202
|
+
document_classifier_arn: str,
|
203
|
+
fail_on_warnings: bool = False,
|
204
|
+
max_retries: int = 75,
|
205
|
+
poke_interval: int = 120,
|
206
|
+
deferrable: bool = conf.getboolean("operators", "default_deferrable", fallback=False),
|
207
|
+
aws_conn_id: str | None = "aws_default",
|
208
|
+
**kwargs: Any,
|
209
|
+
) -> None:
|
210
|
+
super().__init__(**kwargs)
|
211
|
+
self.document_classifier_arn = document_classifier_arn
|
212
|
+
self.fail_on_warnings = fail_on_warnings
|
213
|
+
self.max_retries = max_retries
|
214
|
+
self.poke_interval = poke_interval
|
215
|
+
self.deferrable = deferrable
|
216
|
+
self.aws_conn_id = aws_conn_id
|
217
|
+
|
218
|
+
def execute(self, context: Context) -> Any:
|
219
|
+
if self.deferrable:
|
220
|
+
self.defer(
|
221
|
+
trigger=ComprehendCreateDocumentClassifierCompletedTrigger(
|
222
|
+
document_classifier_arn=self.document_classifier_arn,
|
223
|
+
waiter_delay=int(self.poke_interval),
|
224
|
+
waiter_max_attempts=self.max_retries,
|
225
|
+
aws_conn_id=self.aws_conn_id,
|
226
|
+
),
|
227
|
+
method_name="poke",
|
228
|
+
)
|
229
|
+
else:
|
230
|
+
super().execute(context=context)
|
231
|
+
|
232
|
+
def poke(self, context: Context, **kwargs) -> bool:
|
233
|
+
status = self.hook.conn.describe_document_classifier(
|
234
|
+
DocumentClassifierArn=self.document_classifier_arn
|
235
|
+
)["DocumentClassifierProperties"]["Status"]
|
236
|
+
|
237
|
+
self.log.info(
|
238
|
+
"Poking for AWS Comprehend document classifier arn: %s status: %s",
|
239
|
+
self.document_classifier_arn,
|
240
|
+
status,
|
241
|
+
)
|
242
|
+
|
243
|
+
if status in self.FAILURE_STATES:
|
244
|
+
# TODO: remove this if block when min_airflow_version is set to higher than 2.7.1
|
245
|
+
if self.soft_fail:
|
246
|
+
raise AirflowSkipException(self.FAILURE_MESSAGE)
|
247
|
+
raise AirflowException(self.FAILURE_MESSAGE)
|
248
|
+
|
249
|
+
if status in self.SUCCESS_STATES:
|
250
|
+
self.hook.validate_document_classifier_training_status(
|
251
|
+
document_classifier_arn=self.document_classifier_arn, fail_on_warnings=self.fail_on_warnings
|
252
|
+
)
|
253
|
+
|
254
|
+
self.log.info("Comprehend document classifier `%s` complete.", self.document_classifier_arn)
|
255
|
+
|
256
|
+
return True
|
257
|
+
|
258
|
+
return False
|
@@ -128,7 +128,7 @@ class RedshiftToS3Operator(BaseOperator):
|
|
128
128
|
self, credentials_block: str, select_query: str, s3_key: str, unload_options: str
|
129
129
|
) -> str:
|
130
130
|
# Un-escape already escaped queries
|
131
|
-
select_query = re.sub(r"''(
|
131
|
+
select_query = re.sub(r"''(.+?)''", r"'\1'", select_query)
|
132
132
|
return f"""
|
133
133
|
UNLOAD ($${select_query}$$)
|
134
134
|
TO 's3://{self.s3_bucket}/{s3_key}'
|
@@ -59,3 +59,39 @@ class ComprehendPiiEntitiesDetectionJobCompletedTrigger(AwsBaseWaiterTrigger):
|
|
59
59
|
|
60
60
|
def hook(self) -> AwsGenericHook:
|
61
61
|
return ComprehendHook(aws_conn_id=self.aws_conn_id)
|
62
|
+
|
63
|
+
|
64
|
+
class ComprehendCreateDocumentClassifierCompletedTrigger(AwsBaseWaiterTrigger):
|
65
|
+
"""
|
66
|
+
Trigger when a Comprehend document classifier is complete.
|
67
|
+
|
68
|
+
:param document_classifier_arn: The arn of the Comprehend document classifier.
|
69
|
+
:param waiter_delay: The amount of time in seconds to wait between attempts. (default: 120)
|
70
|
+
:param waiter_max_attempts: The maximum number of attempts to be made. (default: 75)
|
71
|
+
:param aws_conn_id: The Airflow connection used for AWS credentials.
|
72
|
+
"""
|
73
|
+
|
74
|
+
def __init__(
|
75
|
+
self,
|
76
|
+
*,
|
77
|
+
document_classifier_arn: str,
|
78
|
+
waiter_delay: int = 120,
|
79
|
+
waiter_max_attempts: int = 75,
|
80
|
+
aws_conn_id: str | None = "aws_default",
|
81
|
+
) -> None:
|
82
|
+
super().__init__(
|
83
|
+
serialized_fields={"document_classifier_arn": document_classifier_arn},
|
84
|
+
waiter_name="create_document_classifier_complete",
|
85
|
+
waiter_args={"DocumentClassifierArn": document_classifier_arn},
|
86
|
+
failure_message="Comprehend create document classifier failed.",
|
87
|
+
status_message="Status of Comprehend create document classifier is",
|
88
|
+
status_queries=["DocumentClassifierProperties.Status"],
|
89
|
+
return_key="document_classifier_arn",
|
90
|
+
return_value=document_classifier_arn,
|
91
|
+
waiter_delay=waiter_delay,
|
92
|
+
waiter_max_attempts=waiter_max_attempts,
|
93
|
+
aws_conn_id=aws_conn_id,
|
94
|
+
)
|
95
|
+
|
96
|
+
def hook(self) -> AwsGenericHook:
|
97
|
+
return ComprehendHook(aws_conn_id=self.aws_conn_id)
|
@@ -20,10 +20,9 @@ import logging
|
|
20
20
|
import re
|
21
21
|
from datetime import datetime, timezone
|
22
22
|
from enum import Enum
|
23
|
+
from importlib import metadata
|
23
24
|
from typing import Any
|
24
25
|
|
25
|
-
import importlib_metadata
|
26
|
-
|
27
26
|
from airflow.exceptions import AirflowException
|
28
27
|
from airflow.utils.helpers import prune_dict
|
29
28
|
from airflow.version import version
|
@@ -78,7 +77,7 @@ def get_airflow_version() -> tuple[int, ...]:
|
|
78
77
|
|
79
78
|
def get_botocore_version() -> tuple[int, ...]:
|
80
79
|
"""Return the version number of the installed botocore package in the form of a tuple[int,...]."""
|
81
|
-
return tuple(map(int,
|
80
|
+
return tuple(map(int, metadata.version("botocore").split(".")[:3]))
|
82
81
|
|
83
82
|
|
84
83
|
def validate_execute_complete_event(event: dict[str, Any] | None = None) -> dict[str, Any]:
|
@@ -44,6 +44,61 @@
|
|
44
44
|
}
|
45
45
|
|
46
46
|
]
|
47
|
+
},
|
48
|
+
"create_document_classifier_complete": {
|
49
|
+
"delay": 120,
|
50
|
+
"maxAttempts": 75,
|
51
|
+
"operation": "DescribeDocumentClassifier",
|
52
|
+
"acceptors": [
|
53
|
+
{
|
54
|
+
"matcher": "path",
|
55
|
+
"argument": "DocumentClassifierProperties.Status",
|
56
|
+
"expected": "SUBMITTED",
|
57
|
+
"state": "retry"
|
58
|
+
},
|
59
|
+
{
|
60
|
+
"matcher": "path",
|
61
|
+
"argument": "DocumentClassifierProperties.Status",
|
62
|
+
"expected": "TRAINING",
|
63
|
+
"state": "retry"
|
64
|
+
},
|
65
|
+
{
|
66
|
+
"matcher": "path",
|
67
|
+
"argument": "DocumentClassifierProperties.Status",
|
68
|
+
"expected": "DELETING",
|
69
|
+
"state": "failure"
|
70
|
+
},
|
71
|
+
{
|
72
|
+
"matcher": "path",
|
73
|
+
"argument": "DocumentClassifierProperties.Status",
|
74
|
+
"expected": "STOP_REQUESTED",
|
75
|
+
"state": "failure"
|
76
|
+
},
|
77
|
+
{
|
78
|
+
"matcher": "path",
|
79
|
+
"argument": "DocumentClassifierProperties.Status",
|
80
|
+
"expected": "STOPPED",
|
81
|
+
"state": "failure"
|
82
|
+
},
|
83
|
+
{
|
84
|
+
"matcher": "path",
|
85
|
+
"argument": "DocumentClassifierProperties.Status",
|
86
|
+
"expected": "IN_ERROR",
|
87
|
+
"state": "failure"
|
88
|
+
},
|
89
|
+
{
|
90
|
+
"matcher": "path",
|
91
|
+
"argument": "DocumentClassifierProperties.Status",
|
92
|
+
"expected": "TRAINED",
|
93
|
+
"state": "success"
|
94
|
+
},
|
95
|
+
{
|
96
|
+
"matcher": "path",
|
97
|
+
"argument": "DocumentClassifierProperties.Status",
|
98
|
+
"expected": "TRAINED_WITH_WARNING",
|
99
|
+
"state": "success"
|
100
|
+
}
|
101
|
+
]
|
47
102
|
}
|
48
103
|
}
|
49
104
|
}
|
@@ -28,8 +28,9 @@ def get_provider_info():
|
|
28
28
|
"name": "Amazon",
|
29
29
|
"description": "Amazon integration (including `Amazon Web Services (AWS) <https://aws.amazon.com/>`__).\n",
|
30
30
|
"state": "ready",
|
31
|
-
"source-date-epoch":
|
31
|
+
"source-date-epoch": 1718602112,
|
32
32
|
"versions": [
|
33
|
+
"8.25.0",
|
33
34
|
"8.24.0",
|
34
35
|
"8.23.0",
|
35
36
|
"8.22.0",
|
@@ -109,8 +110,8 @@ def get_provider_info():
|
|
109
110
|
{
|
110
111
|
"name": "pandas",
|
111
112
|
"dependencies": [
|
112
|
-
'pandas>=1.
|
113
|
-
'pandas>=
|
113
|
+
'pandas>=2.1.2,<2.2;python_version>="3.9"',
|
114
|
+
'pandas>=1.5.3,<2.2;python_version<"3.9"',
|
114
115
|
],
|
115
116
|
},
|
116
117
|
{"name": "aiobotocore", "dependencies": ["aiobotocore[boto3]>=2.13.0"]},
|
@@ -1,6 +1,6 @@
|
|
1
1
|
Metadata-Version: 2.1
|
2
2
|
Name: apache-airflow-providers-amazon
|
3
|
-
Version: 8.
|
3
|
+
Version: 8.25.0
|
4
4
|
Summary: Provider package apache-airflow-providers-amazon for Apache Airflow
|
5
5
|
Keywords: airflow-provider,amazon,airflow,integration
|
6
6
|
Author-email: Apache Software Foundation <dev@airflow.apache.org>
|
@@ -22,9 +22,9 @@ Classifier: Programming Language :: Python :: 3.11
|
|
22
22
|
Classifier: Programming Language :: Python :: 3.12
|
23
23
|
Classifier: Topic :: System :: Monitoring
|
24
24
|
Requires-Dist: PyAthena>=3.0.10
|
25
|
-
Requires-Dist: apache-airflow-providers-common-sql>=1.3.
|
25
|
+
Requires-Dist: apache-airflow-providers-common-sql>=1.3.1
|
26
26
|
Requires-Dist: apache-airflow-providers-http
|
27
|
-
Requires-Dist: apache-airflow>=2.7.
|
27
|
+
Requires-Dist: apache-airflow>=2.7.0
|
28
28
|
Requires-Dist: asgiref>=2.3.0
|
29
29
|
Requires-Dist: boto3>=1.34.90
|
30
30
|
Requires-Dist: botocore>=1.34.90
|
@@ -36,7 +36,7 @@ Requires-Dist: sqlalchemy_redshift>=0.8.6
|
|
36
36
|
Requires-Dist: watchtower>=3.0.0,<4
|
37
37
|
Requires-Dist: aiobotocore[boto3]>=2.13.0 ; extra == "aiobotocore"
|
38
38
|
Requires-Dist: apache-airflow-providers-apache-hive ; extra == "apache.hive"
|
39
|
-
Requires-Dist: apache-airflow-providers-cncf-kubernetes>=7.2.
|
39
|
+
Requires-Dist: apache-airflow-providers-cncf-kubernetes>=7.2.0 ; extra == "cncf.kubernetes"
|
40
40
|
Requires-Dist: apache-airflow-providers-common-sql ; extra == "common.sql"
|
41
41
|
Requires-Dist: apache-airflow-providers-exasol ; extra == "exasol"
|
42
42
|
Requires-Dist: apache-airflow-providers-ftp ; extra == "ftp"
|
@@ -46,15 +46,15 @@ Requires-Dist: apache-airflow-providers-imap ; extra == "imap"
|
|
46
46
|
Requires-Dist: apache-airflow-providers-microsoft-azure ; extra == "microsoft.azure"
|
47
47
|
Requires-Dist: apache-airflow-providers-mongo ; extra == "mongo"
|
48
48
|
Requires-Dist: apache-airflow-providers-openlineage ; extra == "openlineage"
|
49
|
-
Requires-Dist: pandas>=1.
|
50
|
-
Requires-Dist: pandas>=
|
49
|
+
Requires-Dist: pandas>=2.1.2,<2.2 ; extra == "pandas" and (python_version>="3.9")
|
50
|
+
Requires-Dist: pandas>=1.5.3,<2.2 ; extra == "pandas" and (python_version<"3.9")
|
51
51
|
Requires-Dist: python3-saml>=1.16.0 ; extra == "python3-saml"
|
52
52
|
Requires-Dist: s3fs>=2023.10.0 ; extra == "s3fs"
|
53
53
|
Requires-Dist: apache-airflow-providers-salesforce ; extra == "salesforce"
|
54
54
|
Requires-Dist: apache-airflow-providers-ssh ; extra == "ssh"
|
55
55
|
Project-URL: Bug Tracker, https://github.com/apache/airflow/issues
|
56
|
-
Project-URL: Changelog, https://airflow.apache.org/docs/apache-airflow-providers-amazon/8.
|
57
|
-
Project-URL: Documentation, https://airflow.apache.org/docs/apache-airflow-providers-amazon/8.
|
56
|
+
Project-URL: Changelog, https://airflow.apache.org/docs/apache-airflow-providers-amazon/8.25.0/changelog.html
|
57
|
+
Project-URL: Documentation, https://airflow.apache.org/docs/apache-airflow-providers-amazon/8.25.0
|
58
58
|
Project-URL: Slack Chat, https://s.apache.org/airflow-slack
|
59
59
|
Project-URL: Source Code, https://github.com/apache/airflow
|
60
60
|
Project-URL: Twitter, https://twitter.com/ApacheAirflow
|
@@ -121,7 +121,7 @@ Provides-Extra: ssh
|
|
121
121
|
|
122
122
|
Package ``apache-airflow-providers-amazon``
|
123
123
|
|
124
|
-
Release: ``8.
|
124
|
+
Release: ``8.25.0``
|
125
125
|
|
126
126
|
|
127
127
|
Amazon integration (including `Amazon Web Services (AWS) <https://aws.amazon.com/>`__).
|
@@ -134,7 +134,7 @@ This is a provider package for ``amazon`` provider. All classes for this provide
|
|
134
134
|
are in ``airflow.providers.amazon`` python package.
|
135
135
|
|
136
136
|
You can find package information and changelog for the provider
|
137
|
-
in the `documentation <https://airflow.apache.org/docs/apache-airflow-providers-amazon/8.
|
137
|
+
in the `documentation <https://airflow.apache.org/docs/apache-airflow-providers-amazon/8.25.0/>`_.
|
138
138
|
|
139
139
|
Installation
|
140
140
|
------------
|
@@ -198,4 +198,4 @@ Dependent package
|
|
198
198
|
====================================================================================================================== ===================
|
199
199
|
|
200
200
|
The changelog for the provider package can be found in the
|
201
|
-
`changelog <https://airflow.apache.org/docs/apache-airflow-providers-amazon/8.
|
201
|
+
`changelog <https://airflow.apache.org/docs/apache-airflow-providers-amazon/8.25.0/changelog.html>`_.
|
@@ -1,6 +1,6 @@
|
|
1
1
|
airflow/providers/amazon/LICENSE,sha256=FFb4jd2AXnOOf7XLP04pQW6jbdhG49TxlGY6fFpCV1Y,13609
|
2
|
-
airflow/providers/amazon/__init__.py,sha256=
|
3
|
-
airflow/providers/amazon/get_provider_info.py,sha256=
|
2
|
+
airflow/providers/amazon/__init__.py,sha256=U0WbQRG2O1-e7Mlg9B36FeT87led9l4ZhTg4JiibftA,1494
|
3
|
+
airflow/providers/amazon/get_provider_info.py,sha256=5bSM3uuzz5JMGVQnrXbkjby6UF7V819gOxvc6lvf-2g,66835
|
4
4
|
airflow/providers/amazon/aws/__init__.py,sha256=9hdXHABrVpkbpjZgUft39kOFL2xSGeG4GEua0Hmelus,785
|
5
5
|
airflow/providers/amazon/aws/exceptions.py,sha256=uRGNMgXvgdzfphpOTiyj74lQhjzb70J-X8n6fsx5Jog,1864
|
6
6
|
airflow/providers/amazon/aws/auth_manager/__init__.py,sha256=9hdXHABrVpkbpjZgUft39kOFL2xSGeG4GEua0Hmelus,785
|
@@ -47,7 +47,7 @@ airflow/providers/amazon/aws/hooks/batch_waiters.py,sha256=VlAf3NYyGsfmOw9S4Ods8
|
|
47
47
|
airflow/providers/amazon/aws/hooks/bedrock.py,sha256=TZHEBOcDyeGSx-QLL8LydiytHEMdUETji_4emJfv4Ng,3343
|
48
48
|
airflow/providers/amazon/aws/hooks/chime.py,sha256=h5ogRdWiZrxTm-dwbFJOw36KGD7fHmcQalFLdLOGOpE,4401
|
49
49
|
airflow/providers/amazon/aws/hooks/cloud_formation.py,sha256=7UVGmlmrT8q8rPcEfgPURZ_n-Sd1qNVHGERAi39iqIs,3436
|
50
|
-
airflow/providers/amazon/aws/hooks/comprehend.py,sha256=
|
50
|
+
airflow/providers/amazon/aws/hooks/comprehend.py,sha256=Xggr7GCReowgTAVWNXboFSGmT4r5YpMBauZVZfUWIzg,2734
|
51
51
|
airflow/providers/amazon/aws/hooks/datasync.py,sha256=h0P7C6geYY8T1Uek0ZNOCdi_86HvSsOwkP_5SKoRi5s,13593
|
52
52
|
airflow/providers/amazon/aws/hooks/dms.py,sha256=vIrALXycH2TTfG83ssefG2yefYXrgtgZVFSNKI8Ez5M,7903
|
53
53
|
airflow/providers/amazon/aws/hooks/dynamodb.py,sha256=14HDVrIeeK6TlLI767qsgGw1mu4OfQ7yCnVKkLUgBd0,3992
|
@@ -72,7 +72,7 @@ airflow/providers/amazon/aws/hooks/quicksight.py,sha256=MFTlrWV88wLky2swo-b5fFQD
|
|
72
72
|
airflow/providers/amazon/aws/hooks/rds.py,sha256=zbJNUyejxu_UHCYn5v_2-lxsFDYKiOgwA1k-rpK1oS8,15148
|
73
73
|
airflow/providers/amazon/aws/hooks/redshift_cluster.py,sha256=wpgoCZ5bENxK-gvcejA1EbcGz20EEXbl7QDUMJ5buWc,13015
|
74
74
|
airflow/providers/amazon/aws/hooks/redshift_data.py,sha256=DHsp-_I1wv_P_vlOoYyckekeuj74_BRowQfeWBtENGM,10235
|
75
|
-
airflow/providers/amazon/aws/hooks/redshift_sql.py,sha256=
|
75
|
+
airflow/providers/amazon/aws/hooks/redshift_sql.py,sha256=ue6Ho9bJtv_0OsLmzdxosKNqWLCobOEolecmvv5adJM,11295
|
76
76
|
airflow/providers/amazon/aws/hooks/s3.py,sha256=O4j3LLx3eiN8Cyw7FSTpvIjahQUs3uNVg3gdhjb5cxA,59318
|
77
77
|
airflow/providers/amazon/aws/hooks/sagemaker.py,sha256=vmiBSYEQOfaQLii_LGuJcTLKF7kP4YLsNW2eADq-70U,61805
|
78
78
|
airflow/providers/amazon/aws/hooks/secrets_manager.py,sha256=BTsEHm3GTcoNnE8jBJO403q6wuGkXZFwglQ7l-P9b5M,2667
|
@@ -105,7 +105,7 @@ airflow/providers/amazon/aws/operators/base_aws.py,sha256=cdc5GZkl_YGDDtlV9CVsdb
|
|
105
105
|
airflow/providers/amazon/aws/operators/batch.py,sha256=3HezoNJDWxZudSqbuimO7GvSJirDPQs0n5bPiEyH-kc,22721
|
106
106
|
airflow/providers/amazon/aws/operators/bedrock.py,sha256=PJcsRoTht4w23XG8W1B4Rl6BEJiomQLnwTcuPfMM3pI,40140
|
107
107
|
airflow/providers/amazon/aws/operators/cloud_formation.py,sha256=-WMYq-oA8WpPN2i5aTgBenFj9-CjbeEcy9NuRCnSwpM,5066
|
108
|
-
airflow/providers/amazon/aws/operators/comprehend.py,sha256=
|
108
|
+
airflow/providers/amazon/aws/operators/comprehend.py,sha256=JL0UfGpAekOeRFx3IT32u3fWhMhCwTyziA_OWB6xgjk,15954
|
109
109
|
airflow/providers/amazon/aws/operators/datasync.py,sha256=Pxxt1G1FbmwSqXChZjeQKuS0uUS4QS_qbK69F8Y1SqE,18920
|
110
110
|
airflow/providers/amazon/aws/operators/dms.py,sha256=6RhUtbELAjp0LLkUWl73kdcH4MRmyTzwHi1NxOlkE0Q,12313
|
111
111
|
airflow/providers/amazon/aws/operators/ec2.py,sha256=aQj6cL3nZzu0tcn3dq6RBSPsByZe8fNtn6qcpQYtlNI,17051
|
@@ -137,7 +137,7 @@ airflow/providers/amazon/aws/sensors/base_aws.py,sha256=vkSs3jwtvrdAs4z2flrOLW8z
|
|
137
137
|
airflow/providers/amazon/aws/sensors/batch.py,sha256=_--bT2Wh59x4C3W9Xu4h7xrC--bCOPn29YzbsB6TRzw,11487
|
138
138
|
airflow/providers/amazon/aws/sensors/bedrock.py,sha256=fq8xeYWUapJZmAM0OWYeD9nMNJitvZNY8D9vfu2gcSQ,15907
|
139
139
|
airflow/providers/amazon/aws/sensors/cloud_formation.py,sha256=kLINQol-ZFpjOpSBFQ7I4JXZkBjEICT-g8MT-gXscLw,5426
|
140
|
-
airflow/providers/amazon/aws/sensors/comprehend.py,sha256=
|
140
|
+
airflow/providers/amazon/aws/sensors/comprehend.py,sha256=pbxEo0MYgvXdmtgWMAfHU0HOKD4_3ixPGcZErrXJi4I,10721
|
141
141
|
airflow/providers/amazon/aws/sensors/dms.py,sha256=SzKUgEtL0agejWKWJvjqmrJdtwbd2vJriHcCsYV0j8s,5744
|
142
142
|
airflow/providers/amazon/aws/sensors/dynamodb.py,sha256=P43g73ACBoDo0Lrxzm9wOoEepbRVAOjqtwWYvIq1Tls,5008
|
143
143
|
airflow/providers/amazon/aws/sensors/ec2.py,sha256=GpEVRZEl4oW4eB4ION8H2nfAHp-MGptwav7DpqL3Uo0,4119
|
@@ -171,7 +171,7 @@ airflow/providers/amazon/aws/transfers/http_to_s3.py,sha256=J1HjIdGsd4Zl8kk-RJAX
|
|
171
171
|
airflow/providers/amazon/aws/transfers/imap_attachment_to_s3.py,sha256=xYJ94xNDsadluJpUUv1fURLW7YhSgL-9GaebZ6l4RLU,4536
|
172
172
|
airflow/providers/amazon/aws/transfers/local_to_s3.py,sha256=yp9m7aZuL6YgzYRsFcyZ1wcGTXZTMO0F0CuBfkH1eGo,4165
|
173
173
|
airflow/providers/amazon/aws/transfers/mongo_to_s3.py,sha256=QGlzhywUdRjNwxaS8lESrmmCjLaW_0dpNNggzgmf25M,5997
|
174
|
-
airflow/providers/amazon/aws/transfers/redshift_to_s3.py,sha256=
|
174
|
+
airflow/providers/amazon/aws/transfers/redshift_to_s3.py,sha256=9gNDuNDG8rjOFUB2NAWkrJoRDROTaUlhYLmM9r5ZS3o,8243
|
175
175
|
airflow/providers/amazon/aws/transfers/s3_to_dynamodb.py,sha256=tOPcY_XcUE2jbMzehAVUz5JmH1Hu9ltwAQGH1GgRKbY,11653
|
176
176
|
airflow/providers/amazon/aws/transfers/s3_to_ftp.py,sha256=cxyNRW_FJQNNluuYr5fVluGLYnNRUvN75iHSSEHrVnY,2966
|
177
177
|
airflow/providers/amazon/aws/transfers/s3_to_redshift.py,sha256=xjBe8iy_bX4mqFhwasLdieaGtuJBI6ePwIyVlWN4cPk,8636
|
@@ -186,7 +186,7 @@ airflow/providers/amazon/aws/triggers/athena.py,sha256=TAn2snAGHPfRT-pG8r0EMKlTS
|
|
186
186
|
airflow/providers/amazon/aws/triggers/base.py,sha256=QT-omauJw_ksUb6gS2erm_FVXSZdwHIpyznFXTkoMXo,6384
|
187
187
|
airflow/providers/amazon/aws/triggers/batch.py,sha256=rQYVBgcChS1PuOTFiyEKMdC2nbYLPH_Gkm1AAr1Tuas,10849
|
188
188
|
airflow/providers/amazon/aws/triggers/bedrock.py,sha256=IiKyl0UUax-ex4siLjZpQGDZWyAOOhvJ-9USyRi_r3c,7260
|
189
|
-
airflow/providers/amazon/aws/triggers/comprehend.py,sha256=
|
189
|
+
airflow/providers/amazon/aws/triggers/comprehend.py,sha256=atK02t-G6e-Rgd-a-IHc4n-wGZ3oC4pKueOwNeaLCrI,4063
|
190
190
|
airflow/providers/amazon/aws/triggers/ec2.py,sha256=gMY3EP4TmL6SodLw12FNSLttlHd7hRhOu-q3CiG7y2w,3245
|
191
191
|
airflow/providers/amazon/aws/triggers/ecs.py,sha256=wdHCqhTfOUDsf3MRWlwQmxZ-jxgGIN2ug9JA1bZiG9k,9131
|
192
192
|
airflow/providers/amazon/aws/triggers/eks.py,sha256=t69h3uIgeQTG3d7n7faOJyMPfsbL6OtkmCTUrpq_jqI,16980
|
@@ -204,7 +204,7 @@ airflow/providers/amazon/aws/triggers/s3.py,sha256=J2MvoiQoQ3GKf8OtFF9awvY6SwVXn
|
|
204
204
|
airflow/providers/amazon/aws/triggers/sagemaker.py,sha256=C52lhVsiDEiw-1Kswc0kvNDOyPdDvV5bF7vjbuBN45o,11542
|
205
205
|
airflow/providers/amazon/aws/triggers/sqs.py,sha256=tVA1i8XzV5AqbVQAdWrgrLKoZF8ewqgEwV7ggk1hrQM,8257
|
206
206
|
airflow/providers/amazon/aws/triggers/step_function.py,sha256=M1HGdrnxL_T9KSCBNy2t531xMNJaFc-Y792T9cSmLGM,2685
|
207
|
-
airflow/providers/amazon/aws/utils/__init__.py,sha256=
|
207
|
+
airflow/providers/amazon/aws/utils/__init__.py,sha256=yUkoHb2LuqSyHvj-HAhc2r2s04Kv_PhdyLMq52KarO8,3878
|
208
208
|
airflow/providers/amazon/aws/utils/connection_wrapper.py,sha256=sgXJixQkwPKh5Di3kpAbJdjbWDVHj-AQol5RmWKKKnU,22952
|
209
209
|
airflow/providers/amazon/aws/utils/eks_get_token.py,sha256=q4utFF2c02T2Lm6KIZLABOiXJeglVZKCOxq6gn14dsk,2342
|
210
210
|
airflow/providers/amazon/aws/utils/emailer.py,sha256=y-bzg1BZzOQ8J9-ed-74LY3VMv6LrLfBDtw5S4t3Tv4,1855
|
@@ -227,7 +227,7 @@ airflow/providers/amazon/aws/waiters/base_waiter.py,sha256=CTzi5Q1mgboCeOxUeRL5z
|
|
227
227
|
airflow/providers/amazon/aws/waiters/batch.json,sha256=Viw4UP1nm2D80sG-4ezMAed9FsdBWd1ID1SuM9uaKpA,1206
|
228
228
|
airflow/providers/amazon/aws/waiters/bedrock-agent.json,sha256=bYIMkOCB317fYccd-DjjN4TyGA5SasGlUSdRa5CNqJE,1751
|
229
229
|
airflow/providers/amazon/aws/waiters/bedrock.json,sha256=HtnOezFlA28a39LZc0Q1L7vnepCn-UXrlbOyuG_Ga9I,2269
|
230
|
-
airflow/providers/amazon/aws/waiters/comprehend.json,sha256=
|
230
|
+
airflow/providers/amazon/aws/waiters/comprehend.json,sha256=TcEKnLrmq0ByKH5x8oNC1drTKCYCJ_hsNb5DNihHGyk,3760
|
231
231
|
airflow/providers/amazon/aws/waiters/databrew.json,sha256=2phkNTbNjBWmnqggtXeT5YtoWOPYRtQ-J18lGUm2zTI,1017
|
232
232
|
airflow/providers/amazon/aws/waiters/dynamodb.json,sha256=8Y3FqDtNKGw100NNfQ9fqXcsk6paz7bed6vQ8kk6pss,2225
|
233
233
|
airflow/providers/amazon/aws/waiters/ecs.json,sha256=msjoYqzJMf8SCldtlzG4sGBcyzy2sN3W91JS3akXKy4,1674
|
@@ -241,7 +241,7 @@ airflow/providers/amazon/aws/waiters/opensearchserverless.json,sha256=7UkPgv_tBm
|
|
241
241
|
airflow/providers/amazon/aws/waiters/redshift.json,sha256=jOBotCgbkko1b_CHcGEbhhRvusgt0YSzVuFiZrqVP30,1742
|
242
242
|
airflow/providers/amazon/aws/waiters/sagemaker.json,sha256=JPHuQtUFZ1B7EMLfVmCRevNZ9jgpB71LM0dva8ZEO9A,5254
|
243
243
|
airflow/providers/amazon/aws/waiters/stepfunctions.json,sha256=aBaAZaGv8ZZGdN-2gvYEbq3fL_WHI_7s6SSDL-nWS1A,1034
|
244
|
-
apache_airflow_providers_amazon-8.
|
245
|
-
apache_airflow_providers_amazon-8.
|
246
|
-
apache_airflow_providers_amazon-8.
|
247
|
-
apache_airflow_providers_amazon-8.
|
244
|
+
apache_airflow_providers_amazon-8.25.0.dist-info/entry_points.txt,sha256=vlc0ZzhBkMrav1maTRofgksnAw4SwoQLFX9cmnTgktk,102
|
245
|
+
apache_airflow_providers_amazon-8.25.0.dist-info/WHEEL,sha256=EZbGkh7Ie4PoZfRQ8I0ZuP9VklN_TvcZ6DSE5Uar4z4,81
|
246
|
+
apache_airflow_providers_amazon-8.25.0.dist-info/METADATA,sha256=tVBqQWgIVOZlHU27BJKtSaGr_51aJpGr3Yi2BTnITEc,10291
|
247
|
+
apache_airflow_providers_amazon-8.25.0.dist-info/RECORD,,
|
File without changes
|