apache-airflow-providers-amazon 9.5.0rc2__py3-none-any.whl → 9.6.0rc1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- airflow/providers/amazon/__init__.py +1 -1
- airflow/providers/amazon/aws/auth_manager/aws_auth_manager.py +13 -15
- airflow/providers/amazon/aws/auth_manager/router/login.py +4 -2
- airflow/providers/amazon/aws/executors/ecs/ecs_executor.py +53 -1
- airflow/providers/amazon/aws/hooks/athena_sql.py +2 -2
- airflow/providers/amazon/aws/hooks/batch_client.py +1 -2
- airflow/providers/amazon/aws/hooks/batch_waiters.py +11 -3
- airflow/providers/amazon/aws/hooks/dms.py +3 -1
- airflow/providers/amazon/aws/hooks/glue.py +17 -2
- airflow/providers/amazon/aws/hooks/mwaa.py +1 -1
- airflow/providers/amazon/aws/hooks/redshift_cluster.py +9 -9
- airflow/providers/amazon/aws/hooks/redshift_data.py +1 -2
- airflow/providers/amazon/aws/hooks/s3.py +0 -4
- airflow/providers/amazon/aws/hooks/sagemaker.py +1 -1
- airflow/providers/amazon/aws/links/athena.py +1 -2
- airflow/providers/amazon/aws/log/cloudwatch_task_handler.py +174 -54
- airflow/providers/amazon/aws/log/s3_task_handler.py +123 -86
- airflow/providers/amazon/aws/operators/bedrock.py +119 -0
- airflow/providers/amazon/aws/operators/ec2.py +1 -1
- airflow/providers/amazon/aws/operators/eks.py +3 -3
- airflow/providers/amazon/aws/operators/rds.py +83 -18
- airflow/providers/amazon/aws/operators/redshift_cluster.py +10 -3
- airflow/providers/amazon/aws/operators/sagemaker.py +3 -5
- airflow/providers/amazon/aws/sensors/bedrock.py +110 -0
- airflow/providers/amazon/aws/sensors/glacier.py +1 -1
- airflow/providers/amazon/aws/sensors/mwaa.py +2 -1
- airflow/providers/amazon/aws/sensors/rds.py +23 -20
- airflow/providers/amazon/aws/sensors/s3.py +1 -1
- airflow/providers/amazon/aws/sensors/step_function.py +2 -1
- airflow/providers/amazon/aws/transfers/mongo_to_s3.py +2 -2
- airflow/providers/amazon/aws/transfers/sql_to_s3.py +1 -1
- airflow/providers/amazon/aws/triggers/bedrock.py +98 -0
- airflow/providers/amazon/aws/utils/waiter_with_logging.py +9 -1
- airflow/providers/amazon/aws/waiters/bedrock.json +134 -0
- airflow/providers/amazon/get_provider_info.py +0 -124
- {apache_airflow_providers_amazon-9.5.0rc2.dist-info → apache_airflow_providers_amazon-9.6.0rc1.dist-info}/METADATA +18 -18
- {apache_airflow_providers_amazon-9.5.0rc2.dist-info → apache_airflow_providers_amazon-9.6.0rc1.dist-info}/RECORD +39 -39
- {apache_airflow_providers_amazon-9.5.0rc2.dist-info → apache_airflow_providers_amazon-9.6.0rc1.dist-info}/WHEEL +1 -1
- {apache_airflow_providers_amazon-9.5.0rc2.dist-info → apache_airflow_providers_amazon-9.6.0rc1.dist-info}/entry_points.txt +0 -0
@@ -29,7 +29,7 @@ from airflow import __version__ as airflow_version
|
|
29
29
|
|
30
30
|
__all__ = ["__version__"]
|
31
31
|
|
32
|
-
__version__ = "9.
|
32
|
+
__version__ = "9.6.0"
|
33
33
|
|
34
34
|
if packaging.version.parse(packaging.version.parse(airflow_version).base_version) < packaging.version.parse(
|
35
35
|
"2.9.0"
|
@@ -27,15 +27,6 @@ from fastapi import FastAPI
|
|
27
27
|
|
28
28
|
from airflow.api_fastapi.app import AUTH_MANAGER_FASTAPI_APP_PREFIX
|
29
29
|
from airflow.api_fastapi.auth.managers.base_auth_manager import BaseAuthManager
|
30
|
-
from airflow.api_fastapi.auth.managers.models.resource_details import (
|
31
|
-
AccessView,
|
32
|
-
BackfillDetails,
|
33
|
-
ConnectionDetails,
|
34
|
-
DagAccessEntity,
|
35
|
-
DagDetails,
|
36
|
-
PoolDetails,
|
37
|
-
VariableDetails,
|
38
|
-
)
|
39
30
|
from airflow.cli.cli_config import CLICommand, DefaultHelpParser, GroupCommand
|
40
31
|
from airflow.configuration import conf
|
41
32
|
from airflow.exceptions import AirflowOptionalProviderFeatureException
|
@@ -59,9 +50,16 @@ if TYPE_CHECKING:
|
|
59
50
|
IsAuthorizedVariableRequest,
|
60
51
|
)
|
61
52
|
from airflow.api_fastapi.auth.managers.models.resource_details import (
|
53
|
+
AccessView,
|
62
54
|
AssetAliasDetails,
|
63
55
|
AssetDetails,
|
56
|
+
BackfillDetails,
|
64
57
|
ConfigurationDetails,
|
58
|
+
ConnectionDetails,
|
59
|
+
DagAccessEntity,
|
60
|
+
DagDetails,
|
61
|
+
PoolDetails,
|
62
|
+
VariableDetails,
|
65
63
|
)
|
66
64
|
from airflow.api_fastapi.common.types import MenuItem
|
67
65
|
|
@@ -89,7 +87,7 @@ class AwsAuthManager(BaseAuthManager[AwsAuthManagerUser]):
|
|
89
87
|
|
90
88
|
@cached_property
|
91
89
|
def apiserver_endpoint(self) -> str:
|
92
|
-
return conf.get("api", "base_url")
|
90
|
+
return conf.get("api", "base_url", fallback="/")
|
93
91
|
|
94
92
|
def deserialize_user(self, token: dict[str, Any]) -> AwsAuthManagerUser:
|
95
93
|
return AwsAuthManagerUser(user_id=token.pop("sub"), **token)
|
@@ -256,7 +254,7 @@ class AwsAuthManager(BaseAuthManager[AwsAuthManagerUser]):
|
|
256
254
|
{
|
257
255
|
"method": request["method"],
|
258
256
|
"entity_type": AvpEntities.CONNECTION,
|
259
|
-
"entity_id": cast(ConnectionDetails, request["details"]).conn_id
|
257
|
+
"entity_id": cast("ConnectionDetails", request["details"]).conn_id
|
260
258
|
if request.get("details")
|
261
259
|
else None,
|
262
260
|
}
|
@@ -274,10 +272,10 @@ class AwsAuthManager(BaseAuthManager[AwsAuthManagerUser]):
|
|
274
272
|
{
|
275
273
|
"method": request["method"],
|
276
274
|
"entity_type": AvpEntities.DAG,
|
277
|
-
"entity_id": cast(DagDetails, request["details"]).id if request.get("details") else None,
|
275
|
+
"entity_id": cast("DagDetails", request["details"]).id if request.get("details") else None,
|
278
276
|
"context": {
|
279
277
|
"dag_entity": {
|
280
|
-
"string": cast(DagAccessEntity, request["access_entity"]).value,
|
278
|
+
"string": cast("DagAccessEntity", request["access_entity"]).value,
|
281
279
|
},
|
282
280
|
}
|
283
281
|
if request.get("access_entity")
|
@@ -297,7 +295,7 @@ class AwsAuthManager(BaseAuthManager[AwsAuthManagerUser]):
|
|
297
295
|
{
|
298
296
|
"method": request["method"],
|
299
297
|
"entity_type": AvpEntities.POOL,
|
300
|
-
"entity_id": cast(PoolDetails, request["details"]).name if request.get("details") else None,
|
298
|
+
"entity_id": cast("PoolDetails", request["details"]).name if request.get("details") else None,
|
301
299
|
}
|
302
300
|
for request in requests
|
303
301
|
]
|
@@ -313,7 +311,7 @@ class AwsAuthManager(BaseAuthManager[AwsAuthManagerUser]):
|
|
313
311
|
{
|
314
312
|
"method": request["method"],
|
315
313
|
"entity_type": AvpEntities.VARIABLE,
|
316
|
-
"entity_id": cast(VariableDetails, request["details"]).key
|
314
|
+
"entity_id": cast("VariableDetails", request["details"]).key
|
317
315
|
if request.get("details")
|
318
316
|
else None,
|
319
317
|
}
|
@@ -80,10 +80,12 @@ def login_callback(request: Request):
|
|
80
80
|
username=saml_auth.get_nameid(),
|
81
81
|
email=attributes["email"][0] if "email" in attributes else None,
|
82
82
|
)
|
83
|
-
url = conf.get("api", "base_url")
|
83
|
+
url = conf.get("api", "base_url", fallback="/")
|
84
84
|
token = get_auth_manager().generate_jwt(user)
|
85
85
|
response = RedirectResponse(url=url, status_code=303)
|
86
|
-
|
86
|
+
|
87
|
+
secure = conf.has_option("api", "ssl_cert")
|
88
|
+
response.set_cookie(COOKIE_NAME_JWT_TOKEN, token, secure=secure)
|
87
89
|
return response
|
88
90
|
|
89
91
|
|
@@ -49,12 +49,16 @@ from airflow.providers.amazon.aws.executors.utils.exponential_backoff_retry impo
|
|
49
49
|
exponential_backoff_retry,
|
50
50
|
)
|
51
51
|
from airflow.providers.amazon.aws.hooks.ecs import EcsHook
|
52
|
+
from airflow.providers.amazon.version_compat import AIRFLOW_V_3_0_PLUS
|
52
53
|
from airflow.stats import Stats
|
53
54
|
from airflow.utils import timezone
|
54
55
|
from airflow.utils.helpers import merge_dicts
|
55
56
|
from airflow.utils.state import State
|
56
57
|
|
57
58
|
if TYPE_CHECKING:
|
59
|
+
from sqlalchemy.orm import Session
|
60
|
+
|
61
|
+
from airflow.executors import workloads
|
58
62
|
from airflow.models.taskinstance import TaskInstance, TaskInstanceKey
|
59
63
|
from airflow.providers.amazon.aws.executors.ecs.utils import (
|
60
64
|
CommandType,
|
@@ -100,6 +104,11 @@ class AwsEcsExecutor(BaseExecutor):
|
|
100
104
|
# AWS limits the maximum number of ARNs in the describe_tasks function.
|
101
105
|
DESCRIBE_TASKS_BATCH_SIZE = 99
|
102
106
|
|
107
|
+
if TYPE_CHECKING and AIRFLOW_V_3_0_PLUS:
|
108
|
+
# In the v3 path, we store workloads, not commands as strings.
|
109
|
+
# TODO: TaskSDK: move this type change into BaseExecutor
|
110
|
+
queued_tasks: dict[TaskInstanceKey, workloads.All] # type: ignore[assignment]
|
111
|
+
|
103
112
|
def __init__(self, *args, **kwargs):
|
104
113
|
super().__init__(*args, **kwargs)
|
105
114
|
self.active_workers: EcsTaskCollection = EcsTaskCollection()
|
@@ -114,6 +123,31 @@ class AwsEcsExecutor(BaseExecutor):
|
|
114
123
|
|
115
124
|
self.run_task_kwargs = self._load_run_kwargs()
|
116
125
|
|
126
|
+
def queue_workload(self, workload: workloads.All, session: Session | None) -> None:
|
127
|
+
from airflow.executors import workloads
|
128
|
+
|
129
|
+
if not isinstance(workload, workloads.ExecuteTask):
|
130
|
+
raise RuntimeError(f"{type(self)} cannot handle workloads of type {type(workload)}")
|
131
|
+
ti = workload.ti
|
132
|
+
self.queued_tasks[ti.key] = workload
|
133
|
+
|
134
|
+
def _process_workloads(self, workloads: list[workloads.All]) -> None:
|
135
|
+
from airflow.executors.workloads import ExecuteTask
|
136
|
+
|
137
|
+
# Airflow V3 version
|
138
|
+
for w in workloads:
|
139
|
+
if not isinstance(w, ExecuteTask):
|
140
|
+
raise RuntimeError(f"{type(self)} cannot handle workloads of type {type(w)}")
|
141
|
+
|
142
|
+
command = [w]
|
143
|
+
key = w.ti.key
|
144
|
+
queue = w.ti.queue
|
145
|
+
executor_config = w.ti.executor_config or {}
|
146
|
+
|
147
|
+
del self.queued_tasks[key]
|
148
|
+
self.execute_async(key=key, command=command, queue=queue, executor_config=executor_config) # type: ignore[arg-type]
|
149
|
+
self.running.add(key)
|
150
|
+
|
117
151
|
def start(self):
|
118
152
|
"""Call this when the Executor is run for the first time by the scheduler."""
|
119
153
|
check_health = conf.getboolean(
|
@@ -278,7 +312,7 @@ class AwsEcsExecutor(BaseExecutor):
|
|
278
312
|
if not has_exit_codes:
|
279
313
|
return ""
|
280
314
|
reasons = [
|
281
|
-
f
|
315
|
+
f"{container['container_arn']} - {container['reason']}"
|
282
316
|
for container in containers
|
283
317
|
if "reason" in container
|
284
318
|
]
|
@@ -462,6 +496,24 @@ class AwsEcsExecutor(BaseExecutor):
|
|
462
496
|
"""Save the task to be executed in the next sync by inserting the commands into a queue."""
|
463
497
|
if executor_config and ("name" in executor_config or "command" in executor_config):
|
464
498
|
raise ValueError('Executor Config should never override "name" or "command"')
|
499
|
+
if len(command) == 1:
|
500
|
+
from airflow.executors.workloads import ExecuteTask
|
501
|
+
|
502
|
+
if isinstance(command[0], ExecuteTask):
|
503
|
+
workload = command[0]
|
504
|
+
ser_input = workload.model_dump_json()
|
505
|
+
command = [
|
506
|
+
"python",
|
507
|
+
"-m",
|
508
|
+
"airflow.sdk.execution_time.execute_workload",
|
509
|
+
"--json-string",
|
510
|
+
ser_input,
|
511
|
+
]
|
512
|
+
else:
|
513
|
+
raise ValueError(
|
514
|
+
f"EcsExecutor doesn't know how to handle workload of type: {type(command[0])}"
|
515
|
+
)
|
516
|
+
|
465
517
|
self.pending_tasks.append(
|
466
518
|
EcsQueuedTask(key, command, queue, executor_config or {}, 1, timezone.utcnow())
|
467
519
|
)
|
@@ -146,10 +146,10 @@ class AthenaSQLHook(AwsBaseHook, DbApiHook):
|
|
146
146
|
creds = self.get_credentials(region_name=conn_params["region_name"])
|
147
147
|
|
148
148
|
return URL.create(
|
149
|
-
f
|
149
|
+
f"awsathena+{conn_params['driver']}",
|
150
150
|
username=creds.access_key,
|
151
151
|
password=creds.secret_key,
|
152
|
-
host=f
|
152
|
+
host=f"athena.{conn_params['region_name']}.{conn_params['aws_domain']}",
|
153
153
|
port=443,
|
154
154
|
database=conn_params["schema_name"],
|
155
155
|
query={"aws_session_token": creds.token, **self.conn.extra_dejson},
|
@@ -416,8 +416,7 @@ class BatchClientHook(AwsBaseHook):
|
|
416
416
|
)
|
417
417
|
else:
|
418
418
|
raise AirflowException(
|
419
|
-
f"AWS Batch job ({job_id}) description error: exceeded status_retries "
|
420
|
-
f"({self.status_retries})"
|
419
|
+
f"AWS Batch job ({job_id}) description error: exceeded status_retries ({self.status_retries})"
|
421
420
|
)
|
422
421
|
|
423
422
|
@staticmethod
|
@@ -30,7 +30,7 @@ import json
|
|
30
30
|
import sys
|
31
31
|
from copy import deepcopy
|
32
32
|
from pathlib import Path
|
33
|
-
from typing import TYPE_CHECKING, Callable
|
33
|
+
from typing import TYPE_CHECKING, Any, Callable
|
34
34
|
|
35
35
|
import botocore.client
|
36
36
|
import botocore.exceptions
|
@@ -144,7 +144,12 @@ class BatchWaitersHook(BatchClientHook):
|
|
144
144
|
return self._waiter_model
|
145
145
|
|
146
146
|
def get_waiter(
|
147
|
-
self,
|
147
|
+
self,
|
148
|
+
waiter_name: str,
|
149
|
+
parameters: dict[str, str] | None = None,
|
150
|
+
config_overrides: dict[str, Any] | None = None,
|
151
|
+
deferrable: bool = False,
|
152
|
+
client=None,
|
148
153
|
) -> botocore.waiter.Waiter:
|
149
154
|
"""
|
150
155
|
Get an AWS Batch service waiter, using the configured ``.waiter_model``.
|
@@ -175,7 +180,10 @@ class BatchWaitersHook(BatchClientHook):
|
|
175
180
|
the name (including the casing) of the key name in the waiter
|
176
181
|
model file (typically this is CamelCasing); see ``.list_waiters``.
|
177
182
|
|
178
|
-
:param
|
183
|
+
:param parameters: unused, just here to match the method signature in base_aws
|
184
|
+
:param config_overrides: unused, just here to match the method signature in base_aws
|
185
|
+
:param deferrable: unused, just here to match the method signature in base_aws
|
186
|
+
:param client: unused, just here to match the method signature in base_aws
|
179
187
|
|
180
188
|
:return: a waiter object for the named AWS Batch service
|
181
189
|
"""
|
@@ -292,7 +292,9 @@ class DmsHook(AwsBaseHook):
|
|
292
292
|
return arn
|
293
293
|
|
294
294
|
except ClientError as err:
|
295
|
-
err_str =
|
295
|
+
err_str = (
|
296
|
+
f"Error: {err.get('Error', '').get('Code', '')}: {err.get('Error', '').get('Message', '')}"
|
297
|
+
)
|
296
298
|
self.log.error("Error while creating replication config: %s", err_str)
|
297
299
|
raise err
|
298
300
|
|
@@ -19,12 +19,13 @@ from __future__ import annotations
|
|
19
19
|
|
20
20
|
import asyncio
|
21
21
|
import time
|
22
|
+
import warnings
|
22
23
|
from functools import cached_property
|
23
24
|
from typing import Any
|
24
25
|
|
25
26
|
from botocore.exceptions import ClientError
|
26
27
|
|
27
|
-
from airflow.exceptions import AirflowException
|
28
|
+
from airflow.exceptions import AirflowException, AirflowProviderDeprecationWarning
|
28
29
|
from airflow.providers.amazon.aws.hooks.base_aws import AwsBaseHook
|
29
30
|
from airflow.providers.amazon.aws.hooks.logs import AwsLogsHook
|
30
31
|
|
@@ -145,7 +146,7 @@ class GlueJobHook(AwsBaseHook):
|
|
145
146
|
|
146
147
|
return config
|
147
148
|
|
148
|
-
def
|
149
|
+
def describe_jobs(self) -> list:
|
149
150
|
"""
|
150
151
|
Get list of Jobs.
|
151
152
|
|
@@ -154,6 +155,20 @@ class GlueJobHook(AwsBaseHook):
|
|
154
155
|
"""
|
155
156
|
return self.conn.get_jobs()
|
156
157
|
|
158
|
+
def list_jobs(self) -> list:
|
159
|
+
"""
|
160
|
+
Get list of Jobs.
|
161
|
+
|
162
|
+
.. deprecated::
|
163
|
+
- Use :meth:`describe_jobs` instead.
|
164
|
+
"""
|
165
|
+
warnings.warn(
|
166
|
+
"The method `list_jobs` is deprecated. Use the method `describe_jobs` instead.",
|
167
|
+
AirflowProviderDeprecationWarning,
|
168
|
+
stacklevel=2,
|
169
|
+
)
|
170
|
+
return self.describe_jobs()
|
171
|
+
|
157
172
|
def get_iam_execution_role(self) -> dict:
|
158
173
|
try:
|
159
174
|
iam_client = self.get_session(region_name=self.region_name).client(
|
@@ -26,7 +26,7 @@ from airflow.providers.amazon.aws.hooks.base_aws import AwsBaseHook
|
|
26
26
|
|
27
27
|
class MwaaHook(AwsBaseHook):
|
28
28
|
"""
|
29
|
-
Interact with AWS
|
29
|
+
Interact with AWS Managed Workflows for Apache Airflow.
|
30
30
|
|
31
31
|
Provide thin wrapper around :external+boto3:py:class:`boto3.client("mwaa") <MWAA.Client>`
|
32
32
|
|
@@ -67,7 +67,7 @@ class RedshiftHook(AwsBaseHook):
|
|
67
67
|
for the cluster that is being created.
|
68
68
|
:param params: Remaining AWS Create cluster API params.
|
69
69
|
"""
|
70
|
-
response = self.
|
70
|
+
response = self.conn.create_cluster(
|
71
71
|
ClusterIdentifier=cluster_identifier,
|
72
72
|
NodeType=node_type,
|
73
73
|
MasterUsername=master_username,
|
@@ -87,9 +87,9 @@ class RedshiftHook(AwsBaseHook):
|
|
87
87
|
:param cluster_identifier: unique identifier of a cluster
|
88
88
|
"""
|
89
89
|
try:
|
90
|
-
response = self.
|
90
|
+
response = self.conn.describe_clusters(ClusterIdentifier=cluster_identifier)["Clusters"]
|
91
91
|
return response[0]["ClusterStatus"] if response else None
|
92
|
-
except self.
|
92
|
+
except self.conn.exceptions.ClusterNotFoundFault:
|
93
93
|
return "cluster_not_found"
|
94
94
|
|
95
95
|
async def cluster_status_async(self, cluster_identifier: str) -> str:
|
@@ -115,7 +115,7 @@ class RedshiftHook(AwsBaseHook):
|
|
115
115
|
"""
|
116
116
|
final_cluster_snapshot_identifier = final_cluster_snapshot_identifier or ""
|
117
117
|
|
118
|
-
response = self.
|
118
|
+
response = self.conn.delete_cluster(
|
119
119
|
ClusterIdentifier=cluster_identifier,
|
120
120
|
SkipFinalClusterSnapshot=skip_final_cluster_snapshot,
|
121
121
|
FinalClusterSnapshotIdentifier=final_cluster_snapshot_identifier,
|
@@ -131,7 +131,7 @@ class RedshiftHook(AwsBaseHook):
|
|
131
131
|
|
132
132
|
:param cluster_identifier: unique identifier of a cluster
|
133
133
|
"""
|
134
|
-
response = self.
|
134
|
+
response = self.conn.describe_cluster_snapshots(ClusterIdentifier=cluster_identifier)
|
135
135
|
if "Snapshots" not in response:
|
136
136
|
return None
|
137
137
|
snapshots = response["Snapshots"]
|
@@ -149,7 +149,7 @@ class RedshiftHook(AwsBaseHook):
|
|
149
149
|
:param cluster_identifier: unique identifier of a cluster
|
150
150
|
:param snapshot_identifier: unique identifier for a snapshot of a cluster
|
151
151
|
"""
|
152
|
-
response = self.
|
152
|
+
response = self.conn.restore_from_cluster_snapshot(
|
153
153
|
ClusterIdentifier=cluster_identifier, SnapshotIdentifier=snapshot_identifier
|
154
154
|
)
|
155
155
|
return response["Cluster"] if response["Cluster"] else None
|
@@ -175,7 +175,7 @@ class RedshiftHook(AwsBaseHook):
|
|
175
175
|
"""
|
176
176
|
if tags is None:
|
177
177
|
tags = []
|
178
|
-
response = self.
|
178
|
+
response = self.conn.create_cluster_snapshot(
|
179
179
|
SnapshotIdentifier=snapshot_identifier,
|
180
180
|
ClusterIdentifier=cluster_identifier,
|
181
181
|
ManualSnapshotRetentionPeriod=retention_period,
|
@@ -192,11 +192,11 @@ class RedshiftHook(AwsBaseHook):
|
|
192
192
|
:param snapshot_identifier: A unique identifier for the snapshot that you are requesting
|
193
193
|
"""
|
194
194
|
try:
|
195
|
-
response = self.
|
195
|
+
response = self.conn.describe_cluster_snapshots(
|
196
196
|
SnapshotIdentifier=snapshot_identifier,
|
197
197
|
)
|
198
198
|
snapshot = response.get("Snapshots")[0]
|
199
199
|
snapshot_status: str = snapshot.get("Status")
|
200
200
|
return snapshot_status
|
201
|
-
except self.
|
201
|
+
except self.conn.exceptions.ClusterSnapshotNotFoundFault:
|
202
202
|
return None
|
@@ -186,8 +186,7 @@ class RedshiftDataHook(AwsGenericHook["RedshiftDataAPIServiceClient"]):
|
|
186
186
|
RedshiftDataQueryFailedError if status == FAILED_STATE else RedshiftDataQueryAbortedError
|
187
187
|
)
|
188
188
|
raise exception_cls(
|
189
|
-
f"Statement {resp['Id']} terminated with status {status}. "
|
190
|
-
f"Response details: {pformat(resp)}"
|
189
|
+
f"Statement {resp['Id']} terminated with status {status}. Response details: {pformat(resp)}"
|
191
190
|
)
|
192
191
|
|
193
192
|
self.log.info("Query status: %s", status)
|
@@ -790,10 +790,6 @@ class S3Hook(AwsBaseHook):
|
|
790
790
|
"FAILURE: Inactivity Period passed, not enough objects found in %s",
|
791
791
|
path,
|
792
792
|
)
|
793
|
-
return {
|
794
|
-
"status": "error",
|
795
|
-
"message": f"FAILURE: Inactivity Period passed, not enough objects found in {path}",
|
796
|
-
}
|
797
793
|
return {
|
798
794
|
"status": "pending",
|
799
795
|
"previous_objects": previous_objects,
|
@@ -131,7 +131,7 @@ def secondary_training_status_message(
|
|
131
131
|
status_strs = []
|
132
132
|
for transition in transitions_to_print:
|
133
133
|
message = transition["StatusMessage"]
|
134
|
-
time_utc = timezone.convert_to_utc(cast(datetime, job_description["LastModifiedTime"]))
|
134
|
+
time_utc = timezone.convert_to_utc(cast("datetime", job_description["LastModifiedTime"]))
|
135
135
|
status_strs.append(f"{time_utc:%Y-%m-%d %H:%M:%S} {transition['Status']} - {message}")
|
136
136
|
|
137
137
|
return "\n".join(status_strs)
|
@@ -25,6 +25,5 @@ class AthenaQueryResultsLink(BaseAwsLink):
|
|
25
25
|
name = "Query Results"
|
26
26
|
key = "_athena_query_results"
|
27
27
|
format_str = (
|
28
|
-
BASE_AWS_CONSOLE_LINK + "/athena/home?region={region_name}
|
29
|
-
"/query-editor/history/{query_execution_id}"
|
28
|
+
BASE_AWS_CONSOLE_LINK + "/athena/home?region={region_name}#/query-editor/history/{query_execution_id}"
|
30
29
|
)
|