apache-airflow-providers-standard 1.0.0.dev1__py3-none-any.whl → 1.0.0rc1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of apache-airflow-providers-standard might be problematic. Click here for more details.
- airflow/providers/standard/LICENSE +0 -52
- airflow/providers/standard/__init__.py +23 -1
- airflow/providers/standard/decorators/__init__.py +16 -0
- airflow/providers/standard/decorators/bash.py +121 -0
- airflow/providers/standard/decorators/branch_external_python.py +63 -0
- airflow/providers/standard/decorators/branch_python.py +62 -0
- airflow/providers/standard/decorators/branch_virtualenv.py +62 -0
- airflow/providers/standard/decorators/external_python.py +70 -0
- airflow/providers/standard/decorators/python.py +86 -0
- airflow/providers/standard/decorators/python_virtualenv.py +67 -0
- airflow/providers/standard/decorators/sensor.py +83 -0
- airflow/providers/standard/decorators/short_circuit.py +65 -0
- airflow/providers/standard/get_provider_info.py +78 -7
- airflow/providers/standard/hooks/__init__.py +16 -0
- airflow/providers/standard/hooks/filesystem.py +89 -0
- airflow/providers/standard/hooks/package_index.py +95 -0
- airflow/providers/standard/hooks/subprocess.py +119 -0
- airflow/providers/standard/operators/bash.py +73 -56
- airflow/providers/standard/operators/branch.py +105 -0
- airflow/providers/standard/operators/datetime.py +15 -5
- airflow/providers/standard/operators/empty.py +39 -0
- airflow/providers/standard/operators/latest_only.py +115 -0
- airflow/providers/standard/operators/python.py +1143 -0
- airflow/providers/standard/operators/smooth.py +38 -0
- airflow/providers/standard/operators/trigger_dagrun.py +370 -0
- airflow/providers/standard/operators/weekday.py +19 -9
- airflow/providers/standard/sensors/bash.py +15 -11
- airflow/providers/standard/sensors/date_time.py +32 -8
- airflow/providers/standard/sensors/external_task.py +593 -0
- airflow/providers/standard/sensors/filesystem.py +158 -0
- airflow/providers/standard/sensors/python.py +84 -0
- airflow/providers/standard/sensors/time.py +28 -5
- airflow/providers/standard/sensors/time_delta.py +68 -15
- airflow/providers/standard/sensors/weekday.py +25 -7
- airflow/providers/standard/triggers/__init__.py +16 -0
- airflow/providers/standard/triggers/external_task.py +288 -0
- airflow/providers/standard/triggers/file.py +131 -0
- airflow/providers/standard/triggers/temporal.py +113 -0
- airflow/providers/standard/utils/__init__.py +16 -0
- airflow/providers/standard/utils/python_virtualenv.py +209 -0
- airflow/providers/standard/utils/python_virtualenv_script.jinja2 +82 -0
- airflow/providers/standard/utils/sensor_helper.py +137 -0
- airflow/providers/standard/utils/skipmixin.py +192 -0
- airflow/providers/standard/utils/weekday.py +77 -0
- airflow/providers/standard/version_compat.py +36 -0
- {apache_airflow_providers_standard-1.0.0.dev1.dist-info → apache_airflow_providers_standard-1.0.0rc1.dist-info}/METADATA +12 -31
- apache_airflow_providers_standard-1.0.0rc1.dist-info/RECORD +51 -0
- {apache_airflow_providers_standard-1.0.0.dev1.dist-info → apache_airflow_providers_standard-1.0.0rc1.dist-info}/WHEEL +1 -1
- apache_airflow_providers_standard-1.0.0.dev1.dist-info/RECORD +0 -17
- {apache_airflow_providers_standard-1.0.0.dev1.dist-info → apache_airflow_providers_standard-1.0.0rc1.dist-info}/entry_points.txt +0 -0
|
@@ -0,0 +1,38 @@
|
|
|
1
|
+
#
|
|
2
|
+
# Licensed to the Apache Software Foundation (ASF) under one
|
|
3
|
+
# or more contributor license agreements. See the NOTICE file
|
|
4
|
+
# distributed with this work for additional information
|
|
5
|
+
# regarding copyright ownership. The ASF licenses this file
|
|
6
|
+
# to you under the Apache License, Version 2.0 (the
|
|
7
|
+
# "License"); you may not use this file except in compliance
|
|
8
|
+
# with the License. You may obtain a copy of the License at
|
|
9
|
+
#
|
|
10
|
+
# http://www.apache.org/licenses/LICENSE-2.0
|
|
11
|
+
#
|
|
12
|
+
# Unless required by applicable law or agreed to in writing,
|
|
13
|
+
# software distributed under the License is distributed on an
|
|
14
|
+
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
|
15
|
+
# KIND, either express or implied. See the License for the
|
|
16
|
+
# specific language governing permissions and limitations
|
|
17
|
+
# under the License.
|
|
18
|
+
from __future__ import annotations
|
|
19
|
+
|
|
20
|
+
from typing import TYPE_CHECKING
|
|
21
|
+
|
|
22
|
+
from airflow.models.baseoperator import BaseOperator
|
|
23
|
+
|
|
24
|
+
if TYPE_CHECKING:
|
|
25
|
+
from airflow.sdk.definitions.context import Context
|
|
26
|
+
|
|
27
|
+
|
|
28
|
+
class SmoothOperator(BaseOperator):
|
|
29
|
+
"""Operator that logs a YouTube link to Sade song "Smooth Operator"."""
|
|
30
|
+
|
|
31
|
+
ui_color = "#e8f7e4"
|
|
32
|
+
yt_link: str = "https://www.youtube.com/watch?v=4TYv2PhG89A"
|
|
33
|
+
|
|
34
|
+
def __init__(self, **kwargs) -> None:
|
|
35
|
+
super().__init__(**kwargs)
|
|
36
|
+
|
|
37
|
+
def execute(self, context: Context):
|
|
38
|
+
self.log.info("Enjoy Sade - Smooth Operator: %s", self.yt_link)
|
|
@@ -0,0 +1,370 @@
|
|
|
1
|
+
#
|
|
2
|
+
# Licensed to the Apache Software Foundation (ASF) under one
|
|
3
|
+
# or more contributor license agreements. See the NOTICE file
|
|
4
|
+
# distributed with this work for additional information
|
|
5
|
+
# regarding copyright ownership. The ASF licenses this file
|
|
6
|
+
# to you under the Apache License, Version 2.0 (the
|
|
7
|
+
# "License"); you may not use this file except in compliance
|
|
8
|
+
# with the License. You may obtain a copy of the License at
|
|
9
|
+
#
|
|
10
|
+
# http://www.apache.org/licenses/LICENSE-2.0
|
|
11
|
+
#
|
|
12
|
+
# Unless required by applicable law or agreed to in writing,
|
|
13
|
+
# software distributed under the License is distributed on an
|
|
14
|
+
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
|
15
|
+
# KIND, either express or implied. See the License for the
|
|
16
|
+
# specific language governing permissions and limitations
|
|
17
|
+
# under the License.
|
|
18
|
+
from __future__ import annotations
|
|
19
|
+
|
|
20
|
+
import datetime
|
|
21
|
+
import json
|
|
22
|
+
import time
|
|
23
|
+
from collections.abc import Sequence
|
|
24
|
+
from typing import TYPE_CHECKING, Any
|
|
25
|
+
|
|
26
|
+
from sqlalchemy import select
|
|
27
|
+
from sqlalchemy.orm.exc import NoResultFound
|
|
28
|
+
|
|
29
|
+
from airflow.api.common.trigger_dag import trigger_dag
|
|
30
|
+
from airflow.configuration import conf
|
|
31
|
+
from airflow.exceptions import (
|
|
32
|
+
AirflowException,
|
|
33
|
+
AirflowSkipException,
|
|
34
|
+
DagNotFound,
|
|
35
|
+
DagRunAlreadyExists,
|
|
36
|
+
)
|
|
37
|
+
from airflow.models import BaseOperator
|
|
38
|
+
from airflow.models.dag import DagModel
|
|
39
|
+
from airflow.models.dagbag import DagBag
|
|
40
|
+
from airflow.models.dagrun import DagRun
|
|
41
|
+
from airflow.providers.standard.triggers.external_task import DagStateTrigger
|
|
42
|
+
from airflow.providers.standard.version_compat import AIRFLOW_V_3_0_PLUS
|
|
43
|
+
from airflow.utils import timezone
|
|
44
|
+
from airflow.utils.state import DagRunState
|
|
45
|
+
from airflow.utils.types import NOTSET, ArgNotSet, DagRunType
|
|
46
|
+
|
|
47
|
+
XCOM_LOGICAL_DATE_ISO = "trigger_logical_date_iso"
|
|
48
|
+
XCOM_RUN_ID = "trigger_run_id"
|
|
49
|
+
|
|
50
|
+
|
|
51
|
+
if TYPE_CHECKING:
|
|
52
|
+
from sqlalchemy.orm.session import Session
|
|
53
|
+
|
|
54
|
+
from airflow.models.taskinstancekey import TaskInstanceKey
|
|
55
|
+
|
|
56
|
+
try:
|
|
57
|
+
from airflow.sdk.definitions.context import Context
|
|
58
|
+
except ImportError:
|
|
59
|
+
# TODO: Remove once provider drops support for Airflow 2
|
|
60
|
+
from airflow.utils.context import Context
|
|
61
|
+
|
|
62
|
+
if AIRFLOW_V_3_0_PLUS:
|
|
63
|
+
from airflow.sdk import BaseOperatorLink
|
|
64
|
+
from airflow.sdk.execution_time.xcom import XCom
|
|
65
|
+
else:
|
|
66
|
+
from airflow.models import XCom # type: ignore[no-redef]
|
|
67
|
+
from airflow.models.baseoperatorlink import BaseOperatorLink # type: ignore[no-redef]
|
|
68
|
+
|
|
69
|
+
|
|
70
|
+
class TriggerDagRunLink(BaseOperatorLink):
|
|
71
|
+
"""
|
|
72
|
+
Operator link for TriggerDagRunOperator.
|
|
73
|
+
|
|
74
|
+
It allows users to access DAG triggered by task using TriggerDagRunOperator.
|
|
75
|
+
"""
|
|
76
|
+
|
|
77
|
+
name = "Triggered DAG"
|
|
78
|
+
|
|
79
|
+
def get_link(self, operator: BaseOperator, *, ti_key: TaskInstanceKey) -> str:
|
|
80
|
+
if TYPE_CHECKING:
|
|
81
|
+
assert isinstance(operator, TriggerDagRunOperator)
|
|
82
|
+
|
|
83
|
+
trigger_dag_id = operator.trigger_dag_id
|
|
84
|
+
if not AIRFLOW_V_3_0_PLUS:
|
|
85
|
+
from airflow.models.renderedtifields import RenderedTaskInstanceFields
|
|
86
|
+
|
|
87
|
+
if template_fields := RenderedTaskInstanceFields.get_templated_fields(ti_key):
|
|
88
|
+
trigger_dag_id: str = template_fields.get("trigger_dag_id", operator.trigger_dag_id) # type: ignore[no-redef]
|
|
89
|
+
|
|
90
|
+
# Fetch the correct dag_run_id for the triggerED dag which is
|
|
91
|
+
# stored in xcom during execution of the triggerING task.
|
|
92
|
+
triggered_dag_run_id = XCom.get_value(ti_key=ti_key, key=XCOM_RUN_ID)
|
|
93
|
+
|
|
94
|
+
if AIRFLOW_V_3_0_PLUS:
|
|
95
|
+
from airflow.utils.helpers import build_airflow_dagrun_url
|
|
96
|
+
|
|
97
|
+
return build_airflow_dagrun_url(dag_id=trigger_dag_id, run_id=triggered_dag_run_id)
|
|
98
|
+
from airflow.utils.helpers import build_airflow_url_with_query # type:ignore[attr-defined]
|
|
99
|
+
|
|
100
|
+
query = {"dag_id": trigger_dag_id, "dag_run_id": triggered_dag_run_id}
|
|
101
|
+
return build_airflow_url_with_query(query)
|
|
102
|
+
|
|
103
|
+
|
|
104
|
+
class TriggerDagRunOperator(BaseOperator):
|
|
105
|
+
"""
|
|
106
|
+
Triggers a DAG run for a specified DAG ID.
|
|
107
|
+
|
|
108
|
+
Note that if database isolation mode is enabled, not all features are supported.
|
|
109
|
+
|
|
110
|
+
:param trigger_dag_id: The ``dag_id`` of the DAG to trigger (templated).
|
|
111
|
+
:param trigger_run_id: The run ID to use for the triggered DAG run (templated).
|
|
112
|
+
If not provided, a run ID will be automatically generated.
|
|
113
|
+
:param conf: Configuration for the DAG run (templated).
|
|
114
|
+
:param logical_date: Logical date for the triggered DAG (templated).
|
|
115
|
+
:param reset_dag_run: Whether clear existing DAG run if already exists.
|
|
116
|
+
This is useful when backfill or rerun an existing DAG run.
|
|
117
|
+
This only resets (not recreates) the DAG run.
|
|
118
|
+
DAG run conf is immutable and will not be reset on rerun of an existing DAG run.
|
|
119
|
+
When reset_dag_run=False and dag run exists, DagRunAlreadyExists will be raised.
|
|
120
|
+
When reset_dag_run=True and dag run exists, existing DAG run will be cleared to rerun.
|
|
121
|
+
:param wait_for_completion: Whether or not wait for DAG run completion. (default: False)
|
|
122
|
+
:param poke_interval: Poke interval to check DAG run status when wait_for_completion=True.
|
|
123
|
+
(default: 60)
|
|
124
|
+
:param allowed_states: Optional list of allowed DAG run states of the triggered DAG. This is useful when
|
|
125
|
+
setting ``wait_for_completion`` to True. Must be a valid DagRunState.
|
|
126
|
+
Default is ``[DagRunState.SUCCESS]``.
|
|
127
|
+
:param failed_states: Optional list of failed or disallowed DAG run states of the triggered DAG. This is
|
|
128
|
+
useful when setting ``wait_for_completion`` to True. Must be a valid DagRunState.
|
|
129
|
+
Default is ``[DagRunState.FAILED]``.
|
|
130
|
+
:param skip_when_already_exists: Set to true to mark the task as SKIPPED if a DAG run of the triggered
|
|
131
|
+
DAG for the same logical date already exists.
|
|
132
|
+
:param deferrable: If waiting for completion, whether or not to defer the task until done,
|
|
133
|
+
default is ``False``.
|
|
134
|
+
"""
|
|
135
|
+
|
|
136
|
+
template_fields: Sequence[str] = (
|
|
137
|
+
"trigger_dag_id",
|
|
138
|
+
"trigger_run_id",
|
|
139
|
+
"logical_date",
|
|
140
|
+
"conf",
|
|
141
|
+
"wait_for_completion",
|
|
142
|
+
"skip_when_already_exists",
|
|
143
|
+
)
|
|
144
|
+
template_fields_renderers = {"conf": "py"}
|
|
145
|
+
ui_color = "#ffefeb"
|
|
146
|
+
operator_extra_links = [TriggerDagRunLink()]
|
|
147
|
+
|
|
148
|
+
def __init__(
|
|
149
|
+
self,
|
|
150
|
+
*,
|
|
151
|
+
trigger_dag_id: str,
|
|
152
|
+
trigger_run_id: str | None = None,
|
|
153
|
+
conf: dict | None = None,
|
|
154
|
+
logical_date: str | datetime.datetime | None | ArgNotSet = NOTSET,
|
|
155
|
+
reset_dag_run: bool = False,
|
|
156
|
+
wait_for_completion: bool = False,
|
|
157
|
+
poke_interval: int = 60,
|
|
158
|
+
allowed_states: list[str | DagRunState] | None = None,
|
|
159
|
+
failed_states: list[str | DagRunState] | None = None,
|
|
160
|
+
skip_when_already_exists: bool = False,
|
|
161
|
+
deferrable: bool = conf.getboolean("operators", "default_deferrable", fallback=False),
|
|
162
|
+
**kwargs,
|
|
163
|
+
) -> None:
|
|
164
|
+
super().__init__(**kwargs)
|
|
165
|
+
self.trigger_dag_id = trigger_dag_id
|
|
166
|
+
self.trigger_run_id = trigger_run_id
|
|
167
|
+
self.conf = conf
|
|
168
|
+
self.reset_dag_run = reset_dag_run
|
|
169
|
+
self.wait_for_completion = wait_for_completion
|
|
170
|
+
self.poke_interval = poke_interval
|
|
171
|
+
if allowed_states:
|
|
172
|
+
self.allowed_states = [DagRunState(s) for s in allowed_states]
|
|
173
|
+
else:
|
|
174
|
+
self.allowed_states = [DagRunState.SUCCESS]
|
|
175
|
+
if failed_states is not None:
|
|
176
|
+
self.failed_states = [DagRunState(s) for s in failed_states]
|
|
177
|
+
else:
|
|
178
|
+
self.failed_states = [DagRunState.FAILED]
|
|
179
|
+
self.skip_when_already_exists = skip_when_already_exists
|
|
180
|
+
self._defer = deferrable
|
|
181
|
+
self.logical_date = logical_date
|
|
182
|
+
if logical_date is NOTSET:
|
|
183
|
+
self.logical_date = NOTSET
|
|
184
|
+
elif logical_date is None or isinstance(logical_date, (str, datetime.datetime)):
|
|
185
|
+
self.logical_date = logical_date
|
|
186
|
+
else:
|
|
187
|
+
raise TypeError(
|
|
188
|
+
f"Expected str, datetime.datetime, or None for parameter 'logical_date'. Got {type(logical_date).__name__}"
|
|
189
|
+
)
|
|
190
|
+
|
|
191
|
+
def execute(self, context: Context):
|
|
192
|
+
if self.logical_date is NOTSET:
|
|
193
|
+
# If no logical_date is provided we will set utcnow()
|
|
194
|
+
parsed_logical_date = timezone.utcnow()
|
|
195
|
+
elif self.logical_date is None or isinstance(self.logical_date, datetime.datetime):
|
|
196
|
+
parsed_logical_date = self.logical_date # type: ignore
|
|
197
|
+
elif isinstance(self.logical_date, str):
|
|
198
|
+
parsed_logical_date = timezone.parse(self.logical_date)
|
|
199
|
+
|
|
200
|
+
try:
|
|
201
|
+
json.dumps(self.conf)
|
|
202
|
+
except TypeError:
|
|
203
|
+
raise ValueError("conf parameter should be JSON Serializable")
|
|
204
|
+
|
|
205
|
+
if self.trigger_run_id:
|
|
206
|
+
run_id = str(self.trigger_run_id)
|
|
207
|
+
else:
|
|
208
|
+
if AIRFLOW_V_3_0_PLUS:
|
|
209
|
+
run_id = DagRun.generate_run_id(
|
|
210
|
+
run_type=DagRunType.MANUAL,
|
|
211
|
+
logical_date=parsed_logical_date,
|
|
212
|
+
run_after=parsed_logical_date or timezone.utcnow(),
|
|
213
|
+
)
|
|
214
|
+
else:
|
|
215
|
+
run_id = DagRun.generate_run_id(DagRunType.MANUAL, parsed_logical_date or timezone.utcnow()) # type: ignore[misc,call-arg]
|
|
216
|
+
|
|
217
|
+
if AIRFLOW_V_3_0_PLUS:
|
|
218
|
+
self._trigger_dag_af_3(context=context, run_id=run_id, parsed_logical_date=parsed_logical_date)
|
|
219
|
+
else:
|
|
220
|
+
self._trigger_dag_af_2(context=context, run_id=run_id, parsed_logical_date=parsed_logical_date)
|
|
221
|
+
|
|
222
|
+
def _trigger_dag_af_3(self, context, run_id, parsed_logical_date):
|
|
223
|
+
from airflow.exceptions import DagRunTriggerException
|
|
224
|
+
|
|
225
|
+
raise DagRunTriggerException(
|
|
226
|
+
trigger_dag_id=self.trigger_dag_id,
|
|
227
|
+
dag_run_id=run_id,
|
|
228
|
+
conf=self.conf,
|
|
229
|
+
logical_date=parsed_logical_date,
|
|
230
|
+
reset_dag_run=self.reset_dag_run,
|
|
231
|
+
skip_when_already_exists=self.skip_when_already_exists,
|
|
232
|
+
wait_for_completion=self.wait_for_completion,
|
|
233
|
+
allowed_states=self.allowed_states,
|
|
234
|
+
failed_states=self.failed_states,
|
|
235
|
+
poke_interval=self.poke_interval,
|
|
236
|
+
deferrable=self._defer,
|
|
237
|
+
)
|
|
238
|
+
|
|
239
|
+
def _trigger_dag_af_2(self, context, run_id, parsed_logical_date):
|
|
240
|
+
try:
|
|
241
|
+
dag_run = trigger_dag(
|
|
242
|
+
dag_id=self.trigger_dag_id,
|
|
243
|
+
run_id=run_id,
|
|
244
|
+
conf=self.conf,
|
|
245
|
+
execution_date=parsed_logical_date,
|
|
246
|
+
replace_microseconds=False,
|
|
247
|
+
)
|
|
248
|
+
|
|
249
|
+
except DagRunAlreadyExists as e:
|
|
250
|
+
if self.reset_dag_run:
|
|
251
|
+
dag_run = e.dag_run
|
|
252
|
+
self.log.info("Clearing %s on %s", self.trigger_dag_id, dag_run.run_id)
|
|
253
|
+
|
|
254
|
+
# Get target dag object and call clear()
|
|
255
|
+
dag_model = DagModel.get_current(self.trigger_dag_id)
|
|
256
|
+
if dag_model is None:
|
|
257
|
+
raise DagNotFound(f"Dag id {self.trigger_dag_id} not found in DagModel")
|
|
258
|
+
|
|
259
|
+
# Note: here execution fails on database isolation mode. Needs structural changes for AIP-72
|
|
260
|
+
dag_bag = DagBag(dag_folder=dag_model.fileloc, read_dags_from_db=True)
|
|
261
|
+
dag = dag_bag.get_dag(self.trigger_dag_id)
|
|
262
|
+
dag.clear(start_date=dag_run.logical_date, end_date=dag_run.logical_date)
|
|
263
|
+
else:
|
|
264
|
+
if self.skip_when_already_exists:
|
|
265
|
+
raise AirflowSkipException(
|
|
266
|
+
"Skipping due to skip_when_already_exists is set to True and DagRunAlreadyExists"
|
|
267
|
+
)
|
|
268
|
+
raise e
|
|
269
|
+
if dag_run is None:
|
|
270
|
+
raise RuntimeError("The dag_run should be set here!")
|
|
271
|
+
# Store the run id from the dag run (either created or found above) to
|
|
272
|
+
# be used when creating the extra link on the webserver.
|
|
273
|
+
ti = context["task_instance"]
|
|
274
|
+
ti.xcom_push(key=XCOM_RUN_ID, value=dag_run.run_id)
|
|
275
|
+
|
|
276
|
+
if self.wait_for_completion:
|
|
277
|
+
# Kick off the deferral process
|
|
278
|
+
if self._defer:
|
|
279
|
+
self.defer(
|
|
280
|
+
trigger=DagStateTrigger(
|
|
281
|
+
dag_id=self.trigger_dag_id,
|
|
282
|
+
states=self.allowed_states + self.failed_states,
|
|
283
|
+
execution_dates=[dag_run.logical_date],
|
|
284
|
+
run_ids=[run_id],
|
|
285
|
+
poll_interval=self.poke_interval,
|
|
286
|
+
),
|
|
287
|
+
method_name="execute_complete",
|
|
288
|
+
)
|
|
289
|
+
# wait for dag to complete
|
|
290
|
+
while True:
|
|
291
|
+
self.log.info(
|
|
292
|
+
"Waiting for %s on %s to become allowed state %s ...",
|
|
293
|
+
self.trigger_dag_id,
|
|
294
|
+
run_id,
|
|
295
|
+
self.allowed_states,
|
|
296
|
+
)
|
|
297
|
+
time.sleep(self.poke_interval)
|
|
298
|
+
|
|
299
|
+
# Note: here execution fails on database isolation mode. Needs structural changes for AIP-72
|
|
300
|
+
dag_run.refresh_from_db()
|
|
301
|
+
state = dag_run.state
|
|
302
|
+
if state in self.failed_states:
|
|
303
|
+
raise AirflowException(f"{self.trigger_dag_id} failed with failed states {state}")
|
|
304
|
+
if state in self.allowed_states:
|
|
305
|
+
self.log.info("%s finished with allowed state %s", self.trigger_dag_id, state)
|
|
306
|
+
return
|
|
307
|
+
|
|
308
|
+
def execute_complete(self, context: Context, event: tuple[str, dict[str, Any]]):
|
|
309
|
+
if AIRFLOW_V_3_0_PLUS:
|
|
310
|
+
self._trigger_dag_run_af_3_execute_complete(event=event)
|
|
311
|
+
else:
|
|
312
|
+
self._trigger_dag_run_af_2_execute_complete(event=event)
|
|
313
|
+
|
|
314
|
+
def _trigger_dag_run_af_3_execute_complete(self, event: tuple[str, dict[str, Any]]):
|
|
315
|
+
run_ids = event[1]["run_ids"]
|
|
316
|
+
event_data = event[1]
|
|
317
|
+
failed_run_id_conditions = []
|
|
318
|
+
|
|
319
|
+
for run_id in run_ids:
|
|
320
|
+
state = event_data.get(run_id)
|
|
321
|
+
if state in self.failed_states:
|
|
322
|
+
failed_run_id_conditions.append(run_id)
|
|
323
|
+
continue
|
|
324
|
+
if state in self.allowed_states:
|
|
325
|
+
self.log.info(
|
|
326
|
+
"%s finished with allowed state %s for run_id %s",
|
|
327
|
+
self.trigger_dag_id,
|
|
328
|
+
state,
|
|
329
|
+
run_id,
|
|
330
|
+
)
|
|
331
|
+
|
|
332
|
+
if failed_run_id_conditions:
|
|
333
|
+
raise AirflowException(
|
|
334
|
+
f"{self.trigger_dag_id} failed with failed states {self.failed_states} for run_ids"
|
|
335
|
+
f" {failed_run_id_conditions}"
|
|
336
|
+
)
|
|
337
|
+
|
|
338
|
+
if not AIRFLOW_V_3_0_PLUS:
|
|
339
|
+
from airflow.utils.session import NEW_SESSION, provide_session # type: ignore[misc]
|
|
340
|
+
|
|
341
|
+
@provide_session
|
|
342
|
+
def _trigger_dag_run_af_2_execute_complete(
|
|
343
|
+
self, event: tuple[str, dict[str, Any]], session: Session = NEW_SESSION
|
|
344
|
+
):
|
|
345
|
+
# This logical_date is parsed from the return trigger event
|
|
346
|
+
provided_logical_date = event[1]["execution_dates"][0]
|
|
347
|
+
try:
|
|
348
|
+
# Note: here execution fails on database isolation mode. Needs structural changes for AIP-72
|
|
349
|
+
dag_run = session.execute(
|
|
350
|
+
select(DagRun).where(
|
|
351
|
+
DagRun.dag_id == self.trigger_dag_id, DagRun.execution_date == provided_logical_date
|
|
352
|
+
)
|
|
353
|
+
).scalar_one()
|
|
354
|
+
except NoResultFound:
|
|
355
|
+
raise AirflowException(
|
|
356
|
+
f"No DAG run found for DAG {self.trigger_dag_id} and logical date {self.logical_date}"
|
|
357
|
+
)
|
|
358
|
+
|
|
359
|
+
state = dag_run.state
|
|
360
|
+
|
|
361
|
+
if state in self.failed_states:
|
|
362
|
+
raise AirflowException(f"{self.trigger_dag_id} failed with failed state {state}")
|
|
363
|
+
if state in self.allowed_states:
|
|
364
|
+
self.log.info("%s finished with allowed state %s", self.trigger_dag_id, state)
|
|
365
|
+
return
|
|
366
|
+
|
|
367
|
+
raise AirflowException(
|
|
368
|
+
f"{self.trigger_dag_id} return {state} which is not in {self.failed_states}"
|
|
369
|
+
f" or {self.allowed_states}"
|
|
370
|
+
)
|
|
@@ -17,14 +17,19 @@
|
|
|
17
17
|
# under the License.
|
|
18
18
|
from __future__ import annotations
|
|
19
19
|
|
|
20
|
-
from
|
|
20
|
+
from collections.abc import Iterable
|
|
21
|
+
from typing import TYPE_CHECKING
|
|
21
22
|
|
|
22
|
-
from airflow.operators.branch import BaseBranchOperator
|
|
23
|
+
from airflow.providers.standard.operators.branch import BaseBranchOperator
|
|
24
|
+
from airflow.providers.standard.utils.weekday import WeekDay
|
|
23
25
|
from airflow.utils import timezone
|
|
24
|
-
from airflow.utils.weekday import WeekDay
|
|
25
26
|
|
|
26
27
|
if TYPE_CHECKING:
|
|
27
|
-
|
|
28
|
+
try:
|
|
29
|
+
from airflow.sdk.definitions.context import Context
|
|
30
|
+
except ImportError:
|
|
31
|
+
# TODO: Remove once provider drops support for Airflow 2
|
|
32
|
+
from airflow.utils.context import Context
|
|
28
33
|
|
|
29
34
|
|
|
30
35
|
class BranchDayOfWeekOperator(BaseBranchOperator):
|
|
@@ -38,7 +43,8 @@ class BranchDayOfWeekOperator(BaseBranchOperator):
|
|
|
38
43
|
|
|
39
44
|
.. code-block:: python
|
|
40
45
|
|
|
41
|
-
from airflow.operators.empty import EmptyOperator
|
|
46
|
+
from airflow.providers.standard.operators.empty import EmptyOperator
|
|
47
|
+
from airflow.operators.weekday import BranchDayOfWeekOperator
|
|
42
48
|
|
|
43
49
|
monday = EmptyOperator(task_id="monday")
|
|
44
50
|
other_day = EmptyOperator(task_id="other_day")
|
|
@@ -57,8 +63,9 @@ class BranchDayOfWeekOperator(BaseBranchOperator):
|
|
|
57
63
|
.. code-block:: python
|
|
58
64
|
|
|
59
65
|
# import WeekDay Enum
|
|
60
|
-
from airflow.utils.weekday import WeekDay
|
|
61
|
-
from airflow.operators.empty import EmptyOperator
|
|
66
|
+
from airflow.providers.standard.utils.weekday import WeekDay
|
|
67
|
+
from airflow.providers.standard.operators.empty import EmptyOperator
|
|
68
|
+
from airflow.operators.weekday import BranchDayOfWeekOperator
|
|
62
69
|
|
|
63
70
|
workday = EmptyOperator(task_id="workday")
|
|
64
71
|
weekend = EmptyOperator(task_id="weekend")
|
|
@@ -109,10 +116,13 @@ class BranchDayOfWeekOperator(BaseBranchOperator):
|
|
|
109
116
|
|
|
110
117
|
def choose_branch(self, context: Context) -> str | Iterable[str]:
|
|
111
118
|
if self.use_task_logical_date:
|
|
112
|
-
now = context
|
|
119
|
+
now = context.get("logical_date")
|
|
120
|
+
if not now:
|
|
121
|
+
dag_run = context.get("dag_run")
|
|
122
|
+
now = dag_run.run_after # type: ignore[union-attr, assignment]
|
|
113
123
|
else:
|
|
114
124
|
now = timezone.make_naive(timezone.utcnow(), self.dag.timezone)
|
|
115
125
|
|
|
116
|
-
if now.isoweekday() in self._week_day_num:
|
|
126
|
+
if now.isoweekday() in self._week_day_num: # type: ignore[union-attr]
|
|
117
127
|
return self.follow_task_ids_if_true
|
|
118
128
|
return self.follow_task_ids_if_false
|
|
@@ -18,15 +18,20 @@
|
|
|
18
18
|
from __future__ import annotations
|
|
19
19
|
|
|
20
20
|
import os
|
|
21
|
+
from collections.abc import Sequence
|
|
21
22
|
from subprocess import PIPE, STDOUT, Popen
|
|
22
23
|
from tempfile import NamedTemporaryFile, TemporaryDirectory, gettempdir
|
|
23
|
-
from typing import TYPE_CHECKING
|
|
24
|
+
from typing import TYPE_CHECKING
|
|
24
25
|
|
|
25
26
|
from airflow.exceptions import AirflowFailException
|
|
26
27
|
from airflow.sensors.base import BaseSensorOperator
|
|
27
28
|
|
|
28
29
|
if TYPE_CHECKING:
|
|
29
|
-
|
|
30
|
+
try:
|
|
31
|
+
from airflow.sdk.definitions.context import Context
|
|
32
|
+
except ImportError:
|
|
33
|
+
# TODO: Remove once provider drops support for Airflow 2
|
|
34
|
+
from airflow.utils.context import Context
|
|
30
35
|
|
|
31
36
|
|
|
32
37
|
class BashSensor(BaseSensorOperator):
|
|
@@ -70,9 +75,10 @@ class BashSensor(BaseSensorOperator):
|
|
|
70
75
|
"""Execute the bash command in a temporary directory."""
|
|
71
76
|
bash_command = self.bash_command
|
|
72
77
|
self.log.info("Tmp dir root location: %s", gettempdir())
|
|
73
|
-
with
|
|
74
|
-
|
|
75
|
-
|
|
78
|
+
with (
|
|
79
|
+
TemporaryDirectory(prefix="airflowtmp") as tmp_dir,
|
|
80
|
+
NamedTemporaryFile(dir=tmp_dir, prefix=self.task_id) as f,
|
|
81
|
+
):
|
|
76
82
|
f.write(bytes(bash_command, "utf_8"))
|
|
77
83
|
f.flush()
|
|
78
84
|
fname = f.name
|
|
@@ -101,14 +107,12 @@ class BashSensor(BaseSensorOperator):
|
|
|
101
107
|
return True
|
|
102
108
|
|
|
103
109
|
# we have a retry exit code, sensor retries if return code matches, otherwise error
|
|
104
|
-
|
|
110
|
+
if self.retry_exit_code is not None:
|
|
105
111
|
if resp.returncode == self.retry_exit_code:
|
|
106
112
|
self.log.info("Return code matches retry code, will retry later")
|
|
107
113
|
return False
|
|
108
|
-
|
|
109
|
-
raise AirflowFailException(f"Command exited with return code {resp.returncode}")
|
|
114
|
+
raise AirflowFailException(f"Command exited with return code {resp.returncode}")
|
|
110
115
|
|
|
111
116
|
# backwards compatibility: sensor retries no matter the error code
|
|
112
|
-
|
|
113
|
-
|
|
114
|
-
return False
|
|
117
|
+
self.log.info("Non-zero return code and no retry code set, will retry later")
|
|
118
|
+
return False
|
|
@@ -18,15 +18,37 @@
|
|
|
18
18
|
from __future__ import annotations
|
|
19
19
|
|
|
20
20
|
import datetime
|
|
21
|
-
from
|
|
21
|
+
from collections.abc import Sequence
|
|
22
|
+
from dataclasses import dataclass
|
|
23
|
+
from typing import TYPE_CHECKING, Any, NoReturn
|
|
22
24
|
|
|
25
|
+
from airflow.providers.standard.triggers.temporal import DateTimeTrigger
|
|
26
|
+
from airflow.providers.standard.version_compat import AIRFLOW_V_3_0_PLUS
|
|
23
27
|
from airflow.sensors.base import BaseSensorOperator
|
|
24
|
-
|
|
25
|
-
|
|
28
|
+
|
|
29
|
+
try:
|
|
30
|
+
from airflow.triggers.base import StartTriggerArgs
|
|
31
|
+
except ImportError:
|
|
32
|
+
# TODO: Remove this when min airflow version is 2.10.0 for standard provider
|
|
33
|
+
@dataclass
|
|
34
|
+
class StartTriggerArgs: # type: ignore[no-redef]
|
|
35
|
+
"""Arguments required for start task execution from triggerer."""
|
|
36
|
+
|
|
37
|
+
trigger_cls: str
|
|
38
|
+
next_method: str
|
|
39
|
+
trigger_kwargs: dict[str, Any] | None = None
|
|
40
|
+
next_kwargs: dict[str, Any] | None = None
|
|
41
|
+
timeout: datetime.timedelta | None = None
|
|
42
|
+
|
|
43
|
+
|
|
26
44
|
from airflow.utils import timezone
|
|
27
45
|
|
|
28
46
|
if TYPE_CHECKING:
|
|
29
|
-
|
|
47
|
+
try:
|
|
48
|
+
from airflow.sdk.definitions.context import Context
|
|
49
|
+
except ImportError:
|
|
50
|
+
# TODO: Remove once provider drops support for Airflow 2
|
|
51
|
+
from airflow.utils.context import Context
|
|
30
52
|
|
|
31
53
|
|
|
32
54
|
class DateTimeSensor(BaseSensorOperator):
|
|
@@ -37,7 +59,7 @@ class DateTimeSensor(BaseSensorOperator):
|
|
|
37
59
|
It handles some cases for which ``TimeSensor`` and ``TimeDeltaSensor`` are not suited.
|
|
38
60
|
|
|
39
61
|
**Example** 1 :
|
|
40
|
-
If a task needs to wait for 11am on each ``
|
|
62
|
+
If a task needs to wait for 11am on each ``logical_date``. Using
|
|
41
63
|
``TimeSensor`` or ``TimeDeltaSensor``, all backfill tasks started at
|
|
42
64
|
1am have to wait for 10 hours. This is unnecessary, e.g. a backfill
|
|
43
65
|
task with ``{{ ds }} = '1970-01-01'`` does not need to wait because
|
|
@@ -52,7 +74,7 @@ class DateTimeSensor(BaseSensorOperator):
|
|
|
52
74
|
|
|
53
75
|
DateTimeSensor(
|
|
54
76
|
task_id="wait_for_0100",
|
|
55
|
-
target_time="{{
|
|
77
|
+
target_time="{{ data_interval_end.tomorrow().replace(hour=1) }}",
|
|
56
78
|
)
|
|
57
79
|
|
|
58
80
|
:param target_time: datetime after which the job succeeds. (templated)
|
|
@@ -93,7 +115,7 @@ class DateTimeSensorAsync(DateTimeSensor):
|
|
|
93
115
|
"""
|
|
94
116
|
|
|
95
117
|
start_trigger_args = StartTriggerArgs(
|
|
96
|
-
trigger_cls="airflow.triggers.temporal.DateTimeTrigger",
|
|
118
|
+
trigger_cls="airflow.providers.standard.triggers.temporal.DateTimeTrigger",
|
|
97
119
|
trigger_kwargs={"moment": "", "end_from_trigger": False},
|
|
98
120
|
next_method="execute_complete",
|
|
99
121
|
next_kwargs=None,
|
|
@@ -125,7 +147,9 @@ class DateTimeSensorAsync(DateTimeSensor):
|
|
|
125
147
|
trigger=DateTimeTrigger(
|
|
126
148
|
moment=timezone.parse(self.target_time),
|
|
127
149
|
end_from_trigger=self.end_from_trigger,
|
|
128
|
-
)
|
|
150
|
+
)
|
|
151
|
+
if AIRFLOW_V_3_0_PLUS
|
|
152
|
+
else DateTimeTrigger(moment=timezone.parse(self.target_time)),
|
|
129
153
|
)
|
|
130
154
|
|
|
131
155
|
def execute_complete(self, context: Context, event: Any = None) -> None:
|