apache-airflow-providers-standard 1.0.0.dev1__py3-none-any.whl → 1.1.0rc1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of apache-airflow-providers-standard might be problematic. Click here for more details.
- airflow/providers/standard/LICENSE +0 -52
- airflow/providers/standard/__init__.py +23 -1
- airflow/providers/standard/decorators/__init__.py +16 -0
- airflow/providers/standard/decorators/bash.py +121 -0
- airflow/providers/standard/decorators/branch_external_python.py +63 -0
- airflow/providers/standard/decorators/branch_python.py +62 -0
- airflow/providers/standard/decorators/branch_virtualenv.py +62 -0
- airflow/providers/standard/decorators/external_python.py +70 -0
- airflow/providers/standard/decorators/python.py +86 -0
- airflow/providers/standard/decorators/python_virtualenv.py +67 -0
- airflow/providers/standard/decorators/sensor.py +83 -0
- airflow/providers/standard/decorators/short_circuit.py +65 -0
- airflow/providers/standard/get_provider_info.py +89 -7
- airflow/providers/standard/hooks/__init__.py +16 -0
- airflow/providers/standard/hooks/filesystem.py +89 -0
- airflow/providers/standard/hooks/package_index.py +95 -0
- airflow/providers/standard/hooks/subprocess.py +119 -0
- airflow/providers/standard/operators/bash.py +73 -56
- airflow/providers/standard/operators/branch.py +105 -0
- airflow/providers/standard/operators/datetime.py +15 -5
- airflow/providers/standard/operators/empty.py +39 -0
- airflow/providers/standard/operators/latest_only.py +127 -0
- airflow/providers/standard/operators/python.py +1143 -0
- airflow/providers/standard/operators/smooth.py +38 -0
- airflow/providers/standard/operators/trigger_dagrun.py +391 -0
- airflow/providers/standard/operators/weekday.py +19 -9
- airflow/providers/standard/sensors/bash.py +15 -11
- airflow/providers/standard/sensors/date_time.py +32 -8
- airflow/providers/standard/sensors/external_task.py +593 -0
- airflow/providers/standard/sensors/filesystem.py +158 -0
- airflow/providers/standard/sensors/python.py +84 -0
- airflow/providers/standard/sensors/time.py +28 -5
- airflow/providers/standard/sensors/time_delta.py +68 -15
- airflow/providers/standard/sensors/weekday.py +25 -7
- airflow/providers/standard/triggers/__init__.py +16 -0
- airflow/providers/standard/triggers/external_task.py +288 -0
- airflow/providers/standard/triggers/file.py +131 -0
- airflow/providers/standard/triggers/temporal.py +113 -0
- airflow/providers/standard/utils/__init__.py +16 -0
- airflow/providers/standard/utils/python_virtualenv.py +209 -0
- airflow/providers/standard/utils/python_virtualenv_script.jinja2 +82 -0
- airflow/providers/standard/utils/sensor_helper.py +137 -0
- airflow/providers/standard/utils/skipmixin.py +192 -0
- airflow/providers/standard/utils/weekday.py +77 -0
- airflow/providers/standard/version_compat.py +36 -0
- {apache_airflow_providers_standard-1.0.0.dev1.dist-info → apache_airflow_providers_standard-1.1.0rc1.dist-info}/METADATA +16 -35
- apache_airflow_providers_standard-1.1.0rc1.dist-info/RECORD +51 -0
- {apache_airflow_providers_standard-1.0.0.dev1.dist-info → apache_airflow_providers_standard-1.1.0rc1.dist-info}/WHEEL +1 -1
- apache_airflow_providers_standard-1.0.0.dev1.dist-info/RECORD +0 -17
- {apache_airflow_providers_standard-1.0.0.dev1.dist-info → apache_airflow_providers_standard-1.1.0rc1.dist-info}/entry_points.txt +0 -0
|
@@ -19,19 +19,29 @@ from __future__ import annotations
|
|
|
19
19
|
|
|
20
20
|
import os
|
|
21
21
|
import shutil
|
|
22
|
-
import
|
|
22
|
+
import tempfile
|
|
23
|
+
from collections.abc import Container, Sequence
|
|
23
24
|
from functools import cached_property
|
|
24
|
-
from typing import TYPE_CHECKING, Any, Callable,
|
|
25
|
+
from typing import TYPE_CHECKING, Any, Callable, cast
|
|
25
26
|
|
|
26
27
|
from airflow.exceptions import AirflowException, AirflowSkipException
|
|
27
|
-
from airflow.hooks.subprocess import SubprocessHook
|
|
28
28
|
from airflow.models.baseoperator import BaseOperator
|
|
29
|
-
from airflow.
|
|
30
|
-
from airflow.
|
|
29
|
+
from airflow.providers.standard.hooks.subprocess import SubprocessHook, SubprocessResult, working_directory
|
|
30
|
+
from airflow.providers.standard.version_compat import AIRFLOW_V_3_0_PLUS
|
|
31
|
+
|
|
32
|
+
if AIRFLOW_V_3_0_PLUS:
|
|
33
|
+
from airflow.sdk.execution_time.context import context_to_airflow_vars
|
|
34
|
+
else:
|
|
35
|
+
from airflow.utils.operator_helpers import context_to_airflow_vars # type: ignore[no-redef, attr-defined]
|
|
31
36
|
|
|
32
37
|
if TYPE_CHECKING:
|
|
33
|
-
from airflow.
|
|
34
|
-
|
|
38
|
+
from airflow.utils.types import ArgNotSet
|
|
39
|
+
|
|
40
|
+
try:
|
|
41
|
+
from airflow.sdk.definitions.context import Context
|
|
42
|
+
except ImportError:
|
|
43
|
+
# TODO: Remove once provider drops support for Airflow 2
|
|
44
|
+
from airflow.utils.context import Context
|
|
35
45
|
|
|
36
46
|
|
|
37
47
|
class BashOperator(BaseOperator):
|
|
@@ -62,7 +72,10 @@ class BashOperator(BaseOperator):
|
|
|
62
72
|
:param cwd: Working directory to execute the command in (templated).
|
|
63
73
|
If None (default), the command is run in a temporary directory.
|
|
64
74
|
To use current DAG folder as the working directory,
|
|
65
|
-
you might set template ``{{
|
|
75
|
+
you might set template ``{{ task.dag.folder }}``.
|
|
76
|
+
When bash_command is a '.sh' or '.bash' file, Airflow must have write
|
|
77
|
+
access to the working directory. The script will be rendered (Jinja
|
|
78
|
+
template) into a new temporary file in this directory.
|
|
66
79
|
:param output_processor: Function to further process the output of the bash script
|
|
67
80
|
(default is lambda output: output).
|
|
68
81
|
|
|
@@ -93,14 +106,18 @@ class BashOperator(BaseOperator):
|
|
|
93
106
|
|
|
94
107
|
.. code-block:: python
|
|
95
108
|
|
|
96
|
-
bash_command = "set -e; python3 script.py '{{
|
|
109
|
+
bash_command = "set -e; python3 script.py '{{ data_interval_end }}'"
|
|
97
110
|
|
|
98
111
|
.. note::
|
|
99
112
|
|
|
100
|
-
|
|
101
|
-
``bash_command`` argument -- for example ``bash_command="my_script.sh "``.
|
|
102
|
-
is because Airflow tries to
|
|
103
|
-
it ends with ``.sh
|
|
113
|
+
To simply execute a ``.sh`` or ``.bash`` script (without any Jinja template), add a space after the
|
|
114
|
+
script name ``bash_command`` argument -- for example ``bash_command="my_script.sh "``. This
|
|
115
|
+
is because Airflow tries to load this file and process it as a Jinja template when
|
|
116
|
+
it ends with ``.sh`` or ``.bash``.
|
|
117
|
+
|
|
118
|
+
If you have Jinja template in your script, do not put any blank space. And add the script's directory
|
|
119
|
+
in the DAG's ``template_searchpath``. If you specify a ``cwd``, Airflow must have write access to
|
|
120
|
+
this directory. The script will be rendered (Jinja template) into a new temporary file in this directory.
|
|
104
121
|
|
|
105
122
|
.. warning::
|
|
106
123
|
|
|
@@ -149,7 +166,6 @@ class BashOperator(BaseOperator):
|
|
|
149
166
|
env: dict[str, str] | None = None,
|
|
150
167
|
append_env: bool = False,
|
|
151
168
|
output_encoding: str = "utf-8",
|
|
152
|
-
skip_exit_code: int | None = None,
|
|
153
169
|
skip_on_exit_code: int | Container[int] | None = 99,
|
|
154
170
|
cwd: str | None = None,
|
|
155
171
|
output_processor: Callable[[str], Any] = lambda result: result,
|
|
@@ -159,11 +175,6 @@ class BashOperator(BaseOperator):
|
|
|
159
175
|
self.bash_command = bash_command
|
|
160
176
|
self.env = env
|
|
161
177
|
self.output_encoding = output_encoding
|
|
162
|
-
if skip_exit_code is not None:
|
|
163
|
-
warnings.warn(
|
|
164
|
-
"skip_exit_code is deprecated. Please use skip_on_exit_code", DeprecationWarning, stacklevel=2
|
|
165
|
-
)
|
|
166
|
-
skip_on_exit_code = skip_exit_code
|
|
167
178
|
self.skip_on_exit_code = (
|
|
168
179
|
skip_on_exit_code
|
|
169
180
|
if isinstance(skip_on_exit_code, Container)
|
|
@@ -174,33 +185,16 @@ class BashOperator(BaseOperator):
|
|
|
174
185
|
self.cwd = cwd
|
|
175
186
|
self.append_env = append_env
|
|
176
187
|
self.output_processor = output_processor
|
|
177
|
-
|
|
178
|
-
|
|
179
|
-
|
|
180
|
-
# determine whether the bash_command value needs to re-rendered.
|
|
181
|
-
self._init_bash_command_not_set = isinstance(self.bash_command, ArgNotSet)
|
|
188
|
+
self._is_inline_cmd = None
|
|
189
|
+
if isinstance(bash_command, str):
|
|
190
|
+
self._is_inline_cmd = self._is_inline_command(bash_command=bash_command)
|
|
182
191
|
|
|
183
192
|
@cached_property
|
|
184
193
|
def subprocess_hook(self):
|
|
185
194
|
"""Returns hook for running the bash command."""
|
|
186
195
|
return SubprocessHook()
|
|
187
196
|
|
|
188
|
-
|
|
189
|
-
def refresh_bash_command(ti: TaskInstance) -> None:
|
|
190
|
-
"""
|
|
191
|
-
Rewrite the underlying rendered bash_command value for a task instance in the metadatabase.
|
|
192
|
-
|
|
193
|
-
TaskInstance.get_rendered_template_fields() cannot be used because this will retrieve the
|
|
194
|
-
RenderedTaskInstanceFields from the metadatabase which doesn't have the runtime-evaluated bash_command
|
|
195
|
-
value.
|
|
196
|
-
|
|
197
|
-
:meta private:
|
|
198
|
-
"""
|
|
199
|
-
from airflow.models.renderedtifields import RenderedTaskInstanceFields
|
|
200
|
-
|
|
201
|
-
RenderedTaskInstanceFields._update_runtime_evaluated_template_fields(ti)
|
|
202
|
-
|
|
203
|
-
def get_env(self, context):
|
|
197
|
+
def get_env(self, context) -> dict:
|
|
204
198
|
"""Build the set of environment variables to be exposed for the bash command."""
|
|
205
199
|
system_env = os.environ.copy()
|
|
206
200
|
env = self.env
|
|
@@ -220,7 +214,7 @@ class BashOperator(BaseOperator):
|
|
|
220
214
|
return env
|
|
221
215
|
|
|
222
216
|
def execute(self, context: Context):
|
|
223
|
-
bash_path = shutil.which("bash") or "bash"
|
|
217
|
+
bash_path: str = shutil.which("bash") or "bash"
|
|
224
218
|
if self.cwd is not None:
|
|
225
219
|
if not os.path.exists(self.cwd):
|
|
226
220
|
raise AirflowException(f"Can not find the cwd: {self.cwd}")
|
|
@@ -228,29 +222,52 @@ class BashOperator(BaseOperator):
|
|
|
228
222
|
raise AirflowException(f"The cwd {self.cwd} must be a directory")
|
|
229
223
|
env = self.get_env(context)
|
|
230
224
|
|
|
231
|
-
|
|
232
|
-
|
|
233
|
-
|
|
234
|
-
|
|
235
|
-
# displays the executed command (otherwise it will display as an ArgNotSet type).
|
|
236
|
-
if self._init_bash_command_not_set:
|
|
237
|
-
ti = cast("TaskInstance", context["ti"])
|
|
238
|
-
self.refresh_bash_command(ti)
|
|
225
|
+
if self._is_inline_cmd:
|
|
226
|
+
result = self._run_inline_command(bash_path=bash_path, env=env)
|
|
227
|
+
else:
|
|
228
|
+
result = self._run_rendered_script_file(bash_path=bash_path, env=env)
|
|
239
229
|
|
|
240
|
-
result = self.subprocess_hook.run_command(
|
|
241
|
-
command=[bash_path, "-c", self.bash_command],
|
|
242
|
-
env=env,
|
|
243
|
-
output_encoding=self.output_encoding,
|
|
244
|
-
cwd=self.cwd,
|
|
245
|
-
)
|
|
246
230
|
if result.exit_code in self.skip_on_exit_code:
|
|
247
231
|
raise AirflowSkipException(f"Bash command returned exit code {result.exit_code}. Skipping.")
|
|
248
|
-
|
|
232
|
+
if result.exit_code != 0:
|
|
249
233
|
raise AirflowException(
|
|
250
234
|
f"Bash command failed. The command returned a non-zero exit code {result.exit_code}."
|
|
251
235
|
)
|
|
252
236
|
|
|
253
237
|
return self.output_processor(result.output)
|
|
254
238
|
|
|
239
|
+
def _run_inline_command(self, bash_path: str, env: dict) -> SubprocessResult:
|
|
240
|
+
"""Pass the bash command as string directly in the subprocess."""
|
|
241
|
+
return self.subprocess_hook.run_command(
|
|
242
|
+
command=[bash_path, "-c", self.bash_command],
|
|
243
|
+
env=env,
|
|
244
|
+
output_encoding=self.output_encoding,
|
|
245
|
+
cwd=self.cwd,
|
|
246
|
+
)
|
|
247
|
+
|
|
248
|
+
def _run_rendered_script_file(self, bash_path: str, env: dict) -> SubprocessResult:
|
|
249
|
+
"""
|
|
250
|
+
Save the bash command into a file and execute this file.
|
|
251
|
+
|
|
252
|
+
This allows for longer commands, and prevents "Argument list too long error".
|
|
253
|
+
"""
|
|
254
|
+
with working_directory(cwd=self.cwd) as cwd:
|
|
255
|
+
with tempfile.NamedTemporaryFile(mode="w", dir=cwd, suffix=".sh") as file:
|
|
256
|
+
file.write(cast("str", self.bash_command))
|
|
257
|
+
file.flush()
|
|
258
|
+
|
|
259
|
+
bash_script = os.path.basename(file.name)
|
|
260
|
+
return self.subprocess_hook.run_command(
|
|
261
|
+
command=[bash_path, bash_script],
|
|
262
|
+
env=env,
|
|
263
|
+
output_encoding=self.output_encoding,
|
|
264
|
+
cwd=cwd,
|
|
265
|
+
)
|
|
266
|
+
|
|
267
|
+
@classmethod
|
|
268
|
+
def _is_inline_command(cls, bash_command: str) -> bool:
|
|
269
|
+
"""Return True if the bash command is an inline string. False if it's a bash script file."""
|
|
270
|
+
return not bash_command.endswith(tuple(cls.template_ext))
|
|
271
|
+
|
|
255
272
|
def on_kill(self) -> None:
|
|
256
273
|
self.subprocess_hook.send_sigterm()
|
|
@@ -0,0 +1,105 @@
|
|
|
1
|
+
#
|
|
2
|
+
# Licensed to the Apache Software Foundation (ASF) under one
|
|
3
|
+
# or more contributor license agreements. See the NOTICE file
|
|
4
|
+
# distributed with this work for additional information
|
|
5
|
+
# regarding copyright ownership. The ASF licenses this file
|
|
6
|
+
# to you under the Apache License, Version 2.0 (the
|
|
7
|
+
# "License"); you may not use this file except in compliance
|
|
8
|
+
# with the License. You may obtain a copy of the License at
|
|
9
|
+
#
|
|
10
|
+
# http://www.apache.org/licenses/LICENSE-2.0
|
|
11
|
+
#
|
|
12
|
+
# Unless required by applicable law or agreed to in writing,
|
|
13
|
+
# software distributed under the License is distributed on an
|
|
14
|
+
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
|
15
|
+
# KIND, either express or implied. See the License for the
|
|
16
|
+
# specific language governing permissions and limitations
|
|
17
|
+
# under the License.
|
|
18
|
+
"""Branching operators."""
|
|
19
|
+
|
|
20
|
+
from __future__ import annotations
|
|
21
|
+
|
|
22
|
+
from collections.abc import Iterable
|
|
23
|
+
from typing import TYPE_CHECKING
|
|
24
|
+
|
|
25
|
+
from airflow.models.baseoperator import BaseOperator
|
|
26
|
+
from airflow.providers.standard.version_compat import AIRFLOW_V_3_0_PLUS
|
|
27
|
+
|
|
28
|
+
if AIRFLOW_V_3_0_PLUS:
|
|
29
|
+
from airflow.providers.standard.utils.skipmixin import SkipMixin
|
|
30
|
+
else:
|
|
31
|
+
from airflow.models.skipmixin import SkipMixin
|
|
32
|
+
|
|
33
|
+
if TYPE_CHECKING:
|
|
34
|
+
from airflow.sdk.definitions.context import Context
|
|
35
|
+
from airflow.sdk.types import RuntimeTaskInstanceProtocol
|
|
36
|
+
|
|
37
|
+
|
|
38
|
+
class BranchMixIn(SkipMixin):
|
|
39
|
+
"""Utility helper which handles the branching as one-liner."""
|
|
40
|
+
|
|
41
|
+
def do_branch(self, context: Context, branches_to_execute: str | Iterable[str]) -> str | Iterable[str]:
|
|
42
|
+
"""Implement the handling of branching including logging."""
|
|
43
|
+
self.log.info("Branch into %s", branches_to_execute)
|
|
44
|
+
branch_task_ids = self._expand_task_group_roots(context["ti"], branches_to_execute)
|
|
45
|
+
self.skip_all_except(context["ti"], branch_task_ids)
|
|
46
|
+
return branches_to_execute
|
|
47
|
+
|
|
48
|
+
def _expand_task_group_roots(
|
|
49
|
+
self, ti: RuntimeTaskInstanceProtocol, branches_to_execute: str | Iterable[str]
|
|
50
|
+
) -> Iterable[str]:
|
|
51
|
+
"""Expand any task group into its root task ids."""
|
|
52
|
+
if TYPE_CHECKING:
|
|
53
|
+
assert ti.task
|
|
54
|
+
|
|
55
|
+
task = ti.task
|
|
56
|
+
dag = task.dag
|
|
57
|
+
if TYPE_CHECKING:
|
|
58
|
+
assert dag
|
|
59
|
+
|
|
60
|
+
if branches_to_execute is None:
|
|
61
|
+
return
|
|
62
|
+
elif isinstance(branches_to_execute, str) or not isinstance(branches_to_execute, Iterable):
|
|
63
|
+
branches_to_execute = [branches_to_execute]
|
|
64
|
+
|
|
65
|
+
for branch in branches_to_execute:
|
|
66
|
+
if branch in dag.task_group_dict:
|
|
67
|
+
tg = dag.task_group_dict[branch]
|
|
68
|
+
root_ids = [root.task_id for root in tg.roots]
|
|
69
|
+
self.log.info("Expanding task group %s into %s", tg.group_id, root_ids)
|
|
70
|
+
yield from root_ids
|
|
71
|
+
else:
|
|
72
|
+
yield branch
|
|
73
|
+
|
|
74
|
+
|
|
75
|
+
class BaseBranchOperator(BaseOperator, BranchMixIn):
|
|
76
|
+
"""
|
|
77
|
+
A base class for creating operators with branching functionality, like to BranchPythonOperator.
|
|
78
|
+
|
|
79
|
+
Users should create a subclass from this operator and implement the function
|
|
80
|
+
`choose_branch(self, context)`. This should run whatever business logic
|
|
81
|
+
is needed to determine the branch, and return one of the following:
|
|
82
|
+
- A single task_id (as a str)
|
|
83
|
+
- A single task_group_id (as a str)
|
|
84
|
+
- A list containing a combination of task_ids and task_group_ids
|
|
85
|
+
|
|
86
|
+
The operator will continue with the returned task_id(s) and/or task_group_id(s), and all other
|
|
87
|
+
tasks directly downstream of this operator will be skipped.
|
|
88
|
+
"""
|
|
89
|
+
|
|
90
|
+
inherits_from_skipmixin = True
|
|
91
|
+
|
|
92
|
+
def choose_branch(self, context: Context) -> str | Iterable[str]:
|
|
93
|
+
"""
|
|
94
|
+
Abstract method to choose which branch to run.
|
|
95
|
+
|
|
96
|
+
Subclasses should implement this, running whatever logic is
|
|
97
|
+
necessary to choose a branch and returning a task_id or list of
|
|
98
|
+
task_ids.
|
|
99
|
+
|
|
100
|
+
:param context: Context dictionary as passed to execute()
|
|
101
|
+
"""
|
|
102
|
+
raise NotImplementedError
|
|
103
|
+
|
|
104
|
+
def execute(self, context: Context):
|
|
105
|
+
return self.do_branch(context, self.choose_branch(context))
|
|
@@ -17,14 +17,19 @@
|
|
|
17
17
|
from __future__ import annotations
|
|
18
18
|
|
|
19
19
|
import datetime
|
|
20
|
-
from
|
|
20
|
+
from collections.abc import Iterable
|
|
21
|
+
from typing import TYPE_CHECKING
|
|
21
22
|
|
|
22
23
|
from airflow.exceptions import AirflowException
|
|
23
|
-
from airflow.operators.branch import BaseBranchOperator
|
|
24
|
+
from airflow.providers.standard.operators.branch import BaseBranchOperator
|
|
24
25
|
from airflow.utils import timezone
|
|
25
26
|
|
|
26
27
|
if TYPE_CHECKING:
|
|
27
|
-
|
|
28
|
+
try:
|
|
29
|
+
from airflow.sdk.definitions.context import Context
|
|
30
|
+
except ImportError:
|
|
31
|
+
# TODO: Remove once provider drops support for Airflow 2
|
|
32
|
+
from airflow.utils.context import Context
|
|
28
33
|
|
|
29
34
|
|
|
30
35
|
class BranchDateTimeOperator(BaseBranchOperator):
|
|
@@ -44,7 +49,7 @@ class BranchDateTimeOperator(BaseBranchOperator):
|
|
|
44
49
|
:param target_lower: target lower bound.
|
|
45
50
|
:param target_upper: target upper bound.
|
|
46
51
|
:param use_task_logical_date: If ``True``, uses task's logical date to compare with targets.
|
|
47
|
-
|
|
52
|
+
Logical date is useful for backfilling. If ``False``, uses system's date.
|
|
48
53
|
"""
|
|
49
54
|
|
|
50
55
|
def __init__(
|
|
@@ -72,9 +77,14 @@ class BranchDateTimeOperator(BaseBranchOperator):
|
|
|
72
77
|
|
|
73
78
|
def choose_branch(self, context: Context) -> str | Iterable[str]:
|
|
74
79
|
if self.use_task_logical_date:
|
|
75
|
-
now = context
|
|
80
|
+
now = context.get("logical_date")
|
|
81
|
+
if not now:
|
|
82
|
+
dag_run = context.get("dag_run")
|
|
83
|
+
now = dag_run.run_after # type: ignore[union-attr, assignment]
|
|
76
84
|
else:
|
|
77
85
|
now = timezone.coerce_datetime(timezone.utcnow())
|
|
86
|
+
if TYPE_CHECKING:
|
|
87
|
+
assert isinstance(now, datetime.datetime)
|
|
78
88
|
lower, upper = target_times_as_dates(now, self.target_lower, self.target_upper)
|
|
79
89
|
lower = timezone.coerce_datetime(lower, self.dag.timezone)
|
|
80
90
|
upper = timezone.coerce_datetime(upper, self.dag.timezone)
|
|
@@ -0,0 +1,39 @@
|
|
|
1
|
+
# Licensed to the Apache Software Foundation (ASF) under one
|
|
2
|
+
# or more contributor license agreements. See the NOTICE file
|
|
3
|
+
# distributed with this work for additional information
|
|
4
|
+
# regarding copyright ownership. The ASF licenses this file
|
|
5
|
+
# to you under the Apache License, Version 2.0 (the
|
|
6
|
+
# "License"); you may not use this file except in compliance
|
|
7
|
+
# with the License. You may obtain a copy of the License at
|
|
8
|
+
#
|
|
9
|
+
# http://www.apache.org/licenses/LICENSE-2.0
|
|
10
|
+
#
|
|
11
|
+
# Unless required by applicable law or agreed to in writing,
|
|
12
|
+
# software distributed under the License is distributed on an
|
|
13
|
+
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
|
14
|
+
# KIND, either express or implied. See the License for the
|
|
15
|
+
# specific language governing permissions and limitations
|
|
16
|
+
# under the License.
|
|
17
|
+
from __future__ import annotations
|
|
18
|
+
|
|
19
|
+
from typing import TYPE_CHECKING
|
|
20
|
+
|
|
21
|
+
from airflow.models.baseoperator import BaseOperator
|
|
22
|
+
|
|
23
|
+
if TYPE_CHECKING:
|
|
24
|
+
from airflow.sdk.definitions.context import Context
|
|
25
|
+
|
|
26
|
+
|
|
27
|
+
class EmptyOperator(BaseOperator):
|
|
28
|
+
"""
|
|
29
|
+
Operator that does literally nothing.
|
|
30
|
+
|
|
31
|
+
It can be used to group tasks in a DAG.
|
|
32
|
+
The task is evaluated by the scheduler but never processed by the executor.
|
|
33
|
+
"""
|
|
34
|
+
|
|
35
|
+
ui_color = "#e8f7e4"
|
|
36
|
+
inherits_from_empty_operator = True
|
|
37
|
+
|
|
38
|
+
def execute(self, context: Context):
|
|
39
|
+
pass
|
|
@@ -0,0 +1,127 @@
|
|
|
1
|
+
#
|
|
2
|
+
# Licensed to the Apache Software Foundation (ASF) under one
|
|
3
|
+
# or more contributor license agreements. See the NOTICE file
|
|
4
|
+
# distributed with this work for additional information
|
|
5
|
+
# regarding copyright ownership. The ASF licenses this file
|
|
6
|
+
# to you under the Apache License, Version 2.0 (the
|
|
7
|
+
# "License"); you may not use this file except in compliance
|
|
8
|
+
# with the License. You may obtain a copy of the License at
|
|
9
|
+
#
|
|
10
|
+
# http://www.apache.org/licenses/LICENSE-2.0
|
|
11
|
+
#
|
|
12
|
+
# Unless required by applicable law or agreed to in writing,
|
|
13
|
+
# software distributed under the License is distributed on an
|
|
14
|
+
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
|
15
|
+
# KIND, either express or implied. See the License for the
|
|
16
|
+
# specific language governing permissions and limitations
|
|
17
|
+
# under the License.
|
|
18
|
+
"""Contains an operator to run downstream tasks only for the latest scheduled DagRun."""
|
|
19
|
+
|
|
20
|
+
from __future__ import annotations
|
|
21
|
+
|
|
22
|
+
from collections.abc import Iterable
|
|
23
|
+
from datetime import timedelta
|
|
24
|
+
from typing import TYPE_CHECKING
|
|
25
|
+
|
|
26
|
+
import pendulum
|
|
27
|
+
|
|
28
|
+
from airflow.providers.standard.operators.branch import BaseBranchOperator
|
|
29
|
+
from airflow.providers.standard.version_compat import AIRFLOW_V_3_0_PLUS
|
|
30
|
+
from airflow.utils.types import DagRunType
|
|
31
|
+
|
|
32
|
+
if TYPE_CHECKING:
|
|
33
|
+
from pendulum.datetime import DateTime
|
|
34
|
+
|
|
35
|
+
from airflow.models import DagRun
|
|
36
|
+
|
|
37
|
+
try:
|
|
38
|
+
from airflow.sdk.definitions.context import Context
|
|
39
|
+
except ImportError:
|
|
40
|
+
# TODO: Remove once provider drops support for Airflow 2
|
|
41
|
+
from airflow.utils.context import Context
|
|
42
|
+
|
|
43
|
+
|
|
44
|
+
class LatestOnlyOperator(BaseBranchOperator):
|
|
45
|
+
"""
|
|
46
|
+
Skip tasks that are not running during the most recent schedule interval.
|
|
47
|
+
|
|
48
|
+
If the task is run outside the latest schedule interval (i.e. run_type == DagRunType.MANUAL),
|
|
49
|
+
all directly downstream tasks will be skipped.
|
|
50
|
+
|
|
51
|
+
Note that downstream tasks are never skipped if the given DAG_Run is
|
|
52
|
+
marked as externally triggered.
|
|
53
|
+
|
|
54
|
+
Note that when used with timetables that produce zero-length or point-in-time data intervals
|
|
55
|
+
(e.g., ``DeltaTriggerTimetable``), this operator assumes each run is the latest
|
|
56
|
+
and does not skip downstream tasks.
|
|
57
|
+
"""
|
|
58
|
+
|
|
59
|
+
ui_color = "#e9ffdb" # nyanza
|
|
60
|
+
|
|
61
|
+
def choose_branch(self, context: Context) -> str | Iterable[str]:
|
|
62
|
+
# If the DAG Run is externally triggered, then return without
|
|
63
|
+
# skipping downstream tasks
|
|
64
|
+
dag_run: DagRun = context["dag_run"] # type: ignore[assignment]
|
|
65
|
+
if dag_run.run_type == DagRunType.MANUAL:
|
|
66
|
+
self.log.info("Manually triggered DAG_Run: allowing execution to proceed.")
|
|
67
|
+
return list(self.get_direct_relative_ids(upstream=False))
|
|
68
|
+
|
|
69
|
+
dates = self._get_compare_dates(dag_run)
|
|
70
|
+
|
|
71
|
+
if dates is None:
|
|
72
|
+
self.log.info("Last scheduled execution: allowing execution to proceed.")
|
|
73
|
+
return list(self.get_direct_relative_ids(upstream=False))
|
|
74
|
+
|
|
75
|
+
now = pendulum.now("UTC")
|
|
76
|
+
left_window, right_window = dates
|
|
77
|
+
self.log.info(
|
|
78
|
+
"Checking latest only with left_window: %s right_window: %s now: %s",
|
|
79
|
+
left_window,
|
|
80
|
+
right_window,
|
|
81
|
+
now,
|
|
82
|
+
)
|
|
83
|
+
|
|
84
|
+
if not left_window < now <= right_window:
|
|
85
|
+
self.log.info("Not latest execution, skipping downstream.")
|
|
86
|
+
# we return an empty list, thus the parent BaseBranchOperator
|
|
87
|
+
# won't exclude any downstream tasks from skipping.
|
|
88
|
+
return []
|
|
89
|
+
|
|
90
|
+
self.log.info("Latest, allowing execution to proceed.")
|
|
91
|
+
return list(self.get_direct_relative_ids(upstream=False))
|
|
92
|
+
|
|
93
|
+
def _get_compare_dates(self, dag_run: DagRun) -> tuple[DateTime, DateTime] | None:
|
|
94
|
+
dagrun_date: DateTime
|
|
95
|
+
if AIRFLOW_V_3_0_PLUS:
|
|
96
|
+
dagrun_date = dag_run.logical_date or dag_run.run_after
|
|
97
|
+
else:
|
|
98
|
+
dagrun_date = dag_run.logical_date
|
|
99
|
+
|
|
100
|
+
from airflow.timetables.base import DataInterval, TimeRestriction
|
|
101
|
+
|
|
102
|
+
current_interval = DataInterval(
|
|
103
|
+
start=dag_run.data_interval_start or dagrun_date,
|
|
104
|
+
end=dag_run.data_interval_end or dagrun_date,
|
|
105
|
+
)
|
|
106
|
+
|
|
107
|
+
time_restriction = TimeRestriction(
|
|
108
|
+
earliest=None, latest=current_interval.end - timedelta(microseconds=1), catchup=True
|
|
109
|
+
)
|
|
110
|
+
if prev_info := self.dag.timetable.next_dagrun_info(
|
|
111
|
+
last_automated_data_interval=current_interval,
|
|
112
|
+
restriction=time_restriction,
|
|
113
|
+
):
|
|
114
|
+
left = prev_info.data_interval.end
|
|
115
|
+
else:
|
|
116
|
+
left = current_interval.start
|
|
117
|
+
|
|
118
|
+
time_restriction = TimeRestriction(earliest=current_interval.end, latest=None, catchup=True)
|
|
119
|
+
next_info = self.dag.timetable.next_dagrun_info(
|
|
120
|
+
last_automated_data_interval=current_interval,
|
|
121
|
+
restriction=time_restriction,
|
|
122
|
+
)
|
|
123
|
+
|
|
124
|
+
if not next_info:
|
|
125
|
+
return None
|
|
126
|
+
|
|
127
|
+
return (left, next_info.data_interval.end)
|