apache-airflow-providers-standard 1.0.0.dev0__py3-none-any.whl → 1.0.0rc1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of apache-airflow-providers-standard might be problematic. Click here for more details.

Files changed (50) hide show
  1. airflow/providers/standard/LICENSE +0 -52
  2. airflow/providers/standard/__init__.py +23 -1
  3. airflow/providers/standard/decorators/__init__.py +16 -0
  4. airflow/providers/standard/decorators/bash.py +121 -0
  5. airflow/providers/standard/decorators/branch_external_python.py +63 -0
  6. airflow/providers/standard/decorators/branch_python.py +62 -0
  7. airflow/providers/standard/decorators/branch_virtualenv.py +62 -0
  8. airflow/providers/standard/decorators/external_python.py +70 -0
  9. airflow/providers/standard/decorators/python.py +86 -0
  10. airflow/providers/standard/decorators/python_virtualenv.py +67 -0
  11. airflow/providers/standard/decorators/sensor.py +83 -0
  12. airflow/providers/standard/decorators/short_circuit.py +65 -0
  13. airflow/providers/standard/get_provider_info.py +80 -7
  14. airflow/providers/standard/hooks/__init__.py +16 -0
  15. airflow/providers/standard/hooks/filesystem.py +89 -0
  16. airflow/providers/standard/hooks/package_index.py +95 -0
  17. airflow/providers/standard/hooks/subprocess.py +119 -0
  18. airflow/providers/standard/operators/bash.py +273 -0
  19. airflow/providers/standard/operators/branch.py +105 -0
  20. airflow/providers/standard/operators/datetime.py +15 -5
  21. airflow/providers/standard/operators/empty.py +39 -0
  22. airflow/providers/standard/operators/latest_only.py +115 -0
  23. airflow/providers/standard/operators/python.py +1143 -0
  24. airflow/providers/standard/operators/smooth.py +38 -0
  25. airflow/providers/standard/operators/trigger_dagrun.py +370 -0
  26. airflow/providers/standard/operators/weekday.py +19 -9
  27. airflow/providers/standard/sensors/bash.py +118 -0
  28. airflow/providers/standard/sensors/date_time.py +32 -8
  29. airflow/providers/standard/sensors/external_task.py +593 -0
  30. airflow/providers/standard/sensors/filesystem.py +158 -0
  31. airflow/providers/standard/sensors/python.py +84 -0
  32. airflow/providers/standard/sensors/time.py +28 -5
  33. airflow/providers/standard/sensors/time_delta.py +68 -15
  34. airflow/providers/standard/sensors/weekday.py +25 -7
  35. airflow/providers/standard/triggers/__init__.py +16 -0
  36. airflow/providers/standard/triggers/external_task.py +288 -0
  37. airflow/providers/standard/triggers/file.py +131 -0
  38. airflow/providers/standard/triggers/temporal.py +113 -0
  39. airflow/providers/standard/utils/__init__.py +16 -0
  40. airflow/providers/standard/utils/python_virtualenv.py +209 -0
  41. airflow/providers/standard/utils/python_virtualenv_script.jinja2 +82 -0
  42. airflow/providers/standard/utils/sensor_helper.py +137 -0
  43. airflow/providers/standard/utils/skipmixin.py +192 -0
  44. airflow/providers/standard/utils/weekday.py +77 -0
  45. airflow/providers/standard/version_compat.py +36 -0
  46. {apache_airflow_providers_standard-1.0.0.dev0.dist-info → apache_airflow_providers_standard-1.0.0rc1.dist-info}/METADATA +12 -31
  47. apache_airflow_providers_standard-1.0.0rc1.dist-info/RECORD +51 -0
  48. {apache_airflow_providers_standard-1.0.0.dev0.dist-info → apache_airflow_providers_standard-1.0.0rc1.dist-info}/WHEEL +1 -1
  49. apache_airflow_providers_standard-1.0.0.dev0.dist-info/RECORD +0 -15
  50. {apache_airflow_providers_standard-1.0.0.dev0.dist-info → apache_airflow_providers_standard-1.0.0rc1.dist-info}/entry_points.txt +0 -0
@@ -0,0 +1,273 @@
1
+ #
2
+ # Licensed to the Apache Software Foundation (ASF) under one
3
+ # or more contributor license agreements. See the NOTICE file
4
+ # distributed with this work for additional information
5
+ # regarding copyright ownership. The ASF licenses this file
6
+ # to you under the Apache License, Version 2.0 (the
7
+ # "License"); you may not use this file except in compliance
8
+ # with the License. You may obtain a copy of the License at
9
+ #
10
+ # http://www.apache.org/licenses/LICENSE-2.0
11
+ #
12
+ # Unless required by applicable law or agreed to in writing,
13
+ # software distributed under the License is distributed on an
14
+ # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
15
+ # KIND, either express or implied. See the License for the
16
+ # specific language governing permissions and limitations
17
+ # under the License.
18
+ from __future__ import annotations
19
+
20
+ import os
21
+ import shutil
22
+ import tempfile
23
+ from collections.abc import Container, Sequence
24
+ from functools import cached_property
25
+ from typing import TYPE_CHECKING, Any, Callable, cast
26
+
27
+ from airflow.exceptions import AirflowException, AirflowSkipException
28
+ from airflow.models.baseoperator import BaseOperator
29
+ from airflow.providers.standard.hooks.subprocess import SubprocessHook, SubprocessResult, working_directory
30
+ from airflow.providers.standard.version_compat import AIRFLOW_V_3_0_PLUS
31
+
32
+ if AIRFLOW_V_3_0_PLUS:
33
+ from airflow.sdk.execution_time.context import context_to_airflow_vars
34
+ else:
35
+ from airflow.utils.operator_helpers import context_to_airflow_vars # type: ignore[no-redef, attr-defined]
36
+
37
+ if TYPE_CHECKING:
38
+ from airflow.utils.types import ArgNotSet
39
+
40
+ try:
41
+ from airflow.sdk.definitions.context import Context
42
+ except ImportError:
43
+ # TODO: Remove once provider drops support for Airflow 2
44
+ from airflow.utils.context import Context
45
+
46
+
47
+ class BashOperator(BaseOperator):
48
+ r"""
49
+ Execute a Bash script, command or set of commands.
50
+
51
+ .. seealso::
52
+ For more information on how to use this operator, take a look at the guide:
53
+ :ref:`howto/operator:BashOperator`
54
+
55
+ If BaseOperator.do_xcom_push is True, the last line written to stdout
56
+ will also be pushed to an XCom when the bash command completes
57
+
58
+ :param bash_command: The command, set of commands or reference to a
59
+ Bash script (must be '.sh' or '.bash') to be executed. (templated)
60
+ :param env: If env is not None, it must be a dict that defines the
61
+ environment variables for the new process; these are used instead
62
+ of inheriting the current process environment, which is the default
63
+ behavior. (templated)
64
+ :param append_env: If False(default) uses the environment variables passed in env params
65
+ and does not inherit the current process environment. If True, inherits the environment variables
66
+ from current passes and then environment variable passed by the user will either update the existing
67
+ inherited environment variables or the new variables gets appended to it
68
+ :param output_encoding: Output encoding of Bash command
69
+ :param skip_on_exit_code: If task exits with this exit code, leave the task
70
+ in ``skipped`` state (default: 99). If set to ``None``, any non-zero
71
+ exit code will be treated as a failure.
72
+ :param cwd: Working directory to execute the command in (templated).
73
+ If None (default), the command is run in a temporary directory.
74
+ To use current DAG folder as the working directory,
75
+ you might set template ``{{ task.dag.folder }}``.
76
+ When bash_command is a '.sh' or '.bash' file, Airflow must have write
77
+ access to the working directory. The script will be rendered (Jinja
78
+ template) into a new temporary file in this directory.
79
+ :param output_processor: Function to further process the output of the bash script
80
+ (default is lambda output: output).
81
+
82
+ Airflow will evaluate the exit code of the Bash command. In general, a non-zero exit code will result in
83
+ task failure and zero will result in task success.
84
+ Exit code ``99`` (or another set in ``skip_on_exit_code``)
85
+ will throw an :class:`airflow.exceptions.AirflowSkipException`, which will leave the task in ``skipped``
86
+ state. You can have all non-zero exit codes be treated as a failure by setting ``skip_on_exit_code=None``.
87
+
88
+ .. list-table::
89
+ :widths: 25 25
90
+ :header-rows: 1
91
+
92
+ * - Exit code
93
+ - Behavior
94
+ * - 0
95
+ - success
96
+ * - `skip_on_exit_code` (default: 99)
97
+ - raise :class:`airflow.exceptions.AirflowSkipException`
98
+ * - otherwise
99
+ - raise :class:`airflow.exceptions.AirflowException`
100
+
101
+ .. note::
102
+
103
+ Airflow will not recognize a non-zero exit code unless the whole shell exit with a non-zero exit
104
+ code. This can be an issue if the non-zero exit arises from a sub-command. The easiest way of
105
+ addressing this is to prefix the command with ``set -e;``
106
+
107
+ .. code-block:: python
108
+
109
+ bash_command = "set -e; python3 script.py '{{ data_interval_end }}'"
110
+
111
+ .. note::
112
+
113
+ To simply execute a ``.sh`` or ``.bash`` script (without any Jinja template), add a space after the
114
+ script name ``bash_command`` argument -- for example ``bash_command="my_script.sh "``. This
115
+ is because Airflow tries to load this file and process it as a Jinja template when
116
+ it ends with ``.sh`` or ``.bash``.
117
+
118
+ If you have Jinja template in your script, do not put any blank space. And add the script's directory
119
+ in the DAG's ``template_searchpath``. If you specify a ``cwd``, Airflow must have write access to
120
+ this directory. The script will be rendered (Jinja template) into a new temporary file in this directory.
121
+
122
+ .. warning::
123
+
124
+ Care should be taken with "user" input or when using Jinja templates in the
125
+ ``bash_command``, as this bash operator does not perform any escaping or
126
+ sanitization of the command.
127
+
128
+ This applies mostly to using "dag_run" conf, as that can be submitted via
129
+ users in the Web UI. Most of the default template variables are not at
130
+ risk.
131
+
132
+ For example, do **not** do this:
133
+
134
+ .. code-block:: python
135
+
136
+ bash_task = BashOperator(
137
+ task_id="bash_task",
138
+ bash_command='echo "Here is the message: \'{{ dag_run.conf["message"] if dag_run else "" }}\'"',
139
+ )
140
+
141
+ Instead, you should pass this via the ``env`` kwarg and use double-quotes
142
+ inside the bash_command, as below:
143
+
144
+ .. code-block:: python
145
+
146
+ bash_task = BashOperator(
147
+ task_id="bash_task",
148
+ bash_command="echo \"here is the message: '$message'\"",
149
+ env={"message": '{{ dag_run.conf["message"] if dag_run else "" }}'},
150
+ )
151
+
152
+ .. versionadded:: 2.10.0
153
+ The `output_processor` parameter.
154
+
155
+ """
156
+
157
+ template_fields: Sequence[str] = ("bash_command", "env", "cwd")
158
+ template_fields_renderers = {"bash_command": "bash", "env": "json"}
159
+ template_ext: Sequence[str] = (".sh", ".bash")
160
+ ui_color = "#f0ede4"
161
+
162
+ def __init__(
163
+ self,
164
+ *,
165
+ bash_command: str | ArgNotSet,
166
+ env: dict[str, str] | None = None,
167
+ append_env: bool = False,
168
+ output_encoding: str = "utf-8",
169
+ skip_on_exit_code: int | Container[int] | None = 99,
170
+ cwd: str | None = None,
171
+ output_processor: Callable[[str], Any] = lambda result: result,
172
+ **kwargs,
173
+ ) -> None:
174
+ super().__init__(**kwargs)
175
+ self.bash_command = bash_command
176
+ self.env = env
177
+ self.output_encoding = output_encoding
178
+ self.skip_on_exit_code = (
179
+ skip_on_exit_code
180
+ if isinstance(skip_on_exit_code, Container)
181
+ else [skip_on_exit_code]
182
+ if skip_on_exit_code is not None
183
+ else []
184
+ )
185
+ self.cwd = cwd
186
+ self.append_env = append_env
187
+ self.output_processor = output_processor
188
+ self._is_inline_cmd = None
189
+ if isinstance(bash_command, str):
190
+ self._is_inline_cmd = self._is_inline_command(bash_command=bash_command)
191
+
192
+ @cached_property
193
+ def subprocess_hook(self):
194
+ """Returns hook for running the bash command."""
195
+ return SubprocessHook()
196
+
197
+ def get_env(self, context) -> dict:
198
+ """Build the set of environment variables to be exposed for the bash command."""
199
+ system_env = os.environ.copy()
200
+ env = self.env
201
+ if env is None:
202
+ env = system_env
203
+ else:
204
+ if self.append_env:
205
+ system_env.update(env)
206
+ env = system_env
207
+
208
+ airflow_context_vars = context_to_airflow_vars(context, in_env_var_format=True)
209
+ self.log.debug(
210
+ "Exporting env vars: %s",
211
+ " ".join(f"{k}={v!r}" for k, v in airflow_context_vars.items()),
212
+ )
213
+ env.update(airflow_context_vars)
214
+ return env
215
+
216
+ def execute(self, context: Context):
217
+ bash_path: str = shutil.which("bash") or "bash"
218
+ if self.cwd is not None:
219
+ if not os.path.exists(self.cwd):
220
+ raise AirflowException(f"Can not find the cwd: {self.cwd}")
221
+ if not os.path.isdir(self.cwd):
222
+ raise AirflowException(f"The cwd {self.cwd} must be a directory")
223
+ env = self.get_env(context)
224
+
225
+ if self._is_inline_cmd:
226
+ result = self._run_inline_command(bash_path=bash_path, env=env)
227
+ else:
228
+ result = self._run_rendered_script_file(bash_path=bash_path, env=env)
229
+
230
+ if result.exit_code in self.skip_on_exit_code:
231
+ raise AirflowSkipException(f"Bash command returned exit code {result.exit_code}. Skipping.")
232
+ if result.exit_code != 0:
233
+ raise AirflowException(
234
+ f"Bash command failed. The command returned a non-zero exit code {result.exit_code}."
235
+ )
236
+
237
+ return self.output_processor(result.output)
238
+
239
+ def _run_inline_command(self, bash_path: str, env: dict) -> SubprocessResult:
240
+ """Pass the bash command as string directly in the subprocess."""
241
+ return self.subprocess_hook.run_command(
242
+ command=[bash_path, "-c", self.bash_command],
243
+ env=env,
244
+ output_encoding=self.output_encoding,
245
+ cwd=self.cwd,
246
+ )
247
+
248
+ def _run_rendered_script_file(self, bash_path: str, env: dict) -> SubprocessResult:
249
+ """
250
+ Save the bash command into a file and execute this file.
251
+
252
+ This allows for longer commands, and prevents "Argument list too long error".
253
+ """
254
+ with working_directory(cwd=self.cwd) as cwd:
255
+ with tempfile.NamedTemporaryFile(mode="w", dir=cwd, suffix=".sh") as file:
256
+ file.write(cast("str", self.bash_command))
257
+ file.flush()
258
+
259
+ bash_script = os.path.basename(file.name)
260
+ return self.subprocess_hook.run_command(
261
+ command=[bash_path, bash_script],
262
+ env=env,
263
+ output_encoding=self.output_encoding,
264
+ cwd=cwd,
265
+ )
266
+
267
+ @classmethod
268
+ def _is_inline_command(cls, bash_command: str) -> bool:
269
+ """Return True if the bash command is an inline string. False if it's a bash script file."""
270
+ return not bash_command.endswith(tuple(cls.template_ext))
271
+
272
+ def on_kill(self) -> None:
273
+ self.subprocess_hook.send_sigterm()
@@ -0,0 +1,105 @@
1
+ #
2
+ # Licensed to the Apache Software Foundation (ASF) under one
3
+ # or more contributor license agreements. See the NOTICE file
4
+ # distributed with this work for additional information
5
+ # regarding copyright ownership. The ASF licenses this file
6
+ # to you under the Apache License, Version 2.0 (the
7
+ # "License"); you may not use this file except in compliance
8
+ # with the License. You may obtain a copy of the License at
9
+ #
10
+ # http://www.apache.org/licenses/LICENSE-2.0
11
+ #
12
+ # Unless required by applicable law or agreed to in writing,
13
+ # software distributed under the License is distributed on an
14
+ # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
15
+ # KIND, either express or implied. See the License for the
16
+ # specific language governing permissions and limitations
17
+ # under the License.
18
+ """Branching operators."""
19
+
20
+ from __future__ import annotations
21
+
22
+ from collections.abc import Iterable
23
+ from typing import TYPE_CHECKING
24
+
25
+ from airflow.models.baseoperator import BaseOperator
26
+ from airflow.providers.standard.version_compat import AIRFLOW_V_3_0_PLUS
27
+
28
+ if AIRFLOW_V_3_0_PLUS:
29
+ from airflow.providers.standard.utils.skipmixin import SkipMixin
30
+ else:
31
+ from airflow.models.skipmixin import SkipMixin
32
+
33
+ if TYPE_CHECKING:
34
+ from airflow.sdk.definitions.context import Context
35
+ from airflow.sdk.types import RuntimeTaskInstanceProtocol
36
+
37
+
38
+ class BranchMixIn(SkipMixin):
39
+ """Utility helper which handles the branching as one-liner."""
40
+
41
+ def do_branch(self, context: Context, branches_to_execute: str | Iterable[str]) -> str | Iterable[str]:
42
+ """Implement the handling of branching including logging."""
43
+ self.log.info("Branch into %s", branches_to_execute)
44
+ branch_task_ids = self._expand_task_group_roots(context["ti"], branches_to_execute)
45
+ self.skip_all_except(context["ti"], branch_task_ids)
46
+ return branches_to_execute
47
+
48
+ def _expand_task_group_roots(
49
+ self, ti: RuntimeTaskInstanceProtocol, branches_to_execute: str | Iterable[str]
50
+ ) -> Iterable[str]:
51
+ """Expand any task group into its root task ids."""
52
+ if TYPE_CHECKING:
53
+ assert ti.task
54
+
55
+ task = ti.task
56
+ dag = task.dag
57
+ if TYPE_CHECKING:
58
+ assert dag
59
+
60
+ if branches_to_execute is None:
61
+ return
62
+ elif isinstance(branches_to_execute, str) or not isinstance(branches_to_execute, Iterable):
63
+ branches_to_execute = [branches_to_execute]
64
+
65
+ for branch in branches_to_execute:
66
+ if branch in dag.task_group_dict:
67
+ tg = dag.task_group_dict[branch]
68
+ root_ids = [root.task_id for root in tg.roots]
69
+ self.log.info("Expanding task group %s into %s", tg.group_id, root_ids)
70
+ yield from root_ids
71
+ else:
72
+ yield branch
73
+
74
+
75
+ class BaseBranchOperator(BaseOperator, BranchMixIn):
76
+ """
77
+ A base class for creating operators with branching functionality, like to BranchPythonOperator.
78
+
79
+ Users should create a subclass from this operator and implement the function
80
+ `choose_branch(self, context)`. This should run whatever business logic
81
+ is needed to determine the branch, and return one of the following:
82
+ - A single task_id (as a str)
83
+ - A single task_group_id (as a str)
84
+ - A list containing a combination of task_ids and task_group_ids
85
+
86
+ The operator will continue with the returned task_id(s) and/or task_group_id(s), and all other
87
+ tasks directly downstream of this operator will be skipped.
88
+ """
89
+
90
+ inherits_from_skipmixin = True
91
+
92
+ def choose_branch(self, context: Context) -> str | Iterable[str]:
93
+ """
94
+ Abstract method to choose which branch to run.
95
+
96
+ Subclasses should implement this, running whatever logic is
97
+ necessary to choose a branch and returning a task_id or list of
98
+ task_ids.
99
+
100
+ :param context: Context dictionary as passed to execute()
101
+ """
102
+ raise NotImplementedError
103
+
104
+ def execute(self, context: Context):
105
+ return self.do_branch(context, self.choose_branch(context))
@@ -17,14 +17,19 @@
17
17
  from __future__ import annotations
18
18
 
19
19
  import datetime
20
- from typing import TYPE_CHECKING, Iterable
20
+ from collections.abc import Iterable
21
+ from typing import TYPE_CHECKING
21
22
 
22
23
  from airflow.exceptions import AirflowException
23
- from airflow.operators.branch import BaseBranchOperator
24
+ from airflow.providers.standard.operators.branch import BaseBranchOperator
24
25
  from airflow.utils import timezone
25
26
 
26
27
  if TYPE_CHECKING:
27
- from airflow.utils.context import Context
28
+ try:
29
+ from airflow.sdk.definitions.context import Context
30
+ except ImportError:
31
+ # TODO: Remove once provider drops support for Airflow 2
32
+ from airflow.utils.context import Context
28
33
 
29
34
 
30
35
  class BranchDateTimeOperator(BaseBranchOperator):
@@ -44,7 +49,7 @@ class BranchDateTimeOperator(BaseBranchOperator):
44
49
  :param target_lower: target lower bound.
45
50
  :param target_upper: target upper bound.
46
51
  :param use_task_logical_date: If ``True``, uses task's logical date to compare with targets.
47
- Execution date is useful for backfilling. If ``False``, uses system's date.
52
+ Logical date is useful for backfilling. If ``False``, uses system's date.
48
53
  """
49
54
 
50
55
  def __init__(
@@ -72,9 +77,14 @@ class BranchDateTimeOperator(BaseBranchOperator):
72
77
 
73
78
  def choose_branch(self, context: Context) -> str | Iterable[str]:
74
79
  if self.use_task_logical_date:
75
- now = context["logical_date"]
80
+ now = context.get("logical_date")
81
+ if not now:
82
+ dag_run = context.get("dag_run")
83
+ now = dag_run.run_after # type: ignore[union-attr, assignment]
76
84
  else:
77
85
  now = timezone.coerce_datetime(timezone.utcnow())
86
+ if TYPE_CHECKING:
87
+ assert isinstance(now, datetime.datetime)
78
88
  lower, upper = target_times_as_dates(now, self.target_lower, self.target_upper)
79
89
  lower = timezone.coerce_datetime(lower, self.dag.timezone)
80
90
  upper = timezone.coerce_datetime(upper, self.dag.timezone)
@@ -0,0 +1,39 @@
1
+ # Licensed to the Apache Software Foundation (ASF) under one
2
+ # or more contributor license agreements. See the NOTICE file
3
+ # distributed with this work for additional information
4
+ # regarding copyright ownership. The ASF licenses this file
5
+ # to you under the Apache License, Version 2.0 (the
6
+ # "License"); you may not use this file except in compliance
7
+ # with the License. You may obtain a copy of the License at
8
+ #
9
+ # http://www.apache.org/licenses/LICENSE-2.0
10
+ #
11
+ # Unless required by applicable law or agreed to in writing,
12
+ # software distributed under the License is distributed on an
13
+ # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
14
+ # KIND, either express or implied. See the License for the
15
+ # specific language governing permissions and limitations
16
+ # under the License.
17
+ from __future__ import annotations
18
+
19
+ from typing import TYPE_CHECKING
20
+
21
+ from airflow.models.baseoperator import BaseOperator
22
+
23
+ if TYPE_CHECKING:
24
+ from airflow.sdk.definitions.context import Context
25
+
26
+
27
+ class EmptyOperator(BaseOperator):
28
+ """
29
+ Operator that does literally nothing.
30
+
31
+ It can be used to group tasks in a DAG.
32
+ The task is evaluated by the scheduler but never processed by the executor.
33
+ """
34
+
35
+ ui_color = "#e8f7e4"
36
+ inherits_from_empty_operator = True
37
+
38
+ def execute(self, context: Context):
39
+ pass
@@ -0,0 +1,115 @@
1
+ #
2
+ # Licensed to the Apache Software Foundation (ASF) under one
3
+ # or more contributor license agreements. See the NOTICE file
4
+ # distributed with this work for additional information
5
+ # regarding copyright ownership. The ASF licenses this file
6
+ # to you under the Apache License, Version 2.0 (the
7
+ # "License"); you may not use this file except in compliance
8
+ # with the License. You may obtain a copy of the License at
9
+ #
10
+ # http://www.apache.org/licenses/LICENSE-2.0
11
+ #
12
+ # Unless required by applicable law or agreed to in writing,
13
+ # software distributed under the License is distributed on an
14
+ # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
15
+ # KIND, either express or implied. See the License for the
16
+ # specific language governing permissions and limitations
17
+ # under the License.
18
+ """Contains an operator to run downstream tasks only for the latest scheduled DagRun."""
19
+
20
+ from __future__ import annotations
21
+
22
+ from collections.abc import Iterable
23
+ from typing import TYPE_CHECKING
24
+
25
+ import pendulum
26
+
27
+ from airflow.providers.standard.operators.branch import BaseBranchOperator
28
+ from airflow.providers.standard.version_compat import AIRFLOW_V_3_0_PLUS
29
+ from airflow.utils.types import DagRunType
30
+
31
+ if TYPE_CHECKING:
32
+ from airflow.models import DAG, DagRun
33
+ from airflow.timetables.base import DagRunInfo
34
+
35
+ try:
36
+ from airflow.sdk.definitions.context import Context
37
+ except ImportError:
38
+ # TODO: Remove once provider drops support for Airflow 2
39
+ from airflow.utils.context import Context
40
+
41
+
42
+ class LatestOnlyOperator(BaseBranchOperator):
43
+ """
44
+ Skip tasks that are not running during the most recent schedule interval.
45
+
46
+ If the task is run outside the latest schedule interval (i.e. run_type == DagRunType.MANUAL),
47
+ all directly downstream tasks will be skipped.
48
+
49
+ Note that downstream tasks are never skipped if the given DAG_Run is
50
+ marked as externally triggered.
51
+
52
+ Note that when used with timetables that produce zero-length or point-in-time data intervals
53
+ (e.g., ``DeltaTriggerTimetable``), this operator assumes each run is the latest
54
+ and does not skip downstream tasks.
55
+ """
56
+
57
+ ui_color = "#e9ffdb" # nyanza
58
+
59
+ def choose_branch(self, context: Context) -> str | Iterable[str]:
60
+ # If the DAG Run is externally triggered, then return without
61
+ # skipping downstream tasks
62
+ dag_run: DagRun = context["dag_run"] # type: ignore[assignment]
63
+ if dag_run.run_type == DagRunType.MANUAL:
64
+ self.log.info("Manually triggered DAG_Run: allowing execution to proceed.")
65
+ return list(context["task"].get_direct_relative_ids(upstream=False))
66
+
67
+ next_info = self._get_next_run_info(context, dag_run)
68
+ now = pendulum.now("UTC")
69
+
70
+ if next_info is None:
71
+ self.log.info("Last scheduled execution: allowing execution to proceed.")
72
+ return list(context["task"].get_direct_relative_ids(upstream=False))
73
+
74
+ left_window, right_window = next_info.data_interval
75
+ self.log.info(
76
+ "Checking latest only with left_window: %s right_window: %s now: %s",
77
+ left_window,
78
+ right_window,
79
+ now,
80
+ )
81
+
82
+ if left_window == right_window:
83
+ self.log.info(
84
+ "Zero-length interval [%s, %s) from timetable (%s); treating current run as latest.",
85
+ left_window,
86
+ right_window,
87
+ self.dag.timetable.__class__,
88
+ )
89
+ return list(context["task"].get_direct_relative_ids(upstream=False))
90
+
91
+ if not left_window < now <= right_window:
92
+ self.log.info("Not latest execution, skipping downstream.")
93
+ # we return an empty list, thus the parent BaseBranchOperator
94
+ # won't exclude any downstream tasks from skipping.
95
+ return []
96
+ self.log.info("Latest, allowing execution to proceed.")
97
+ return list(context["task"].get_direct_relative_ids(upstream=False))
98
+
99
+ def _get_next_run_info(self, context: Context, dag_run: DagRun) -> DagRunInfo | None:
100
+ dag: DAG = context["dag"] # type: ignore[assignment]
101
+
102
+ if AIRFLOW_V_3_0_PLUS:
103
+ from airflow.timetables.base import DataInterval, TimeRestriction
104
+
105
+ time_restriction = TimeRestriction(earliest=None, latest=None, catchup=True)
106
+ current_interval = DataInterval(start=dag_run.data_interval_start, end=dag_run.data_interval_end)
107
+
108
+ next_info = dag.timetable.next_dagrun_info(
109
+ last_automated_data_interval=current_interval,
110
+ restriction=time_restriction,
111
+ )
112
+
113
+ else:
114
+ next_info = dag.next_dagrun_info(dag.get_run_data_interval(dag_run), restricted=False)
115
+ return next_info