apache-airflow-providers-standard 0.1.0rc1__py3-none-any.whl → 1.0.0.dev1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of apache-airflow-providers-standard might be problematic. Click here for more details.

Files changed (37) hide show
  1. airflow/providers/standard/LICENSE +52 -0
  2. airflow/providers/standard/__init__.py +1 -23
  3. airflow/providers/standard/get_provider_info.py +7 -52
  4. airflow/providers/standard/operators/bash.py +28 -82
  5. airflow/providers/standard/operators/datetime.py +3 -8
  6. airflow/providers/standard/operators/weekday.py +4 -11
  7. airflow/providers/standard/sensors/bash.py +5 -11
  8. airflow/providers/standard/sensors/date_time.py +8 -32
  9. airflow/providers/standard/sensors/time.py +5 -28
  10. airflow/providers/standard/sensors/time_delta.py +10 -48
  11. airflow/providers/standard/sensors/weekday.py +2 -7
  12. {apache_airflow_providers_standard-0.1.0rc1.dist-info → apache_airflow_providers_standard-1.0.0.dev1.dist-info}/METADATA +36 -20
  13. apache_airflow_providers_standard-1.0.0.dev1.dist-info/RECORD +17 -0
  14. {apache_airflow_providers_standard-0.1.0rc1.dist-info → apache_airflow_providers_standard-1.0.0.dev1.dist-info}/WHEEL +1 -1
  15. airflow/providers/standard/hooks/__init__.py +0 -16
  16. airflow/providers/standard/hooks/filesystem.py +0 -89
  17. airflow/providers/standard/hooks/package_index.py +0 -95
  18. airflow/providers/standard/hooks/subprocess.py +0 -119
  19. airflow/providers/standard/operators/empty.py +0 -39
  20. airflow/providers/standard/operators/generic_transfer.py +0 -138
  21. airflow/providers/standard/operators/latest_only.py +0 -83
  22. airflow/providers/standard/operators/python.py +0 -1132
  23. airflow/providers/standard/operators/trigger_dagrun.py +0 -292
  24. airflow/providers/standard/sensors/external_task.py +0 -509
  25. airflow/providers/standard/sensors/filesystem.py +0 -158
  26. airflow/providers/standard/sensors/python.py +0 -85
  27. airflow/providers/standard/triggers/__init__.py +0 -16
  28. airflow/providers/standard/triggers/external_task.py +0 -211
  29. airflow/providers/standard/triggers/file.py +0 -131
  30. airflow/providers/standard/triggers/temporal.py +0 -114
  31. airflow/providers/standard/utils/__init__.py +0 -16
  32. airflow/providers/standard/utils/python_virtualenv.py +0 -209
  33. airflow/providers/standard/utils/python_virtualenv_script.jinja2 +0 -77
  34. airflow/providers/standard/utils/sensor_helper.py +0 -119
  35. airflow/providers/standard/version_compat.py +0 -36
  36. apache_airflow_providers_standard-0.1.0rc1.dist-info/RECORD +0 -38
  37. {apache_airflow_providers_standard-0.1.0rc1.dist-info → apache_airflow_providers_standard-1.0.0.dev1.dist-info}/entry_points.txt +0 -0
@@ -199,3 +199,55 @@ distributed under the License is distributed on an "AS IS" BASIS,
199
199
  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
200
200
  See the License for the specific language governing permissions and
201
201
  limitations under the License.
202
+
203
+ ============================================================================
204
+ APACHE AIRFLOW SUBCOMPONENTS:
205
+
206
+ The Apache Airflow project contains subcomponents with separate copyright
207
+ notices and license terms. Your use of the source code for the these
208
+ subcomponents is subject to the terms and conditions of the following
209
+ licenses.
210
+
211
+
212
+ ========================================================================
213
+ Third party Apache 2.0 licenses
214
+ ========================================================================
215
+
216
+ The following components are provided under the Apache 2.0 License.
217
+ See project link for details. The text of each license is also included
218
+ at 3rd-party-licenses/LICENSE-[project].txt.
219
+
220
+ (ALv2 License) hue v4.3.0 (https://github.com/cloudera/hue/)
221
+ (ALv2 License) jqclock v2.3.0 (https://github.com/JohnRDOrazio/jQuery-Clock-Plugin)
222
+ (ALv2 License) bootstrap3-typeahead v4.0.2 (https://github.com/bassjobsen/Bootstrap-3-Typeahead)
223
+ (ALv2 License) connexion v2.7.0 (https://github.com/zalando/connexion)
224
+
225
+ ========================================================================
226
+ MIT licenses
227
+ ========================================================================
228
+
229
+ The following components are provided under the MIT License. See project link for details.
230
+ The text of each license is also included at 3rd-party-licenses/LICENSE-[project].txt.
231
+
232
+ (MIT License) jquery v3.5.1 (https://jquery.org/license/)
233
+ (MIT License) dagre-d3 v0.6.4 (https://github.com/cpettitt/dagre-d3)
234
+ (MIT License) bootstrap v3.4.1 (https://github.com/twbs/bootstrap/)
235
+ (MIT License) d3-tip v0.9.1 (https://github.com/Caged/d3-tip)
236
+ (MIT License) dataTables v1.10.25 (https://datatables.net)
237
+ (MIT License) normalize.css v3.0.2 (http://necolas.github.io/normalize.css/)
238
+ (MIT License) ElasticMock v1.3.2 (https://github.com/vrcmarcos/elasticmock)
239
+ (MIT License) MomentJS v2.24.0 (http://momentjs.com/)
240
+ (MIT License) eonasdan-bootstrap-datetimepicker v4.17.49 (https://github.com/eonasdan/bootstrap-datetimepicker/)
241
+
242
+ ========================================================================
243
+ BSD 3-Clause licenses
244
+ ========================================================================
245
+ The following components are provided under the BSD 3-Clause license. See project links for details.
246
+ The text of each license is also included at 3rd-party-licenses/LICENSE-[project].txt.
247
+
248
+ (BSD 3 License) d3 v5.16.0 (https://d3js.org)
249
+ (BSD 3 License) d3-shape v2.1.0 (https://github.com/d3/d3-shape)
250
+ (BSD 3 License) cgroupspy 0.2.1 (https://github.com/cloudsigma/cgroupspy)
251
+
252
+ ========================================================================
253
+ See 3rd-party-licenses/LICENSES-ui.txt for packages used in `/airflow/www`
@@ -1,3 +1,4 @@
1
+ #
1
2
  # Licensed to the Apache Software Foundation (ASF) under one
2
3
  # or more contributor license agreements. See the NOTICE file
3
4
  # distributed with this work for additional information
@@ -14,26 +15,3 @@
14
15
  # KIND, either express or implied. See the License for the
15
16
  # specific language governing permissions and limitations
16
17
  # under the License.
17
- #
18
- # NOTE! THIS FILE IS AUTOMATICALLY GENERATED AND WILL BE
19
- # OVERWRITTEN WHEN PREPARING DOCUMENTATION FOR THE PACKAGES.
20
- #
21
- # IF YOU WANT TO MODIFY THIS FILE, YOU SHOULD MODIFY THE TEMPLATE
22
- # `PROVIDER__INIT__PY_TEMPLATE.py.jinja2` IN the `dev/breeze/src/airflow_breeze/templates` DIRECTORY
23
- #
24
- from __future__ import annotations
25
-
26
- import packaging.version
27
-
28
- from airflow import __version__ as airflow_version
29
-
30
- __all__ = ["__version__"]
31
-
32
- __version__ = "0.1.0"
33
-
34
- if packaging.version.parse(packaging.version.parse(airflow_version).base_version) < packaging.version.parse(
35
- "2.9.0"
36
- ):
37
- raise RuntimeError(
38
- f"The package `apache-airflow-providers-standard:{__version__}` needs Apache Airflow 2.9.0+"
39
- )
@@ -15,7 +15,8 @@
15
15
  # specific language governing permissions and limitations
16
16
  # under the License.
17
17
 
18
- # NOTE! THIS FILE IS AUTOMATICALLY GENERATED AND WILL BE OVERWRITTEN!
18
+ # NOTE! THIS FILE IS AUTOMATICALLY GENERATED AND WILL BE
19
+ # OVERWRITTEN WHEN PREPARING PACKAGES.
19
20
  #
20
21
  # IF YOU WANT TO MODIFY THIS FILE, YOU SHOULD MODIFY THE TEMPLATE
21
22
  # `get_provider_info_TEMPLATE.py.jinja2` IN the `dev/breeze/src/airflow_breeze/templates` DIRECTORY
@@ -26,19 +27,16 @@ def get_provider_info():
26
27
  "package-name": "apache-airflow-providers-standard",
27
28
  "name": "Standard",
28
29
  "description": "Airflow Standard Provider\n",
29
- "state": "ready",
30
- "source-date-epoch": 1739964539,
31
- "versions": ["0.1.0", "0.0.3", "0.0.2", "0.0.1"],
30
+ "state": "not-ready",
31
+ "source-date-epoch": 1718603992,
32
+ "versions": ["1.0.0"],
33
+ "dependencies": ["apache-airflow>=2.10.0"],
32
34
  "integrations": [
33
35
  {
34
36
  "integration-name": "Standard",
35
37
  "external-doc-url": "https://airflow.apache.org/",
36
38
  "tags": ["apache"],
37
- "how-to-guide": [
38
- "/docs/apache-airflow-providers-standard/operators/bash.rst",
39
- "/docs/apache-airflow-providers-standard/operators/python.rst",
40
- "/docs/apache-airflow-providers-standard/operators/datetime.rst",
41
- ],
39
+ "how-to-guide": ["/docs/apache-airflow-providers-standard/operators.rst"],
42
40
  }
43
41
  ],
44
42
  "operators": [
@@ -48,11 +46,6 @@ def get_provider_info():
48
46
  "airflow.providers.standard.operators.datetime",
49
47
  "airflow.providers.standard.operators.weekday",
50
48
  "airflow.providers.standard.operators.bash",
51
- "airflow.providers.standard.operators.python",
52
- "airflow.providers.standard.operators.empty",
53
- "airflow.providers.standard.operators.generic_transfer",
54
- "airflow.providers.standard.operators.trigger_dagrun",
55
- "airflow.providers.standard.operators.latest_only",
56
49
  ],
57
50
  }
58
51
  ],
@@ -65,45 +58,7 @@ def get_provider_info():
65
58
  "airflow.providers.standard.sensors.time",
66
59
  "airflow.providers.standard.sensors.weekday",
67
60
  "airflow.providers.standard.sensors.bash",
68
- "airflow.providers.standard.sensors.python",
69
- "airflow.providers.standard.sensors.filesystem",
70
- "airflow.providers.standard.sensors.external_task",
71
- ],
72
- }
73
- ],
74
- "hooks": [
75
- {
76
- "integration-name": "Standard",
77
- "python-modules": [
78
- "airflow.providers.standard.hooks.filesystem",
79
- "airflow.providers.standard.hooks.package_index",
80
- "airflow.providers.standard.hooks.subprocess",
81
61
  ],
82
62
  }
83
63
  ],
84
- "triggers": [
85
- {
86
- "integration-name": "Standard",
87
- "python-modules": [
88
- "airflow.providers.standard.triggers.external_task",
89
- "airflow.providers.standard.triggers.file",
90
- "airflow.providers.standard.triggers.temporal",
91
- ],
92
- }
93
- ],
94
- "config": {
95
- "standard": {
96
- "description": "Options for the standard provider operators.",
97
- "options": {
98
- "venv_install_method": {
99
- "description": "Which python tooling should be used to install the virtual environment.\n\nThe following options are available:\n- ``auto``: Automatically select, use ``uv`` if available, otherwise use ``pip``.\n- ``pip``: Use pip to install the virtual environment.\n- ``uv``: Use uv to install the virtual environment. Must be available in environment PATH.\n",
100
- "version_added": None,
101
- "type": "string",
102
- "example": "uv",
103
- "default": "auto",
104
- }
105
- },
106
- }
107
- },
108
- "dependencies": ["apache-airflow>=2.9.0", "apache-airflow-providers-common-sql>=1.20.0"],
109
64
  }
@@ -19,26 +19,19 @@ from __future__ import annotations
19
19
 
20
20
  import os
21
21
  import shutil
22
- import tempfile
23
- from collections.abc import Container, Sequence
22
+ import warnings
24
23
  from functools import cached_property
25
- from typing import TYPE_CHECKING, Any, Callable, cast
24
+ from typing import TYPE_CHECKING, Any, Callable, Container, Sequence, cast
26
25
 
27
26
  from airflow.exceptions import AirflowException, AirflowSkipException
27
+ from airflow.hooks.subprocess import SubprocessHook
28
28
  from airflow.models.baseoperator import BaseOperator
29
- from airflow.providers.standard.hooks.subprocess import SubprocessHook, SubprocessResult, working_directory
30
29
  from airflow.utils.operator_helpers import context_to_airflow_vars
31
- from airflow.utils.session import NEW_SESSION, provide_session
32
30
  from airflow.utils.types import ArgNotSet
33
31
 
34
32
  if TYPE_CHECKING:
35
- from sqlalchemy.orm import Session as SASession
36
-
37
- try:
38
- from airflow.sdk.definitions.context import Context
39
- except ImportError:
40
- # TODO: Remove once provider drops support for Airflow 2
41
- from airflow.utils.context import Context
33
+ from airflow.models.taskinstance import TaskInstance
34
+ from airflow.utils.context import Context
42
35
 
43
36
 
44
37
  class BashOperator(BaseOperator):
@@ -69,10 +62,7 @@ class BashOperator(BaseOperator):
69
62
  :param cwd: Working directory to execute the command in (templated).
70
63
  If None (default), the command is run in a temporary directory.
71
64
  To use current DAG folder as the working directory,
72
- you might set template ``{{ task.dag.folder }}``.
73
- When bash_command is a '.sh' or '.bash' file, Airflow must have write
74
- access to the working directory. The script will be rendered (Jinja
75
- template) into a new temporary file in this directory.
65
+ you might set template ``{{ dag_run.dag.folder }}``.
76
66
  :param output_processor: Function to further process the output of the bash script
77
67
  (default is lambda output: output).
78
68
 
@@ -103,18 +93,14 @@ class BashOperator(BaseOperator):
103
93
 
104
94
  .. code-block:: python
105
95
 
106
- bash_command = "set -e; python3 script.py '{{ data_interval_end }}'"
96
+ bash_command = "set -e; python3 script.py '{{ next_execution_date }}'"
107
97
 
108
98
  .. note::
109
99
 
110
- To simply execute a ``.sh`` or ``.bash`` script (without any Jinja template), add a space after the
111
- script name ``bash_command`` argument -- for example ``bash_command="my_script.sh "``. This
112
- is because Airflow tries to load this file and process it as a Jinja template when
113
- it ends with ``.sh`` or ``.bash``.
114
-
115
- If you have Jinja template in your script, do not put any blank space. And add the script's directory
116
- in the DAG's ``template_searchpath``. If you specify a ``cwd``, Airflow must have write access to
117
- this directory. The script will be rendered (Jinja template) into a new temporary file in this directory.
100
+ Add a space after the script name when directly calling a ``.sh`` script with the
101
+ ``bash_command`` argument -- for example ``bash_command="my_script.sh "``. This
102
+ is because Airflow tries to apply load this file and process it as a Jinja template to
103
+ it ends with ``.sh``, which will likely not be what most users want.
118
104
 
119
105
  .. warning::
120
106
 
@@ -163,6 +149,7 @@ class BashOperator(BaseOperator):
163
149
  env: dict[str, str] | None = None,
164
150
  append_env: bool = False,
165
151
  output_encoding: str = "utf-8",
152
+ skip_exit_code: int | None = None,
166
153
  skip_on_exit_code: int | Container[int] | None = 99,
167
154
  cwd: str | None = None,
168
155
  output_processor: Callable[[str], Any] = lambda result: result,
@@ -172,6 +159,11 @@ class BashOperator(BaseOperator):
172
159
  self.bash_command = bash_command
173
160
  self.env = env
174
161
  self.output_encoding = output_encoding
162
+ if skip_exit_code is not None:
163
+ warnings.warn(
164
+ "skip_exit_code is deprecated. Please use skip_on_exit_code", DeprecationWarning, stacklevel=2
165
+ )
166
+ skip_on_exit_code = skip_exit_code
175
167
  self.skip_on_exit_code = (
176
168
  skip_on_exit_code
177
169
  if isinstance(skip_on_exit_code, Container)
@@ -188,20 +180,13 @@ class BashOperator(BaseOperator):
188
180
  # determine whether the bash_command value needs to re-rendered.
189
181
  self._init_bash_command_not_set = isinstance(self.bash_command, ArgNotSet)
190
182
 
191
- # Keep a copy of the original bash_command, without the Jinja template rendered.
192
- # This is later used to determine if the bash_command is a script or an inline string command.
193
- # We do this later, because the bash_command is not available in __init__ when using @task.bash.
194
- self._unrendered_bash_command: str | ArgNotSet = bash_command
195
-
196
183
  @cached_property
197
184
  def subprocess_hook(self):
198
185
  """Returns hook for running the bash command."""
199
186
  return SubprocessHook()
200
187
 
201
- # TODO: This should be replaced with Task SDK API call
202
188
  @staticmethod
203
- @provide_session
204
- def refresh_bash_command(ti, session: SASession = NEW_SESSION) -> None:
189
+ def refresh_bash_command(ti: TaskInstance) -> None:
205
190
  """
206
191
  Rewrite the underlying rendered bash_command value for a task instance in the metadatabase.
207
192
 
@@ -213,13 +198,9 @@ class BashOperator(BaseOperator):
213
198
  """
214
199
  from airflow.models.renderedtifields import RenderedTaskInstanceFields
215
200
 
216
- """Update rendered task instance fields for cases where runtime evaluated, not templated."""
217
-
218
- rtif = RenderedTaskInstanceFields(ti)
219
- RenderedTaskInstanceFields.write(rtif, session=session)
220
- RenderedTaskInstanceFields.delete_old_records(ti.task_id, ti.dag_id, session=session)
201
+ RenderedTaskInstanceFields._update_runtime_evaluated_template_fields(ti)
221
202
 
222
- def get_env(self, context) -> dict:
203
+ def get_env(self, context):
223
204
  """Build the set of environment variables to be exposed for the bash command."""
224
205
  system_env = os.environ.copy()
225
206
  env = self.env
@@ -239,7 +220,7 @@ class BashOperator(BaseOperator):
239
220
  return env
240
221
 
241
222
  def execute(self, context: Context):
242
- bash_path: str = shutil.which("bash") or "bash"
223
+ bash_path = shutil.which("bash") or "bash"
243
224
  if self.cwd is not None:
244
225
  if not os.path.exists(self.cwd):
245
226
  raise AirflowException(f"Can not find the cwd: {self.cwd}")
@@ -253,17 +234,15 @@ class BashOperator(BaseOperator):
253
234
  # Both will ensure the correct Bash command is executed and that the Rendered Template view in the UI
254
235
  # displays the executed command (otherwise it will display as an ArgNotSet type).
255
236
  if self._init_bash_command_not_set:
256
- is_inline_command = self._is_inline_command(bash_command=cast(str, self.bash_command))
257
- ti = context["ti"]
237
+ ti = cast("TaskInstance", context["ti"])
258
238
  self.refresh_bash_command(ti)
259
- else:
260
- is_inline_command = self._is_inline_command(bash_command=cast(str, self._unrendered_bash_command))
261
-
262
- if is_inline_command:
263
- result = self._run_inline_command(bash_path=bash_path, env=env)
264
- else:
265
- result = self._run_rendered_script_file(bash_path=bash_path, env=env)
266
239
 
240
+ result = self.subprocess_hook.run_command(
241
+ command=[bash_path, "-c", self.bash_command],
242
+ env=env,
243
+ output_encoding=self.output_encoding,
244
+ cwd=self.cwd,
245
+ )
267
246
  if result.exit_code in self.skip_on_exit_code:
268
247
  raise AirflowSkipException(f"Bash command returned exit code {result.exit_code}. Skipping.")
269
248
  elif result.exit_code != 0:
@@ -273,38 +252,5 @@ class BashOperator(BaseOperator):
273
252
 
274
253
  return self.output_processor(result.output)
275
254
 
276
- def _run_inline_command(self, bash_path: str, env: dict) -> SubprocessResult:
277
- """Pass the bash command as string directly in the subprocess."""
278
- return self.subprocess_hook.run_command(
279
- command=[bash_path, "-c", self.bash_command],
280
- env=env,
281
- output_encoding=self.output_encoding,
282
- cwd=self.cwd,
283
- )
284
-
285
- def _run_rendered_script_file(self, bash_path: str, env: dict) -> SubprocessResult:
286
- """
287
- Save the bash command into a file and execute this file.
288
-
289
- This allows for longer commands, and prevents "Argument list too long error".
290
- """
291
- with working_directory(cwd=self.cwd) as cwd:
292
- with tempfile.NamedTemporaryFile(mode="w", dir=cwd, suffix=".sh") as file:
293
- file.write(cast(str, self.bash_command))
294
- file.flush()
295
-
296
- bash_script = os.path.basename(file.name)
297
- return self.subprocess_hook.run_command(
298
- command=[bash_path, bash_script],
299
- env=env,
300
- output_encoding=self.output_encoding,
301
- cwd=cwd,
302
- )
303
-
304
- @classmethod
305
- def _is_inline_command(cls, bash_command: str) -> bool:
306
- """Return True if the bash command is an inline string. False if it's a bash script file."""
307
- return not bash_command.endswith(tuple(cls.template_ext))
308
-
309
255
  def on_kill(self) -> None:
310
256
  self.subprocess_hook.send_sigterm()
@@ -17,19 +17,14 @@
17
17
  from __future__ import annotations
18
18
 
19
19
  import datetime
20
- from collections.abc import Iterable
21
- from typing import TYPE_CHECKING
20
+ from typing import TYPE_CHECKING, Iterable
22
21
 
23
22
  from airflow.exceptions import AirflowException
24
23
  from airflow.operators.branch import BaseBranchOperator
25
24
  from airflow.utils import timezone
26
25
 
27
26
  if TYPE_CHECKING:
28
- try:
29
- from airflow.sdk.definitions.context import Context
30
- except ImportError:
31
- # TODO: Remove once provider drops support for Airflow 2
32
- from airflow.utils.context import Context
27
+ from airflow.utils.context import Context
33
28
 
34
29
 
35
30
  class BranchDateTimeOperator(BaseBranchOperator):
@@ -49,7 +44,7 @@ class BranchDateTimeOperator(BaseBranchOperator):
49
44
  :param target_lower: target lower bound.
50
45
  :param target_upper: target upper bound.
51
46
  :param use_task_logical_date: If ``True``, uses task's logical date to compare with targets.
52
- Logical date is useful for backfilling. If ``False``, uses system's date.
47
+ Execution date is useful for backfilling. If ``False``, uses system's date.
53
48
  """
54
49
 
55
50
  def __init__(
@@ -17,19 +17,14 @@
17
17
  # under the License.
18
18
  from __future__ import annotations
19
19
 
20
- from collections.abc import Iterable
21
- from typing import TYPE_CHECKING
20
+ from typing import TYPE_CHECKING, Iterable
22
21
 
23
22
  from airflow.operators.branch import BaseBranchOperator
24
23
  from airflow.utils import timezone
25
24
  from airflow.utils.weekday import WeekDay
26
25
 
27
26
  if TYPE_CHECKING:
28
- try:
29
- from airflow.sdk.definitions.context import Context
30
- except ImportError:
31
- # TODO: Remove once provider drops support for Airflow 2
32
- from airflow.utils.context import Context
27
+ from airflow.utils.context import Context
33
28
 
34
29
 
35
30
  class BranchDayOfWeekOperator(BaseBranchOperator):
@@ -43,8 +38,7 @@ class BranchDayOfWeekOperator(BaseBranchOperator):
43
38
 
44
39
  .. code-block:: python
45
40
 
46
- from airflow.providers.standard.operators.empty import EmptyOperator
47
- from airflow.operators.weekday import BranchDayOfWeekOperator
41
+ from airflow.operators.empty import EmptyOperator
48
42
 
49
43
  monday = EmptyOperator(task_id="monday")
50
44
  other_day = EmptyOperator(task_id="other_day")
@@ -64,8 +58,7 @@ class BranchDayOfWeekOperator(BaseBranchOperator):
64
58
 
65
59
  # import WeekDay Enum
66
60
  from airflow.utils.weekday import WeekDay
67
- from airflow.providers.standard.operators.empty import EmptyOperator
68
- from airflow.operators.weekday import BranchDayOfWeekOperator
61
+ from airflow.operators.empty import EmptyOperator
69
62
 
70
63
  workday = EmptyOperator(task_id="workday")
71
64
  weekend = EmptyOperator(task_id="weekend")
@@ -18,20 +18,15 @@
18
18
  from __future__ import annotations
19
19
 
20
20
  import os
21
- from collections.abc import Sequence
22
21
  from subprocess import PIPE, STDOUT, Popen
23
22
  from tempfile import NamedTemporaryFile, TemporaryDirectory, gettempdir
24
- from typing import TYPE_CHECKING
23
+ from typing import TYPE_CHECKING, Sequence
25
24
 
26
25
  from airflow.exceptions import AirflowFailException
27
26
  from airflow.sensors.base import BaseSensorOperator
28
27
 
29
28
  if TYPE_CHECKING:
30
- try:
31
- from airflow.sdk.definitions.context import Context
32
- except ImportError:
33
- # TODO: Remove once provider drops support for Airflow 2
34
- from airflow.utils.context import Context
29
+ from airflow.utils.context import Context
35
30
 
36
31
 
37
32
  class BashSensor(BaseSensorOperator):
@@ -75,10 +70,9 @@ class BashSensor(BaseSensorOperator):
75
70
  """Execute the bash command in a temporary directory."""
76
71
  bash_command = self.bash_command
77
72
  self.log.info("Tmp dir root location: %s", gettempdir())
78
- with (
79
- TemporaryDirectory(prefix="airflowtmp") as tmp_dir,
80
- NamedTemporaryFile(dir=tmp_dir, prefix=self.task_id) as f,
81
- ):
73
+ with TemporaryDirectory(prefix="airflowtmp") as tmp_dir, NamedTemporaryFile(
74
+ dir=tmp_dir, prefix=self.task_id
75
+ ) as f:
82
76
  f.write(bytes(bash_command, "utf_8"))
83
77
  f.flush()
84
78
  fname = f.name
@@ -18,37 +18,15 @@
18
18
  from __future__ import annotations
19
19
 
20
20
  import datetime
21
- from collections.abc import Sequence
22
- from dataclasses import dataclass
23
- from typing import TYPE_CHECKING, Any, NoReturn
21
+ from typing import TYPE_CHECKING, Any, NoReturn, Sequence
24
22
 
25
- from airflow.providers.standard.triggers.temporal import DateTimeTrigger
26
- from airflow.providers.standard.version_compat import AIRFLOW_V_3_0_PLUS
27
23
  from airflow.sensors.base import BaseSensorOperator
28
-
29
- try:
30
- from airflow.triggers.base import StartTriggerArgs
31
- except ImportError:
32
- # TODO: Remove this when min airflow version is 2.10.0 for standard provider
33
- @dataclass
34
- class StartTriggerArgs: # type: ignore[no-redef]
35
- """Arguments required for start task execution from triggerer."""
36
-
37
- trigger_cls: str
38
- next_method: str
39
- trigger_kwargs: dict[str, Any] | None = None
40
- next_kwargs: dict[str, Any] | None = None
41
- timeout: datetime.timedelta | None = None
42
-
43
-
24
+ from airflow.triggers.base import StartTriggerArgs
25
+ from airflow.triggers.temporal import DateTimeTrigger
44
26
  from airflow.utils import timezone
45
27
 
46
28
  if TYPE_CHECKING:
47
- try:
48
- from airflow.sdk.definitions.context import Context
49
- except ImportError:
50
- # TODO: Remove once provider drops support for Airflow 2
51
- from airflow.utils.context import Context
29
+ from airflow.utils.context import Context
52
30
 
53
31
 
54
32
  class DateTimeSensor(BaseSensorOperator):
@@ -59,7 +37,7 @@ class DateTimeSensor(BaseSensorOperator):
59
37
  It handles some cases for which ``TimeSensor`` and ``TimeDeltaSensor`` are not suited.
60
38
 
61
39
  **Example** 1 :
62
- If a task needs to wait for 11am on each ``logical_date``. Using
40
+ If a task needs to wait for 11am on each ``execution_date``. Using
63
41
  ``TimeSensor`` or ``TimeDeltaSensor``, all backfill tasks started at
64
42
  1am have to wait for 10 hours. This is unnecessary, e.g. a backfill
65
43
  task with ``{{ ds }} = '1970-01-01'`` does not need to wait because
@@ -74,7 +52,7 @@ class DateTimeSensor(BaseSensorOperator):
74
52
 
75
53
  DateTimeSensor(
76
54
  task_id="wait_for_0100",
77
- target_time="{{ data_interval_end.tomorrow().replace(hour=1) }}",
55
+ target_time="{{ next_execution_date.tomorrow().replace(hour=1) }}",
78
56
  )
79
57
 
80
58
  :param target_time: datetime after which the job succeeds. (templated)
@@ -115,7 +93,7 @@ class DateTimeSensorAsync(DateTimeSensor):
115
93
  """
116
94
 
117
95
  start_trigger_args = StartTriggerArgs(
118
- trigger_cls="airflow.providers.standard.triggers.temporal.DateTimeTrigger",
96
+ trigger_cls="airflow.triggers.temporal.DateTimeTrigger",
119
97
  trigger_kwargs={"moment": "", "end_from_trigger": False},
120
98
  next_method="execute_complete",
121
99
  next_kwargs=None,
@@ -147,9 +125,7 @@ class DateTimeSensorAsync(DateTimeSensor):
147
125
  trigger=DateTimeTrigger(
148
126
  moment=timezone.parse(self.target_time),
149
127
  end_from_trigger=self.end_from_trigger,
150
- )
151
- if AIRFLOW_V_3_0_PLUS
152
- else DateTimeTrigger(moment=timezone.parse(self.target_time)),
128
+ ),
153
129
  )
154
130
 
155
131
  def execute_complete(self, context: Context, event: Any = None) -> None:
@@ -18,36 +18,15 @@
18
18
  from __future__ import annotations
19
19
 
20
20
  import datetime
21
- from dataclasses import dataclass
22
21
  from typing import TYPE_CHECKING, Any, NoReturn
23
22
 
24
- from airflow.providers.standard.triggers.temporal import DateTimeTrigger
25
- from airflow.providers.standard.version_compat import AIRFLOW_V_2_10_PLUS
26
23
  from airflow.sensors.base import BaseSensorOperator
27
-
28
- try:
29
- from airflow.triggers.base import StartTriggerArgs
30
- except ImportError:
31
- # TODO: Remove this when min airflow version is 2.10.0 for standard provider
32
- @dataclass
33
- class StartTriggerArgs: # type: ignore[no-redef]
34
- """Arguments required for start task execution from triggerer."""
35
-
36
- trigger_cls: str
37
- next_method: str
38
- trigger_kwargs: dict[str, Any] | None = None
39
- next_kwargs: dict[str, Any] | None = None
40
- timeout: datetime.timedelta | None = None
41
-
42
-
24
+ from airflow.triggers.base import StartTriggerArgs
25
+ from airflow.triggers.temporal import DateTimeTrigger
43
26
  from airflow.utils import timezone
44
27
 
45
28
  if TYPE_CHECKING:
46
- try:
47
- from airflow.sdk.definitions.context import Context
48
- except ImportError:
49
- # TODO: Remove once provider drops support for Airflow 2
50
- from airflow.utils.context import Context
29
+ from airflow.utils.context import Context
51
30
 
52
31
 
53
32
  class TimeSensor(BaseSensorOperator):
@@ -89,7 +68,7 @@ class TimeSensorAsync(BaseSensorOperator):
89
68
  """
90
69
 
91
70
  start_trigger_args = StartTriggerArgs(
92
- trigger_cls="airflow.providers.standard.triggers.temporal.DateTimeTrigger",
71
+ trigger_cls="airflow.triggers.temporal.DateTimeTrigger",
93
72
  trigger_kwargs={"moment": "", "end_from_trigger": False},
94
73
  next_method="execute_complete",
95
74
  next_kwargs=None,
@@ -123,9 +102,7 @@ class TimeSensorAsync(BaseSensorOperator):
123
102
 
124
103
  def execute(self, context: Context) -> NoReturn:
125
104
  self.defer(
126
- trigger=DateTimeTrigger(moment=self.target_datetime, end_from_trigger=self.end_from_trigger)
127
- if AIRFLOW_V_2_10_PLUS
128
- else DateTimeTrigger(moment=self.target_datetime),
105
+ trigger=DateTimeTrigger(moment=self.target_datetime, end_from_trigger=self.end_from_trigger),
129
106
  method_name="execute_complete",
130
107
  )
131
108