apache-airflow-providers-standard 0.0.2__py3-none-any.whl → 0.0.3__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (23) hide show
  1. airflow/providers/standard/__init__.py +3 -3
  2. airflow/providers/standard/get_provider_info.py +15 -3
  3. airflow/providers/standard/operators/bash.py +17 -10
  4. airflow/providers/standard/operators/latest_only.py +78 -0
  5. airflow/providers/standard/operators/python.py +33 -53
  6. airflow/providers/standard/operators/trigger_dagrun.py +1 -10
  7. airflow/providers/standard/sensors/date_time.py +3 -3
  8. airflow/providers/standard/sensors/external_task.py +512 -0
  9. airflow/providers/standard/sensors/filesystem.py +18 -3
  10. airflow/providers/standard/sensors/time.py +3 -3
  11. airflow/providers/standard/sensors/time_delta.py +22 -3
  12. airflow/providers/standard/{utils/version_references.py → triggers/__init__.py} +0 -10
  13. airflow/providers/standard/triggers/external_task.py +216 -0
  14. airflow/providers/standard/triggers/file.py +77 -0
  15. airflow/providers/standard/triggers/temporal.py +114 -0
  16. airflow/providers/standard/utils/python_virtualenv_script.jinja2 +2 -2
  17. airflow/providers/standard/utils/sensor_helper.py +123 -0
  18. airflow/providers/standard/version_compat.py +36 -0
  19. {apache_airflow_providers_standard-0.0.2.dist-info → apache_airflow_providers_standard-0.0.3.dist-info}/METADATA +9 -9
  20. apache_airflow_providers_standard-0.0.3.dist-info/RECORD +37 -0
  21. apache_airflow_providers_standard-0.0.2.dist-info/RECORD +0 -30
  22. {apache_airflow_providers_standard-0.0.2.dist-info → apache_airflow_providers_standard-0.0.3.dist-info}/WHEEL +0 -0
  23. {apache_airflow_providers_standard-0.0.2.dist-info → apache_airflow_providers_standard-0.0.3.dist-info}/entry_points.txt +0 -0
@@ -29,11 +29,11 @@ from airflow import __version__ as airflow_version
29
29
 
30
30
  __all__ = ["__version__"]
31
31
 
32
- __version__ = "0.0.2"
32
+ __version__ = "0.0.3"
33
33
 
34
34
  if packaging.version.parse(packaging.version.parse(airflow_version).base_version) < packaging.version.parse(
35
- "2.8.0"
35
+ "2.9.0"
36
36
  ):
37
37
  raise RuntimeError(
38
- f"The package `apache-airflow-providers-standard:{__version__}` needs Apache Airflow 2.8.0+"
38
+ f"The package `apache-airflow-providers-standard:{__version__}` needs Apache Airflow 2.9.0+"
39
39
  )
@@ -28,9 +28,9 @@ def get_provider_info():
28
28
  "name": "Standard",
29
29
  "description": "Airflow Standard Provider\n",
30
30
  "state": "ready",
31
- "source-date-epoch": 1732434919,
32
- "versions": ["0.0.2", "0.0.1"],
33
- "dependencies": ["apache-airflow>=2.8.0", "apache-airflow-providers-common-sql>=1.20.0"],
31
+ "source-date-epoch": 1734536895,
32
+ "versions": ["0.0.3", "0.0.2", "0.0.1"],
33
+ "dependencies": ["apache-airflow>=2.9.0", "apache-airflow-providers-common-sql>=1.20.0"],
34
34
  "integrations": [
35
35
  {
36
36
  "integration-name": "Standard",
@@ -53,6 +53,7 @@ def get_provider_info():
53
53
  "airflow.providers.standard.operators.python",
54
54
  "airflow.providers.standard.operators.generic_transfer",
55
55
  "airflow.providers.standard.operators.trigger_dagrun",
56
+ "airflow.providers.standard.operators.latest_only",
56
57
  ],
57
58
  }
58
59
  ],
@@ -67,6 +68,7 @@ def get_provider_info():
67
68
  "airflow.providers.standard.sensors.bash",
68
69
  "airflow.providers.standard.sensors.python",
69
70
  "airflow.providers.standard.sensors.filesystem",
71
+ "airflow.providers.standard.sensors.external_task",
70
72
  ],
71
73
  }
72
74
  ],
@@ -80,6 +82,16 @@ def get_provider_info():
80
82
  ],
81
83
  }
82
84
  ],
85
+ "triggers": [
86
+ {
87
+ "integration-name": "Standard",
88
+ "python-modules": [
89
+ "airflow.providers.standard.triggers.external_task",
90
+ "airflow.providers.standard.triggers.file",
91
+ "airflow.providers.standard.triggers.temporal",
92
+ ],
93
+ }
94
+ ],
83
95
  "config": {
84
96
  "standard": {
85
97
  "description": "Options for the standard provider operators.",
@@ -20,7 +20,6 @@ from __future__ import annotations
20
20
  import os
21
21
  import shutil
22
22
  import tempfile
23
- import warnings
24
23
  from collections.abc import Container, Sequence
25
24
  from functools import cached_property
26
25
  from typing import TYPE_CHECKING, Any, Callable, cast
@@ -29,9 +28,12 @@ from airflow.exceptions import AirflowException, AirflowSkipException
29
28
  from airflow.models.baseoperator import BaseOperator
30
29
  from airflow.providers.standard.hooks.subprocess import SubprocessHook, SubprocessResult, working_directory
31
30
  from airflow.utils.operator_helpers import context_to_airflow_vars
31
+ from airflow.utils.session import NEW_SESSION, provide_session
32
32
  from airflow.utils.types import ArgNotSet
33
33
 
34
34
  if TYPE_CHECKING:
35
+ from sqlalchemy.orm import Session as SASession
36
+
35
37
  from airflow.models.taskinstance import TaskInstance
36
38
  from airflow.utils.context import Context
37
39
 
@@ -158,7 +160,6 @@ class BashOperator(BaseOperator):
158
160
  env: dict[str, str] | None = None,
159
161
  append_env: bool = False,
160
162
  output_encoding: str = "utf-8",
161
- skip_exit_code: int | None = None,
162
163
  skip_on_exit_code: int | Container[int] | None = 99,
163
164
  cwd: str | None = None,
164
165
  output_processor: Callable[[str], Any] = lambda result: result,
@@ -168,11 +169,6 @@ class BashOperator(BaseOperator):
168
169
  self.bash_command = bash_command
169
170
  self.env = env
170
171
  self.output_encoding = output_encoding
171
- if skip_exit_code is not None:
172
- warnings.warn(
173
- "skip_exit_code is deprecated. Please use skip_on_exit_code", DeprecationWarning, stacklevel=2
174
- )
175
- skip_on_exit_code = skip_exit_code
176
172
  self.skip_on_exit_code = (
177
173
  skip_on_exit_code
178
174
  if isinstance(skip_on_exit_code, Container)
@@ -199,8 +195,10 @@ class BashOperator(BaseOperator):
199
195
  """Returns hook for running the bash command."""
200
196
  return SubprocessHook()
201
197
 
198
+ # TODO: This should be replaced with Task SDK API call
202
199
  @staticmethod
203
- def refresh_bash_command(ti: TaskInstance) -> None:
200
+ @provide_session
201
+ def refresh_bash_command(ti: TaskInstance, session: SASession = NEW_SESSION) -> None:
204
202
  """
205
203
  Rewrite the underlying rendered bash_command value for a task instance in the metadatabase.
206
204
 
@@ -212,7 +210,16 @@ class BashOperator(BaseOperator):
212
210
  """
213
211
  from airflow.models.renderedtifields import RenderedTaskInstanceFields
214
212
 
215
- RenderedTaskInstanceFields._update_runtime_evaluated_template_fields(ti)
213
+ """Update rendered task instance fields for cases where runtime evaluated, not templated."""
214
+ # Note: Need lazy import to break the partly loaded class loop
215
+ from airflow.models.taskinstance import TaskInstance
216
+
217
+ # If called via remote API the DAG needs to be re-loaded
218
+ TaskInstance.ensure_dag(ti, session=session)
219
+
220
+ rtif = RenderedTaskInstanceFields(ti)
221
+ RenderedTaskInstanceFields.write(rtif, session=session)
222
+ RenderedTaskInstanceFields.delete_old_records(ti.task_id, ti.dag_id, session=session)
216
223
 
217
224
  def get_env(self, context) -> dict:
218
225
  """Build the set of environment variables to be exposed for the bash command."""
@@ -249,7 +256,7 @@ class BashOperator(BaseOperator):
249
256
  # displays the executed command (otherwise it will display as an ArgNotSet type).
250
257
  if self._init_bash_command_not_set:
251
258
  is_inline_command = self._is_inline_command(bash_command=cast(str, self.bash_command))
252
- ti = cast("TaskInstance", context["ti"])
259
+ ti = context["ti"]
253
260
  self.refresh_bash_command(ti)
254
261
  else:
255
262
  is_inline_command = self._is_inline_command(bash_command=cast(str, self._unrendered_bash_command))
@@ -0,0 +1,78 @@
1
+ #
2
+ # Licensed to the Apache Software Foundation (ASF) under one
3
+ # or more contributor license agreements. See the NOTICE file
4
+ # distributed with this work for additional information
5
+ # regarding copyright ownership. The ASF licenses this file
6
+ # to you under the Apache License, Version 2.0 (the
7
+ # "License"); you may not use this file except in compliance
8
+ # with the License. You may obtain a copy of the License at
9
+ #
10
+ # http://www.apache.org/licenses/LICENSE-2.0
11
+ #
12
+ # Unless required by applicable law or agreed to in writing,
13
+ # software distributed under the License is distributed on an
14
+ # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
15
+ # KIND, either express or implied. See the License for the
16
+ # specific language governing permissions and limitations
17
+ # under the License.
18
+ """Contains an operator to run downstream tasks only for the latest scheduled DagRun."""
19
+
20
+ from __future__ import annotations
21
+
22
+ from collections.abc import Iterable
23
+ from typing import TYPE_CHECKING
24
+
25
+ import pendulum
26
+
27
+ from airflow.operators.branch import BaseBranchOperator
28
+
29
+ if TYPE_CHECKING:
30
+ from airflow.models import DAG, DagRun
31
+ from airflow.utils.context import Context
32
+
33
+
34
+ class LatestOnlyOperator(BaseBranchOperator):
35
+ """
36
+ Skip tasks that are not running during the most recent schedule interval.
37
+
38
+ If the task is run outside the latest schedule interval (i.e. external_trigger),
39
+ all directly downstream tasks will be skipped.
40
+
41
+ Note that downstream tasks are never skipped if the given DAG_Run is
42
+ marked as externally triggered.
43
+ """
44
+
45
+ ui_color = "#e9ffdb" # nyanza
46
+
47
+ def choose_branch(self, context: Context) -> str | Iterable[str]:
48
+ # If the DAG Run is externally triggered, then return without
49
+ # skipping downstream tasks
50
+ dag_run: DagRun = context["dag_run"] # type: ignore[assignment]
51
+ if dag_run.external_trigger:
52
+ self.log.info("Externally triggered DAG_Run: allowing execution to proceed.")
53
+ return list(context["task"].get_direct_relative_ids(upstream=False))
54
+
55
+ dag: DAG = context["dag"]
56
+ next_info = dag.next_dagrun_info(dag.get_run_data_interval(dag_run), restricted=False)
57
+ now = pendulum.now("UTC")
58
+
59
+ if next_info is None:
60
+ self.log.info("Last scheduled execution: allowing execution to proceed.")
61
+ return list(context["task"].get_direct_relative_ids(upstream=False))
62
+
63
+ left_window, right_window = next_info.data_interval
64
+ self.log.info(
65
+ "Checking latest only with left_window: %s right_window: %s now: %s",
66
+ left_window,
67
+ right_window,
68
+ now,
69
+ )
70
+
71
+ if not left_window < now <= right_window:
72
+ self.log.info("Not latest execution, skipping downstream.")
73
+ # we return an empty list, thus the parent BaseBranchOperator
74
+ # won't exclude any downstream tasks from skipping.
75
+ return []
76
+ else:
77
+ self.log.info("Latest, allowing execution to proceed.")
78
+ return list(context["task"].get_direct_relative_ids(upstream=False))
@@ -26,7 +26,6 @@ import subprocess
26
26
  import sys
27
27
  import textwrap
28
28
  import types
29
- import warnings
30
29
  from abc import ABCMeta, abstractmethod
31
30
  from collections.abc import Collection, Container, Iterable, Mapping, Sequence
32
31
  from functools import cache
@@ -41,7 +40,6 @@ from airflow.exceptions import (
41
40
  AirflowException,
42
41
  AirflowSkipException,
43
42
  DeserializingResultError,
44
- RemovedInAirflow3Warning,
45
43
  )
46
44
  from airflow.models.baseoperator import BaseOperator
47
45
  from airflow.models.skipmixin import SkipMixin
@@ -49,22 +47,22 @@ from airflow.models.taskinstance import _CURRENT_CONTEXT
49
47
  from airflow.models.variable import Variable
50
48
  from airflow.operators.branch import BranchMixIn
51
49
  from airflow.providers.standard.utils.python_virtualenv import prepare_virtualenv, write_python_script
52
- from airflow.providers.standard.utils.version_references import AIRFLOW_V_2_10_PLUS, AIRFLOW_V_3_0_PLUS
53
- from airflow.settings import _ENABLE_AIP_44
50
+ from airflow.providers.standard.version_compat import (
51
+ AIRFLOW_V_2_10_PLUS,
52
+ AIRFLOW_V_3_0_PLUS,
53
+ )
54
54
  from airflow.typing_compat import Literal
55
55
  from airflow.utils import hashlib_wrapper
56
56
  from airflow.utils.context import context_copy_partial, context_merge
57
57
  from airflow.utils.file import get_unique_dag_module_name
58
58
  from airflow.utils.operator_helpers import KeywordParameters
59
59
  from airflow.utils.process_utils import execute_in_subprocess, execute_in_subprocess_with_kwargs
60
- from airflow.utils.session import create_session
61
60
 
62
61
  log = logging.getLogger(__name__)
63
62
 
64
63
  if TYPE_CHECKING:
65
64
  from pendulum.datetime import DateTime
66
65
 
67
- from airflow.serialization.enums import Encoding
68
66
  from airflow.utils.context import Context
69
67
 
70
68
 
@@ -417,7 +415,6 @@ class _BasePythonVirtualenvOperator(PythonOperator, metaclass=ABCMeta):
417
415
  skip_on_exit_code: int | Container[int] | None = None,
418
416
  env_vars: dict[str, str] | None = None,
419
417
  inherit_env: bool = True,
420
- use_dill: bool = False,
421
418
  use_airflow_context: bool = False,
422
419
  **kwargs,
423
420
  ):
@@ -439,18 +436,6 @@ class _BasePythonVirtualenvOperator(PythonOperator, metaclass=ABCMeta):
439
436
  )
440
437
  self.string_args = string_args or []
441
438
 
442
- if use_dill:
443
- warnings.warn(
444
- "`use_dill` is deprecated and will be removed in a future version. "
445
- "Please provide serializer='dill' instead.",
446
- RemovedInAirflow3Warning,
447
- stacklevel=3,
448
- )
449
- if serializer:
450
- raise AirflowException(
451
- "Both 'use_dill' and 'serializer' parameters are set. Please set only one of them"
452
- )
453
- serializer = "dill"
454
439
  serializer = serializer or "pickle"
455
440
  if serializer not in _SERIALIZERS:
456
441
  msg = (
@@ -524,10 +509,6 @@ class _BasePythonVirtualenvOperator(PythonOperator, metaclass=ABCMeta):
524
509
  self._write_args(input_path)
525
510
  self._write_string_args(string_args_path)
526
511
 
527
- if self.use_airflow_context and not _ENABLE_AIP_44:
528
- error_msg = "`get_current_context()` needs to be used with AIP-44 enabled."
529
- raise AirflowException(error_msg)
530
-
531
512
  jinja_context = {
532
513
  "op_args": self.op_args,
533
514
  "op_kwargs": op_kwargs,
@@ -547,18 +528,19 @@ class _BasePythonVirtualenvOperator(PythonOperator, metaclass=ABCMeta):
547
528
  render_template_as_native_obj=self.dag.render_template_as_native_obj,
548
529
  )
549
530
  if self.use_airflow_context:
550
- from airflow.serialization.serialized_objects import BaseSerialization
551
-
552
- context = get_current_context()
553
- with create_session() as session:
554
- # FIXME: DetachedInstanceError
555
- dag_run, task_instance = context["dag_run"], context["task_instance"]
556
- session.add_all([dag_run, task_instance])
557
- serializable_context: dict[Encoding, Any] = BaseSerialization.serialize(
558
- context, use_pydantic_models=True
559
- )
560
- with airflow_context_path.open("w+") as file:
561
- json.dump(serializable_context, file)
531
+ # TODO: replace with commented code when context serialization is implemented in AIP-72
532
+ raise AirflowException(
533
+ "The `use_airflow_context=True` is not yet implemented. "
534
+ "It will work in Airflow 3 after AIP-72 context "
535
+ "serialization is ready."
536
+ )
537
+ # context = get_current_context()
538
+ # with create_session() as session:
539
+ # dag_run, task_instance = context["dag_run"], context["task_instance"]
540
+ # session.add_all([dag_run, task_instance])
541
+ # serializable_context: dict[Encoding, Any] = # Get serializable context here
542
+ # with airflow_context_path.open("w+") as file:
543
+ # json.dump(serializable_context, file)
562
544
 
563
545
  env_vars = dict(os.environ) if self.inherit_env else {}
564
546
  if self.env_vars:
@@ -669,10 +651,8 @@ class PythonVirtualenvOperator(_BasePythonVirtualenvOperator):
669
651
  environment. If set to ``True``, the virtual environment will inherit the environment variables
670
652
  of the parent process (``os.environ``). If set to ``False``, the virtual environment will be
671
653
  executed with a clean environment.
672
- :param use_dill: Deprecated, use ``serializer`` instead. Whether to use dill to serialize
673
- the args and result (pickle is default). This allows more complex types
674
- but requires you to include dill in your requirements.
675
654
  :param use_airflow_context: Whether to provide ``get_current_context()`` to the python_callable.
655
+ NOT YET IMPLEMENTED - waits for AIP-72 context serialization.
676
656
  """
677
657
 
678
658
  template_fields: Sequence[str] = tuple(
@@ -700,7 +680,6 @@ class PythonVirtualenvOperator(_BasePythonVirtualenvOperator):
700
680
  venv_cache_path: None | os.PathLike[str] = None,
701
681
  env_vars: dict[str, str] | None = None,
702
682
  inherit_env: bool = True,
703
- use_dill: bool = False,
704
683
  use_airflow_context: bool = False,
705
684
  **kwargs,
706
685
  ):
@@ -718,15 +697,18 @@ class PythonVirtualenvOperator(_BasePythonVirtualenvOperator):
718
697
  raise AirflowException(
719
698
  "Passing non-string types (e.g. int or float) as python_version not supported"
720
699
  )
721
- if use_airflow_context and not AIRFLOW_V_3_0_PLUS:
722
- raise AirflowException(
723
- "The `use_airflow_context=True` is only supported in Airflow 3.0.0 and later."
724
- )
725
700
  if use_airflow_context and (not expect_airflow and not system_site_packages):
726
701
  raise AirflowException(
727
702
  "The `use_airflow_context` parameter is set to True, but "
728
703
  "expect_airflow and system_site_packages are set to False."
729
704
  )
705
+ # TODO: remove when context serialization is implemented in AIP-72
706
+ if use_airflow_context and not AIRFLOW_V_3_0_PLUS:
707
+ raise AirflowException(
708
+ "The `use_airflow_context=True` is not yet implemented. "
709
+ "It will work in Airflow 3 after AIP-72 context "
710
+ "serialization is ready."
711
+ )
730
712
  if not requirements:
731
713
  self.requirements: list[str] = []
732
714
  elif isinstance(requirements, str):
@@ -755,7 +737,6 @@ class PythonVirtualenvOperator(_BasePythonVirtualenvOperator):
755
737
  skip_on_exit_code=skip_on_exit_code,
756
738
  env_vars=env_vars,
757
739
  inherit_env=inherit_env,
758
- use_dill=use_dill,
759
740
  use_airflow_context=use_airflow_context,
760
741
  **kwargs,
761
742
  )
@@ -972,10 +953,8 @@ class ExternalPythonOperator(_BasePythonVirtualenvOperator):
972
953
  environment. If set to ``True``, the virtual environment will inherit the environment variables
973
954
  of the parent process (``os.environ``). If set to ``False``, the virtual environment will be
974
955
  executed with a clean environment.
975
- :param use_dill: Deprecated, use ``serializer`` instead. Whether to use dill to serialize
976
- the args and result (pickle is default). This allows more complex types
977
- but requires you to include dill in your requirements.
978
956
  :param use_airflow_context: Whether to provide ``get_current_context()`` to the python_callable.
957
+ NOT YET IMPLEMENTED - waits for AIP-72 context serialization.
979
958
  """
980
959
 
981
960
  template_fields: Sequence[str] = tuple({"python"}.union(PythonOperator.template_fields))
@@ -996,20 +975,22 @@ class ExternalPythonOperator(_BasePythonVirtualenvOperator):
996
975
  skip_on_exit_code: int | Container[int] | None = None,
997
976
  env_vars: dict[str, str] | None = None,
998
977
  inherit_env: bool = True,
999
- use_dill: bool = False,
1000
978
  use_airflow_context: bool = False,
1001
979
  **kwargs,
1002
980
  ):
1003
981
  if not python:
1004
982
  raise ValueError("Python Path must be defined in ExternalPythonOperator")
1005
- if use_airflow_context and not AIRFLOW_V_3_0_PLUS:
1006
- raise AirflowException(
1007
- "The `use_airflow_context=True` is only supported in Airflow 3.0.0 and later."
1008
- )
1009
983
  if use_airflow_context and not expect_airflow:
1010
984
  raise AirflowException(
1011
985
  "The `use_airflow_context` parameter is set to True, but expect_airflow is set to False."
1012
986
  )
987
+ # TODO: remove when context serialization is implemented in AIP-72
988
+ if use_airflow_context:
989
+ raise AirflowException(
990
+ "The `use_airflow_context=True` is not yet implemented. "
991
+ "It will work in Airflow 3 after AIP-72 context "
992
+ "serialization is ready."
993
+ )
1013
994
  self.python = python
1014
995
  self.expect_pendulum = expect_pendulum
1015
996
  super().__init__(
@@ -1024,7 +1005,6 @@ class ExternalPythonOperator(_BasePythonVirtualenvOperator):
1024
1005
  skip_on_exit_code=skip_on_exit_code,
1025
1006
  env_vars=env_vars,
1026
1007
  inherit_env=inherit_env,
1027
- use_dill=use_dill,
1028
1008
  use_airflow_context=use_airflow_context,
1029
1009
  **kwargs,
1030
1010
  )
@@ -27,7 +27,6 @@ from sqlalchemy import select
27
27
  from sqlalchemy.orm.exc import NoResultFound
28
28
 
29
29
  from airflow.api.common.trigger_dag import trigger_dag
30
- from airflow.api_internal.internal_api_call import InternalApiConfig
31
30
  from airflow.configuration import conf
32
31
  from airflow.exceptions import (
33
32
  AirflowException,
@@ -40,7 +39,7 @@ from airflow.models.dag import DagModel
40
39
  from airflow.models.dagbag import DagBag
41
40
  from airflow.models.dagrun import DagRun
42
41
  from airflow.models.xcom import XCom
43
- from airflow.triggers.external_task import DagStateTrigger
42
+ from airflow.providers.standard.triggers.external_task import DagStateTrigger
44
43
  from airflow.utils import timezone
45
44
  from airflow.utils.helpers import build_airflow_url_with_query
46
45
  from airflow.utils.session import provide_session
@@ -182,14 +181,6 @@ class TriggerDagRunOperator(BaseOperator):
182
181
  self.logical_date = logical_date
183
182
 
184
183
  def execute(self, context: Context):
185
- if InternalApiConfig.get_use_internal_api():
186
- if self.reset_dag_run:
187
- raise AirflowException("Parameter reset_dag_run=True is broken with Database Isolation Mode.")
188
- if self.wait_for_completion:
189
- raise AirflowException(
190
- "Parameter wait_for_completion=True is broken with Database Isolation Mode."
191
- )
192
-
193
184
  if isinstance(self.logical_date, datetime.datetime):
194
185
  parsed_logical_date = self.logical_date
195
186
  elif isinstance(self.logical_date, str):
@@ -22,7 +22,8 @@ from collections.abc import Sequence
22
22
  from dataclasses import dataclass
23
23
  from typing import TYPE_CHECKING, Any, NoReturn
24
24
 
25
- from airflow.providers.standard.utils.version_references import AIRFLOW_V_3_0_PLUS
25
+ from airflow.providers.standard.triggers.temporal import DateTimeTrigger
26
+ from airflow.providers.standard.version_compat import AIRFLOW_V_3_0_PLUS
26
27
  from airflow.sensors.base import BaseSensorOperator
27
28
 
28
29
  try:
@@ -40,7 +41,6 @@ except ImportError:
40
41
  timeout: datetime.timedelta | None = None
41
42
 
42
43
 
43
- from airflow.triggers.temporal import DateTimeTrigger
44
44
  from airflow.utils import timezone
45
45
 
46
46
  if TYPE_CHECKING:
@@ -111,7 +111,7 @@ class DateTimeSensorAsync(DateTimeSensor):
111
111
  """
112
112
 
113
113
  start_trigger_args = StartTriggerArgs(
114
- trigger_cls="airflow.triggers.temporal.DateTimeTrigger",
114
+ trigger_cls="airflow.providers.standard.triggers.temporal.DateTimeTrigger",
115
115
  trigger_kwargs={"moment": "", "end_from_trigger": False},
116
116
  next_method="execute_complete",
117
117
  next_kwargs=None,