apache-airflow-providers-standard 1.0.0.dev1__py3-none-any.whl → 1.1.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of apache-airflow-providers-standard might be problematic. Click here for more details.

Files changed (50) hide show
  1. airflow/providers/standard/LICENSE +0 -52
  2. airflow/providers/standard/__init__.py +23 -1
  3. airflow/providers/standard/decorators/__init__.py +16 -0
  4. airflow/providers/standard/decorators/bash.py +121 -0
  5. airflow/providers/standard/decorators/branch_external_python.py +63 -0
  6. airflow/providers/standard/decorators/branch_python.py +62 -0
  7. airflow/providers/standard/decorators/branch_virtualenv.py +62 -0
  8. airflow/providers/standard/decorators/external_python.py +70 -0
  9. airflow/providers/standard/decorators/python.py +86 -0
  10. airflow/providers/standard/decorators/python_virtualenv.py +67 -0
  11. airflow/providers/standard/decorators/sensor.py +83 -0
  12. airflow/providers/standard/decorators/short_circuit.py +65 -0
  13. airflow/providers/standard/get_provider_info.py +89 -7
  14. airflow/providers/standard/hooks/__init__.py +16 -0
  15. airflow/providers/standard/hooks/filesystem.py +89 -0
  16. airflow/providers/standard/hooks/package_index.py +95 -0
  17. airflow/providers/standard/hooks/subprocess.py +119 -0
  18. airflow/providers/standard/operators/bash.py +73 -56
  19. airflow/providers/standard/operators/branch.py +105 -0
  20. airflow/providers/standard/operators/datetime.py +15 -5
  21. airflow/providers/standard/operators/empty.py +39 -0
  22. airflow/providers/standard/operators/latest_only.py +127 -0
  23. airflow/providers/standard/operators/python.py +1143 -0
  24. airflow/providers/standard/operators/smooth.py +38 -0
  25. airflow/providers/standard/operators/trigger_dagrun.py +391 -0
  26. airflow/providers/standard/operators/weekday.py +19 -9
  27. airflow/providers/standard/sensors/bash.py +15 -11
  28. airflow/providers/standard/sensors/date_time.py +32 -8
  29. airflow/providers/standard/sensors/external_task.py +593 -0
  30. airflow/providers/standard/sensors/filesystem.py +158 -0
  31. airflow/providers/standard/sensors/python.py +84 -0
  32. airflow/providers/standard/sensors/time.py +28 -5
  33. airflow/providers/standard/sensors/time_delta.py +68 -15
  34. airflow/providers/standard/sensors/weekday.py +25 -7
  35. airflow/providers/standard/triggers/__init__.py +16 -0
  36. airflow/providers/standard/triggers/external_task.py +288 -0
  37. airflow/providers/standard/triggers/file.py +131 -0
  38. airflow/providers/standard/triggers/temporal.py +113 -0
  39. airflow/providers/standard/utils/__init__.py +16 -0
  40. airflow/providers/standard/utils/python_virtualenv.py +209 -0
  41. airflow/providers/standard/utils/python_virtualenv_script.jinja2 +82 -0
  42. airflow/providers/standard/utils/sensor_helper.py +137 -0
  43. airflow/providers/standard/utils/skipmixin.py +192 -0
  44. airflow/providers/standard/utils/weekday.py +77 -0
  45. airflow/providers/standard/version_compat.py +36 -0
  46. {apache_airflow_providers_standard-1.0.0.dev1.dist-info → apache_airflow_providers_standard-1.1.0.dist-info}/METADATA +16 -35
  47. apache_airflow_providers_standard-1.1.0.dist-info/RECORD +51 -0
  48. {apache_airflow_providers_standard-1.0.0.dev1.dist-info → apache_airflow_providers_standard-1.1.0.dist-info}/WHEEL +1 -1
  49. apache_airflow_providers_standard-1.0.0.dev1.dist-info/RECORD +0 -17
  50. {apache_airflow_providers_standard-1.0.0.dev1.dist-info → apache_airflow_providers_standard-1.1.0.dist-info}/entry_points.txt +0 -0
@@ -0,0 +1,38 @@
1
+ #
2
+ # Licensed to the Apache Software Foundation (ASF) under one
3
+ # or more contributor license agreements. See the NOTICE file
4
+ # distributed with this work for additional information
5
+ # regarding copyright ownership. The ASF licenses this file
6
+ # to you under the Apache License, Version 2.0 (the
7
+ # "License"); you may not use this file except in compliance
8
+ # with the License. You may obtain a copy of the License at
9
+ #
10
+ # http://www.apache.org/licenses/LICENSE-2.0
11
+ #
12
+ # Unless required by applicable law or agreed to in writing,
13
+ # software distributed under the License is distributed on an
14
+ # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
15
+ # KIND, either express or implied. See the License for the
16
+ # specific language governing permissions and limitations
17
+ # under the License.
18
+ from __future__ import annotations
19
+
20
+ from typing import TYPE_CHECKING
21
+
22
+ from airflow.models.baseoperator import BaseOperator
23
+
24
+ if TYPE_CHECKING:
25
+ from airflow.sdk.definitions.context import Context
26
+
27
+
28
+ class SmoothOperator(BaseOperator):
29
+ """Operator that logs a YouTube link to Sade song "Smooth Operator"."""
30
+
31
+ ui_color = "#e8f7e4"
32
+ yt_link: str = "https://www.youtube.com/watch?v=4TYv2PhG89A"
33
+
34
+ def __init__(self, **kwargs) -> None:
35
+ super().__init__(**kwargs)
36
+
37
+ def execute(self, context: Context):
38
+ self.log.info("Enjoy Sade - Smooth Operator: %s", self.yt_link)
@@ -0,0 +1,391 @@
1
+ #
2
+ # Licensed to the Apache Software Foundation (ASF) under one
3
+ # or more contributor license agreements. See the NOTICE file
4
+ # distributed with this work for additional information
5
+ # regarding copyright ownership. The ASF licenses this file
6
+ # to you under the Apache License, Version 2.0 (the
7
+ # "License"); you may not use this file except in compliance
8
+ # with the License. You may obtain a copy of the License at
9
+ #
10
+ # http://www.apache.org/licenses/LICENSE-2.0
11
+ #
12
+ # Unless required by applicable law or agreed to in writing,
13
+ # software distributed under the License is distributed on an
14
+ # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
15
+ # KIND, either express or implied. See the License for the
16
+ # specific language governing permissions and limitations
17
+ # under the License.
18
+ from __future__ import annotations
19
+
20
+ import datetime
21
+ import json
22
+ import time
23
+ from collections.abc import Sequence
24
+ from typing import TYPE_CHECKING, Any
25
+
26
+ from sqlalchemy import select
27
+ from sqlalchemy.orm.exc import NoResultFound
28
+
29
+ from airflow.api.common.trigger_dag import trigger_dag
30
+ from airflow.configuration import conf
31
+ from airflow.exceptions import (
32
+ AirflowException,
33
+ AirflowSkipException,
34
+ DagNotFound,
35
+ DagRunAlreadyExists,
36
+ )
37
+ from airflow.models import BaseOperator
38
+ from airflow.models.dag import DagModel
39
+ from airflow.models.dagbag import DagBag
40
+ from airflow.models.dagrun import DagRun
41
+ from airflow.providers.standard.triggers.external_task import DagStateTrigger
42
+ from airflow.providers.standard.version_compat import AIRFLOW_V_3_0_PLUS
43
+ from airflow.utils import timezone
44
+ from airflow.utils.state import DagRunState
45
+ from airflow.utils.types import NOTSET, ArgNotSet, DagRunType
46
+
47
+ XCOM_LOGICAL_DATE_ISO = "trigger_logical_date_iso"
48
+ XCOM_RUN_ID = "trigger_run_id"
49
+
50
+
51
+ if TYPE_CHECKING:
52
+ from sqlalchemy.orm.session import Session
53
+
54
+ from airflow.models.taskinstancekey import TaskInstanceKey
55
+
56
+ try:
57
+ from airflow.sdk.definitions.context import Context
58
+ except ImportError:
59
+ # TODO: Remove once provider drops support for Airflow 2
60
+ from airflow.utils.context import Context
61
+
62
+ if AIRFLOW_V_3_0_PLUS:
63
+ from airflow.sdk import BaseOperatorLink
64
+ from airflow.sdk.execution_time.xcom import XCom
65
+ else:
66
+ from airflow.models import XCom # type: ignore[no-redef]
67
+ from airflow.models.baseoperatorlink import BaseOperatorLink # type: ignore[no-redef]
68
+
69
+
70
+ class DagIsPaused(AirflowException):
71
+ """Raise when a dag is paused and something tries to run it."""
72
+
73
+ def __init__(self, dag_id: str) -> None:
74
+ super().__init__(dag_id)
75
+ self.dag_id = dag_id
76
+
77
+ def __str__(self) -> str:
78
+ return f"Dag {self.dag_id} is paused"
79
+
80
+
81
+ class TriggerDagRunLink(BaseOperatorLink):
82
+ """
83
+ Operator link for TriggerDagRunOperator.
84
+
85
+ It allows users to access DAG triggered by task using TriggerDagRunOperator.
86
+ """
87
+
88
+ name = "Triggered DAG"
89
+
90
+ def get_link(self, operator: BaseOperator, *, ti_key: TaskInstanceKey) -> str:
91
+ if TYPE_CHECKING:
92
+ assert isinstance(operator, TriggerDagRunOperator)
93
+
94
+ trigger_dag_id = operator.trigger_dag_id
95
+ if not AIRFLOW_V_3_0_PLUS:
96
+ from airflow.models.renderedtifields import RenderedTaskInstanceFields
97
+
98
+ if template_fields := RenderedTaskInstanceFields.get_templated_fields(ti_key):
99
+ trigger_dag_id: str = template_fields.get("trigger_dag_id", operator.trigger_dag_id) # type: ignore[no-redef]
100
+
101
+ # Fetch the correct dag_run_id for the triggerED dag which is
102
+ # stored in xcom during execution of the triggerING task.
103
+ triggered_dag_run_id = XCom.get_value(ti_key=ti_key, key=XCOM_RUN_ID)
104
+
105
+ if AIRFLOW_V_3_0_PLUS:
106
+ from airflow.utils.helpers import build_airflow_dagrun_url
107
+
108
+ return build_airflow_dagrun_url(dag_id=trigger_dag_id, run_id=triggered_dag_run_id)
109
+ from airflow.utils.helpers import build_airflow_url_with_query # type:ignore[attr-defined]
110
+
111
+ query = {"dag_id": trigger_dag_id, "dag_run_id": triggered_dag_run_id}
112
+ return build_airflow_url_with_query(query)
113
+
114
+
115
+ class TriggerDagRunOperator(BaseOperator):
116
+ """
117
+ Triggers a DAG run for a specified DAG ID.
118
+
119
+ Note that if database isolation mode is enabled, not all features are supported.
120
+
121
+ :param trigger_dag_id: The ``dag_id`` of the DAG to trigger (templated).
122
+ :param trigger_run_id: The run ID to use for the triggered DAG run (templated).
123
+ If not provided, a run ID will be automatically generated.
124
+ :param conf: Configuration for the DAG run (templated).
125
+ :param logical_date: Logical date for the triggered DAG (templated).
126
+ :param reset_dag_run: Whether clear existing DAG run if already exists.
127
+ This is useful when backfill or rerun an existing DAG run.
128
+ This only resets (not recreates) the DAG run.
129
+ DAG run conf is immutable and will not be reset on rerun of an existing DAG run.
130
+ When reset_dag_run=False and dag run exists, DagRunAlreadyExists will be raised.
131
+ When reset_dag_run=True and dag run exists, existing DAG run will be cleared to rerun.
132
+ :param wait_for_completion: Whether or not wait for DAG run completion. (default: False)
133
+ :param poke_interval: Poke interval to check DAG run status when wait_for_completion=True.
134
+ (default: 60)
135
+ :param allowed_states: Optional list of allowed DAG run states of the triggered DAG. This is useful when
136
+ setting ``wait_for_completion`` to True. Must be a valid DagRunState.
137
+ Default is ``[DagRunState.SUCCESS]``.
138
+ :param failed_states: Optional list of failed or disallowed DAG run states of the triggered DAG. This is
139
+ useful when setting ``wait_for_completion`` to True. Must be a valid DagRunState.
140
+ Default is ``[DagRunState.FAILED]``.
141
+ :param skip_when_already_exists: Set to true to mark the task as SKIPPED if a DAG run of the triggered
142
+ DAG for the same logical date already exists.
143
+ :param fail_when_dag_is_paused: If the dag to trigger is paused, DagIsPaused will be raised.
144
+ :param deferrable: If waiting for completion, whether or not to defer the task until done,
145
+ default is ``False``.
146
+ """
147
+
148
+ template_fields: Sequence[str] = (
149
+ "trigger_dag_id",
150
+ "trigger_run_id",
151
+ "logical_date",
152
+ "conf",
153
+ "wait_for_completion",
154
+ "skip_when_already_exists",
155
+ )
156
+ template_fields_renderers = {"conf": "py"}
157
+ ui_color = "#ffefeb"
158
+ operator_extra_links = [TriggerDagRunLink()]
159
+
160
+ def __init__(
161
+ self,
162
+ *,
163
+ trigger_dag_id: str,
164
+ trigger_run_id: str | None = None,
165
+ conf: dict | None = None,
166
+ logical_date: str | datetime.datetime | None | ArgNotSet = NOTSET,
167
+ reset_dag_run: bool = False,
168
+ wait_for_completion: bool = False,
169
+ poke_interval: int = 60,
170
+ allowed_states: list[str | DagRunState] | None = None,
171
+ failed_states: list[str | DagRunState] | None = None,
172
+ skip_when_already_exists: bool = False,
173
+ fail_when_dag_is_paused: bool = False,
174
+ deferrable: bool = conf.getboolean("operators", "default_deferrable", fallback=False),
175
+ **kwargs,
176
+ ) -> None:
177
+ super().__init__(**kwargs)
178
+ self.trigger_dag_id = trigger_dag_id
179
+ self.trigger_run_id = trigger_run_id
180
+ self.conf = conf
181
+ self.reset_dag_run = reset_dag_run
182
+ self.wait_for_completion = wait_for_completion
183
+ self.poke_interval = poke_interval
184
+ if allowed_states:
185
+ self.allowed_states = [DagRunState(s) for s in allowed_states]
186
+ else:
187
+ self.allowed_states = [DagRunState.SUCCESS]
188
+ if failed_states is not None:
189
+ self.failed_states = [DagRunState(s) for s in failed_states]
190
+ else:
191
+ self.failed_states = [DagRunState.FAILED]
192
+ self.skip_when_already_exists = skip_when_already_exists
193
+ self.fail_when_dag_is_paused = fail_when_dag_is_paused
194
+ self._defer = deferrable
195
+ self.logical_date = logical_date
196
+ if logical_date is NOTSET:
197
+ self.logical_date = NOTSET
198
+ elif logical_date is None or isinstance(logical_date, (str, datetime.datetime)):
199
+ self.logical_date = logical_date
200
+ else:
201
+ raise TypeError(
202
+ f"Expected str, datetime.datetime, or None for parameter 'logical_date'. Got {type(logical_date).__name__}"
203
+ )
204
+
205
+ def execute(self, context: Context):
206
+ if self.logical_date is NOTSET:
207
+ # If no logical_date is provided we will set utcnow()
208
+ parsed_logical_date = timezone.utcnow()
209
+ elif self.logical_date is None or isinstance(self.logical_date, datetime.datetime):
210
+ parsed_logical_date = self.logical_date # type: ignore
211
+ elif isinstance(self.logical_date, str):
212
+ parsed_logical_date = timezone.parse(self.logical_date)
213
+
214
+ try:
215
+ json.dumps(self.conf)
216
+ except TypeError:
217
+ raise ValueError("conf parameter should be JSON Serializable")
218
+
219
+ if self.trigger_run_id:
220
+ run_id = str(self.trigger_run_id)
221
+ else:
222
+ if AIRFLOW_V_3_0_PLUS:
223
+ run_id = DagRun.generate_run_id(
224
+ run_type=DagRunType.MANUAL,
225
+ logical_date=parsed_logical_date,
226
+ run_after=parsed_logical_date or timezone.utcnow(),
227
+ )
228
+ else:
229
+ run_id = DagRun.generate_run_id(DagRunType.MANUAL, parsed_logical_date or timezone.utcnow()) # type: ignore[misc,call-arg]
230
+
231
+ if self.fail_when_dag_is_paused:
232
+ dag_model = DagModel.get_current(self.trigger_dag_id)
233
+ if dag_model.is_paused:
234
+ if AIRFLOW_V_3_0_PLUS:
235
+ raise DagIsPaused(dag_id=self.trigger_dag_id)
236
+ raise AirflowException(f"Dag {self.trigger_dag_id} is paused")
237
+
238
+ if AIRFLOW_V_3_0_PLUS:
239
+ self._trigger_dag_af_3(context=context, run_id=run_id, parsed_logical_date=parsed_logical_date)
240
+ else:
241
+ self._trigger_dag_af_2(context=context, run_id=run_id, parsed_logical_date=parsed_logical_date)
242
+
243
+ def _trigger_dag_af_3(self, context, run_id, parsed_logical_date):
244
+ from airflow.exceptions import DagRunTriggerException
245
+
246
+ raise DagRunTriggerException(
247
+ trigger_dag_id=self.trigger_dag_id,
248
+ dag_run_id=run_id,
249
+ conf=self.conf,
250
+ logical_date=parsed_logical_date,
251
+ reset_dag_run=self.reset_dag_run,
252
+ skip_when_already_exists=self.skip_when_already_exists,
253
+ wait_for_completion=self.wait_for_completion,
254
+ allowed_states=self.allowed_states,
255
+ failed_states=self.failed_states,
256
+ poke_interval=self.poke_interval,
257
+ deferrable=self._defer,
258
+ )
259
+
260
+ def _trigger_dag_af_2(self, context, run_id, parsed_logical_date):
261
+ try:
262
+ dag_run = trigger_dag(
263
+ dag_id=self.trigger_dag_id,
264
+ run_id=run_id,
265
+ conf=self.conf,
266
+ execution_date=parsed_logical_date,
267
+ replace_microseconds=False,
268
+ )
269
+
270
+ except DagRunAlreadyExists as e:
271
+ if self.reset_dag_run:
272
+ dag_run = e.dag_run
273
+ self.log.info("Clearing %s on %s", self.trigger_dag_id, dag_run.run_id)
274
+
275
+ # Get target dag object and call clear()
276
+ dag_model = DagModel.get_current(self.trigger_dag_id)
277
+ if dag_model is None:
278
+ raise DagNotFound(f"Dag id {self.trigger_dag_id} not found in DagModel")
279
+
280
+ # Note: here execution fails on database isolation mode. Needs structural changes for AIP-72
281
+ dag_bag = DagBag(dag_folder=dag_model.fileloc, read_dags_from_db=True)
282
+ dag = dag_bag.get_dag(self.trigger_dag_id)
283
+ dag.clear(start_date=dag_run.logical_date, end_date=dag_run.logical_date)
284
+ else:
285
+ if self.skip_when_already_exists:
286
+ raise AirflowSkipException(
287
+ "Skipping due to skip_when_already_exists is set to True and DagRunAlreadyExists"
288
+ )
289
+ raise e
290
+ if dag_run is None:
291
+ raise RuntimeError("The dag_run should be set here!")
292
+ # Store the run id from the dag run (either created or found above) to
293
+ # be used when creating the extra link on the webserver.
294
+ ti = context["task_instance"]
295
+ ti.xcom_push(key=XCOM_RUN_ID, value=dag_run.run_id)
296
+
297
+ if self.wait_for_completion:
298
+ # Kick off the deferral process
299
+ if self._defer:
300
+ self.defer(
301
+ trigger=DagStateTrigger(
302
+ dag_id=self.trigger_dag_id,
303
+ states=self.allowed_states + self.failed_states,
304
+ execution_dates=[dag_run.logical_date],
305
+ run_ids=[run_id],
306
+ poll_interval=self.poke_interval,
307
+ ),
308
+ method_name="execute_complete",
309
+ )
310
+ # wait for dag to complete
311
+ while True:
312
+ self.log.info(
313
+ "Waiting for %s on %s to become allowed state %s ...",
314
+ self.trigger_dag_id,
315
+ run_id,
316
+ self.allowed_states,
317
+ )
318
+ time.sleep(self.poke_interval)
319
+
320
+ # Note: here execution fails on database isolation mode. Needs structural changes for AIP-72
321
+ dag_run.refresh_from_db()
322
+ state = dag_run.state
323
+ if state in self.failed_states:
324
+ raise AirflowException(f"{self.trigger_dag_id} failed with failed states {state}")
325
+ if state in self.allowed_states:
326
+ self.log.info("%s finished with allowed state %s", self.trigger_dag_id, state)
327
+ return
328
+
329
+ def execute_complete(self, context: Context, event: tuple[str, dict[str, Any]]):
330
+ if AIRFLOW_V_3_0_PLUS:
331
+ self._trigger_dag_run_af_3_execute_complete(event=event)
332
+ else:
333
+ self._trigger_dag_run_af_2_execute_complete(event=event)
334
+
335
+ def _trigger_dag_run_af_3_execute_complete(self, event: tuple[str, dict[str, Any]]):
336
+ run_ids = event[1]["run_ids"]
337
+ event_data = event[1]
338
+ failed_run_id_conditions = []
339
+
340
+ for run_id in run_ids:
341
+ state = event_data.get(run_id)
342
+ if state in self.failed_states:
343
+ failed_run_id_conditions.append(run_id)
344
+ continue
345
+ if state in self.allowed_states:
346
+ self.log.info(
347
+ "%s finished with allowed state %s for run_id %s",
348
+ self.trigger_dag_id,
349
+ state,
350
+ run_id,
351
+ )
352
+
353
+ if failed_run_id_conditions:
354
+ raise AirflowException(
355
+ f"{self.trigger_dag_id} failed with failed states {self.failed_states} for run_ids"
356
+ f" {failed_run_id_conditions}"
357
+ )
358
+
359
+ if not AIRFLOW_V_3_0_PLUS:
360
+ from airflow.utils.session import NEW_SESSION, provide_session # type: ignore[misc]
361
+
362
+ @provide_session
363
+ def _trigger_dag_run_af_2_execute_complete(
364
+ self, event: tuple[str, dict[str, Any]], session: Session = NEW_SESSION
365
+ ):
366
+ # This logical_date is parsed from the return trigger event
367
+ provided_logical_date = event[1]["execution_dates"][0]
368
+ try:
369
+ # Note: here execution fails on database isolation mode. Needs structural changes for AIP-72
370
+ dag_run = session.execute(
371
+ select(DagRun).where(
372
+ DagRun.dag_id == self.trigger_dag_id, DagRun.execution_date == provided_logical_date
373
+ )
374
+ ).scalar_one()
375
+ except NoResultFound:
376
+ raise AirflowException(
377
+ f"No DAG run found for DAG {self.trigger_dag_id} and logical date {self.logical_date}"
378
+ )
379
+
380
+ state = dag_run.state
381
+
382
+ if state in self.failed_states:
383
+ raise AirflowException(f"{self.trigger_dag_id} failed with failed state {state}")
384
+ if state in self.allowed_states:
385
+ self.log.info("%s finished with allowed state %s", self.trigger_dag_id, state)
386
+ return
387
+
388
+ raise AirflowException(
389
+ f"{self.trigger_dag_id} return {state} which is not in {self.failed_states}"
390
+ f" or {self.allowed_states}"
391
+ )
@@ -17,14 +17,19 @@
17
17
  # under the License.
18
18
  from __future__ import annotations
19
19
 
20
- from typing import TYPE_CHECKING, Iterable
20
+ from collections.abc import Iterable
21
+ from typing import TYPE_CHECKING
21
22
 
22
- from airflow.operators.branch import BaseBranchOperator
23
+ from airflow.providers.standard.operators.branch import BaseBranchOperator
24
+ from airflow.providers.standard.utils.weekday import WeekDay
23
25
  from airflow.utils import timezone
24
- from airflow.utils.weekday import WeekDay
25
26
 
26
27
  if TYPE_CHECKING:
27
- from airflow.utils.context import Context
28
+ try:
29
+ from airflow.sdk.definitions.context import Context
30
+ except ImportError:
31
+ # TODO: Remove once provider drops support for Airflow 2
32
+ from airflow.utils.context import Context
28
33
 
29
34
 
30
35
  class BranchDayOfWeekOperator(BaseBranchOperator):
@@ -38,7 +43,8 @@ class BranchDayOfWeekOperator(BaseBranchOperator):
38
43
 
39
44
  .. code-block:: python
40
45
 
41
- from airflow.operators.empty import EmptyOperator
46
+ from airflow.providers.standard.operators.empty import EmptyOperator
47
+ from airflow.operators.weekday import BranchDayOfWeekOperator
42
48
 
43
49
  monday = EmptyOperator(task_id="monday")
44
50
  other_day = EmptyOperator(task_id="other_day")
@@ -57,8 +63,9 @@ class BranchDayOfWeekOperator(BaseBranchOperator):
57
63
  .. code-block:: python
58
64
 
59
65
  # import WeekDay Enum
60
- from airflow.utils.weekday import WeekDay
61
- from airflow.operators.empty import EmptyOperator
66
+ from airflow.providers.standard.utils.weekday import WeekDay
67
+ from airflow.providers.standard.operators.empty import EmptyOperator
68
+ from airflow.operators.weekday import BranchDayOfWeekOperator
62
69
 
63
70
  workday = EmptyOperator(task_id="workday")
64
71
  weekend = EmptyOperator(task_id="weekend")
@@ -109,10 +116,13 @@ class BranchDayOfWeekOperator(BaseBranchOperator):
109
116
 
110
117
  def choose_branch(self, context: Context) -> str | Iterable[str]:
111
118
  if self.use_task_logical_date:
112
- now = context["logical_date"]
119
+ now = context.get("logical_date")
120
+ if not now:
121
+ dag_run = context.get("dag_run")
122
+ now = dag_run.run_after # type: ignore[union-attr, assignment]
113
123
  else:
114
124
  now = timezone.make_naive(timezone.utcnow(), self.dag.timezone)
115
125
 
116
- if now.isoweekday() in self._week_day_num:
126
+ if now.isoweekday() in self._week_day_num: # type: ignore[union-attr]
117
127
  return self.follow_task_ids_if_true
118
128
  return self.follow_task_ids_if_false
@@ -18,15 +18,20 @@
18
18
  from __future__ import annotations
19
19
 
20
20
  import os
21
+ from collections.abc import Sequence
21
22
  from subprocess import PIPE, STDOUT, Popen
22
23
  from tempfile import NamedTemporaryFile, TemporaryDirectory, gettempdir
23
- from typing import TYPE_CHECKING, Sequence
24
+ from typing import TYPE_CHECKING
24
25
 
25
26
  from airflow.exceptions import AirflowFailException
26
27
  from airflow.sensors.base import BaseSensorOperator
27
28
 
28
29
  if TYPE_CHECKING:
29
- from airflow.utils.context import Context
30
+ try:
31
+ from airflow.sdk.definitions.context import Context
32
+ except ImportError:
33
+ # TODO: Remove once provider drops support for Airflow 2
34
+ from airflow.utils.context import Context
30
35
 
31
36
 
32
37
  class BashSensor(BaseSensorOperator):
@@ -70,9 +75,10 @@ class BashSensor(BaseSensorOperator):
70
75
  """Execute the bash command in a temporary directory."""
71
76
  bash_command = self.bash_command
72
77
  self.log.info("Tmp dir root location: %s", gettempdir())
73
- with TemporaryDirectory(prefix="airflowtmp") as tmp_dir, NamedTemporaryFile(
74
- dir=tmp_dir, prefix=self.task_id
75
- ) as f:
78
+ with (
79
+ TemporaryDirectory(prefix="airflowtmp") as tmp_dir,
80
+ NamedTemporaryFile(dir=tmp_dir, prefix=self.task_id) as f,
81
+ ):
76
82
  f.write(bytes(bash_command, "utf_8"))
77
83
  f.flush()
78
84
  fname = f.name
@@ -101,14 +107,12 @@ class BashSensor(BaseSensorOperator):
101
107
  return True
102
108
 
103
109
  # we have a retry exit code, sensor retries if return code matches, otherwise error
104
- elif self.retry_exit_code is not None:
110
+ if self.retry_exit_code is not None:
105
111
  if resp.returncode == self.retry_exit_code:
106
112
  self.log.info("Return code matches retry code, will retry later")
107
113
  return False
108
- else:
109
- raise AirflowFailException(f"Command exited with return code {resp.returncode}")
114
+ raise AirflowFailException(f"Command exited with return code {resp.returncode}")
110
115
 
111
116
  # backwards compatibility: sensor retries no matter the error code
112
- else:
113
- self.log.info("Non-zero return code and no retry code set, will retry later")
114
- return False
117
+ self.log.info("Non-zero return code and no retry code set, will retry later")
118
+ return False
@@ -18,15 +18,37 @@
18
18
  from __future__ import annotations
19
19
 
20
20
  import datetime
21
- from typing import TYPE_CHECKING, Any, NoReturn, Sequence
21
+ from collections.abc import Sequence
22
+ from dataclasses import dataclass
23
+ from typing import TYPE_CHECKING, Any, NoReturn
22
24
 
25
+ from airflow.providers.standard.triggers.temporal import DateTimeTrigger
26
+ from airflow.providers.standard.version_compat import AIRFLOW_V_3_0_PLUS
23
27
  from airflow.sensors.base import BaseSensorOperator
24
- from airflow.triggers.base import StartTriggerArgs
25
- from airflow.triggers.temporal import DateTimeTrigger
28
+
29
+ try:
30
+ from airflow.triggers.base import StartTriggerArgs
31
+ except ImportError:
32
+ # TODO: Remove this when min airflow version is 2.10.0 for standard provider
33
+ @dataclass
34
+ class StartTriggerArgs: # type: ignore[no-redef]
35
+ """Arguments required for start task execution from triggerer."""
36
+
37
+ trigger_cls: str
38
+ next_method: str
39
+ trigger_kwargs: dict[str, Any] | None = None
40
+ next_kwargs: dict[str, Any] | None = None
41
+ timeout: datetime.timedelta | None = None
42
+
43
+
26
44
  from airflow.utils import timezone
27
45
 
28
46
  if TYPE_CHECKING:
29
- from airflow.utils.context import Context
47
+ try:
48
+ from airflow.sdk.definitions.context import Context
49
+ except ImportError:
50
+ # TODO: Remove once provider drops support for Airflow 2
51
+ from airflow.utils.context import Context
30
52
 
31
53
 
32
54
  class DateTimeSensor(BaseSensorOperator):
@@ -37,7 +59,7 @@ class DateTimeSensor(BaseSensorOperator):
37
59
  It handles some cases for which ``TimeSensor`` and ``TimeDeltaSensor`` are not suited.
38
60
 
39
61
  **Example** 1 :
40
- If a task needs to wait for 11am on each ``execution_date``. Using
62
+ If a task needs to wait for 11am on each ``logical_date``. Using
41
63
  ``TimeSensor`` or ``TimeDeltaSensor``, all backfill tasks started at
42
64
  1am have to wait for 10 hours. This is unnecessary, e.g. a backfill
43
65
  task with ``{{ ds }} = '1970-01-01'`` does not need to wait because
@@ -52,7 +74,7 @@ class DateTimeSensor(BaseSensorOperator):
52
74
 
53
75
  DateTimeSensor(
54
76
  task_id="wait_for_0100",
55
- target_time="{{ next_execution_date.tomorrow().replace(hour=1) }}",
77
+ target_time="{{ data_interval_end.tomorrow().replace(hour=1) }}",
56
78
  )
57
79
 
58
80
  :param target_time: datetime after which the job succeeds. (templated)
@@ -93,7 +115,7 @@ class DateTimeSensorAsync(DateTimeSensor):
93
115
  """
94
116
 
95
117
  start_trigger_args = StartTriggerArgs(
96
- trigger_cls="airflow.triggers.temporal.DateTimeTrigger",
118
+ trigger_cls="airflow.providers.standard.triggers.temporal.DateTimeTrigger",
97
119
  trigger_kwargs={"moment": "", "end_from_trigger": False},
98
120
  next_method="execute_complete",
99
121
  next_kwargs=None,
@@ -125,7 +147,9 @@ class DateTimeSensorAsync(DateTimeSensor):
125
147
  trigger=DateTimeTrigger(
126
148
  moment=timezone.parse(self.target_time),
127
149
  end_from_trigger=self.end_from_trigger,
128
- ),
150
+ )
151
+ if AIRFLOW_V_3_0_PLUS
152
+ else DateTimeTrigger(moment=timezone.parse(self.target_time)),
129
153
  )
130
154
 
131
155
  def execute_complete(self, context: Context, event: Any = None) -> None: