apache-airflow-providers-standard 1.0.0.dev1__py3-none-any.whl → 1.1.0rc1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of apache-airflow-providers-standard might be problematic. Click here for more details.

Files changed (50) hide show
  1. airflow/providers/standard/LICENSE +0 -52
  2. airflow/providers/standard/__init__.py +23 -1
  3. airflow/providers/standard/decorators/__init__.py +16 -0
  4. airflow/providers/standard/decorators/bash.py +121 -0
  5. airflow/providers/standard/decorators/branch_external_python.py +63 -0
  6. airflow/providers/standard/decorators/branch_python.py +62 -0
  7. airflow/providers/standard/decorators/branch_virtualenv.py +62 -0
  8. airflow/providers/standard/decorators/external_python.py +70 -0
  9. airflow/providers/standard/decorators/python.py +86 -0
  10. airflow/providers/standard/decorators/python_virtualenv.py +67 -0
  11. airflow/providers/standard/decorators/sensor.py +83 -0
  12. airflow/providers/standard/decorators/short_circuit.py +65 -0
  13. airflow/providers/standard/get_provider_info.py +89 -7
  14. airflow/providers/standard/hooks/__init__.py +16 -0
  15. airflow/providers/standard/hooks/filesystem.py +89 -0
  16. airflow/providers/standard/hooks/package_index.py +95 -0
  17. airflow/providers/standard/hooks/subprocess.py +119 -0
  18. airflow/providers/standard/operators/bash.py +73 -56
  19. airflow/providers/standard/operators/branch.py +105 -0
  20. airflow/providers/standard/operators/datetime.py +15 -5
  21. airflow/providers/standard/operators/empty.py +39 -0
  22. airflow/providers/standard/operators/latest_only.py +127 -0
  23. airflow/providers/standard/operators/python.py +1143 -0
  24. airflow/providers/standard/operators/smooth.py +38 -0
  25. airflow/providers/standard/operators/trigger_dagrun.py +391 -0
  26. airflow/providers/standard/operators/weekday.py +19 -9
  27. airflow/providers/standard/sensors/bash.py +15 -11
  28. airflow/providers/standard/sensors/date_time.py +32 -8
  29. airflow/providers/standard/sensors/external_task.py +593 -0
  30. airflow/providers/standard/sensors/filesystem.py +158 -0
  31. airflow/providers/standard/sensors/python.py +84 -0
  32. airflow/providers/standard/sensors/time.py +28 -5
  33. airflow/providers/standard/sensors/time_delta.py +68 -15
  34. airflow/providers/standard/sensors/weekday.py +25 -7
  35. airflow/providers/standard/triggers/__init__.py +16 -0
  36. airflow/providers/standard/triggers/external_task.py +288 -0
  37. airflow/providers/standard/triggers/file.py +131 -0
  38. airflow/providers/standard/triggers/temporal.py +113 -0
  39. airflow/providers/standard/utils/__init__.py +16 -0
  40. airflow/providers/standard/utils/python_virtualenv.py +209 -0
  41. airflow/providers/standard/utils/python_virtualenv_script.jinja2 +82 -0
  42. airflow/providers/standard/utils/sensor_helper.py +137 -0
  43. airflow/providers/standard/utils/skipmixin.py +192 -0
  44. airflow/providers/standard/utils/weekday.py +77 -0
  45. airflow/providers/standard/version_compat.py +36 -0
  46. {apache_airflow_providers_standard-1.0.0.dev1.dist-info → apache_airflow_providers_standard-1.1.0rc1.dist-info}/METADATA +16 -35
  47. apache_airflow_providers_standard-1.1.0rc1.dist-info/RECORD +51 -0
  48. {apache_airflow_providers_standard-1.0.0.dev1.dist-info → apache_airflow_providers_standard-1.1.0rc1.dist-info}/WHEEL +1 -1
  49. apache_airflow_providers_standard-1.0.0.dev1.dist-info/RECORD +0 -17
  50. {apache_airflow_providers_standard-1.0.0.dev1.dist-info → apache_airflow_providers_standard-1.1.0rc1.dist-info}/entry_points.txt +0 -0
@@ -0,0 +1,158 @@
1
+ #
2
+ # Licensed to the Apache Software Foundation (ASF) under one
3
+ # or more contributor license agreements. See the NOTICE file
4
+ # distributed with this work for additional information
5
+ # regarding copyright ownership. The ASF licenses this file
6
+ # to you under the Apache License, Version 2.0 (the
7
+ # "License"); you may not use this file except in compliance
8
+ # with the License. You may obtain a copy of the License at
9
+ #
10
+ # http://www.apache.org/licenses/LICENSE-2.0
11
+ #
12
+ # Unless required by applicable law or agreed to in writing,
13
+ # software distributed under the License is distributed on an
14
+ # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
15
+ # KIND, either express or implied. See the License for the
16
+ # specific language governing permissions and limitations
17
+ # under the License.
18
+ from __future__ import annotations
19
+
20
+ import datetime
21
+ import os
22
+ from collections.abc import Sequence
23
+ from dataclasses import dataclass
24
+ from functools import cached_property
25
+ from glob import glob
26
+ from typing import TYPE_CHECKING, Any
27
+
28
+ from airflow.configuration import conf
29
+ from airflow.exceptions import AirflowException
30
+ from airflow.providers.standard.hooks.filesystem import FSHook
31
+ from airflow.providers.standard.triggers.file import FileTrigger
32
+ from airflow.sensors.base import BaseSensorOperator
33
+
34
+ try:
35
+ from airflow.triggers.base import StartTriggerArgs
36
+ except ImportError:
37
+ # TODO: Remove this when min airflow version is 2.10.0 for standard provider
38
+ @dataclass
39
+ class StartTriggerArgs: # type: ignore[no-redef]
40
+ """Arguments required for start task execution from triggerer."""
41
+
42
+ trigger_cls: str
43
+ next_method: str
44
+ trigger_kwargs: dict[str, Any] | None = None
45
+ next_kwargs: dict[str, Any] | None = None
46
+ timeout: datetime.timedelta | None = None
47
+
48
+
49
+ if TYPE_CHECKING:
50
+ try:
51
+ from airflow.sdk.definitions.context import Context
52
+ except ImportError:
53
+ # TODO: Remove once provider drops support for Airflow 2
54
+ from airflow.utils.context import Context
55
+
56
+
57
+ class FileSensor(BaseSensorOperator):
58
+ """
59
+ Waits for a file or folder to land in a filesystem.
60
+
61
+ If the path given is a directory then this sensor will only return true if
62
+ any files exist inside it (either directly, or within a subdirectory)
63
+
64
+ :param fs_conn_id: reference to the File (path)
65
+ connection id
66
+ :param filepath: File or folder name (relative to
67
+ the base path set within the connection), can be a glob.
68
+ :param recursive: when set to ``True``, enables recursive directory matching behavior of
69
+ ``**`` in glob filepath parameter. Defaults to ``False``.
70
+ :param deferrable: If waiting for completion, whether to defer the task until done,
71
+ default is ``False``.
72
+ :param start_from_trigger: Start the task directly from the triggerer without going into the worker.
73
+ :param trigger_kwargs: The keyword arguments passed to the trigger when start_from_trigger is set to True
74
+ during dynamic task mapping. This argument is not used in standard usage.
75
+
76
+ .. seealso::
77
+ For more information on how to use this sensor, take a look at the guide:
78
+ :ref:`howto/operator:FileSensor`
79
+
80
+
81
+ """
82
+
83
+ template_fields: Sequence[str] = ("filepath",)
84
+ ui_color = "#91818a"
85
+ start_trigger_args = StartTriggerArgs(
86
+ trigger_cls="airflow.providers.standard.triggers.file.FileTrigger",
87
+ trigger_kwargs={},
88
+ next_method="execute_complete",
89
+ next_kwargs=None,
90
+ timeout=None,
91
+ )
92
+ start_from_trigger = False
93
+
94
+ def __init__(
95
+ self,
96
+ *,
97
+ filepath,
98
+ fs_conn_id="fs_default",
99
+ recursive=False,
100
+ deferrable: bool = conf.getboolean("operators", "default_deferrable", fallback=False),
101
+ start_from_trigger: bool = False,
102
+ trigger_kwargs: dict[str, Any] | None = None,
103
+ **kwargs,
104
+ ):
105
+ super().__init__(**kwargs)
106
+ self.filepath = filepath
107
+ self.fs_conn_id = fs_conn_id
108
+ self.recursive = recursive
109
+ self.deferrable = deferrable
110
+
111
+ self.start_from_trigger = start_from_trigger
112
+
113
+ if self.deferrable and self.start_from_trigger:
114
+ self.start_trigger_args.timeout = datetime.timedelta(seconds=self.timeout)
115
+ self.start_trigger_args.trigger_kwargs = dict(
116
+ filepath=self.path,
117
+ recursive=self.recursive,
118
+ poke_interval=self.poke_interval,
119
+ )
120
+
121
+ @cached_property
122
+ def path(self) -> str:
123
+ hook = FSHook(self.fs_conn_id)
124
+ basepath = hook.get_path()
125
+ full_path = os.path.join(basepath, self.filepath)
126
+ return full_path
127
+
128
+ def poke(self, context: Context) -> bool:
129
+ self.log.info("Poking for file %s", self.path)
130
+ for path in glob(self.path, recursive=self.recursive):
131
+ if os.path.isfile(path):
132
+ mod_time = datetime.datetime.fromtimestamp(os.path.getmtime(path)).strftime("%Y%m%d%H%M%S")
133
+ self.log.info("Found File %s last modified: %s", path, mod_time)
134
+ return True
135
+
136
+ for _, _, files in os.walk(path):
137
+ if files:
138
+ return True
139
+ return False
140
+
141
+ def execute(self, context: Context) -> None:
142
+ if not self.deferrable:
143
+ super().execute(context=context)
144
+ if not self.poke(context=context):
145
+ self.defer(
146
+ timeout=datetime.timedelta(seconds=self.timeout),
147
+ trigger=FileTrigger(
148
+ filepath=self.path,
149
+ recursive=self.recursive,
150
+ poke_interval=self.poke_interval,
151
+ ),
152
+ method_name="execute_complete",
153
+ )
154
+
155
+ def execute_complete(self, context: Context, event: bool | None = None) -> None:
156
+ if not event:
157
+ raise AirflowException("%s task failed as %s not found.", self.task_id, self.filepath)
158
+ self.log.info("%s completed successfully as %s found.", self.task_id, self.filepath)
@@ -0,0 +1,84 @@
1
+ #
2
+ # Licensed to the Apache Software Foundation (ASF) under one
3
+ # or more contributor license agreements. See the NOTICE file
4
+ # distributed with this work for additional information
5
+ # regarding copyright ownership. The ASF licenses this file
6
+ # to you under the Apache License, Version 2.0 (the
7
+ # "License"); you may not use this file except in compliance
8
+ # with the License. You may obtain a copy of the License at
9
+ #
10
+ # http://www.apache.org/licenses/LICENSE-2.0
11
+ #
12
+ # Unless required by applicable law or agreed to in writing,
13
+ # software distributed under the License is distributed on an
14
+ # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
15
+ # KIND, either express or implied. See the License for the
16
+ # specific language governing permissions and limitations
17
+ # under the License.
18
+ from __future__ import annotations
19
+
20
+ from collections.abc import Mapping, Sequence
21
+ from typing import TYPE_CHECKING, Any, Callable
22
+
23
+ from airflow.sensors.base import BaseSensorOperator, PokeReturnValue
24
+ from airflow.utils.context import context_merge
25
+ from airflow.utils.operator_helpers import determine_kwargs
26
+
27
+ if TYPE_CHECKING:
28
+ try:
29
+ from airflow.sdk.definitions.context import Context
30
+ except ImportError:
31
+ # TODO: Remove once provider drops support for Airflow 2
32
+ from airflow.utils.context import Context
33
+
34
+
35
+ class PythonSensor(BaseSensorOperator):
36
+ """
37
+ Waits for a Python callable to return True.
38
+
39
+ User could put input argument in templates_dict
40
+ e.g ``templates_dict = {'start_ds': 1970}``
41
+ and access the argument by calling ``kwargs['templates_dict']['start_ds']``
42
+ in the callable
43
+
44
+ :param python_callable: A reference to an object that is callable
45
+ :param op_kwargs: a dictionary of keyword arguments that will get unpacked
46
+ in your function
47
+ :param op_args: a list of positional arguments that will get unpacked when
48
+ calling your callable
49
+ :param templates_dict: a dictionary where the values are templates that
50
+ will get templated by the Airflow engine sometime between
51
+ ``__init__`` and ``execute`` takes place and are made available
52
+ in your callable's context after the template has been applied.
53
+
54
+ .. seealso::
55
+ For more information on how to use this sensor, take a look at the guide:
56
+ :ref:`howto/operator:PythonSensor`
57
+ """
58
+
59
+ template_fields: Sequence[str] = ("templates_dict", "op_args", "op_kwargs")
60
+
61
+ def __init__(
62
+ self,
63
+ *,
64
+ python_callable: Callable,
65
+ op_args: list | None = None,
66
+ op_kwargs: Mapping[str, Any] | None = None,
67
+ templates_dict: dict | None = None,
68
+ **kwargs,
69
+ ):
70
+ super().__init__(**kwargs)
71
+ self.python_callable = python_callable
72
+ self.op_args = op_args or []
73
+ self.op_kwargs = op_kwargs or {}
74
+ self.templates_dict = templates_dict
75
+
76
+ def poke(self, context: Context) -> PokeReturnValue | bool:
77
+ context_merge(context, self.op_kwargs, templates_dict=self.templates_dict)
78
+ self.op_kwargs = determine_kwargs(self.python_callable, self.op_args, context)
79
+
80
+ self.log.info("Poking callable: %s", str(self.python_callable))
81
+ return_value = self.python_callable(*self.op_args, **self.op_kwargs)
82
+ if isinstance(return_value, PokeReturnValue):
83
+ return return_value
84
+ return PokeReturnValue(bool(return_value))
@@ -18,15 +18,36 @@
18
18
  from __future__ import annotations
19
19
 
20
20
  import datetime
21
+ from dataclasses import dataclass
21
22
  from typing import TYPE_CHECKING, Any, NoReturn
22
23
 
24
+ from airflow.providers.standard.triggers.temporal import DateTimeTrigger
25
+ from airflow.providers.standard.version_compat import AIRFLOW_V_2_10_PLUS
23
26
  from airflow.sensors.base import BaseSensorOperator
24
- from airflow.triggers.base import StartTriggerArgs
25
- from airflow.triggers.temporal import DateTimeTrigger
27
+
28
+ try:
29
+ from airflow.triggers.base import StartTriggerArgs
30
+ except ImportError:
31
+ # TODO: Remove this when min airflow version is 2.10.0 for standard provider
32
+ @dataclass
33
+ class StartTriggerArgs: # type: ignore[no-redef]
34
+ """Arguments required for start task execution from triggerer."""
35
+
36
+ trigger_cls: str
37
+ next_method: str
38
+ trigger_kwargs: dict[str, Any] | None = None
39
+ next_kwargs: dict[str, Any] | None = None
40
+ timeout: datetime.timedelta | None = None
41
+
42
+
26
43
  from airflow.utils import timezone
27
44
 
28
45
  if TYPE_CHECKING:
29
- from airflow.utils.context import Context
46
+ try:
47
+ from airflow.sdk.definitions.context import Context
48
+ except ImportError:
49
+ # TODO: Remove once provider drops support for Airflow 2
50
+ from airflow.utils.context import Context
30
51
 
31
52
 
32
53
  class TimeSensor(BaseSensorOperator):
@@ -68,7 +89,7 @@ class TimeSensorAsync(BaseSensorOperator):
68
89
  """
69
90
 
70
91
  start_trigger_args = StartTriggerArgs(
71
- trigger_cls="airflow.triggers.temporal.DateTimeTrigger",
92
+ trigger_cls="airflow.providers.standard.triggers.temporal.DateTimeTrigger",
72
93
  trigger_kwargs={"moment": "", "end_from_trigger": False},
73
94
  next_method="execute_complete",
74
95
  next_kwargs=None,
@@ -102,7 +123,9 @@ class TimeSensorAsync(BaseSensorOperator):
102
123
 
103
124
  def execute(self, context: Context) -> NoReturn:
104
125
  self.defer(
105
- trigger=DateTimeTrigger(moment=self.target_datetime, end_from_trigger=self.end_from_trigger),
126
+ trigger=DateTimeTrigger(moment=self.target_datetime, end_from_trigger=self.end_from_trigger)
127
+ if AIRFLOW_V_2_10_PLUS
128
+ else DateTimeTrigger(moment=self.target_datetime),
106
129
  method_name="execute_complete",
107
130
  )
108
131
 
@@ -17,41 +17,77 @@
17
17
  # under the License.
18
18
  from __future__ import annotations
19
19
 
20
- from datetime import timedelta
20
+ from datetime import datetime, timedelta
21
21
  from time import sleep
22
22
  from typing import TYPE_CHECKING, Any, NoReturn
23
23
 
24
+ from packaging.version import Version
25
+
24
26
  from airflow.configuration import conf
25
27
  from airflow.exceptions import AirflowSkipException
28
+ from airflow.providers.standard.triggers.temporal import DateTimeTrigger, TimeDeltaTrigger
29
+ from airflow.providers.standard.version_compat import AIRFLOW_V_3_0_PLUS
26
30
  from airflow.sensors.base import BaseSensorOperator
27
- from airflow.triggers.temporal import DateTimeTrigger, TimeDeltaTrigger
28
31
  from airflow.utils import timezone
29
32
 
30
33
  if TYPE_CHECKING:
31
- from airflow.utils.context import Context
34
+ try:
35
+ from airflow.sdk.definitions.context import Context
36
+ except ImportError:
37
+ # TODO: Remove once provider drops support for Airflow 2
38
+ from airflow.utils.context import Context
39
+
40
+
41
+ def _get_airflow_version():
42
+ from airflow import __version__ as airflow_version
43
+
44
+ return Version(Version(airflow_version).base_version)
32
45
 
33
46
 
34
47
  class TimeDeltaSensor(BaseSensorOperator):
35
48
  """
36
- Waits for a timedelta after the run's data interval.
49
+ Waits for a timedelta.
37
50
 
38
- :param delta: time length to wait after the data interval before succeeding.
51
+ The delta will be evaluated against data_interval_end if present for the dag run,
52
+ otherwise run_after will be used.
53
+
54
+ :param delta: time to wait before succeeding.
39
55
 
40
56
  .. seealso::
41
57
  For more information on how to use this sensor, take a look at the guide:
42
58
  :ref:`howto/operator:TimeDeltaSensor`
43
59
 
44
-
45
60
  """
46
61
 
47
62
  def __init__(self, *, delta, **kwargs):
48
63
  super().__init__(**kwargs)
49
64
  self.delta = delta
50
65
 
51
- def poke(self, context: Context):
52
- target_dttm = context["data_interval_end"]
53
- target_dttm += self.delta
54
- self.log.info("Checking if the time (%s) has come", target_dttm)
66
+ def _derive_base_time(self, context: Context) -> datetime:
67
+ """
68
+ Get the "base time" against which the delta should be calculated.
69
+
70
+ If data_interval_end is populated, use it; else use run_after.
71
+ """
72
+ data_interval_end = context.get("data_interval_end")
73
+ if data_interval_end:
74
+ if not isinstance(data_interval_end, datetime):
75
+ raise ValueError("`data_interval_end` returned non-datetime object")
76
+
77
+ return data_interval_end
78
+
79
+ if not data_interval_end and not AIRFLOW_V_3_0_PLUS:
80
+ raise ValueError("`data_interval_end` not found in task context.")
81
+
82
+ dag_run = context.get("dag_run")
83
+ if not dag_run:
84
+ raise ValueError("`dag_run` not found in task context")
85
+ return dag_run.run_after
86
+
87
+ def poke(self, context: Context) -> bool:
88
+ base_time = self._derive_base_time(context=context)
89
+ target_dttm = base_time + self.delta
90
+ self.log.info("Checking if the delta has elapsed base_time=%s, delta=%s", base_time, self.delta)
55
91
  return timezone.utcnow() > target_dttm
56
92
 
57
93
 
@@ -75,19 +111,34 @@ class TimeDeltaSensorAsync(TimeDeltaSensor):
75
111
  self.end_from_trigger = end_from_trigger
76
112
 
77
113
  def execute(self, context: Context) -> bool | NoReturn:
78
- target_dttm = context["data_interval_end"]
79
- target_dttm += self.delta
114
+ base_time = self._derive_base_time(context=context)
115
+ target_dttm: datetime = base_time + self.delta
116
+
80
117
  if timezone.utcnow() > target_dttm:
81
118
  # If the target datetime is in the past, return immediately
82
119
  return True
83
120
  try:
84
- trigger = DateTimeTrigger(moment=target_dttm, end_from_trigger=self.end_from_trigger)
121
+ if AIRFLOW_V_3_0_PLUS:
122
+ trigger = DateTimeTrigger(moment=target_dttm, end_from_trigger=self.end_from_trigger)
123
+ else:
124
+ trigger = DateTimeTrigger(moment=target_dttm)
85
125
  except (TypeError, ValueError) as e:
86
126
  if self.soft_fail:
87
127
  raise AirflowSkipException("Skipping due to soft_fail is set to True.") from e
88
128
  raise
89
129
 
90
- self.defer(trigger=trigger, method_name="execute_complete")
130
+ # todo: remove backcompat when min airflow version greater than 2.11
131
+ timeout: int | float | timedelta
132
+ if _get_airflow_version() >= Version("2.11.0"):
133
+ timeout = self.timeout
134
+ else:
135
+ timeout = timedelta(seconds=self.timeout)
136
+
137
+ self.defer(
138
+ trigger=trigger,
139
+ method_name="execute_complete",
140
+ timeout=timeout,
141
+ )
91
142
 
92
143
  def execute_complete(self, context: Context, event: Any = None) -> None:
93
144
  """Handle the event when the trigger fires and return immediately."""
@@ -121,7 +172,9 @@ class WaitSensor(BaseSensorOperator):
121
172
  def execute(self, context: Context) -> None:
122
173
  if self.deferrable:
123
174
  self.defer(
124
- trigger=TimeDeltaTrigger(self.time_to_wait, end_from_trigger=True),
175
+ trigger=TimeDeltaTrigger(self.time_to_wait, end_from_trigger=True)
176
+ if AIRFLOW_V_3_0_PLUS
177
+ else TimeDeltaTrigger(self.time_to_wait),
125
178
  method_name="execute_complete",
126
179
  )
127
180
  else:
@@ -17,14 +17,19 @@
17
17
  # under the License.
18
18
  from __future__ import annotations
19
19
 
20
- from typing import TYPE_CHECKING, Iterable
20
+ from collections.abc import Iterable
21
+ from typing import TYPE_CHECKING
21
22
 
23
+ from airflow.providers.standard.utils.weekday import WeekDay
22
24
  from airflow.sensors.base import BaseSensorOperator
23
25
  from airflow.utils import timezone
24
- from airflow.utils.weekday import WeekDay
25
26
 
26
27
  if TYPE_CHECKING:
27
- from airflow.utils.context import Context
28
+ try:
29
+ from airflow.sdk.definitions.context import Context
30
+ except ImportError:
31
+ # TODO: Remove once provider drops support for Airflow 2
32
+ from airflow.utils.context import Context
28
33
 
29
34
 
30
35
  class DayOfWeekSensor(BaseSensorOperator):
@@ -49,7 +54,7 @@ class DayOfWeekSensor(BaseSensorOperator):
49
54
  **Example** (with :class:`~airflow.utils.weekday.WeekDay` enum): ::
50
55
 
51
56
  # import WeekDay Enum
52
- from airflow.utils.weekday import WeekDay
57
+ from airflow.providers.standard.utils.weekday import WeekDay
53
58
 
54
59
  weekend_check = DayOfWeekSensor(
55
60
  task_id="weekend_check",
@@ -98,7 +103,20 @@ class DayOfWeekSensor(BaseSensorOperator):
98
103
  self.week_day,
99
104
  WeekDay(timezone.utcnow().isoweekday()).name,
100
105
  )
106
+
101
107
  if self.use_task_logical_date:
102
- return context["logical_date"].isoweekday() in self._week_day_num
103
- else:
104
- return timezone.utcnow().isoweekday() in self._week_day_num
108
+ logical_date = context.get("logical_date")
109
+ dag_run = context.get("dag_run")
110
+
111
+ if not (logical_date or (dag_run and dag_run.run_after)):
112
+ raise ValueError(
113
+ "Either `logical_date` or `run_after` should be provided in the task context when "
114
+ "`use_task_logical_date` is True"
115
+ )
116
+
117
+ determined_weekday_num = (
118
+ logical_date.isoweekday() if logical_date else dag_run.run_after.isoweekday() # type: ignore[union-attr]
119
+ )
120
+
121
+ return determined_weekday_num in self._week_day_num
122
+ return timezone.utcnow().isoweekday() in self._week_day_num
@@ -0,0 +1,16 @@
1
+ # Licensed to the Apache Software Foundation (ASF) under one
2
+ # or more contributor license agreements. See the NOTICE file
3
+ # distributed with this work for additional information
4
+ # regarding copyright ownership. The ASF licenses this file
5
+ # to you under the Apache License, Version 2.0 (the
6
+ # "License"); you may not use this file except in compliance
7
+ # with the License. You may obtain a copy of the License at
8
+ #
9
+ # http://www.apache.org/licenses/LICENSE-2.0
10
+ #
11
+ # Unless required by applicable law or agreed to in writing,
12
+ # software distributed under the License is distributed on an
13
+ # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
14
+ # KIND, either express or implied. See the License for the
15
+ # specific language governing permissions and limitations
16
+ # under the License.