apache-airflow-providers-standard 1.6.0rc2__py3-none-any.whl → 1.7.0rc1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (24) hide show
  1. airflow/providers/standard/__init__.py +1 -1
  2. airflow/providers/standard/example_dags/example_bash_decorator.py +6 -1
  3. airflow/providers/standard/example_dags/example_branch_operator.py +6 -1
  4. airflow/providers/standard/example_dags/example_branch_operator_decorator.py +6 -1
  5. airflow/providers/standard/example_dags/example_hitl_operator.py +11 -6
  6. airflow/providers/standard/example_dags/example_sensors.py +6 -1
  7. airflow/providers/standard/example_dags/example_short_circuit_decorator.py +6 -1
  8. airflow/providers/standard/example_dags/example_short_circuit_operator.py +6 -1
  9. airflow/providers/standard/exceptions.py +1 -1
  10. airflow/providers/standard/operators/branch.py +11 -5
  11. airflow/providers/standard/operators/hitl.py +161 -2
  12. airflow/providers/standard/operators/python.py +1 -1
  13. airflow/providers/standard/operators/trigger_dagrun.py +8 -5
  14. airflow/providers/standard/sensors/date_time.py +9 -9
  15. airflow/providers/standard/sensors/filesystem.py +4 -8
  16. airflow/providers/standard/sensors/time.py +4 -8
  17. airflow/providers/standard/triggers/hitl.py +8 -4
  18. airflow/providers/standard/utils/sensor_helper.py +3 -2
  19. airflow/providers/standard/utils/skipmixin.py +6 -3
  20. airflow/providers/standard/version_compat.py +3 -1
  21. {apache_airflow_providers_standard-1.6.0rc2.dist-info → apache_airflow_providers_standard-1.7.0rc1.dist-info}/METADATA +7 -8
  22. {apache_airflow_providers_standard-1.6.0rc2.dist-info → apache_airflow_providers_standard-1.7.0rc1.dist-info}/RECORD +24 -24
  23. {apache_airflow_providers_standard-1.6.0rc2.dist-info → apache_airflow_providers_standard-1.7.0rc1.dist-info}/WHEEL +0 -0
  24. {apache_airflow_providers_standard-1.6.0rc2.dist-info → apache_airflow_providers_standard-1.7.0rc1.dist-info}/entry_points.txt +0 -0
@@ -29,7 +29,7 @@ from airflow import __version__ as airflow_version
29
29
 
30
30
  __all__ = ["__version__"]
31
31
 
32
- __version__ = "1.6.0"
32
+ __version__ = "1.7.0"
33
33
 
34
34
  if packaging.version.parse(packaging.version.parse(airflow_version).base_version) < packaging.version.parse(
35
35
  "2.10.0"
@@ -23,7 +23,12 @@ from airflow.exceptions import AirflowSkipException
23
23
  from airflow.providers.standard.operators.empty import EmptyOperator
24
24
  from airflow.providers.standard.utils.weekday import WeekDay
25
25
  from airflow.sdk import chain, dag, task
26
- from airflow.utils.trigger_rule import TriggerRule
26
+
27
+ try:
28
+ from airflow.sdk import TriggerRule
29
+ except ImportError:
30
+ # Compatibility for Airflow < 3.1
31
+ from airflow.utils.trigger_rule import TriggerRule # type: ignore[no-redef,attr-defined]
27
32
 
28
33
 
29
34
  @dag(schedule=None, start_date=pendulum.datetime(2023, 1, 1, tz="UTC"), catchup=False)
@@ -39,7 +39,12 @@ from airflow.providers.standard.operators.python import (
39
39
  PythonVirtualenvOperator,
40
40
  )
41
41
  from airflow.sdk import DAG, Label
42
- from airflow.utils.trigger_rule import TriggerRule
42
+
43
+ try:
44
+ from airflow.sdk import TriggerRule
45
+ except ImportError:
46
+ # Compatibility for Airflow < 3.1
47
+ from airflow.utils.trigger_rule import TriggerRule # type: ignore[no-redef,attr-defined]
43
48
 
44
49
  PATH_TO_PYTHON_BINARY = sys.executable
45
50
 
@@ -32,7 +32,12 @@ import pendulum
32
32
 
33
33
  from airflow.providers.standard.operators.empty import EmptyOperator
34
34
  from airflow.sdk import DAG, Label, task
35
- from airflow.utils.trigger_rule import TriggerRule
35
+
36
+ try:
37
+ from airflow.sdk import TriggerRule
38
+ except ImportError:
39
+ # Compatibility for Airflow < 3.1
40
+ from airflow.utils.trigger_rule import TriggerRule # type: ignore[no-redef,attr-defined]
36
41
 
37
42
  PATH_TO_PYTHON_BINARY = sys.executable
38
43
 
@@ -37,6 +37,7 @@ if TYPE_CHECKING:
37
37
  # [START hitl_tutorial]
38
38
 
39
39
 
40
+ # [START hitl_notifier]
40
41
  class LocalLogNotifier(BaseNotifier):
41
42
  """Simple notifier to demonstrate HITL notification without setup any connection."""
42
43
 
@@ -46,10 +47,14 @@ class LocalLogNotifier(BaseNotifier):
46
47
  self.message = message
47
48
 
48
49
  def notify(self, context: Context) -> None:
50
+ url = HITLOperator.generate_link_to_ui_from_context(
51
+ context=context,
52
+ base_url="http://localhost:28080",
53
+ )
49
54
  self.log.info(self.message)
55
+ self.log.info("Url to respond %s", url)
50
56
 
51
57
 
52
- # [START htil_notifer]
53
58
  hitl_request_callback = LocalLogNotifier(
54
59
  message="""
55
60
  [HITL]
@@ -65,7 +70,7 @@ hitl_success_callback = LocalLogNotifier(
65
70
  message="{% set task_id = task.task_id -%}{{ ti.xcom_pull(task_ids=task_id) }}"
66
71
  )
67
72
  hitl_failure_callback = LocalLogNotifier(message="Request to response to '{{ task.subject }}' failed")
68
- # [END htil_notifer]
73
+ # [END hitl_notifier]
69
74
 
70
75
  with DAG(
71
76
  dag_id="example_hitl_operator",
@@ -95,7 +100,7 @@ with DAG(
95
100
  )
96
101
  # [END howto_hitl_operator]
97
102
 
98
- # [START howto_hitl_operator_muliple]
103
+ # [START howto_hitl_operator_multiple]
99
104
  wait_for_multiple_options = HITLOperator(
100
105
  task_id="wait_for_multiple_options",
101
106
  subject="Please choose option to proceed: ",
@@ -105,7 +110,7 @@ with DAG(
105
110
  on_success_callback=hitl_success_callback,
106
111
  on_failure_callback=hitl_failure_callback,
107
112
  )
108
- # [END howto_hitl_operator_muliple]
113
+ # [END howto_hitl_operator_multiple]
109
114
 
110
115
  # [START howto_hitl_operator_timeout]
111
116
  wait_for_default_option = HITLOperator(
@@ -127,8 +132,8 @@ with DAG(
127
132
  body="""
128
133
  Input: {{ ti.xcom_pull(task_ids='wait_for_input')["params_input"]["information"] }}
129
134
  Option: {{ ti.xcom_pull(task_ids='wait_for_option')["chosen_options"] }}
130
- Multiple Options: {{ ti.xcom_pull(task_ids='wait_for_option')["chosen_options"] }}
131
- Timeout Option: {{ ti.xcom_pull(task_ids='wait_for_option')["chosen_options"] }}
135
+ Multiple Options: {{ ti.xcom_pull(task_ids='wait_for_multiple_options')["chosen_options"] }}
136
+ Timeout Option: {{ ti.xcom_pull(task_ids='wait_for_default_option')["chosen_options"] }}
132
137
  """,
133
138
  defaults="Reject",
134
139
  execution_timeout=datetime.timedelta(minutes=1),
@@ -30,7 +30,12 @@ from airflow.providers.standard.sensors.time_delta import TimeDeltaSensor
30
30
  from airflow.providers.standard.sensors.weekday import DayOfWeekSensor
31
31
  from airflow.providers.standard.utils.weekday import WeekDay
32
32
  from airflow.sdk import DAG
33
- from airflow.utils.trigger_rule import TriggerRule
33
+
34
+ try:
35
+ from airflow.sdk import TriggerRule
36
+ except ImportError:
37
+ # Compatibility for Airflow < 3.1
38
+ from airflow.utils.trigger_rule import TriggerRule # type: ignore[no-redef,attr-defined]
34
39
 
35
40
 
36
41
  # [START example_callables]
@@ -22,7 +22,12 @@ import pendulum
22
22
 
23
23
  from airflow.providers.standard.operators.empty import EmptyOperator
24
24
  from airflow.sdk import chain, dag, task
25
- from airflow.utils.trigger_rule import TriggerRule
25
+
26
+ try:
27
+ from airflow.sdk import TriggerRule
28
+ except ImportError:
29
+ # Compatibility for Airflow < 3.1
30
+ from airflow.utils.trigger_rule import TriggerRule # type: ignore[no-redef,attr-defined]
26
31
 
27
32
 
28
33
  @dag(schedule=None, start_date=pendulum.datetime(2021, 1, 1, tz="UTC"), catchup=False, tags=["example"])
@@ -24,7 +24,12 @@ import pendulum
24
24
  from airflow.providers.standard.operators.empty import EmptyOperator
25
25
  from airflow.providers.standard.operators.python import ShortCircuitOperator
26
26
  from airflow.sdk import DAG, chain
27
- from airflow.utils.trigger_rule import TriggerRule
27
+
28
+ try:
29
+ from airflow.sdk import TriggerRule
30
+ except ImportError:
31
+ # Compatibility for Airflow < 3.1
32
+ from airflow.utils.trigger_rule import TriggerRule # type: ignore[no-redef,attr-defined]
28
33
 
29
34
  with DAG(
30
35
  dag_id="example_short_circuit_operator",
@@ -62,4 +62,4 @@ class HITLTriggerEventError(AirflowException):
62
62
 
63
63
 
64
64
  class HITLTimeoutError(HITLTriggerEventError):
65
- """Raised when HILTOperator timeouts."""
65
+ """Raised when HITLOperator timeouts."""
@@ -37,11 +37,17 @@ if TYPE_CHECKING:
37
37
  class BranchMixIn(SkipMixin):
38
38
  """Utility helper which handles the branching as one-liner."""
39
39
 
40
- def do_branch(self, context: Context, branches_to_execute: str | Iterable[str]) -> str | Iterable[str]:
40
+ def do_branch(
41
+ self, context: Context, branches_to_execute: str | Iterable[str] | None
42
+ ) -> str | Iterable[str] | None:
41
43
  """Implement the handling of branching including logging."""
42
44
  self.log.info("Branch into %s", branches_to_execute)
43
- branch_task_ids = self._expand_task_group_roots(context["ti"], branches_to_execute)
44
- self.skip_all_except(context["ti"], branch_task_ids)
45
+ if branches_to_execute is None:
46
+ # When None is returned, skip all downstream tasks
47
+ self.skip_all_except(context["ti"], None)
48
+ else:
49
+ branch_task_ids = self._expand_task_group_roots(context["ti"], branches_to_execute)
50
+ self.skip_all_except(context["ti"], branch_task_ids)
45
51
  return branches_to_execute
46
52
 
47
53
  def _expand_task_group_roots(
@@ -86,13 +92,13 @@ class BaseBranchOperator(BaseOperator, BranchMixIn):
86
92
 
87
93
  inherits_from_skipmixin = True
88
94
 
89
- def choose_branch(self, context: Context) -> str | Iterable[str]:
95
+ def choose_branch(self, context: Context) -> str | Iterable[str] | None:
90
96
  """
91
97
  Abstract method to choose which branch to run.
92
98
 
93
99
  Subclasses should implement this, running whatever logic is
94
100
  necessary to choose a branch and returning a task_id or list of
95
- task_ids.
101
+ task_ids. If None is returned, all downstream tasks will be skipped.
96
102
 
97
103
  :param context: Context dictionary as passed to execute()
98
104
  """
@@ -20,26 +20,28 @@ import logging
20
20
 
21
21
  from airflow.exceptions import AirflowOptionalProviderFeatureException
22
22
  from airflow.providers.standard.version_compat import AIRFLOW_V_3_1_PLUS
23
- from airflow.sdk.bases.notifier import BaseNotifier
24
23
 
25
24
  if not AIRFLOW_V_3_1_PLUS:
26
25
  raise AirflowOptionalProviderFeatureException("Human in the loop functionality needs Airflow 3.1+.")
27
26
 
28
-
29
27
  from collections.abc import Collection, Mapping, Sequence
30
28
  from typing import TYPE_CHECKING, Any
29
+ from urllib.parse import ParseResult, urlencode, urlparse, urlunparse
31
30
 
31
+ from airflow.configuration import conf
32
32
  from airflow.providers.standard.exceptions import HITLTimeoutError, HITLTriggerEventError
33
33
  from airflow.providers.standard.operators.branch import BranchMixIn
34
34
  from airflow.providers.standard.triggers.hitl import HITLTrigger, HITLTriggerEventSuccessPayload
35
35
  from airflow.providers.standard.utils.skipmixin import SkipMixin
36
36
  from airflow.providers.standard.version_compat import BaseOperator
37
+ from airflow.sdk.bases.notifier import BaseNotifier
37
38
  from airflow.sdk.definitions.param import ParamsDict
38
39
  from airflow.sdk.execution_time.hitl import upsert_hitl_detail
39
40
  from airflow.sdk.timezone import utcnow
40
41
 
41
42
  if TYPE_CHECKING:
42
43
  from airflow.sdk.definitions.context import Context
44
+ from airflow.sdk.types import RuntimeTaskInstanceProtocol
43
45
 
44
46
 
45
47
  class HITLOperator(BaseOperator):
@@ -68,6 +70,7 @@ class HITLOperator(BaseOperator):
68
70
  multiple: bool = False,
69
71
  params: ParamsDict | dict[str, Any] | None = None,
70
72
  notifiers: Sequence[BaseNotifier] | BaseNotifier | None = None,
73
+ respondents: str | list[str] | None = None,
71
74
  **kwargs,
72
75
  ) -> None:
73
76
  super().__init__(**kwargs)
@@ -83,14 +86,32 @@ class HITLOperator(BaseOperator):
83
86
  self.notifiers: Sequence[BaseNotifier] = (
84
87
  [notifiers] if isinstance(notifiers, BaseNotifier) else notifiers or []
85
88
  )
89
+ self.respondents = [respondents] if isinstance(respondents, str) else respondents
86
90
 
87
91
  self.validate_options()
92
+ self.validate_params()
88
93
  self.validate_defaults()
89
94
 
90
95
  def validate_options(self) -> None:
96
+ """
97
+ Validate the `options` attribute of the instance.
98
+
99
+ Raises:
100
+ ValueError: If `options` is empty.
101
+ """
91
102
  if not self.options:
92
103
  raise ValueError('"options" cannot be empty.')
93
104
 
105
+ def validate_params(self) -> None:
106
+ """
107
+ Validate the `params` attribute of the instance.
108
+
109
+ Raises:
110
+ ValueError: If `"_options"` key is present in `params`, which is not allowed.
111
+ """
112
+ if "_options" in self.params:
113
+ raise ValueError('"_options" is not allowed in params')
114
+
94
115
  def validate_defaults(self) -> None:
95
116
  """
96
117
  Validate whether the given defaults pass the following criteria.
@@ -117,6 +138,7 @@ class HITLOperator(BaseOperator):
117
138
  defaults=self.defaults,
118
139
  multiple=self.multiple,
119
140
  params=self.serialized_params,
141
+ respondents=self.respondents,
120
142
  )
121
143
 
122
144
  if self.execution_timeout:
@@ -178,6 +200,95 @@ class HITLOperator(BaseOperator):
178
200
  ):
179
201
  raise ValueError(f"params_input {params_input} does not match params {self.params}")
180
202
 
203
+ def generate_link_to_ui(
204
+ self,
205
+ *,
206
+ task_instance: RuntimeTaskInstanceProtocol,
207
+ base_url: str | None = None,
208
+ options: str | list[str] | None = None,
209
+ params_input: dict[str, Any] | None = None,
210
+ ) -> str:
211
+ """
212
+ Generate a URL link to the "required actions" page for a specific task instance.
213
+
214
+ This URL includes query parameters based on allowed options and parameters.
215
+
216
+ Args:
217
+ task_instance: The task instance to generate the link for.
218
+ base_url: Optional base URL to use. Defaults to ``api.base_url`` from config.
219
+ options: Optional subset of allowed options to include in the URL.
220
+ params_input: Optional subset of allowed params to include in the URL.
221
+
222
+ Raises:
223
+ ValueError: If any provided option or parameter is invalid.
224
+ ValueError: If no base_url can be determined.
225
+
226
+ Returns:
227
+ The full URL pointing to the required actions page with query parameters.
228
+ """
229
+ query_param: dict[str, Any] = {}
230
+ options = [options] if isinstance(options, str) else options
231
+ if options:
232
+ if diff := set(options) - set(self.options):
233
+ raise ValueError(f"options {diff} are not valid options")
234
+ query_param["_options"] = options
235
+
236
+ if params_input:
237
+ if diff := set(params_input.keys()) - set(self.params.keys()):
238
+ raise ValueError(f"params {diff} are not valid params")
239
+ query_param.update(params_input)
240
+
241
+ if not (base_url := base_url or conf.get("api", "base_url", fallback=None)):
242
+ raise ValueError("Not able to retrieve base_url")
243
+
244
+ query_param["map_index"] = task_instance.map_index
245
+
246
+ parsed_base_url: ParseResult = urlparse(base_url)
247
+ return urlunparse(
248
+ (
249
+ parsed_base_url.scheme,
250
+ parsed_base_url.netloc,
251
+ f"/dags/{task_instance.dag_id}/runs/{task_instance.run_id}/tasks/{task_instance.task_id}/required_actions",
252
+ "",
253
+ urlencode(query_param) if query_param else "",
254
+ "",
255
+ )
256
+ )
257
+
258
+ @staticmethod
259
+ def generate_link_to_ui_from_context(
260
+ *,
261
+ context: Context,
262
+ base_url: str | None = None,
263
+ options: list[str] | None = None,
264
+ params_input: dict[str, Any] | None = None,
265
+ ) -> str:
266
+ """
267
+ Generate a "required actions" page URL from a task context.
268
+
269
+ Delegates to ``generate_link_to_ui`` using the task and task_instance extracted from
270
+ the provided context.
271
+
272
+ Args:
273
+ context: The Airflow task context containing 'task' and 'task_instance'.
274
+ base_url: Optional base URL to use.
275
+ options: Optional list of allowed options to include.
276
+ params_input: Optional dictionary of allowed parameters to include.
277
+
278
+ Returns:
279
+ The full URL pointing to the required actions page with query parameters.
280
+ """
281
+ hitl_op = context["task"]
282
+ if not isinstance(hitl_op, HITLOperator):
283
+ raise ValueError("This method only supports HITLOperator")
284
+
285
+ return hitl_op.generate_link_to_ui(
286
+ task_instance=context["task_instance"],
287
+ base_url=base_url,
288
+ options=options,
289
+ params_input=params_input,
290
+ )
291
+
181
292
 
182
293
  class ApprovalOperator(HITLOperator, SkipMixin):
183
294
  """Human-in-the-loop Operator that has only 'Approval' and 'Reject' options."""
@@ -239,9 +350,57 @@ class HITLBranchOperator(HITLOperator, BranchMixIn):
239
350
 
240
351
  inherits_from_skipmixin = True
241
352
 
353
+ def __init__(self, *, options_mapping: dict[str, str] | None = None, **kwargs) -> None:
354
+ """
355
+ Initialize HITLBranchOperator.
356
+
357
+ Args:
358
+ options_mapping:
359
+ A dictionary mapping option labels (must match entries in `self.options`)
360
+ to string values (e.g., task IDs). Defaults to an empty dict if not provided.
361
+
362
+ Raises:
363
+ ValueError:
364
+ - If `options_mapping` contains keys not present in `self.options`.
365
+ - If any value in `options_mapping` is not a string.
366
+ """
367
+ super().__init__(**kwargs)
368
+ self.options_mapping = options_mapping or {}
369
+ self.validate_options_mapping()
370
+
371
+ def validate_options_mapping(self) -> None:
372
+ """
373
+ Validate that `options_mapping` keys match `self.options` and all values are strings.
374
+
375
+ Raises:
376
+ ValueError: If any key is not in `self.options` or any value is not a string.
377
+ """
378
+ if not self.options_mapping:
379
+ return
380
+
381
+ # Validate that the choice options are keys in the mapping are the same
382
+ invalid_keys = set(self.options_mapping.keys()) - set(self.options)
383
+ if invalid_keys:
384
+ raise ValueError(
385
+ f"`options_mapping` contains keys that are not in `options`: {sorted(invalid_keys)}"
386
+ )
387
+
388
+ # validate that all values are strings
389
+ invalid_entries = {
390
+ k: (v, type(v).__name__) for k, v in self.options_mapping.items() if not isinstance(v, str)
391
+ }
392
+ if invalid_entries:
393
+ raise ValueError(
394
+ f"`options_mapping` values must be strings (task_ids).\nInvalid entries: {invalid_entries}"
395
+ )
396
+
242
397
  def execute_complete(self, context: Context, event: dict[str, Any]) -> Any:
398
+ """Execute the operator and branch based on chosen options."""
243
399
  ret = super().execute_complete(context=context, event=event)
244
400
  chosen_options = ret["chosen_options"]
401
+
402
+ # Map options to task IDs using the mapping, fallback to original option
403
+ chosen_options = [self.options_mapping.get(option, option) for option in chosen_options]
245
404
  return self.do_branch(context=context, branches_to_execute=chosen_options)
246
405
 
247
406
 
@@ -1076,7 +1076,7 @@ class ExternalPythonOperator(_BasePythonVirtualenvOperator):
1076
1076
 
1077
1077
  def _iter_serializable_context_keys(self):
1078
1078
  yield from self.BASE_SERIALIZABLE_CONTEXT_KEYS
1079
- if self._get_airflow_version_from_target_env():
1079
+ if self.expect_airflow and self._get_airflow_version_from_target_env():
1080
1080
  yield from self.AIRFLOW_SERIALIZABLE_CONTEXT_KEYS
1081
1081
  yield from self.PENDULUM_SERIALIZABLE_CONTEXT_KEYS
1082
1082
  elif self._is_pendulum_installed_in_target_env():
@@ -35,11 +35,15 @@ from airflow.exceptions import (
35
35
  DagRunAlreadyExists,
36
36
  )
37
37
  from airflow.models.dag import DagModel
38
- from airflow.models.dagbag import DagBag
39
38
  from airflow.models.dagrun import DagRun
39
+ from airflow.models.serialized_dag import SerializedDagModel
40
40
  from airflow.providers.standard.triggers.external_task import DagStateTrigger
41
- from airflow.providers.standard.version_compat import AIRFLOW_V_3_0_PLUS, BaseOperator, BaseOperatorLink
42
- from airflow.utils import timezone
41
+ from airflow.providers.standard.version_compat import (
42
+ AIRFLOW_V_3_0_PLUS,
43
+ BaseOperator,
44
+ BaseOperatorLink,
45
+ timezone,
46
+ )
43
47
  from airflow.utils.state import DagRunState
44
48
  from airflow.utils.types import NOTSET, ArgNotSet, DagRunType
45
49
 
@@ -275,8 +279,7 @@ class TriggerDagRunOperator(BaseOperator):
275
279
  raise DagNotFound(f"Dag id {self.trigger_dag_id} not found in DagModel")
276
280
 
277
281
  # Note: here execution fails on database isolation mode. Needs structural changes for AIP-72
278
- dag_bag = DagBag(dag_folder=dag_model.fileloc, read_dags_from_db=True)
279
- dag = dag_bag.get_dag(self.trigger_dag_id)
282
+ dag = SerializedDagModel.get_dag(self.trigger_dag_id)
280
283
  dag.clear(start_date=dag_run.logical_date, end_date=dag_run.logical_date)
281
284
  else:
282
285
  if self.skip_when_already_exists:
@@ -24,12 +24,16 @@ from typing import TYPE_CHECKING, Any, NoReturn
24
24
 
25
25
  from airflow.providers.standard.triggers.temporal import DateTimeTrigger
26
26
  from airflow.providers.standard.version_compat import AIRFLOW_V_3_0_PLUS, BaseSensorOperator
27
- from airflow.utils import timezone
28
27
 
29
28
  try:
30
- from airflow.triggers.base import StartTriggerArgs
31
- except ImportError:
32
- # TODO: Remove this when min airflow version is 2.10.0 for standard provider
29
+ from airflow.sdk import timezone
30
+ except ImportError: # TODO: Remove this when min airflow version is 3.1.0 for standard provider
31
+ from airflow.utils import timezone # type: ignore[attr-defined,no-redef]
32
+
33
+ try:
34
+ from airflow.triggers.base import StartTriggerArgs # type: ignore[no-redef]
35
+ except ImportError: # TODO: Remove this when min airflow version is 2.10.0 for standard provider
36
+
33
37
  @dataclass
34
38
  class StartTriggerArgs: # type: ignore[no-redef]
35
39
  """Arguments required for start task execution from triggerer."""
@@ -42,11 +46,7 @@ except ImportError:
42
46
 
43
47
 
44
48
  if TYPE_CHECKING:
45
- try:
46
- from airflow.sdk.definitions.context import Context
47
- except ImportError:
48
- # TODO: Remove once provider drops support for Airflow 2
49
- from airflow.utils.context import Context
49
+ from airflow.sdk import Context
50
50
 
51
51
 
52
52
  class DateTimeSensor(BaseSensorOperator):
@@ -32,9 +32,9 @@ from airflow.providers.standard.triggers.file import FileTrigger
32
32
  from airflow.providers.standard.version_compat import BaseSensorOperator
33
33
 
34
34
  try:
35
- from airflow.triggers.base import StartTriggerArgs
36
- except ImportError:
37
- # TODO: Remove this when min airflow version is 2.10.0 for standard provider
35
+ from airflow.triggers.base import StartTriggerArgs # type: ignore[no-redef]
36
+ except ImportError: # TODO: Remove this when min airflow version is 2.10.0 for standard provider
37
+
38
38
  @dataclass
39
39
  class StartTriggerArgs: # type: ignore[no-redef]
40
40
  """Arguments required for start task execution from triggerer."""
@@ -47,11 +47,7 @@ except ImportError:
47
47
 
48
48
 
49
49
  if TYPE_CHECKING:
50
- try:
51
- from airflow.sdk.definitions.context import Context
52
- except ImportError:
53
- # TODO: Remove once provider drops support for Airflow 2
54
- from airflow.utils.context import Context
50
+ from airflow.sdk import Context
55
51
 
56
52
 
57
53
  class FileSensor(BaseSensorOperator):
@@ -28,9 +28,9 @@ from airflow.providers.standard.triggers.temporal import DateTimeTrigger
28
28
  from airflow.providers.standard.version_compat import BaseSensorOperator
29
29
 
30
30
  try:
31
- from airflow.triggers.base import StartTriggerArgs
32
- except ImportError:
33
- # TODO: Remove this when min airflow version is 2.10.0 for standard provider
31
+ from airflow.triggers.base import StartTriggerArgs # type: ignore[no-redef]
32
+ except ImportError: # TODO: Remove this when min airflow version is 2.10.0 for standard provider
33
+
34
34
  @dataclass
35
35
  class StartTriggerArgs: # type: ignore[no-redef]
36
36
  """Arguments required for start task execution from triggerer."""
@@ -48,11 +48,7 @@ except ImportError:
48
48
  from airflow.utils import timezone # type: ignore[attr-defined,no-redef]
49
49
 
50
50
  if TYPE_CHECKING:
51
- try:
52
- from airflow.sdk.definitions.context import Context
53
- except ImportError:
54
- # TODO: Remove once provider drops support for Airflow 2
55
- from airflow.utils.context import Context
51
+ from airflow.sdk import Context
56
52
 
57
53
 
58
54
  class TimeSensor(BaseSensorOperator):
@@ -32,7 +32,7 @@ from asgiref.sync import sync_to_async
32
32
 
33
33
  from airflow.sdk.execution_time.hitl import (
34
34
  get_hitl_detail_content_detail,
35
- update_htil_detail_response,
35
+ update_hitl_detail_response,
36
36
  )
37
37
  from airflow.sdk.timezone import utcnow
38
38
  from airflow.triggers.base import BaseTrigger, TriggerEvent
@@ -107,7 +107,7 @@ class HITLTrigger(BaseTrigger):
107
107
  )
108
108
  return
109
109
 
110
- await sync_to_async(update_htil_detail_response)(
110
+ await sync_to_async(update_hitl_detail_response)(
111
111
  ti_id=self.ti_id,
112
112
  chosen_options=self.defaults,
113
113
  params_input=self.params,
@@ -127,12 +127,16 @@ class HITLTrigger(BaseTrigger):
127
127
  resp = await sync_to_async(get_hitl_detail_content_detail)(ti_id=self.ti_id)
128
128
  if resp.response_received and resp.chosen_options:
129
129
  self.log.info(
130
- "[HITL] user=%s options=%s at %s", resp.user_id, resp.chosen_options, resp.response_at
130
+ "[HITL] responded_by=%s (id=%s) options=%s at %s",
131
+ resp.responded_user_name,
132
+ resp.responded_user_id,
133
+ resp.chosen_options,
134
+ resp.response_at,
131
135
  )
132
136
  yield TriggerEvent(
133
137
  HITLTriggerEventSuccessPayload(
134
138
  chosen_options=resp.chosen_options,
135
- params_input=resp.params_input,
139
+ params_input=resp.params_input or {},
136
140
  timedout=False,
137
141
  )
138
142
  )
@@ -20,7 +20,8 @@ from typing import TYPE_CHECKING, Any, cast
20
20
 
21
21
  from sqlalchemy import func, select, tuple_
22
22
 
23
- from airflow.models import DagBag, DagRun, TaskInstance
23
+ from airflow.models import DagRun, TaskInstance
24
+ from airflow.models.serialized_dag import SerializedDagModel
24
25
  from airflow.providers.standard.version_compat import AIRFLOW_V_3_0_PLUS
25
26
  from airflow.utils.session import NEW_SESSION, provide_session
26
27
 
@@ -105,7 +106,7 @@ def _get_external_task_group_task_ids(dttm_filter, external_task_group_id, exter
105
106
  :param external_dag_id: The ID of the external DAG.
106
107
  :param session: airflow session object
107
108
  """
108
- refreshed_dag_info = DagBag(read_dags_from_db=True).get_dag(external_dag_id, session)
109
+ refreshed_dag_info = SerializedDagModel.get_dag(external_dag_id, session=session)
109
110
  task_group = refreshed_dag_info.task_group_dict.get(external_task_group_id)
110
111
 
111
112
  if task_group:
@@ -45,7 +45,7 @@ def _ensure_tasks(nodes: Iterable[DAGNode]) -> Sequence[Operator]:
45
45
  from airflow.sdk.definitions.mappedoperator import MappedOperator
46
46
  else:
47
47
  from airflow.models.baseoperator import BaseOperator # type: ignore[no-redef]
48
- from airflow.models.mappedoperator import MappedOperator
48
+ from airflow.models.mappedoperator import MappedOperator # type: ignore[assignment,no-redef]
49
49
 
50
50
  return [n for n in nodes if isinstance(n, (BaseOperator, MappedOperator))]
51
51
 
@@ -129,14 +129,17 @@ class SkipMixin(LoggingMixin):
129
129
  if isinstance(branch_task_ids, str):
130
130
  branch_task_id_set = {branch_task_ids}
131
131
  elif isinstance(branch_task_ids, Iterable):
132
+ # Handle the case where invalid values are passed as elements of an Iterable
133
+ # Non-string values are considered invalid elements
132
134
  branch_task_id_set = set(branch_task_ids)
133
135
  invalid_task_ids_type = {
134
136
  (bti, type(bti).__name__) for bti in branch_task_id_set if not isinstance(bti, str)
135
137
  }
136
138
  if invalid_task_ids_type:
137
139
  raise AirflowException(
138
- f"'branch_task_ids' expected all task IDs are strings. "
139
- f"Invalid tasks found: {invalid_task_ids_type}."
140
+ f"Unable to branch to the specified tasks. "
141
+ f"The branching function returned invalid 'branch_task_ids': {invalid_task_ids_type}. "
142
+ f"Please check that your function returns an Iterable of valid task IDs that exist in your DAG."
140
143
  )
141
144
  elif branch_task_ids is None:
142
145
  branch_task_id_set = set()
@@ -39,11 +39,12 @@ AIRFLOW_V_3_1_PLUS: bool = get_base_airflow_version_tuple() >= (3, 1, 0)
39
39
  # DecoratedOperator -- where `DecoratedOperator._handle_output` needed `xcom_push` to exist on `BaseOperator`
40
40
  # even though it wasn't used.
41
41
  if AIRFLOW_V_3_1_PLUS:
42
- from airflow.sdk import BaseHook, BaseOperator
42
+ from airflow.sdk import BaseHook, BaseOperator, timezone
43
43
  from airflow.sdk.definitions.context import context_merge
44
44
  else:
45
45
  from airflow.hooks.base import BaseHook # type: ignore[attr-defined,no-redef]
46
46
  from airflow.models.baseoperator import BaseOperator # type: ignore[no-redef]
47
+ from airflow.utils import timezone # type: ignore[attr-defined,no-redef]
47
48
  from airflow.utils.context import context_merge # type: ignore[no-redef, attr-defined]
48
49
 
49
50
  if AIRFLOW_V_3_0_PLUS:
@@ -62,4 +63,5 @@ __all__ = [
62
63
  "BaseSensorOperator",
63
64
  "PokeReturnValue",
64
65
  "context_merge",
66
+ "timezone",
65
67
  ]
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: apache-airflow-providers-standard
3
- Version: 1.6.0rc2
3
+ Version: 1.7.0rc1
4
4
  Summary: Provider package apache-airflow-providers-standard for Apache Airflow
5
5
  Keywords: airflow-provider,standard,airflow,integration
6
6
  Author-email: Apache Software Foundation <dev@airflow.apache.org>
@@ -22,8 +22,8 @@ Classifier: Programming Language :: Python :: 3.13
22
22
  Classifier: Topic :: System :: Monitoring
23
23
  Requires-Dist: apache-airflow>=2.10.0rc1
24
24
  Project-URL: Bug Tracker, https://github.com/apache/airflow/issues
25
- Project-URL: Changelog, https://airflow.staged.apache.org/docs/apache-airflow-providers-standard/1.6.0/changelog.html
26
- Project-URL: Documentation, https://airflow.staged.apache.org/docs/apache-airflow-providers-standard/1.6.0
25
+ Project-URL: Changelog, https://airflow.staged.apache.org/docs/apache-airflow-providers-standard/1.7.0/changelog.html
26
+ Project-URL: Documentation, https://airflow.staged.apache.org/docs/apache-airflow-providers-standard/1.7.0
27
27
  Project-URL: Mastodon, https://fosstodon.org/@airflow
28
28
  Project-URL: Slack Chat, https://s.apache.org/airflow-slack
29
29
  Project-URL: Source Code, https://github.com/apache/airflow
@@ -54,9 +54,8 @@ Project-URL: YouTube, https://www.youtube.com/channel/UCSXwxpWZQ7XZ1WL3wqevChA/
54
54
 
55
55
  Package ``apache-airflow-providers-standard``
56
56
 
57
- Release: ``1.6.0``
57
+ Release: ``1.7.0``
58
58
 
59
- Release Date: ``|PypiReleaseDate|``
60
59
 
61
60
  Airflow Standard Provider
62
61
 
@@ -68,12 +67,12 @@ This is a provider package for ``standard`` provider. All classes for this provi
68
67
  are in ``airflow.providers.standard`` python package.
69
68
 
70
69
  You can find package information and changelog for the provider
71
- in the `documentation <https://airflow.apache.org/docs/apache-airflow-providers-standard/1.6.0/>`_.
70
+ in the `documentation <https://airflow.apache.org/docs/apache-airflow-providers-standard/1.7.0/>`_.
72
71
 
73
72
  Installation
74
73
  ------------
75
74
 
76
- You can install this package on top of an existing Airflow 2 installation (see ``Requirements`` below
75
+ You can install this package on top of an existing Airflow installation (see ``Requirements`` below
77
76
  for the minimum Airflow version supported) via
78
77
  ``pip install apache-airflow-providers-standard``
79
78
 
@@ -89,5 +88,5 @@ PIP package Version required
89
88
  ================== ==================
90
89
 
91
90
  The changelog for the provider package can be found in the
92
- `changelog <https://airflow.apache.org/docs/apache-airflow-providers-standard/1.6.0/changelog.html>`_.
91
+ `changelog <https://airflow.apache.org/docs/apache-airflow-providers-standard/1.7.0/changelog.html>`_.
93
92
 
@@ -1,8 +1,8 @@
1
1
  airflow/providers/standard/LICENSE,sha256=gXPVwptPlW1TJ4HSuG5OMPg-a3h43OGMkZRR1rpwfJA,10850
2
- airflow/providers/standard/__init__.py,sha256=HRU9nnD6x9Bv6Wqk4LgRJeoJvuUNVQQMsRz1FiSBuSc,1497
3
- airflow/providers/standard/exceptions.py,sha256=8CTMCs1xVk_06piBoyP3pKX6j29riukL8V2V7miPgEU,2269
2
+ airflow/providers/standard/__init__.py,sha256=078RBvCMj07zXJOm392G_z8EFWs_t7BgZmXNmw-tdow,1497
3
+ airflow/providers/standard/exceptions.py,sha256=866ohBix8CtwCv0Dzr1p1tMHq0tNqDH1tMEAmC6BdwE,2269
4
4
  airflow/providers/standard/get_provider_info.py,sha256=jhENLvqCXj0mzBPmJeAvPj7XWaaNuxPLhHVVA-9rqzs,7132
5
- airflow/providers/standard/version_compat.py,sha256=zXebxsadQSmoPBb2ypNHwJO65GY_E_mnNDmPNu2PPKI,2787
5
+ airflow/providers/standard/version_compat.py,sha256=pWRfOwwGUd_uIfch0eiT5vOgwaA5rvohtW5Q46orG98,2891
6
6
  airflow/providers/standard/decorators/__init__.py,sha256=9hdXHABrVpkbpjZgUft39kOFL2xSGeG4GEua0Hmelus,785
7
7
  airflow/providers/standard/decorators/bash.py,sha256=NuWB6q8AeTzWPeIIX-y3M-qN819BEcjNQwS32XTF6cs,4416
8
8
  airflow/providers/standard/decorators/branch_external_python.py,sha256=G1l3_sYM38wEWMRnzRuVGmAbd4uMN5D07zm3EM3-ZWo,2584
@@ -14,23 +14,23 @@ airflow/providers/standard/decorators/python_virtualenv.py,sha256=_8Ir-9tt1Ru5rH
14
14
  airflow/providers/standard/decorators/sensor.py,sha256=BEcTdFUrfiFnLLZN6Uqtiqqq6ScGRVg9JkZYZk4sc-U,3230
15
15
  airflow/providers/standard/decorators/short_circuit.py,sha256=zUZgcVDrZXXU_cOhvkxGJrSUsC0oD1HAGPNVDD3U2QM,2533
16
16
  airflow/providers/standard/example_dags/__init__.py,sha256=9hdXHABrVpkbpjZgUft39kOFL2xSGeG4GEua0Hmelus,785
17
- airflow/providers/standard/example_dags/example_bash_decorator.py,sha256=6tJyRuHso9ITzTrKWK_kvMLKQFVzajkuWfkvaP-fMp4,3878
17
+ airflow/providers/standard/example_dags/example_bash_decorator.py,sha256=uNnzOlKqHcpFQcFjiIh6VgZR60dtvs__4I2PIt6veCg,4025
18
18
  airflow/providers/standard/example_dags/example_bash_operator.py,sha256=tAS4cBsKW9B1nUukmYTpUw5Vf63476v_-tYjffyAtd4,2352
19
19
  airflow/providers/standard/example_dags/example_branch_datetime_operator.py,sha256=6sGzn1xlMaF3I-HMI7bvx78oyxZUw5WAF_Gja_ZUch0,3765
20
20
  airflow/providers/standard/example_dags/example_branch_day_of_week_operator.py,sha256=75ncMaGfkjxN0ULszqeXrSL5rHauUTNOhGiGAGPm3pw,2362
21
- airflow/providers/standard/example_dags/example_branch_operator.py,sha256=c4dr2drDHgYEN5ZdtlN_KH6sdXEHdxqEbsQDCHbPku4,5269
22
- airflow/providers/standard/example_dags/example_branch_operator_decorator.py,sha256=mF_87Kqxhui6sCsWvBXJ6m_w9bUoeqrA_rUVJuyIeq4,4801
21
+ airflow/providers/standard/example_dags/example_branch_operator.py,sha256=YJCQ15x4g-fz3igQvqCYEqDStqXrgTIeLB4T_OykwPM,5416
22
+ airflow/providers/standard/example_dags/example_branch_operator_decorator.py,sha256=5tuPNOq_Ufj9UZsATxQMIWaQbIJlkDETR8uz4Io-otw,4948
23
23
  airflow/providers/standard/example_dags/example_external_task_child_deferrable.py,sha256=o-ji3leJTBjiChEWoqVu4ykz1YVYUd8-ApmZwHFcNc8,1233
24
24
  airflow/providers/standard/example_dags/example_external_task_marker_dag.py,sha256=gssBjlfrGMDLZxTYOxo8ihXLbJ-3Uu31QodINGFWYNU,3650
25
25
  airflow/providers/standard/example_dags/example_external_task_parent_deferrable.py,sha256=8R1EqL0x1SgxcJtOb2EO4_NXmFgxiM-a0wPilDf7fis,2172
26
- airflow/providers/standard/example_dags/example_hitl_operator.py,sha256=i6UC0IX8s5anBx4AStd3-lPYGoguOxscIXl94Q4BdRw,5745
26
+ airflow/providers/standard/example_dags/example_hitl_operator.py,sha256=izEvpOU7f_I63e46rakmk8KnoAQpgSeKDaD3LZ2fgwU,5962
27
27
  airflow/providers/standard/example_dags/example_latest_only.py,sha256=ac9WpLMWLzyuxZks74t3HojS7vRG2gynmQfGm13gwOI,1456
28
28
  airflow/providers/standard/example_dags/example_python_decorator.py,sha256=jveqPOw1GZzD3Z37_rYc8Q8hcyx8vCNjgetpO_P6qmg,4281
29
29
  airflow/providers/standard/example_dags/example_python_operator.py,sha256=3L6CZHK2Fb7zmA9tDhZ5QaEe38WJYlS4l35Gc7xJAoE,4761
30
30
  airflow/providers/standard/example_dags/example_sensor_decorator.py,sha256=zEZUh2YD17T8-bX-HGjar5cCqgi6qHIB0fXY29lboJA,1873
31
- airflow/providers/standard/example_dags/example_sensors.py,sha256=6gLWMyRA5GS71a7IvO_--Dd12aML9GyLYMOmjgyLg30,4658
32
- airflow/providers/standard/example_dags/example_short_circuit_decorator.py,sha256=Fo8ip4wUqFcfdKffMu40SvDfdIfgZ0Bmq_4S7aA8s7o,2420
33
- airflow/providers/standard/example_dags/example_short_circuit_operator.py,sha256=WatudzQM8K_6SJ8ndgGrhpcR89AJboOpPIK37U1mOAk,2414
31
+ airflow/providers/standard/example_dags/example_sensors.py,sha256=Uwpgg5xC3j5O8L5rsEnwhHslTi6fASs2Nflvq9ATQ9o,4805
32
+ airflow/providers/standard/example_dags/example_short_circuit_decorator.py,sha256=sLoVrNcJwrzGL4HnbAS2dSDWCeIGLeuQ3ivDwpdsrvM,2567
33
+ airflow/providers/standard/example_dags/example_short_circuit_operator.py,sha256=70bIcTaVc8BX-iqqdkxxMbg8SigoRJGtNdHikglIYy4,2561
34
34
  airflow/providers/standard/example_dags/example_trigger_controller_dag.py,sha256=lHgCZC5Zkv4E3couxAYlpb1EazSmOF_qKqunIAfjskM,1752
35
35
  airflow/providers/standard/example_dags/sql/__init__.py,sha256=9hdXHABrVpkbpjZgUft39kOFL2xSGeG4GEua0Hmelus,785
36
36
  airflow/providers/standard/example_dags/sql/sample.sql,sha256=OVk1qozBY58lp_tFtnyQiLSbKRdqKn4zbxJHH_Umdek,866
@@ -41,36 +41,36 @@ airflow/providers/standard/hooks/subprocess.py,sha256=Dte_ysI0kAx5wPUT96AkIFyEgS
41
41
  airflow/providers/standard/models/__init__.py,sha256=9hdXHABrVpkbpjZgUft39kOFL2xSGeG4GEua0Hmelus,785
42
42
  airflow/providers/standard/operators/__init__.py,sha256=9hdXHABrVpkbpjZgUft39kOFL2xSGeG4GEua0Hmelus,785
43
43
  airflow/providers/standard/operators/bash.py,sha256=yzNbi20dp6PWJzXZqJ05gyMsdb6-UsNuNrPGp2hiu1E,11366
44
- airflow/providers/standard/operators/branch.py,sha256=No3JoaqPjEAu7QlVf-20nzli0yYKbYorjcx885i5eeg,3970
44
+ airflow/providers/standard/operators/branch.py,sha256=hiaTvIvHuN6-6dhdxJEot1FOTUmld1fr0c9lUVoCouo,4243
45
45
  airflow/providers/standard/operators/datetime.py,sha256=bYDdbfAyAlEXRRHjOgB06UhgDum6SPdd5I3u-ylPSaw,5005
46
46
  airflow/providers/standard/operators/empty.py,sha256=BTeZ4KRykaEHLZigSBkevcStCrbPdQpWDMnO3ZdtZqw,1338
47
- airflow/providers/standard/operators/hitl.py,sha256=zvMU5FPoaTmSPARhmKsr8qaQ0ajctdkE4V0GNaPHS3M,10054
47
+ airflow/providers/standard/operators/hitl.py,sha256=MZ-61THumncfT6BcJ0xUUOW7sVqYsvRrHwgaNYlMbtI,16329
48
48
  airflow/providers/standard/operators/latest_only.py,sha256=VkU-nAI8QbIrmeiv4wYXBcZF0yKMkcFapormg0J5-As,5110
49
- airflow/providers/standard/operators/python.py,sha256=nPRQV6OweIkf976i7AJpZn9vQziqugReJ7xtDu27s8Q,53741
49
+ airflow/providers/standard/operators/python.py,sha256=zRtonsvdBdUNIGjYQjrMkvz6eh0IZFwcQoFEekBe6Ys,53765
50
50
  airflow/providers/standard/operators/smooth.py,sha256=IMs5GjM42XEiroksIZ5flGQgxfRUbXZXCWxpshVinYQ,1396
51
- airflow/providers/standard/operators/trigger_dagrun.py,sha256=AgJaiB4u-X-HDvdYLdseFQO_zBYb6_UijV-qmDqwqo0,16576
51
+ airflow/providers/standard/operators/trigger_dagrun.py,sha256=PngOqCvJqHGLU8nZLYXMjWHcfy7mf1h_iiTXAADUkGE,16516
52
52
  airflow/providers/standard/operators/weekday.py,sha256=Qg7LhXYtybVSGZn8uQqF-r7RB7zOXfe3R6vSGVa_rJk,5083
53
53
  airflow/providers/standard/sensors/__init__.py,sha256=9hdXHABrVpkbpjZgUft39kOFL2xSGeG4GEua0Hmelus,785
54
54
  airflow/providers/standard/sensors/bash.py,sha256=jiysK84IwnVpQj1_lE65E_pSPE0FO82GGtqXf_a8STA,4984
55
- airflow/providers/standard/sensors/date_time.py,sha256=W4lN-EXCIiJkPf6FKvJ4yx7X9vSCfKT7YjVbnjtmkrM,6481
55
+ airflow/providers/standard/sensors/date_time.py,sha256=h1Y1E6AqDxvAFJZw56ni3R8TEeUgn4OBI04bd5L-vt0,6516
56
56
  airflow/providers/standard/sensors/external_task.py,sha256=lDEg2Zbwp79f6VV6uH3PXI-NiHbL4IMAO4z-1VDl4gA,28695
57
- airflow/providers/standard/sensors/filesystem.py,sha256=jDgxZQ4WXRv1PSjc2o4K0Iq_AxnaPw7yIUnafK_VpaM,6050
57
+ airflow/providers/standard/sensors/filesystem.py,sha256=Z6Fiism8rMA8LzKUBnxOWL3ErUTtkKQBPp7_ipbGPAU,5902
58
58
  airflow/providers/standard/sensors/python.py,sha256=gFlZf3h60UtgJCarG9FN30cnPxKyLNVn46nNxu6O9aQ,3415
59
- airflow/providers/standard/sensors/time.py,sha256=9kfamlG4mNJ1T39rOrhuAW41NiXFPP6RxU1ytiTIsLs,5271
59
+ airflow/providers/standard/sensors/time.py,sha256=tUDXe8-oOIoq7R_FXE62GPYTGngJqRBoS1X44Y6HR4E,5123
60
60
  airflow/providers/standard/sensors/time_delta.py,sha256=ggDSna-m_scLFks9zx1LoC64jQBjw7ZQqH7n96UU2BQ,7579
61
61
  airflow/providers/standard/sensors/weekday.py,sha256=sKDQ7xC9c32DZxaGNIjqmW6HXE4hIvKC71Kt-_d9SG8,4470
62
62
  airflow/providers/standard/triggers/__init__.py,sha256=9hdXHABrVpkbpjZgUft39kOFL2xSGeG4GEua0Hmelus,785
63
63
  airflow/providers/standard/triggers/external_task.py,sha256=R2Wsd21pw9_gGTs9XuHafylt65hMVPisz2g6vnpLJ4o,11521
64
64
  airflow/providers/standard/triggers/file.py,sha256=2i8-RwSjEgdOwQNcHCqLmSdpE3Ehqg4GQJ8nE3-fHxo,4886
65
- airflow/providers/standard/triggers/hitl.py,sha256=39xqF8om6hLJPhcQ6t4q_ZYhrQTAfC9bwitu1cjbb7k,5127
65
+ airflow/providers/standard/triggers/hitl.py,sha256=w4j91SsXOXBdWI1iDIQHAio7MzBjf8vlMovot9IvW3s,5266
66
66
  airflow/providers/standard/triggers/temporal.py,sha256=rqUMRsKTO46JvUbIXgReei5p9cnEd8PV4zJkLZWWOUI,4552
67
67
  airflow/providers/standard/utils/__init__.py,sha256=9hdXHABrVpkbpjZgUft39kOFL2xSGeG4GEua0Hmelus,785
68
68
  airflow/providers/standard/utils/python_virtualenv.py,sha256=logUzODR5qnUZYyew-ZEbL7rudrkTEtmnU9qTQhg0-0,8532
69
69
  airflow/providers/standard/utils/python_virtualenv_script.jinja2,sha256=3Z334hVq6hQ9EHkOoGnAHc2_XNkZQkOJGxZArDKLc-c,2770
70
- airflow/providers/standard/utils/sensor_helper.py,sha256=PNIETsl_a4BkmOypFfHdpP0VuTkC6eWKUDuwnNVaWsA,5000
71
- airflow/providers/standard/utils/skipmixin.py,sha256=PMrP2vtr5Sn6eCVslAqmEpY6Rgo6ZyfR73LPXS5NGVA,8015
70
+ airflow/providers/standard/utils/sensor_helper.py,sha256=FwI36sJK-s3Wz0slypF1_tAikQpiXovtTiN__Md00Aw,5049
71
+ airflow/providers/standard/utils/skipmixin.py,sha256=kHpui_Ag-bwbbwjQOfXEmE4zAWYrhO9H_qKGRkbtqdE,8349
72
72
  airflow/providers/standard/utils/weekday.py,sha256=ySDrIkWv-lqqxURo9E98IGInDqERec2O4y9o2hQTGiQ,2685
73
- apache_airflow_providers_standard-1.6.0rc2.dist-info/entry_points.txt,sha256=mW2YRh3mVdZdaP5-iGSNgmcCh3YYdALIn28BCLBZZ40,104
74
- apache_airflow_providers_standard-1.6.0rc2.dist-info/WHEEL,sha256=G2gURzTEtmeR8nrdXUJfNiB3VYVxigPQ-bEQujpNiNs,82
75
- apache_airflow_providers_standard-1.6.0rc2.dist-info/METADATA,sha256=J3y9RNYBTZdUCuGKTKHv5lG5E8_EFDDNl_PYVEokMfY,3847
76
- apache_airflow_providers_standard-1.6.0rc2.dist-info/RECORD,,
73
+ apache_airflow_providers_standard-1.7.0rc1.dist-info/entry_points.txt,sha256=mW2YRh3mVdZdaP5-iGSNgmcCh3YYdALIn28BCLBZZ40,104
74
+ apache_airflow_providers_standard-1.7.0rc1.dist-info/WHEEL,sha256=G2gURzTEtmeR8nrdXUJfNiB3VYVxigPQ-bEQujpNiNs,82
75
+ apache_airflow_providers_standard-1.7.0rc1.dist-info/METADATA,sha256=8FvnxMusKWW9rm3_jn1R6rQ_xIa2WJdKy6C5ykBjWdE,3809
76
+ apache_airflow_providers_standard-1.7.0rc1.dist-info/RECORD,,