apache-airflow-providers-standard 1.9.1rc1__py3-none-any.whl → 1.9.2rc1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -29,7 +29,7 @@ from airflow import __version__ as airflow_version
29
29
 
30
30
  __all__ = ["__version__"]
31
31
 
32
- __version__ = "1.9.1"
32
+ __version__ = "1.9.2"
33
33
 
34
34
  if packaging.version.parse(packaging.version.parse(airflow_version).base_version) < packaging.version.parse(
35
35
  "2.10.0"
@@ -77,14 +77,6 @@ class SubprocessHook(BaseHook):
77
77
  """
78
78
  self.log.info("Tmp dir root location: %s", gettempdir())
79
79
  with working_directory(cwd=cwd) as cwd:
80
-
81
- def pre_exec():
82
- # Restore default signal disposition and invoke setsid
83
- for sig in ("SIGPIPE", "SIGXFZ", "SIGXFSZ"):
84
- if hasattr(signal, sig):
85
- signal.signal(getattr(signal, sig), signal.SIG_DFL)
86
- os.setsid()
87
-
88
80
  self.log.info("Running command: %s", command)
89
81
 
90
82
  self.sub_process = Popen(
@@ -93,7 +85,8 @@ class SubprocessHook(BaseHook):
93
85
  stderr=STDOUT,
94
86
  cwd=cwd,
95
87
  env=env if env or env == {} else os.environ,
96
- preexec_fn=pre_exec,
88
+ start_new_session=True,
89
+ restore_signals=True,
97
90
  )
98
91
 
99
92
  self.log.info("Output:")
@@ -19,7 +19,7 @@ from __future__ import annotations
19
19
  import logging
20
20
 
21
21
  from airflow.exceptions import AirflowOptionalProviderFeatureException
22
- from airflow.providers.standard.version_compat import AIRFLOW_V_3_1_PLUS
22
+ from airflow.providers.standard.version_compat import AIRFLOW_V_3_1_3_PLUS, AIRFLOW_V_3_1_PLUS
23
23
 
24
24
  if not AIRFLOW_V_3_1_PLUS:
25
25
  raise AirflowOptionalProviderFeatureException("Human in the loop functionality needs Airflow 3.1+.")
@@ -84,6 +84,7 @@ class HITLOperator(BaseOperator):
84
84
  self.multiple = multiple
85
85
 
86
86
  self.params: ParamsDict = params if isinstance(params, ParamsDict) else ParamsDict(params or {})
87
+
87
88
  self.notifiers: Sequence[BaseNotifier] = (
88
89
  [notifiers] if isinstance(notifiers, BaseNotifier) else notifiers or []
89
90
  )
@@ -110,6 +111,7 @@ class HITLOperator(BaseOperator):
110
111
  Raises:
111
112
  ValueError: If `"_options"` key is present in `params`, which is not allowed.
112
113
  """
114
+ self.params.validate()
113
115
  if "_options" in self.params:
114
116
  raise ValueError('"_options" is not allowed in params')
115
117
 
@@ -165,8 +167,10 @@ class HITLOperator(BaseOperator):
165
167
  )
166
168
 
167
169
  @property
168
- def serialized_params(self) -> dict[str, Any]:
169
- return self.params.dump() if isinstance(self.params, ParamsDict) else self.params
170
+ def serialized_params(self) -> dict[str, dict[str, Any]]:
171
+ if not AIRFLOW_V_3_1_3_PLUS:
172
+ return self.params.dump() if isinstance(self.params, ParamsDict) else self.params
173
+ return {k: self.params.get_param(k).serialize() for k in self.params}
170
174
 
171
175
  def execute_complete(self, context: Context, event: dict[str, Any]) -> Any:
172
176
  if "error" in event:
@@ -196,13 +200,12 @@ class HITLOperator(BaseOperator):
196
200
 
197
201
  def validate_params_input(self, params_input: Mapping) -> None:
198
202
  """Check whether user provide valid params input."""
199
- if (
200
- self.serialized_params is not None
201
- and params_input is not None
202
- and set(self.serialized_params.keys()) ^ set(params_input)
203
- ):
203
+ if self.params and params_input and set(self.serialized_params.keys()) ^ set(params_input):
204
204
  raise ValueError(f"params_input {params_input} does not match params {self.params}")
205
205
 
206
+ for key, value in params_input.items():
207
+ self.params[key] = value
208
+
206
209
  def generate_link_to_ui(
207
210
  self,
208
211
  *,
@@ -88,9 +88,9 @@ class LatestOnlyOperator(BaseBranchOperator):
88
88
  def _get_compare_dates(self, dag_run: DagRun) -> tuple[DateTime, DateTime] | None:
89
89
  dagrun_date: DateTime
90
90
  if AIRFLOW_V_3_0_PLUS:
91
- dagrun_date = dag_run.logical_date or dag_run.run_after
91
+ dagrun_date = dag_run.logical_date or dag_run.run_after # type: ignore[assignment]
92
92
  else:
93
- dagrun_date = dag_run.logical_date
93
+ dagrun_date = dag_run.logical_date # type: ignore[assignment]
94
94
 
95
95
  from airflow.timetables.base import DataInterval, TimeRestriction
96
96
 
@@ -51,12 +51,15 @@ from airflow.exceptions import (
51
51
  from airflow.models.variable import Variable
52
52
  from airflow.providers.common.compat.sdk import context_merge
53
53
  from airflow.providers.standard.hooks.package_index import PackageIndexHook
54
- from airflow.providers.standard.utils.python_virtualenv import prepare_virtualenv, write_python_script
54
+ from airflow.providers.standard.utils.python_virtualenv import (
55
+ _execute_in_subprocess,
56
+ prepare_virtualenv,
57
+ write_python_script,
58
+ )
55
59
  from airflow.providers.standard.version_compat import AIRFLOW_V_3_0_PLUS, BaseOperator
56
60
  from airflow.utils import hashlib_wrapper
57
61
  from airflow.utils.file import get_unique_dag_module_name
58
62
  from airflow.utils.operator_helpers import KeywordParameters
59
- from airflow.utils.process_utils import execute_in_subprocess
60
63
 
61
64
  if AIRFLOW_V_3_0_PLUS:
62
65
  from airflow.providers.standard.operators.branch import BaseBranchOperator
@@ -572,7 +575,7 @@ class _BasePythonVirtualenvOperator(PythonOperator, metaclass=ABCMeta):
572
575
  os.fspath(termination_log_path),
573
576
  os.fspath(airflow_context_path),
574
577
  ]
575
- execute_in_subprocess(
578
+ _execute_in_subprocess(
576
579
  cmd=cmd,
577
580
  env=env_vars,
578
581
  )
@@ -21,6 +21,7 @@ import datetime
21
21
  import json
22
22
  import time
23
23
  from collections.abc import Sequence
24
+ from json import JSONDecodeError
24
25
  from typing import TYPE_CHECKING, Any
25
26
 
26
27
  from sqlalchemy import select
@@ -202,9 +203,11 @@ class TriggerDagRunOperator(BaseOperator):
202
203
  parsed_logical_date = timezone.parse(self.logical_date)
203
204
 
204
205
  try:
206
+ if self.conf and isinstance(self.conf, str):
207
+ self.conf = json.loads(self.conf)
205
208
  json.dumps(self.conf)
206
- except TypeError:
207
- raise ValueError("conf parameter should be JSON Serializable")
209
+ except (TypeError, JSONDecodeError):
210
+ raise ValueError("conf parameter should be JSON Serializable %s", self.conf)
208
211
 
209
212
  if self.trigger_run_id:
210
213
  run_id = str(self.trigger_run_id)
@@ -220,6 +223,8 @@ class TriggerDagRunOperator(BaseOperator):
220
223
 
221
224
  if self.fail_when_dag_is_paused:
222
225
  dag_model = DagModel.get_current(self.trigger_dag_id)
226
+ if not dag_model:
227
+ raise ValueError(f"Dag {self.trigger_dag_id} is not found")
223
228
  if dag_model.is_paused:
224
229
  # TODO: enable this when dag state endpoint available from task sdk
225
230
  # if AIRFLOW_V_3_0_PLUS:
@@ -17,7 +17,6 @@
17
17
  # under the License.
18
18
  from __future__ import annotations
19
19
 
20
- import os
21
20
  from collections.abc import Sequence
22
21
  from subprocess import PIPE, STDOUT, Popen
23
22
  from tempfile import NamedTemporaryFile, TemporaryDirectory, gettempdir
@@ -89,7 +88,7 @@ class BashSensor(BaseSensorOperator):
89
88
  close_fds=True,
90
89
  cwd=tmp_dir,
91
90
  env=self.env,
92
- preexec_fn=os.setsid,
91
+ start_new_session=True,
93
92
  ) as resp:
94
93
  if resp.stdout:
95
94
  self.log.info("Output:")
@@ -18,9 +18,10 @@ from __future__ import annotations
18
18
 
19
19
  import datetime
20
20
  import os
21
+ import typing
21
22
  import warnings
22
- from collections.abc import Callable, Collection, Iterable
23
- from typing import TYPE_CHECKING, Any, ClassVar
23
+ from collections.abc import Callable, Collection, Iterable, Sequence
24
+ from typing import TYPE_CHECKING, ClassVar
24
25
 
25
26
  from airflow.configuration import conf
26
27
  from airflow.exceptions import AirflowSkipException
@@ -251,17 +252,15 @@ class ExternalTaskSensor(BaseSensorOperator):
251
252
  self.deferrable = deferrable
252
253
  self.poll_interval = poll_interval
253
254
 
254
- def _get_dttm_filter(self, context):
255
+ def _get_dttm_filter(self, context: Context) -> Sequence[datetime.datetime]:
255
256
  logical_date = self._get_logical_date(context)
256
257
 
257
258
  if self.execution_delta:
258
- dttm = logical_date - self.execution_delta
259
- elif self.execution_date_fn:
260
- dttm = self._handle_execution_date_fn(context=context)
261
- else:
262
- dttm = logical_date
263
-
264
- return dttm if isinstance(dttm, list) else [dttm]
259
+ return [logical_date - self.execution_delta]
260
+ if self.execution_date_fn:
261
+ result = self._handle_execution_date_fn(context=context)
262
+ return result if isinstance(result, list) else [result]
263
+ return [logical_date]
265
264
 
266
265
  def poke(self, context: Context) -> bool:
267
266
  # delay check to poke rather than __init__ in case it was supplied as XComArgs
@@ -298,7 +297,7 @@ class ExternalTaskSensor(BaseSensorOperator):
298
297
  return self._poke_af3(context, dttm_filter)
299
298
  return self._poke_af2(dttm_filter)
300
299
 
301
- def _poke_af3(self, context: Context, dttm_filter: list[datetime.datetime]) -> bool:
300
+ def _poke_af3(self, context: Context, dttm_filter: Sequence[datetime.datetime]) -> bool:
302
301
  from airflow.providers.standard.utils.sensor_helper import _get_count_by_matched_states
303
302
 
304
303
  self._has_checked_existence = True
@@ -308,20 +307,20 @@ class ExternalTaskSensor(BaseSensorOperator):
308
307
  if self.external_task_ids:
309
308
  return ti.get_ti_count(
310
309
  dag_id=self.external_dag_id,
311
- task_ids=self.external_task_ids, # type: ignore[arg-type]
312
- logical_dates=dttm_filter,
310
+ task_ids=list(self.external_task_ids),
311
+ logical_dates=list(dttm_filter),
313
312
  states=states,
314
313
  )
315
314
  if self.external_task_group_id:
316
315
  run_id_task_state_map = ti.get_task_states(
317
316
  dag_id=self.external_dag_id,
318
317
  task_group_id=self.external_task_group_id,
319
- logical_dates=dttm_filter,
318
+ logical_dates=list(dttm_filter),
320
319
  )
321
320
  return _get_count_by_matched_states(run_id_task_state_map, states)
322
321
  return ti.get_dr_count(
323
322
  dag_id=self.external_dag_id,
324
- logical_dates=dttm_filter,
323
+ logical_dates=list(dttm_filter),
325
324
  states=states,
326
325
  )
327
326
 
@@ -339,7 +338,7 @@ class ExternalTaskSensor(BaseSensorOperator):
339
338
  count_allowed = self._calculate_count(count, dttm_filter)
340
339
  return count_allowed == len(dttm_filter)
341
340
 
342
- def _calculate_count(self, count: int, dttm_filter: list[datetime.datetime]) -> float | int:
341
+ def _calculate_count(self, count: int, dttm_filter: Sequence[datetime.datetime]) -> float | int:
343
342
  """Calculate the normalized count based on the type of check."""
344
343
  if self.external_task_ids:
345
344
  return count / len(self.external_task_ids)
@@ -395,7 +394,7 @@ class ExternalTaskSensor(BaseSensorOperator):
395
394
  if not AIRFLOW_V_3_0_PLUS:
396
395
 
397
396
  @provide_session
398
- def _poke_af2(self, dttm_filter: list[datetime.datetime], session: Session = NEW_SESSION) -> bool:
397
+ def _poke_af2(self, dttm_filter: Sequence[datetime.datetime], session: Session = NEW_SESSION) -> bool:
399
398
  if self.check_existence and not self._has_checked_existence:
400
399
  self._check_for_existence(session=session)
401
400
 
@@ -416,27 +415,48 @@ class ExternalTaskSensor(BaseSensorOperator):
416
415
  super().execute(context)
417
416
  else:
418
417
  dttm_filter = self._get_dttm_filter(context)
419
- logical_or_execution_dates = (
420
- {"logical_dates": dttm_filter} if AIRFLOW_V_3_0_PLUS else {"execution_dates": dttm_filter}
421
- )
422
- self.defer(
423
- timeout=self.execution_timeout,
424
- trigger=WorkflowTrigger(
425
- external_dag_id=self.external_dag_id,
426
- external_task_group_id=self.external_task_group_id,
427
- external_task_ids=self.external_task_ids,
428
- allowed_states=self.allowed_states,
429
- failed_states=self.failed_states,
430
- skipped_states=self.skipped_states,
431
- poke_interval=self.poll_interval,
432
- soft_fail=self.soft_fail,
433
- **logical_or_execution_dates,
434
- ),
435
- method_name="execute_complete",
436
- )
418
+ if AIRFLOW_V_3_0_PLUS:
419
+ self.defer(
420
+ timeout=self.execution_timeout,
421
+ trigger=WorkflowTrigger(
422
+ external_dag_id=self.external_dag_id,
423
+ external_task_group_id=self.external_task_group_id,
424
+ external_task_ids=self.external_task_ids,
425
+ allowed_states=self.allowed_states,
426
+ failed_states=self.failed_states,
427
+ skipped_states=self.skipped_states,
428
+ poke_interval=self.poll_interval,
429
+ soft_fail=self.soft_fail,
430
+ logical_dates=list(dttm_filter),
431
+ run_ids=None,
432
+ execution_dates=None,
433
+ ),
434
+ method_name="execute_complete",
435
+ )
436
+ else:
437
+ self.defer(
438
+ timeout=self.execution_timeout,
439
+ trigger=WorkflowTrigger(
440
+ external_dag_id=self.external_dag_id,
441
+ external_task_group_id=self.external_task_group_id,
442
+ external_task_ids=self.external_task_ids,
443
+ allowed_states=self.allowed_states,
444
+ failed_states=self.failed_states,
445
+ skipped_states=self.skipped_states,
446
+ poke_interval=self.poll_interval,
447
+ soft_fail=self.soft_fail,
448
+ execution_dates=list(dttm_filter),
449
+ logical_dates=None,
450
+ run_ids=None,
451
+ ),
452
+ method_name="execute_complete",
453
+ )
437
454
 
438
- def execute_complete(self, context, event=None):
455
+ def execute_complete(self, context: Context, event: dict[str, typing.Any] | None = None) -> None:
439
456
  """Execute when the trigger fires - return immediately."""
457
+ if event is None:
458
+ raise ExternalTaskNotFoundError("No event received from trigger")
459
+
440
460
  if event["status"] == "success":
441
461
  self.log.info("External tasks %s has executed successfully.", self.external_task_ids)
442
462
  elif event["status"] == "skipped":
@@ -453,13 +473,14 @@ class ExternalTaskSensor(BaseSensorOperator):
453
473
  "name of executed task and Dag."
454
474
  )
455
475
 
456
- def _check_for_existence(self, session) -> None:
476
+ def _check_for_existence(self, session: Session) -> None:
457
477
  dag_to_wait = DagModel.get_current(self.external_dag_id, session)
458
478
 
459
479
  if not dag_to_wait:
460
480
  raise ExternalDagNotFoundError(f"The external DAG {self.external_dag_id} does not exist.")
461
481
 
462
- if not os.path.exists(correct_maybe_zipped(dag_to_wait.fileloc)):
482
+ path = correct_maybe_zipped(dag_to_wait.fileloc)
483
+ if not path or not os.path.exists(path):
463
484
  raise ExternalDagDeletedError(f"The external DAG {self.external_dag_id} was deleted.")
464
485
 
465
486
  if self.external_task_ids:
@@ -488,7 +509,7 @@ class ExternalTaskSensor(BaseSensorOperator):
488
509
 
489
510
  self._has_checked_existence = True
490
511
 
491
- def get_count(self, dttm_filter, session, states) -> int:
512
+ def get_count(self, dttm_filter: Sequence[datetime.datetime], session: Session, states: list[str]) -> int:
492
513
  """
493
514
  Get the count of records against dttm filter and states.
494
515
 
@@ -509,15 +530,19 @@ class ExternalTaskSensor(BaseSensorOperator):
509
530
  session,
510
531
  )
511
532
 
512
- def get_external_task_group_task_ids(self, session, dttm_filter):
533
+ def get_external_task_group_task_ids(
534
+ self, session: Session, dttm_filter: Sequence[datetime.datetime]
535
+ ) -> list[tuple[str, int]]:
513
536
  warnings.warn(
514
537
  "This method is deprecated and will be removed in future.", DeprecationWarning, stacklevel=2
515
538
  )
539
+ if self.external_task_group_id is None:
540
+ return []
516
541
  return _get_external_task_group_task_ids(
517
- dttm_filter, self.external_task_group_id, self.external_dag_id, session
542
+ list(dttm_filter), self.external_task_group_id, self.external_dag_id, session
518
543
  )
519
544
 
520
- def _get_logical_date(self, context) -> datetime.datetime:
545
+ def _get_logical_date(self, context: Context) -> datetime.datetime:
521
546
  """
522
547
  Handle backwards- and forwards-compatible retrieval of the date.
523
548
 
@@ -527,19 +552,21 @@ class ExternalTaskSensor(BaseSensorOperator):
527
552
  if AIRFLOW_V_3_0_PLUS:
528
553
  logical_date = context.get("logical_date")
529
554
  dag_run = context.get("dag_run")
530
- if not (logical_date or (dag_run and dag_run.run_after)):
531
- raise ValueError(
532
- "Either `logical_date` or `dag_run.run_after` must be provided in the context"
533
- )
534
- return logical_date or dag_run.run_after
555
+ if logical_date:
556
+ return logical_date
557
+ if dag_run and hasattr(dag_run, "run_after") and dag_run.run_after:
558
+ return dag_run.run_after
559
+ raise ValueError("Either `logical_date` or `dag_run.run_after` must be provided in the context")
535
560
 
536
561
  # Airflow 2.x and earlier: contexts used "execution_date"
537
562
  execution_date = context.get("execution_date")
538
563
  if not execution_date:
539
564
  raise ValueError("Either `execution_date` must be provided in the context`")
565
+ if not isinstance(execution_date, datetime.datetime):
566
+ raise ValueError("execution_date must be a datetime object")
540
567
  return execution_date
541
568
 
542
- def _handle_execution_date_fn(self, context) -> Any:
569
+ def _handle_execution_date_fn(self, context: Context) -> datetime.datetime | list[datetime.datetime]:
543
570
  """
544
571
  Handle backward compatibility.
545
572
 
@@ -18,10 +18,11 @@ from __future__ import annotations
18
18
 
19
19
  import asyncio
20
20
  import typing
21
+ from collections.abc import Collection
21
22
  from typing import Any
22
23
 
23
24
  from asgiref.sync import sync_to_async
24
- from sqlalchemy import func
25
+ from sqlalchemy import func, select
25
26
 
26
27
  from airflow.models import DagRun
27
28
  from airflow.providers.standard.utils.sensor_helper import _get_count
@@ -60,9 +61,9 @@ class WorkflowTrigger(BaseTrigger):
60
61
  logical_dates: list[datetime] | None = None,
61
62
  external_task_ids: typing.Collection[str] | None = None,
62
63
  external_task_group_id: str | None = None,
63
- failed_states: typing.Iterable[str] | None = None,
64
- skipped_states: typing.Iterable[str] | None = None,
65
- allowed_states: typing.Iterable[str] | None = None,
64
+ failed_states: Collection[str] | None = None,
65
+ skipped_states: Collection[str] | None = None,
66
+ allowed_states: Collection[str] | None = None,
66
67
  poke_interval: float = 2.0,
67
68
  soft_fail: bool = False,
68
69
  **kwargs,
@@ -129,43 +130,41 @@ class WorkflowTrigger(BaseTrigger):
129
130
  self.log.info("Sleeping for %s seconds", self.poke_interval)
130
131
  await asyncio.sleep(self.poke_interval)
131
132
 
132
- async def _get_count_af_3(self, states):
133
+ async def _get_count_af_3(self, states: Collection[str] | None) -> int:
133
134
  from airflow.providers.standard.utils.sensor_helper import _get_count_by_matched_states
134
135
  from airflow.sdk.execution_time.task_runner import RuntimeTaskInstance
135
136
 
136
- params = {
137
- "dag_id": self.external_dag_id,
138
- "logical_dates": self.logical_dates,
139
- "run_ids": self.run_ids,
140
- }
141
137
  if self.external_task_ids:
142
138
  count = await sync_to_async(RuntimeTaskInstance.get_ti_count)(
143
- task_ids=self.external_task_ids,
144
- states=states,
145
- **params,
139
+ dag_id=self.external_dag_id,
140
+ task_ids=list(self.external_task_ids),
141
+ logical_dates=self.logical_dates,
142
+ run_ids=self.run_ids,
143
+ states=list(states) if states else None,
146
144
  )
147
- elif self.external_task_group_id:
145
+ return int(count / len(self.external_task_ids))
146
+ if self.external_task_group_id:
148
147
  run_id_task_state_map = await sync_to_async(RuntimeTaskInstance.get_task_states)(
148
+ dag_id=self.external_dag_id,
149
149
  task_group_id=self.external_task_group_id,
150
- **params,
150
+ logical_dates=self.logical_dates,
151
+ run_ids=self.run_ids,
151
152
  )
152
153
  count = await sync_to_async(_get_count_by_matched_states)(
153
154
  run_id_task_state_map=run_id_task_state_map,
154
- states=states,
155
+ states=states or [],
155
156
  )
156
- else:
157
- count = await sync_to_async(RuntimeTaskInstance.get_dr_count)(
158
- dag_id=self.external_dag_id,
159
- logical_dates=self.logical_dates,
160
- run_ids=self.run_ids,
161
- states=states,
162
- )
163
- if self.external_task_ids:
164
- return count / len(self.external_task_ids)
157
+ return count
158
+ count = await sync_to_async(RuntimeTaskInstance.get_dr_count)(
159
+ dag_id=self.external_dag_id,
160
+ logical_dates=self.logical_dates,
161
+ run_ids=self.run_ids,
162
+ states=list(states) if states else None,
163
+ )
165
164
  return count
166
165
 
167
166
  @sync_to_async
168
- def _get_count(self, states: typing.Iterable[str] | None) -> int:
167
+ def _get_count(self, states: Collection[str] | None) -> int:
169
168
  """
170
169
  Get the count of records against dttm filter and states. Async wrapper for _get_count.
171
170
 
@@ -228,8 +227,8 @@ class DagStateTrigger(BaseTrigger):
228
227
  runs_ids_or_dates = len(self.execution_dates)
229
228
 
230
229
  if AIRFLOW_V_3_0_PLUS:
231
- event = await self.validate_count_dags_af_3(runs_ids_or_dates_len=runs_ids_or_dates)
232
- yield TriggerEvent(event)
230
+ data = await self.validate_count_dags_af_3(runs_ids_or_dates_len=runs_ids_or_dates)
231
+ yield TriggerEvent(data)
233
232
  return
234
233
  else:
235
234
  while True:
@@ -239,7 +238,7 @@ class DagStateTrigger(BaseTrigger):
239
238
  return
240
239
  await asyncio.sleep(self.poll_interval)
241
240
 
242
- async def validate_count_dags_af_3(self, runs_ids_or_dates_len: int = 0) -> tuple[str, dict[str, Any]]:
241
+ async def validate_count_dags_af_3(self, runs_ids_or_dates_len: int = 0) -> dict[str, typing.Any]:
243
242
  from airflow.sdk.execution_time.task_runner import RuntimeTaskInstance
244
243
 
245
244
  cls_path, data = self.serialize()
@@ -259,7 +258,7 @@ class DagStateTrigger(BaseTrigger):
259
258
  run_id=run_id,
260
259
  )
261
260
  data[run_id] = state
262
- return cls_path, data
261
+ return data
263
262
  await asyncio.sleep(self.poll_interval)
264
263
 
265
264
  if not AIRFLOW_V_3_0_PLUS:
@@ -270,17 +269,18 @@ class DagStateTrigger(BaseTrigger):
270
269
  def count_dags(self, *, session: Session = NEW_SESSION) -> int:
271
270
  """Count how many dag runs in the database match our criteria."""
272
271
  _dag_run_date_condition = (
273
- DagRun.run_id.in_(self.run_ids)
272
+ DagRun.run_id.in_(self.run_ids or [])
274
273
  if AIRFLOW_V_3_0_PLUS
275
274
  else DagRun.execution_date.in_(self.execution_dates)
276
275
  )
277
- count = (
278
- session.query(func.count("*")) # .count() is inefficient
279
- .filter(
276
+ stmt = (
277
+ select(func.count())
278
+ .select_from(DagRun)
279
+ .where(
280
280
  DagRun.dag_id == self.dag_id,
281
281
  DagRun.state.in_(self.states),
282
282
  _dag_run_date_condition,
283
283
  )
284
- .scalar()
285
284
  )
286
- return typing.cast("int", count)
285
+ result = session.execute(stmt).scalar()
286
+ return result or 0
@@ -79,7 +79,7 @@ class FileTrigger(BaseTrigger):
79
79
  self.log.info("Found File %s last modified: %s", path, mod_time)
80
80
  yield TriggerEvent(True)
81
81
  return
82
- for _, _, files in os.walk(self.filepath):
82
+ for _, _, files in os.walk(path):
83
83
  if files:
84
84
  yield TriggerEvent(True)
85
85
  return
@@ -30,6 +30,9 @@ from uuid import UUID
30
30
 
31
31
  from asgiref.sync import sync_to_async
32
32
 
33
+ from airflow.exceptions import ParamValidationError
34
+ from airflow.sdk import Param
35
+ from airflow.sdk.definitions.param import ParamsDict
33
36
  from airflow.sdk.execution_time.hitl import (
34
37
  HITLUser,
35
38
  get_hitl_detail_content_detail,
@@ -43,7 +46,7 @@ class HITLTriggerEventSuccessPayload(TypedDict, total=False):
43
46
  """Minimum required keys for a success Human-in-the-loop TriggerEvent."""
44
47
 
45
48
  chosen_options: list[str]
46
- params_input: dict[str, Any]
49
+ params_input: dict[str, dict[str, Any]]
47
50
  responded_by_user: HITLUser | None
48
51
  responded_at: datetime
49
52
  timedout: bool
@@ -53,7 +56,7 @@ class HITLTriggerEventFailurePayload(TypedDict):
53
56
  """Minimum required keys for a failed Human-in-the-loop TriggerEvent."""
54
57
 
55
58
  error: str
56
- error_type: Literal["timeout", "unknown"]
59
+ error_type: Literal["timeout", "unknown", "validation"]
57
60
 
58
61
 
59
62
  class HITLTrigger(BaseTrigger):
@@ -64,7 +67,7 @@ class HITLTrigger(BaseTrigger):
64
67
  *,
65
68
  ti_id: UUID,
66
69
  options: list[str],
67
- params: dict[str, Any],
70
+ params: dict[str, dict[str, Any]],
68
71
  defaults: list[str] | None = None,
69
72
  multiple: bool = False,
70
73
  timeout_datetime: datetime | None,
@@ -80,7 +83,21 @@ class HITLTrigger(BaseTrigger):
80
83
  self.defaults = defaults
81
84
  self.timeout_datetime = timeout_datetime
82
85
 
83
- self.params = params
86
+ self.params = ParamsDict(
87
+ {
88
+ k: Param(
89
+ v.pop("value"),
90
+ **v,
91
+ )
92
+ if HITLTrigger._is_param(v)
93
+ else Param(v)
94
+ for k, v in params.items()
95
+ },
96
+ )
97
+
98
+ @staticmethod
99
+ def _is_param(value: Any) -> bool:
100
+ return isinstance(value, dict) and all(key in value for key in ("description", "schema", "value"))
84
101
 
85
102
  def serialize(self) -> tuple[str, dict[str, Any]]:
86
103
  """Serialize HITLTrigger arguments and classpath."""
@@ -90,99 +107,131 @@ class HITLTrigger(BaseTrigger):
90
107
  "ti_id": self.ti_id,
91
108
  "options": self.options,
92
109
  "defaults": self.defaults,
93
- "params": self.params,
110
+ "params": {k: self.params.get_param(k).serialize() for k in self.params},
94
111
  "multiple": self.multiple,
95
112
  "timeout_datetime": self.timeout_datetime,
96
113
  "poke_interval": self.poke_interval,
97
114
  },
98
115
  )
99
116
 
100
- async def run(self) -> AsyncIterator[TriggerEvent]:
101
- """Loop until the Human-in-the-loop response received or timeout reached."""
102
- while True:
103
- if self.timeout_datetime and self.timeout_datetime < utcnow():
104
- # Fetch latest HITL detail before fallback
105
- resp = await sync_to_async(get_hitl_detail_content_detail)(ti_id=self.ti_id)
106
- # Response already received, yield success and exit
107
- if resp.response_received and resp.chosen_options:
108
- if TYPE_CHECKING:
109
- assert resp.responded_by_user is not None
110
- assert resp.responded_at is not None
111
-
112
- self.log.info(
113
- "[HITL] responded_by=%s (id=%s) options=%s at %s (timeout fallback skipped)",
114
- resp.responded_by_user.name,
115
- resp.responded_by_user.id,
116
- resp.chosen_options,
117
- resp.responded_at,
118
- )
119
- yield TriggerEvent(
120
- HITLTriggerEventSuccessPayload(
121
- chosen_options=resp.chosen_options,
122
- params_input=resp.params_input or {},
123
- responded_at=resp.responded_at,
124
- responded_by_user=HITLUser(
125
- id=resp.responded_by_user.id,
126
- name=resp.responded_by_user.name,
127
- ),
128
- timedout=False,
129
- )
130
- )
131
- return
132
-
133
- if self.defaults is None:
134
- yield TriggerEvent(
135
- HITLTriggerEventFailurePayload(
136
- error="The timeout has passed, and the response has not yet been received.",
137
- error_type="timeout",
138
- )
139
- )
140
- return
141
-
142
- resp = await sync_to_async(update_hitl_detail_response)(
143
- ti_id=self.ti_id,
144
- chosen_options=self.defaults,
145
- params_input=self.params,
117
+ async def _handle_timeout(self) -> TriggerEvent:
118
+ """Handle HITL timeout logic and yield appropriate event."""
119
+ resp = await sync_to_async(get_hitl_detail_content_detail)(ti_id=self.ti_id)
120
+
121
+ # Case 1: Response arrived just before timeout
122
+ if resp.response_received and resp.chosen_options:
123
+ if TYPE_CHECKING:
124
+ assert resp.responded_by_user is not None
125
+ assert resp.responded_at is not None
126
+
127
+ chosen_options_list = list(resp.chosen_options or [])
128
+ self.log.info(
129
+ "[HITL] responded_by=%s (id=%s) options=%s at %s (timeout fallback skipped)",
130
+ resp.responded_by_user.name,
131
+ resp.responded_by_user.id,
132
+ chosen_options_list,
133
+ resp.responded_at,
134
+ )
135
+ return TriggerEvent(
136
+ HITLTriggerEventSuccessPayload(
137
+ chosen_options=chosen_options_list,
138
+ params_input=resp.params_input or {},
139
+ responded_at=resp.responded_at,
140
+ responded_by_user=HITLUser(
141
+ id=resp.responded_by_user.id,
142
+ name=resp.responded_by_user.name,
143
+ ),
144
+ timedout=False,
146
145
  )
147
- if TYPE_CHECKING:
148
- assert resp.responded_at is not None
149
- self.log.info(
150
- "[HITL] timeout reached before receiving response, fallback to default %s", self.defaults
146
+ )
147
+
148
+ # Case 2: No defaults defined → failure
149
+ if self.defaults is None:
150
+ return TriggerEvent(
151
+ HITLTriggerEventFailurePayload(
152
+ error="The timeout has passed, and the response has not yet been received.",
153
+ error_type="timeout",
151
154
  )
152
- yield TriggerEvent(
153
- HITLTriggerEventSuccessPayload(
154
- chosen_options=self.defaults,
155
- params_input=self.params,
156
- responded_by_user=None,
157
- responded_at=resp.responded_at,
158
- timedout=True,
155
+ )
156
+
157
+ # Case 3: Timeout fallback to default
158
+ resp = await sync_to_async(update_hitl_detail_response)(
159
+ ti_id=self.ti_id,
160
+ chosen_options=self.defaults,
161
+ params_input=self.params.dump(),
162
+ )
163
+ if TYPE_CHECKING:
164
+ assert resp.responded_at is not None
165
+
166
+ self.log.info(
167
+ "[HITL] timeout reached before receiving response, fallback to default %s",
168
+ self.defaults,
169
+ )
170
+ return TriggerEvent(
171
+ HITLTriggerEventSuccessPayload(
172
+ chosen_options=self.defaults,
173
+ params_input=self.params.dump(),
174
+ responded_by_user=None,
175
+ responded_at=resp.responded_at,
176
+ timedout=True,
177
+ )
178
+ )
179
+
180
+ async def _handle_response(self):
181
+ """Check if HITL response is ready and yield success if so."""
182
+ resp = await sync_to_async(get_hitl_detail_content_detail)(ti_id=self.ti_id)
183
+ if TYPE_CHECKING:
184
+ assert resp.responded_by_user is not None
185
+ assert resp.responded_at is not None
186
+
187
+ if not (resp.response_received and resp.chosen_options):
188
+ return None
189
+
190
+ # validate input
191
+ if params_input := resp.params_input:
192
+ try:
193
+ for key, value in params_input.items():
194
+ self.params[key] = value
195
+ except ParamValidationError as err:
196
+ return TriggerEvent(
197
+ HITLTriggerEventFailurePayload(
198
+ error=str(err),
199
+ error_type="validation",
159
200
  )
160
201
  )
202
+
203
+ chosen_options_list = list(resp.chosen_options or [])
204
+ self.log.info(
205
+ "[HITL] responded_by=%s (id=%s) options=%s at %s",
206
+ resp.responded_by_user.name,
207
+ resp.responded_by_user.id,
208
+ chosen_options_list,
209
+ resp.responded_at,
210
+ )
211
+ return TriggerEvent(
212
+ HITLTriggerEventSuccessPayload(
213
+ chosen_options=chosen_options_list,
214
+ params_input=params_input or {},
215
+ responded_at=resp.responded_at,
216
+ responded_by_user=HITLUser(
217
+ id=resp.responded_by_user.id,
218
+ name=resp.responded_by_user.name,
219
+ ),
220
+ timedout=False,
221
+ )
222
+ )
223
+
224
+ async def run(self) -> AsyncIterator[TriggerEvent]:
225
+ """Loop until the Human-in-the-loop response received or timeout reached."""
226
+ while True:
227
+ if self.timeout_datetime and self.timeout_datetime < utcnow():
228
+ event = await self._handle_timeout()
229
+ yield event
161
230
  return
162
231
 
163
- resp = await sync_to_async(get_hitl_detail_content_detail)(ti_id=self.ti_id)
164
- if resp.response_received and resp.chosen_options:
165
- if TYPE_CHECKING:
166
- assert resp.responded_by_user is not None
167
- assert resp.responded_at is not None
168
- self.log.info(
169
- "[HITL] responded_by=%s (id=%s) options=%s at %s",
170
- resp.responded_by_user.name,
171
- resp.responded_by_user.id,
172
- resp.chosen_options,
173
- resp.responded_at,
174
- )
175
- yield TriggerEvent(
176
- HITLTriggerEventSuccessPayload(
177
- chosen_options=resp.chosen_options,
178
- params_input=resp.params_input or {},
179
- responded_at=resp.responded_at,
180
- responded_by_user=HITLUser(
181
- id=resp.responded_by_user.id,
182
- name=resp.responded_by_user.name,
183
- ),
184
- timedout=False,
185
- )
186
- )
232
+ event = await self._handle_response()
233
+ if event:
234
+ yield event
187
235
  return
236
+
188
237
  await asyncio.sleep(self.poke_interval)
@@ -19,8 +19,11 @@
19
19
 
20
20
  from __future__ import annotations
21
21
 
22
+ import logging
22
23
  import os
24
+ import shlex
23
25
  import shutil
26
+ import subprocess
24
27
  import warnings
25
28
  from pathlib import Path
26
29
 
@@ -28,7 +31,6 @@ import jinja2
28
31
  from jinja2 import select_autoescape
29
32
 
30
33
  from airflow.configuration import conf
31
- from airflow.utils.process_utils import execute_in_subprocess
32
34
 
33
35
 
34
36
  def _is_uv_installed() -> bool:
@@ -132,6 +134,37 @@ def _index_urls_to_uv_env_vars(index_urls: list[str] | None = None) -> dict[str,
132
134
  return uv_index_env_vars
133
135
 
134
136
 
137
+ def _execute_in_subprocess(cmd: list[str], cwd: str | None = None, env: dict[str, str] | None = None) -> None:
138
+ """
139
+ Execute a process and stream output to logger.
140
+
141
+ :param cmd: command and arguments to run
142
+ :param cwd: Current working directory passed to the Popen constructor
143
+ :param env: Additional environment variables to set for the subprocess.
144
+ """
145
+ log = logging.getLogger(__name__)
146
+
147
+ log.info("Executing cmd: %s", " ".join(shlex.quote(c) for c in cmd))
148
+ with subprocess.Popen(
149
+ cmd,
150
+ stdout=subprocess.PIPE,
151
+ stderr=subprocess.STDOUT,
152
+ bufsize=0,
153
+ close_fds=True,
154
+ cwd=cwd,
155
+ env=env,
156
+ ) as proc:
157
+ log.info("Output:")
158
+ if proc.stdout:
159
+ with proc.stdout:
160
+ for line in iter(proc.stdout.readline, b""):
161
+ log.info("%s", line.decode().rstrip())
162
+
163
+ exit_code = proc.wait()
164
+ if exit_code != 0:
165
+ raise subprocess.CalledProcessError(exit_code, cmd)
166
+
167
+
135
168
  def prepare_virtualenv(
136
169
  venv_directory: str,
137
170
  python_bin: str,
@@ -169,7 +202,7 @@ def prepare_virtualenv(
169
202
  venv_cmd = _generate_uv_cmd(venv_directory, python_bin, system_site_packages)
170
203
  else:
171
204
  venv_cmd = _generate_venv_cmd(venv_directory, python_bin, system_site_packages)
172
- execute_in_subprocess(venv_cmd)
205
+ _execute_in_subprocess(venv_cmd)
173
206
 
174
207
  pip_cmd = None
175
208
  if requirements is not None and len(requirements) != 0:
@@ -188,7 +221,7 @@ def prepare_virtualenv(
188
221
  )
189
222
 
190
223
  if pip_cmd:
191
- execute_in_subprocess(pip_cmd, env={**os.environ, **_index_urls_to_uv_env_vars(index_urls)})
224
+ _execute_in_subprocess(pip_cmd, env={**os.environ, **_index_urls_to_uv_env_vars(index_urls)})
192
225
 
193
226
  return f"{venv_directory}/bin/python"
194
227
 
@@ -16,6 +16,7 @@
16
16
  # under the License.
17
17
  from __future__ import annotations
18
18
 
19
+ from collections.abc import Collection
19
20
  from typing import TYPE_CHECKING, Any, cast
20
21
 
21
22
  from sqlalchemy import func, select, tuple_
@@ -27,7 +28,7 @@ from airflow.utils.session import NEW_SESSION, provide_session
27
28
 
28
29
  if TYPE_CHECKING:
29
30
  from sqlalchemy.orm import Session
30
- from sqlalchemy.sql import Executable
31
+ from sqlalchemy.sql import Select
31
32
 
32
33
 
33
34
  @provide_session
@@ -59,6 +60,7 @@ def _get_count(
59
60
  session.scalar(
60
61
  _count_stmt(TI, states, dttm_filter, external_dag_id).where(TI.task_id.in_(external_task_ids))
61
62
  )
63
+ or 0
62
64
  ) / len(external_task_ids)
63
65
  elif external_task_group_id:
64
66
  external_task_group_task_ids = _get_external_task_group_task_ids(
@@ -68,20 +70,25 @@ def _get_count(
68
70
  count = 0
69
71
  else:
70
72
  count = (
71
- session.scalar(
72
- _count_stmt(TI, states, dttm_filter, external_dag_id).where(
73
- tuple_(TI.task_id, TI.map_index).in_(external_task_group_task_ids)
73
+ (
74
+ session.scalar(
75
+ _count_stmt(TI, states, dttm_filter, external_dag_id).where(
76
+ tuple_(TI.task_id, TI.map_index).in_(external_task_group_task_ids)
77
+ )
74
78
  )
79
+ or 0
75
80
  )
76
81
  / len(external_task_group_task_ids)
77
82
  * len(dttm_filter)
78
83
  )
79
84
  else:
80
- count = session.scalar(_count_stmt(DR, states, dttm_filter, external_dag_id))
85
+ count = session.scalar(_count_stmt(DR, states, dttm_filter, external_dag_id)) or 0
81
86
  return cast("int", count)
82
87
 
83
88
 
84
- def _count_stmt(model, states, dttm_filter, external_dag_id) -> Executable:
89
+ def _count_stmt(
90
+ model: type[DagRun] | type[TaskInstance], states: list[str], dttm_filter: list[Any], external_dag_id: str
91
+ ) -> Select[tuple[int]]:
85
92
  """
86
93
  Get the count of records against dttm filter and states.
87
94
 
@@ -97,7 +104,9 @@ def _count_stmt(model, states, dttm_filter, external_dag_id) -> Executable:
97
104
  )
98
105
 
99
106
 
100
- def _get_external_task_group_task_ids(dttm_filter, external_task_group_id, external_dag_id, session):
107
+ def _get_external_task_group_task_ids(
108
+ dttm_filter: list[Any], external_task_group_id: str, external_dag_id: str, session: Session
109
+ ) -> list[tuple[str, int]]:
101
110
  """
102
111
  Get the count of records against dttm filter and states.
103
112
 
@@ -107,6 +116,8 @@ def _get_external_task_group_task_ids(dttm_filter, external_task_group_id, exter
107
116
  :param session: airflow session object
108
117
  """
109
118
  refreshed_dag_info = SerializedDagModel.get_dag(external_dag_id, session=session)
119
+ if not refreshed_dag_info:
120
+ return [(external_task_group_id, -1)]
110
121
  task_group = refreshed_dag_info.task_group_dict.get(external_task_group_id)
111
122
 
112
123
  if task_group:
@@ -129,7 +140,7 @@ def _get_external_task_group_task_ids(dttm_filter, external_task_group_id, exter
129
140
 
130
141
  def _get_count_by_matched_states(
131
142
  run_id_task_state_map: dict[str, dict[str, Any]],
132
- states: list[str],
143
+ states: Collection[str],
133
144
  ):
134
145
  count = 0
135
146
  for _, task_states in run_id_task_state_map.items():
@@ -34,6 +34,7 @@ def get_base_airflow_version_tuple() -> tuple[int, int, int]:
34
34
 
35
35
  AIRFLOW_V_3_0_PLUS: bool = get_base_airflow_version_tuple() >= (3, 0, 0)
36
36
  AIRFLOW_V_3_1_PLUS: bool = get_base_airflow_version_tuple() >= (3, 1, 0)
37
+ AIRFLOW_V_3_1_3_PLUS: bool = get_base_airflow_version_tuple() >= (3, 1, 3)
37
38
  AIRFLOW_V_3_2_PLUS: bool = get_base_airflow_version_tuple() >= (3, 2, 0)
38
39
 
39
40
  # BaseOperator: Use 3.1+ due to xcom_push method missing in SDK BaseOperator 3.0.x
@@ -1,12 +1,13 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: apache-airflow-providers-standard
3
- Version: 1.9.1rc1
3
+ Version: 1.9.2rc1
4
4
  Summary: Provider package apache-airflow-providers-standard for Apache Airflow
5
5
  Keywords: airflow-provider,standard,airflow,integration
6
6
  Author-email: Apache Software Foundation <dev@airflow.apache.org>
7
7
  Maintainer-email: Apache Software Foundation <dev@airflow.apache.org>
8
8
  Requires-Python: >=3.10
9
9
  Description-Content-Type: text/x-rst
10
+ License-Expression: Apache-2.0
10
11
  Classifier: Development Status :: 5 - Production/Stable
11
12
  Classifier: Environment :: Console
12
13
  Classifier: Environment :: Web Environment
@@ -14,17 +15,18 @@ Classifier: Intended Audience :: Developers
14
15
  Classifier: Intended Audience :: System Administrators
15
16
  Classifier: Framework :: Apache Airflow
16
17
  Classifier: Framework :: Apache Airflow :: Provider
17
- Classifier: License :: OSI Approved :: Apache Software License
18
18
  Classifier: Programming Language :: Python :: 3.10
19
19
  Classifier: Programming Language :: Python :: 3.11
20
20
  Classifier: Programming Language :: Python :: 3.12
21
21
  Classifier: Programming Language :: Python :: 3.13
22
22
  Classifier: Topic :: System :: Monitoring
23
+ License-File: LICENSE
24
+ License-File: NOTICE
23
25
  Requires-Dist: apache-airflow>=2.10.0rc1
24
26
  Requires-Dist: apache-airflow-providers-common-compat>=1.8.0rc1
25
27
  Project-URL: Bug Tracker, https://github.com/apache/airflow/issues
26
- Project-URL: Changelog, https://airflow.staged.apache.org/docs/apache-airflow-providers-standard/1.9.1/changelog.html
27
- Project-URL: Documentation, https://airflow.staged.apache.org/docs/apache-airflow-providers-standard/1.9.1
28
+ Project-URL: Changelog, https://airflow.staged.apache.org/docs/apache-airflow-providers-standard/1.9.2/changelog.html
29
+ Project-URL: Documentation, https://airflow.staged.apache.org/docs/apache-airflow-providers-standard/1.9.2
28
30
  Project-URL: Mastodon, https://fosstodon.org/@airflow
29
31
  Project-URL: Slack Chat, https://s.apache.org/airflow-slack
30
32
  Project-URL: Source Code, https://github.com/apache/airflow
@@ -55,7 +57,7 @@ Project-URL: YouTube, https://www.youtube.com/channel/UCSXwxpWZQ7XZ1WL3wqevChA/
55
57
 
56
58
  Package ``apache-airflow-providers-standard``
57
59
 
58
- Release: ``1.9.1``
60
+ Release: ``1.9.2``
59
61
 
60
62
 
61
63
  Airflow Standard Provider
@@ -68,7 +70,7 @@ This is a provider package for ``standard`` provider. All classes for this provi
68
70
  are in ``airflow.providers.standard`` python package.
69
71
 
70
72
  You can find package information and changelog for the provider
71
- in the `documentation <https://airflow.apache.org/docs/apache-airflow-providers-standard/1.9.1/>`_.
73
+ in the `documentation <https://airflow.apache.org/docs/apache-airflow-providers-standard/1.9.2/>`_.
72
74
 
73
75
  Installation
74
76
  ------------
@@ -109,5 +111,5 @@ Dependent package
109
111
  ================================================================================================================== =================
110
112
 
111
113
  The changelog for the provider package can be found in the
112
- `changelog <https://airflow.apache.org/docs/apache-airflow-providers-standard/1.9.1/changelog.html>`_.
114
+ `changelog <https://airflow.apache.org/docs/apache-airflow-providers-standard/1.9.2/changelog.html>`_.
113
115
 
@@ -1,8 +1,7 @@
1
- airflow/providers/standard/LICENSE,sha256=gXPVwptPlW1TJ4HSuG5OMPg-a3h43OGMkZRR1rpwfJA,10850
2
- airflow/providers/standard/__init__.py,sha256=U9kcSbyD5ZHnLiCe5_tLPs6cbPhaIKxFIfJ3mbvq8Vw,1497
1
+ airflow/providers/standard/__init__.py,sha256=jfNXXadd0prAQ-FCHal8rq0weWZ0Rb2en5O2wHveHE0,1497
3
2
  airflow/providers/standard/exceptions.py,sha256=m2Ryv36yrzAk8xMIA4lhR11n1gA1CKPFwRok8ksl_tk,2416
4
3
  airflow/providers/standard/get_provider_info.py,sha256=NVstkG2ZeAiTZnvmbrMpxcYgJzcdITKdQvgDOZYX1Rk,7227
5
- airflow/providers/standard/version_compat.py,sha256=-7q64t0wML0dv9SIfoRV5lGphE_8bWmBHZ07o6u6_9Y,2091
4
+ airflow/providers/standard/version_compat.py,sha256=wzS7qPjUGZp-zZeL2YnvAlxD-Sdtt7ff6KnlFk3xlUA,2166
6
5
  airflow/providers/standard/decorators/__init__.py,sha256=9hdXHABrVpkbpjZgUft39kOFL2xSGeG4GEua0Hmelus,785
7
6
  airflow/providers/standard/decorators/bash.py,sha256=7njtxxSfAwBdcOz8ZPTH1MLsXTgyL-oxEE7kXMZvv3Q,4132
8
7
  airflow/providers/standard/decorators/branch_external_python.py,sha256=sGtSrCK137HLm77fcxbYgpDeMMuYdMJp6oVbPrQHvPU,2403
@@ -38,40 +37,42 @@ airflow/providers/standard/example_dags/sql/sample.sql,sha256=OVk1qozBY58lp_tFtn
38
37
  airflow/providers/standard/hooks/__init__.py,sha256=9hdXHABrVpkbpjZgUft39kOFL2xSGeG4GEua0Hmelus,785
39
38
  airflow/providers/standard/hooks/filesystem.py,sha256=-WxoAHv6hOmyZr_KjHKGOzdi5FbdiRDdGuJUjts9jTw,2887
40
39
  airflow/providers/standard/hooks/package_index.py,sha256=BgPZB9z0UKV1jO-QERdxZTwY1KLMwYl9YFAU_q_ZX0w,3786
41
- airflow/providers/standard/hooks/subprocess.py,sha256=fmo71AfSxmDJpkBO7fFtLDdJLAkhlhdIPCzHj-GDBjI,4937
40
+ airflow/providers/standard/hooks/subprocess.py,sha256=bO6xV9JBfQ_iZAdV1JiycHSqd_HWYgKUOSGCd32H2Js,4667
42
41
  airflow/providers/standard/models/__init__.py,sha256=9hdXHABrVpkbpjZgUft39kOFL2xSGeG4GEua0Hmelus,785
43
42
  airflow/providers/standard/operators/__init__.py,sha256=9hdXHABrVpkbpjZgUft39kOFL2xSGeG4GEua0Hmelus,785
44
43
  airflow/providers/standard/operators/bash.py,sha256=BJaW76cjLdio8ldTytA03lKlDw16xJYl_zF4mWF39Ts,11053
45
44
  airflow/providers/standard/operators/branch.py,sha256=NtQVkB63nvZCnFoYlfhM-fG1c9zakEDIid20KfuoLW4,4247
46
45
  airflow/providers/standard/operators/datetime.py,sha256=U9hSX5wP0e0NfgM2gYitqOIoqvJxrGxTU2TUV82pHPg,4879
47
46
  airflow/providers/standard/operators/empty.py,sha256=flxN2BhuHegEOiiAcJR9QOuMO8PcxQu353Q9p-Yk82s,1342
48
- airflow/providers/standard/operators/hitl.py,sha256=iV7zcSAcgyZV1HCvl6vHF9YV535sJnMc7JsKP-3uDxE,18022
49
- airflow/providers/standard/operators/latest_only.py,sha256=FDQP8PMw-bDXJUwfpGB0ESjxitgRMd7kqPO5hAsbf6c,4961
50
- airflow/providers/standard/operators/python.py,sha256=H3AaqcEmIM_KZKJV8RiUxksjBfELoD1l7iCEqA_Is-E,53670
47
+ airflow/providers/standard/operators/hitl.py,sha256=rsCkEKBeK-f0nCd17yvjMzPXqUdmTfKwhnwTgd9Xqo0,18209
48
+ airflow/providers/standard/operators/latest_only.py,sha256=95c4z3tjjTlJQ-I90AbfQRnYeUPxxSjJI-TaTWkrCWs,5017
49
+ airflow/providers/standard/operators/python.py,sha256=vxwTKJJ22ZCrMVWtzmWgKgfh6tO7oKeMVXOdItowPbc,53650
51
50
  airflow/providers/standard/operators/smooth.py,sha256=WamRqmeSm6BcGCCBAqBEVYIRk4ZXbeI_Q7OjPgLfnUI,1400
52
- airflow/providers/standard/operators/trigger_dagrun.py,sha256=2cNz7QgJy0biV6PCjMrr7FInCN6z_ZWkdrXF_lq54ns,16537
51
+ airflow/providers/standard/operators/trigger_dagrun.py,sha256=b4mkLIn0knBqeGZMJYr-SkBpqPLjhnrjAArtnMExkQo,16816
53
52
  airflow/providers/standard/operators/weekday.py,sha256=s8C6T-x9Hvkj4YQWCguTRyhiAqdJYCBr12rPm2qYC3M,4957
54
53
  airflow/providers/standard/sensors/__init__.py,sha256=9hdXHABrVpkbpjZgUft39kOFL2xSGeG4GEua0Hmelus,785
55
- airflow/providers/standard/sensors/bash.py,sha256=TNcFnE881afOhQQI_PQj6Xnd4WmPO5uKUFsFJuiKN1o,4830
54
+ airflow/providers/standard/sensors/bash.py,sha256=W_RUdczpEfta7WZVzNHuPbsnZq5YvchmlqEGp3tiLYI,4822
56
55
  airflow/providers/standard/sensors/date_time.py,sha256=_Pc4DBL807sJInvaTFxBvAirb3FtBbdpoBYGZj-u4dQ,6355
57
- airflow/providers/standard/sensors/external_task.py,sha256=qWxjP8raAofH6rIj5CorVj1d6YTAv3q3ve2La5sK9bk,28797
56
+ airflow/providers/standard/sensors/external_task.py,sha256=oGB71ZkyzuDE4p2GaGyaRBSEiMy4twATD5OTnG9HRFk,30357
58
57
  airflow/providers/standard/sensors/filesystem.py,sha256=tuKzvQhv-FO9yQOZ1lhDLAOTHtKlaMXPBmq1asSzlfU,5896
59
58
  airflow/providers/standard/sensors/python.py,sha256=eBfy0QRgsQHw4H4pZ4u7DNzu7ZdMH7TtPQWOdP4wWqA,3221
60
59
  airflow/providers/standard/sensors/time.py,sha256=ymNNjJmtcUKumlbSfPq6krCF_4lsuFtdjqTNr-gPlOM,4986
61
60
  airflow/providers/standard/sensors/time_delta.py,sha256=aEz9VMxnILWafac6sdZRXDYAvV_XukVYFRFYB2buVAY,7453
62
61
  airflow/providers/standard/sensors/weekday.py,sha256=Jb_QPJNN7_Os1X0Y-MA0-J_tv-rtWjZcU0C_5edo8X0,4291
63
62
  airflow/providers/standard/triggers/__init__.py,sha256=9hdXHABrVpkbpjZgUft39kOFL2xSGeG4GEua0Hmelus,785
64
- airflow/providers/standard/triggers/external_task.py,sha256=R2Wsd21pw9_gGTs9XuHafylt65hMVPisz2g6vnpLJ4o,11521
65
- airflow/providers/standard/triggers/file.py,sha256=2i8-RwSjEgdOwQNcHCqLmSdpE3Ehqg4GQJ8nE3-fHxo,4886
66
- airflow/providers/standard/triggers/hitl.py,sha256=MXAwMPlMcil1EBRnck9hjakicK93QfLTBEyKKmGucV0,7403
63
+ airflow/providers/standard/triggers/external_task.py,sha256=LKjoLC--pIBWBDqt9uOVHeGqlmr8svSprh8beFGUSF8,11652
64
+ airflow/providers/standard/triggers/file.py,sha256=mkZuOBNMHON9DQSBRO1NIqcNNjxGM5dbYOQ1Cfsm-BQ,4877
65
+ airflow/providers/standard/triggers/hitl.py,sha256=cJfFqiuEq5nZRisfktJ2DgviJqxV9tiBIbS8CYmPaOQ,8501
67
66
  airflow/providers/standard/triggers/temporal.py,sha256=-Zxu96xqt40bhsyzFSK2gv-Ddb2GNr2UogeIoyBX684,4468
68
67
  airflow/providers/standard/utils/__init__.py,sha256=9hdXHABrVpkbpjZgUft39kOFL2xSGeG4GEua0Hmelus,785
69
- airflow/providers/standard/utils/python_virtualenv.py,sha256=logUzODR5qnUZYyew-ZEbL7rudrkTEtmnU9qTQhg0-0,8532
68
+ airflow/providers/standard/utils/python_virtualenv.py,sha256=CyAn1rix-BCvtf6CI6fAMfLZWZx2GRhjfugpyrbRnZc,9522
70
69
  airflow/providers/standard/utils/python_virtualenv_script.jinja2,sha256=3Z334hVq6hQ9EHkOoGnAHc2_XNkZQkOJGxZArDKLc-c,2770
71
- airflow/providers/standard/utils/sensor_helper.py,sha256=FwI36sJK-s3Wz0slypF1_tAikQpiXovtTiN__Md00Aw,5049
70
+ airflow/providers/standard/utils/sensor_helper.py,sha256=ZcJeWAGymwUma7R6U7pQXhmQLC2UEeiyjQOrH4uFxt0,5407
72
71
  airflow/providers/standard/utils/skipmixin.py,sha256=YDgyVUv6XBmsMJzv5AhatZvymgcThBmBD0Yr2_lh40E,8012
73
72
  airflow/providers/standard/utils/weekday.py,sha256=ySDrIkWv-lqqxURo9E98IGInDqERec2O4y9o2hQTGiQ,2685
74
- apache_airflow_providers_standard-1.9.1rc1.dist-info/entry_points.txt,sha256=mW2YRh3mVdZdaP5-iGSNgmcCh3YYdALIn28BCLBZZ40,104
75
- apache_airflow_providers_standard-1.9.1rc1.dist-info/WHEEL,sha256=G2gURzTEtmeR8nrdXUJfNiB3VYVxigPQ-bEQujpNiNs,82
76
- apache_airflow_providers_standard-1.9.1rc1.dist-info/METADATA,sha256=myRsPZSw0qhDL4Jpf0tWaeSpFe8wztNKDoYx5Vk18IU,5120
77
- apache_airflow_providers_standard-1.9.1rc1.dist-info/RECORD,,
73
+ apache_airflow_providers_standard-1.9.2rc1.dist-info/entry_points.txt,sha256=mW2YRh3mVdZdaP5-iGSNgmcCh3YYdALIn28BCLBZZ40,104
74
+ apache_airflow_providers_standard-1.9.2rc1.dist-info/licenses/LICENSE,sha256=gXPVwptPlW1TJ4HSuG5OMPg-a3h43OGMkZRR1rpwfJA,10850
75
+ apache_airflow_providers_standard-1.9.2rc1.dist-info/licenses/NOTICE,sha256=E3-_E02gwwSEFzeeWPKmnIjOoos3hW28CLISV6sYrbQ,168
76
+ apache_airflow_providers_standard-1.9.2rc1.dist-info/WHEEL,sha256=G2gURzTEtmeR8nrdXUJfNiB3VYVxigPQ-bEQujpNiNs,82
77
+ apache_airflow_providers_standard-1.9.2rc1.dist-info/METADATA,sha256=C7vbo6Vt_Zmx2cfqp0IM3DqMLA5fE9cA8cK6l_kY10I,5131
78
+ apache_airflow_providers_standard-1.9.2rc1.dist-info/RECORD,,
@@ -0,0 +1,5 @@
1
+ Apache Airflow
2
+ Copyright 2016-2025 The Apache Software Foundation
3
+
4
+ This product includes software developed at
5
+ The Apache Software Foundation (http://www.apache.org/).