apache-airflow-providers-standard 0.3.0rc2__py3-none-any.whl → 0.4.0rc1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -29,7 +29,7 @@ from airflow import __version__ as airflow_version
29
29
 
30
30
  __all__ = ["__version__"]
31
31
 
32
- __version__ = "0.3.0"
32
+ __version__ = "0.4.0"
33
33
 
34
34
  if packaging.version.parse(packaging.version.parse(airflow_version).base_version) < packaging.version.parse(
35
35
  "2.9.0"
@@ -21,7 +21,17 @@ import warnings
21
21
  from collections.abc import Collection, Mapping, Sequence
22
22
  from typing import TYPE_CHECKING, Any, Callable, ClassVar
23
23
 
24
- from airflow.decorators.base import DecoratedOperator, TaskDecorator, task_decorator_factory
24
+ from airflow.providers.standard.version_compat import AIRFLOW_V_3_0_PLUS
25
+
26
+ if AIRFLOW_V_3_0_PLUS:
27
+ from airflow.sdk.bases.decorator import DecoratedOperator, TaskDecorator, task_decorator_factory
28
+ else:
29
+ from airflow.decorators.base import ( # type: ignore[no-redef]
30
+ DecoratedOperator,
31
+ TaskDecorator,
32
+ task_decorator_factory,
33
+ )
34
+
25
35
  from airflow.providers.standard.operators.bash import BashOperator
26
36
  from airflow.sdk.definitions._internal.types import SET_DURING_EXECUTION
27
37
  from airflow.utils.context import context_merge
@@ -18,12 +18,18 @@ from __future__ import annotations
18
18
 
19
19
  from typing import TYPE_CHECKING, Callable
20
20
 
21
- from airflow.decorators.base import task_decorator_factory
21
+ from airflow.providers.standard.version_compat import AIRFLOW_V_3_0_PLUS
22
+
23
+ if AIRFLOW_V_3_0_PLUS:
24
+ from airflow.sdk.bases.decorator import task_decorator_factory
25
+ else:
26
+ from airflow.decorators.base import task_decorator_factory # type: ignore[no-redef]
27
+
22
28
  from airflow.providers.standard.decorators.python import _PythonDecoratedOperator
23
29
  from airflow.providers.standard.operators.python import BranchExternalPythonOperator
24
30
 
25
31
  if TYPE_CHECKING:
26
- from airflow.decorators.base import TaskDecorator
32
+ from airflow.sdk.bases.decorator import TaskDecorator
27
33
 
28
34
 
29
35
  class _BranchExternalPythonDecoratedOperator(_PythonDecoratedOperator, BranchExternalPythonOperator):
@@ -18,12 +18,17 @@ from __future__ import annotations
18
18
 
19
19
  from typing import TYPE_CHECKING, Callable
20
20
 
21
- from airflow.decorators.base import task_decorator_factory
21
+ from airflow.providers.standard.version_compat import AIRFLOW_V_3_0_PLUS
22
+
23
+ if AIRFLOW_V_3_0_PLUS:
24
+ from airflow.sdk.bases.decorator import task_decorator_factory
25
+ else:
26
+ from airflow.decorators.base import task_decorator_factory # type: ignore[no-redef]
22
27
  from airflow.providers.standard.decorators.python import _PythonDecoratedOperator
23
28
  from airflow.providers.standard.operators.python import BranchPythonOperator
24
29
 
25
30
  if TYPE_CHECKING:
26
- from airflow.decorators.base import TaskDecorator
31
+ from airflow.sdk.bases.decorator import TaskDecorator
27
32
 
28
33
 
29
34
  class _BranchPythonDecoratedOperator(_PythonDecoratedOperator, BranchPythonOperator):
@@ -18,12 +18,17 @@ from __future__ import annotations
18
18
 
19
19
  from typing import TYPE_CHECKING, Callable
20
20
 
21
- from airflow.decorators.base import task_decorator_factory
21
+ from airflow.providers.standard.version_compat import AIRFLOW_V_3_0_PLUS
22
+
23
+ if AIRFLOW_V_3_0_PLUS:
24
+ from airflow.sdk.bases.decorator import task_decorator_factory
25
+ else:
26
+ from airflow.decorators.base import task_decorator_factory # type: ignore[no-redef]
22
27
  from airflow.providers.standard.decorators.python import _PythonDecoratedOperator
23
28
  from airflow.providers.standard.operators.python import BranchPythonVirtualenvOperator
24
29
 
25
30
  if TYPE_CHECKING:
26
- from airflow.decorators.base import TaskDecorator
31
+ from airflow.sdk.bases.decorator import TaskDecorator
27
32
 
28
33
 
29
34
  class _BranchPythonVirtualenvDecoratedOperator(_PythonDecoratedOperator, BranchPythonVirtualenvOperator):
@@ -18,12 +18,17 @@ from __future__ import annotations
18
18
 
19
19
  from typing import TYPE_CHECKING, Callable
20
20
 
21
- from airflow.decorators.base import task_decorator_factory
21
+ from airflow.providers.standard.version_compat import AIRFLOW_V_3_0_PLUS
22
+
23
+ if AIRFLOW_V_3_0_PLUS:
24
+ from airflow.sdk.bases.decorator import task_decorator_factory
25
+ else:
26
+ from airflow.decorators.base import task_decorator_factory # type: ignore[no-redef]
22
27
  from airflow.providers.standard.decorators.python import _PythonDecoratedOperator
23
28
  from airflow.providers.standard.operators.python import ExternalPythonOperator
24
29
 
25
30
  if TYPE_CHECKING:
26
- from airflow.decorators.base import TaskDecorator
31
+ from airflow.sdk.bases.decorator import TaskDecorator
27
32
 
28
33
 
29
34
  class _PythonExternalDecoratedOperator(_PythonDecoratedOperator, ExternalPythonOperator):
@@ -19,11 +19,16 @@ from __future__ import annotations
19
19
  from collections.abc import Sequence
20
20
  from typing import TYPE_CHECKING, Callable
21
21
 
22
- from airflow.decorators.base import DecoratedOperator, task_decorator_factory
23
22
  from airflow.providers.standard.operators.python import PythonOperator
23
+ from airflow.providers.standard.version_compat import AIRFLOW_V_3_0_PLUS
24
+
25
+ if AIRFLOW_V_3_0_PLUS:
26
+ from airflow.sdk.bases.decorator import DecoratedOperator, task_decorator_factory
27
+ else:
28
+ from airflow.decorators.base import DecoratedOperator, task_decorator_factory # type: ignore[no-redef]
24
29
 
25
30
  if TYPE_CHECKING:
26
- from airflow.decorators.base import TaskDecorator
31
+ from airflow.sdk.bases.decorator import TaskDecorator
27
32
 
28
33
 
29
34
  class _PythonDecoratedOperator(DecoratedOperator, PythonOperator):
@@ -18,12 +18,19 @@ from __future__ import annotations
18
18
 
19
19
  from typing import TYPE_CHECKING, Callable
20
20
 
21
- from airflow.decorators.base import task_decorator_factory
21
+ from airflow.providers.standard.version_compat import AIRFLOW_V_3_0_PLUS
22
+
23
+ if AIRFLOW_V_3_0_PLUS:
24
+ from airflow.sdk.bases.decorator import task_decorator_factory
25
+ else:
26
+ from airflow.decorators.base import task_decorator_factory # type: ignore[no-redef]
27
+
28
+
22
29
  from airflow.providers.standard.decorators.python import _PythonDecoratedOperator
23
30
  from airflow.providers.standard.operators.python import PythonVirtualenvOperator
24
31
 
25
32
  if TYPE_CHECKING:
26
- from airflow.decorators.base import TaskDecorator
33
+ from airflow.sdk.bases.decorator import TaskDecorator
27
34
 
28
35
 
29
36
  class _PythonVirtualenvDecoratedOperator(_PythonDecoratedOperator, PythonVirtualenvOperator):
@@ -20,11 +20,18 @@ from __future__ import annotations
20
20
  from collections.abc import Sequence
21
21
  from typing import TYPE_CHECKING, Callable, ClassVar
22
22
 
23
- from airflow.decorators.base import get_unique_task_id, task_decorator_factory
23
+ from airflow.providers.standard.version_compat import AIRFLOW_V_3_0_PLUS
24
+
25
+ if AIRFLOW_V_3_0_PLUS:
26
+ from airflow.sdk.bases.decorator import get_unique_task_id, task_decorator_factory
27
+ else:
28
+ from airflow.decorators.base import get_unique_task_id, task_decorator_factory # type: ignore[no-redef]
29
+
30
+
24
31
  from airflow.providers.standard.sensors.python import PythonSensor
25
32
 
26
33
  if TYPE_CHECKING:
27
- from airflow.decorators.base import TaskDecorator
34
+ from airflow.sdk.bases.decorator import TaskDecorator
28
35
 
29
36
 
30
37
  class DecoratedSensorOperator(PythonSensor):
@@ -18,12 +18,18 @@ from __future__ import annotations
18
18
 
19
19
  from typing import TYPE_CHECKING, Callable
20
20
 
21
- from airflow.decorators.base import task_decorator_factory
21
+ from airflow.providers.standard.version_compat import AIRFLOW_V_3_0_PLUS
22
+
23
+ if AIRFLOW_V_3_0_PLUS:
24
+ from airflow.sdk.bases.decorator import task_decorator_factory
25
+ else:
26
+ from airflow.decorators.base import task_decorator_factory # type: ignore[no-redef]
27
+
22
28
  from airflow.providers.standard.decorators.python import _PythonDecoratedOperator
23
29
  from airflow.providers.standard.operators.python import ShortCircuitOperator
24
30
 
25
31
  if TYPE_CHECKING:
26
- from airflow.decorators.base import TaskDecorator
32
+ from airflow.sdk.bases.decorator import TaskDecorator
27
33
 
28
34
 
29
35
  class _ShortCircuitDecoratedOperator(_PythonDecoratedOperator, ShortCircuitOperator):
@@ -26,9 +26,6 @@ def get_provider_info():
26
26
  "package-name": "apache-airflow-providers-standard",
27
27
  "name": "Standard",
28
28
  "description": "Airflow Standard Provider\n",
29
- "state": "ready",
30
- "source-date-epoch": 1743477899,
31
- "versions": ["0.3.0", "0.2.0", "0.1.1", "0.1.0", "0.0.3", "0.0.2", "0.0.1"],
32
29
  "integrations": [
33
30
  {
34
31
  "integration-name": "Standard",
@@ -135,6 +132,4 @@ def get_provider_info():
135
132
  "name": "short_circuit",
136
133
  },
137
134
  ],
138
- "dependencies": ["apache-airflow>=2.9.0"],
139
- "devel-dependencies": [],
140
135
  }
@@ -25,10 +25,12 @@ from typing import TYPE_CHECKING
25
25
  import pendulum
26
26
 
27
27
  from airflow.providers.standard.operators.branch import BaseBranchOperator
28
+ from airflow.providers.standard.version_compat import AIRFLOW_V_3_0_PLUS
28
29
  from airflow.utils.types import DagRunType
29
30
 
30
31
  if TYPE_CHECKING:
31
32
  from airflow.models import DAG, DagRun
33
+ from airflow.timetables.base import DagRunInfo
32
34
 
33
35
  try:
34
36
  from airflow.sdk.definitions.context import Context
@@ -46,6 +48,10 @@ class LatestOnlyOperator(BaseBranchOperator):
46
48
 
47
49
  Note that downstream tasks are never skipped if the given DAG_Run is
48
50
  marked as externally triggered.
51
+
52
+ Note that when used with timetables that produce zero-length or point-in-time data intervals
53
+ (e.g., ``DeltaTriggerTimetable``), this operator assumes each run is the latest
54
+ and does not skip downstream tasks.
49
55
  """
50
56
 
51
57
  ui_color = "#e9ffdb" # nyanza
@@ -58,8 +64,7 @@ class LatestOnlyOperator(BaseBranchOperator):
58
64
  self.log.info("Manually triggered DAG_Run: allowing execution to proceed.")
59
65
  return list(context["task"].get_direct_relative_ids(upstream=False))
60
66
 
61
- dag: DAG = context["dag"] # type: ignore[assignment]
62
- next_info = dag.next_dagrun_info(dag.get_run_data_interval(dag_run), restricted=False)
67
+ next_info = self._get_next_run_info(context, dag_run)
63
68
  now = pendulum.now("UTC")
64
69
 
65
70
  if next_info is None:
@@ -74,6 +79,15 @@ class LatestOnlyOperator(BaseBranchOperator):
74
79
  now,
75
80
  )
76
81
 
82
+ if left_window == right_window:
83
+ self.log.info(
84
+ "Zero-length interval [%s, %s) from timetable (%s); treating current run as latest.",
85
+ left_window,
86
+ right_window,
87
+ self.dag.timetable.__class__,
88
+ )
89
+ return list(context["task"].get_direct_relative_ids(upstream=False))
90
+
77
91
  if not left_window < now <= right_window:
78
92
  self.log.info("Not latest execution, skipping downstream.")
79
93
  # we return an empty list, thus the parent BaseBranchOperator
@@ -82,3 +96,21 @@ class LatestOnlyOperator(BaseBranchOperator):
82
96
  else:
83
97
  self.log.info("Latest, allowing execution to proceed.")
84
98
  return list(context["task"].get_direct_relative_ids(upstream=False))
99
+
100
+ def _get_next_run_info(self, context: Context, dag_run: DagRun) -> DagRunInfo | None:
101
+ dag: DAG = context["dag"] # type: ignore[assignment]
102
+
103
+ if AIRFLOW_V_3_0_PLUS:
104
+ from airflow.timetables.base import DataInterval, TimeRestriction
105
+
106
+ time_restriction = TimeRestriction(earliest=None, latest=None, catchup=True)
107
+ current_interval = DataInterval(start=dag_run.data_interval_start, end=dag_run.data_interval_end)
108
+
109
+ next_info = dag.timetable.next_dagrun_info(
110
+ last_automated_data_interval=current_interval,
111
+ restriction=time_restriction,
112
+ )
113
+
114
+ else:
115
+ next_info = dag.next_dagrun_info(dag.get_run_data_interval(dag_run), restricted=False)
116
+ return next_info
@@ -41,9 +41,9 @@ from airflow.models.dagrun import DagRun
41
41
  from airflow.providers.standard.triggers.external_task import DagStateTrigger
42
42
  from airflow.providers.standard.version_compat import AIRFLOW_V_3_0_PLUS
43
43
  from airflow.utils import timezone
44
- from airflow.utils.session import provide_session
44
+ from airflow.utils.session import NEW_SESSION, provide_session
45
45
  from airflow.utils.state import DagRunState
46
- from airflow.utils.types import DagRunType
46
+ from airflow.utils.types import NOTSET, ArgNotSet, DagRunType
47
47
 
48
48
  XCOM_LOGICAL_DATE_ISO = "trigger_logical_date_iso"
49
49
  XCOM_RUN_ID = "trigger_run_id"
@@ -153,7 +153,7 @@ class TriggerDagRunOperator(BaseOperator):
153
153
  trigger_dag_id: str,
154
154
  trigger_run_id: str | None = None,
155
155
  conf: dict | None = None,
156
- logical_date: str | datetime.datetime | None = None,
156
+ logical_date: str | datetime.datetime | None | ArgNotSet = NOTSET,
157
157
  reset_dag_run: bool = False,
158
158
  wait_for_completion: bool = False,
159
159
  poke_interval: int = 60,
@@ -180,19 +180,23 @@ class TriggerDagRunOperator(BaseOperator):
180
180
  self.failed_states = [DagRunState.FAILED]
181
181
  self.skip_when_already_exists = skip_when_already_exists
182
182
  self._defer = deferrable
183
-
184
- if logical_date is not None and not isinstance(logical_date, (str, datetime.datetime)):
185
- type_name = type(logical_date).__name__
183
+ self.logical_date = logical_date
184
+ if logical_date is NOTSET:
185
+ self.logical_date = NOTSET
186
+ elif logical_date is None or isinstance(logical_date, (str, datetime.datetime)):
187
+ self.logical_date = logical_date
188
+ else:
186
189
  raise TypeError(
187
- f"Expected str or datetime.datetime type for parameter 'logical_date'. Got {type_name}"
190
+ f"Expected str, datetime.datetime, or None for parameter 'logical_date'. Got {type(logical_date).__name__}"
188
191
  )
189
192
 
190
- self.logical_date = logical_date
191
-
192
193
  def execute(self, context: Context):
193
- if self.logical_date is None or isinstance(self.logical_date, datetime.datetime):
194
- parsed_logical_date = self.logical_date
195
- else:
194
+ if self.logical_date is NOTSET:
195
+ # If no logical_date is provided we will set utcnow()
196
+ parsed_logical_date = timezone.utcnow()
197
+ elif self.logical_date is None or isinstance(self.logical_date, datetime.datetime):
198
+ parsed_logical_date = self.logical_date # type: ignore
199
+ elif isinstance(self.logical_date, str):
196
200
  parsed_logical_date = timezone.parse(self.logical_date)
197
201
 
198
202
  try:
@@ -231,10 +235,9 @@ class TriggerDagRunOperator(BaseOperator):
231
235
  allowed_states=self.allowed_states,
232
236
  failed_states=self.failed_states,
233
237
  poke_interval=self.poke_interval,
238
+ deferrable=self._defer,
234
239
  )
235
240
 
236
- # TODO: Support deferral
237
-
238
241
  def _trigger_dag_af_2(self, context, run_id, parsed_logical_date):
239
242
  try:
240
243
  dag_run = trigger_dag(
@@ -304,8 +307,40 @@ class TriggerDagRunOperator(BaseOperator):
304
307
  self.log.info("%s finished with allowed state %s", self.trigger_dag_id, state)
305
308
  return
306
309
 
310
+ def execute_complete(self, context: Context, event: tuple[str, dict[str, Any]]):
311
+ if AIRFLOW_V_3_0_PLUS:
312
+ self._trigger_dag_run_af_3_execute_complete(event=event)
313
+ else:
314
+ self._trigger_dag_run_af_2_execute_complete(event=event)
315
+
316
+ def _trigger_dag_run_af_3_execute_complete(self, event: tuple[str, dict[str, Any]]):
317
+ run_ids = event[1]["run_ids"]
318
+ event_data = event[1]
319
+ failed_run_id_conditions = []
320
+
321
+ for run_id in run_ids:
322
+ state = event_data.get(run_id)
323
+ if state in self.failed_states:
324
+ failed_run_id_conditions.append(run_id)
325
+ continue
326
+ if state in self.allowed_states:
327
+ self.log.info(
328
+ "%s finished with allowed state %s for run_id %s",
329
+ self.trigger_dag_id,
330
+ state,
331
+ run_id,
332
+ )
333
+
334
+ if failed_run_id_conditions:
335
+ raise AirflowException(
336
+ f"{self.trigger_dag_id} failed with failed states {self.failed_states} for run_ids"
337
+ f" {failed_run_id_conditions}"
338
+ )
339
+
307
340
  @provide_session
308
- def execute_complete(self, context: Context, session: Session, event: tuple[str, dict[str, Any]]):
341
+ def _trigger_dag_run_af_2_execute_complete(
342
+ self, event: tuple[str, dict[str, Any]], session: Session = NEW_SESSION
343
+ ):
309
344
  # This logical_date is parsed from the return trigger event
310
345
  provided_logical_date = event[1]["execution_dates"][0]
311
346
  try:
@@ -346,8 +346,6 @@ class ExternalTaskSensor(BaseSensorOperator):
346
346
  """Calculate the normalized count based on the type of check."""
347
347
  if self.external_task_ids:
348
348
  return count / len(self.external_task_ids)
349
- elif self.external_task_group_id:
350
- return count / len(dttm_filter)
351
349
  else:
352
350
  return count
353
351
 
@@ -421,16 +419,22 @@ class ExternalTaskSensor(BaseSensorOperator):
421
419
  if not self.deferrable:
422
420
  super().execute(context)
423
421
  else:
422
+ dttm_filter = self._get_dttm_filter(context)
423
+ logical_or_execution_dates = (
424
+ {"logical_dates": dttm_filter} if AIRFLOW_V_3_0_PLUS else {"execution_date": dttm_filter}
425
+ )
424
426
  self.defer(
425
427
  timeout=self.execution_timeout,
426
428
  trigger=WorkflowTrigger(
427
429
  external_dag_id=self.external_dag_id,
428
430
  external_task_group_id=self.external_task_group_id,
429
431
  external_task_ids=self.external_task_ids,
430
- logical_dates=self._get_dttm_filter(context),
431
432
  allowed_states=self.allowed_states,
433
+ failed_states=self.failed_states,
434
+ skipped_states=self.skipped_states,
432
435
  poke_interval=self.poll_interval,
433
436
  soft_fail=self.soft_fail,
437
+ **logical_or_execution_dates,
434
438
  ),
435
439
  method_name="execute_complete",
436
440
  )
@@ -50,6 +50,7 @@ class WorkflowTrigger(BaseTrigger):
50
50
  :param allowed_states: States considered as successful for external tasks.
51
51
  :param poke_interval: The interval (in seconds) for poking the external tasks.
52
52
  :param soft_fail: If True, the trigger will not fail the entire dag on external task failure.
53
+ :param logical_dates: A list of logical dates for the external dag.
53
54
  """
54
55
 
55
56
  def __init__(
@@ -57,6 +58,7 @@ class WorkflowTrigger(BaseTrigger):
57
58
  external_dag_id: str,
58
59
  run_ids: list[str] | None = None,
59
60
  execution_dates: list[datetime] | None = None,
61
+ logical_dates: list[datetime] | None = None,
60
62
  external_task_ids: typing.Collection[str] | None = None,
61
63
  external_task_group_id: str | None = None,
62
64
  failed_states: typing.Iterable[str] | None = None,
@@ -76,6 +78,7 @@ class WorkflowTrigger(BaseTrigger):
76
78
  self.poke_interval = poke_interval
77
79
  self.soft_fail = soft_fail
78
80
  self.execution_dates = execution_dates
81
+ self.logical_dates = logical_dates
79
82
  super().__init__(**kwargs)
80
83
 
81
84
  def serialize(self) -> tuple[str, dict[str, Any]]:
@@ -92,6 +95,7 @@ class WorkflowTrigger(BaseTrigger):
92
95
  }
93
96
  if AIRFLOW_V_3_0_PLUS:
94
97
  data["run_ids"] = self.run_ids
98
+ data["logical_dates"] = self.logical_dates
95
99
  else:
96
100
  data["execution_dates"] = self.execution_dates
97
101
 
@@ -99,9 +103,16 @@ class WorkflowTrigger(BaseTrigger):
99
103
 
100
104
  async def run(self) -> typing.AsyncIterator[TriggerEvent]:
101
105
  """Check periodically tasks, task group or dag status."""
106
+ if AIRFLOW_V_3_0_PLUS:
107
+ get_count_func = self._get_count_af_3
108
+ run_id_or_dates = (self.run_ids or self.logical_dates) or []
109
+ else:
110
+ get_count_func = self._get_count
111
+ run_id_or_dates = self.execution_dates or []
112
+
102
113
  while True:
103
114
  if self.failed_states:
104
- failed_count = await self._get_count(self.failed_states)
115
+ failed_count = await get_count_func(self.failed_states)
105
116
  if failed_count > 0:
106
117
  yield TriggerEvent({"status": "failed"})
107
118
  return
@@ -109,18 +120,43 @@ class WorkflowTrigger(BaseTrigger):
109
120
  yield TriggerEvent({"status": "success"})
110
121
  return
111
122
  if self.skipped_states:
112
- skipped_count = await self._get_count(self.skipped_states)
123
+ skipped_count = await get_count_func(self.skipped_states)
113
124
  if skipped_count > 0:
114
125
  yield TriggerEvent({"status": "skipped"})
115
126
  return
116
- allowed_count = await self._get_count(self.allowed_states)
117
- _dates = self.run_ids if AIRFLOW_V_3_0_PLUS else self.execution_dates
118
- if allowed_count == len(_dates): # type: ignore[arg-type]
127
+ allowed_count = await get_count_func(self.allowed_states)
128
+
129
+ if allowed_count == len(run_id_or_dates): # type: ignore[arg-type]
119
130
  yield TriggerEvent({"status": "success"})
120
131
  return
121
132
  self.log.info("Sleeping for %s seconds", self.poke_interval)
122
133
  await asyncio.sleep(self.poke_interval)
123
134
 
135
+ async def _get_count_af_3(self, states):
136
+ from airflow.sdk.execution_time.task_runner import RuntimeTaskInstance
137
+
138
+ if self.external_task_ids or self.external_task_group_id:
139
+ count = await sync_to_async(RuntimeTaskInstance.get_ti_count)(
140
+ dag_id=self.external_dag_id,
141
+ task_ids=self.external_task_ids,
142
+ task_group_id=self.external_task_group_id,
143
+ logical_dates=self.logical_dates,
144
+ run_ids=self.run_ids,
145
+ states=states,
146
+ )
147
+ else:
148
+ count = await sync_to_async(RuntimeTaskInstance.get_dr_count)(
149
+ dag_id=self.external_dag_id,
150
+ logical_dates=self.logical_dates,
151
+ run_ids=self.run_ids,
152
+ states=states,
153
+ )
154
+
155
+ if self.external_task_ids:
156
+ return count / len(self.external_task_ids)
157
+ else:
158
+ return count
159
+
124
160
  @sync_to_async
125
161
  def _get_count(self, states: typing.Iterable[str] | None) -> int:
126
162
  """
@@ -170,29 +206,58 @@ class DagStateTrigger(BaseTrigger):
170
206
  "dag_id": self.dag_id,
171
207
  "states": self.states,
172
208
  "poll_interval": self.poll_interval,
209
+ "run_ids": self.run_ids,
210
+ "execution_dates": self.execution_dates,
173
211
  }
174
212
 
175
- if AIRFLOW_V_3_0_PLUS:
176
- data["run_ids"] = self.run_ids
177
- else:
178
- data["execution_dates"] = self.execution_dates
179
-
180
213
  return "airflow.providers.standard.triggers.external_task.DagStateTrigger", data
181
214
 
182
215
  async def run(self) -> typing.AsyncIterator[TriggerEvent]:
183
216
  """Check periodically if the dag run exists, and has hit one of the states yet, or not."""
217
+ runs_ids_or_dates = 0
218
+ if self.run_ids:
219
+ runs_ids_or_dates = len(self.run_ids)
220
+ elif self.execution_dates:
221
+ runs_ids_or_dates = len(self.execution_dates)
222
+
223
+ if AIRFLOW_V_3_0_PLUS:
224
+ event = await self.validate_count_dags_af_3(runs_ids_or_dates_len=runs_ids_or_dates)
225
+ yield TriggerEvent(event)
226
+ return
227
+ else:
228
+ while True:
229
+ num_dags = await self.count_dags() # type: ignore[call-arg]
230
+ if num_dags == runs_ids_or_dates:
231
+ yield TriggerEvent(self.serialize())
232
+ return
233
+ await asyncio.sleep(self.poll_interval)
234
+
235
+ async def validate_count_dags_af_3(self, runs_ids_or_dates_len: int = 0) -> tuple[str, dict[str, Any]]:
236
+ from airflow.sdk.execution_time.task_runner import RuntimeTaskInstance
237
+
238
+ cls_path, data = self.serialize()
239
+
184
240
  while True:
185
- # mypy confuses typing here
186
- num_dags = await self.count_dags() # type: ignore[call-arg]
187
- _dates = self.run_ids if AIRFLOW_V_3_0_PLUS else self.execution_dates
188
- if num_dags == len(_dates): # type: ignore[arg-type]
189
- yield TriggerEvent(self.serialize())
190
- return
241
+ num_dags = await sync_to_async(RuntimeTaskInstance.get_dr_count)(
242
+ dag_id=self.dag_id,
243
+ run_ids=self.run_ids,
244
+ states=self.states, # type: ignore[arg-type]
245
+ logical_dates=self.execution_dates,
246
+ )
247
+ if num_dags == runs_ids_or_dates_len:
248
+ if isinstance(self.run_ids, list):
249
+ for run_id in self.run_ids:
250
+ state = await sync_to_async(RuntimeTaskInstance.get_dagrun_state)(
251
+ dag_id=self.dag_id,
252
+ run_id=run_id,
253
+ )
254
+ data[run_id] = state
255
+ return cls_path, data
191
256
  await asyncio.sleep(self.poll_interval)
192
257
 
193
258
  @sync_to_async
194
259
  @provide_session
195
- def count_dags(self, *, session: Session = NEW_SESSION) -> int | None:
260
+ def count_dags(self, *, session: Session = NEW_SESSION) -> int:
196
261
  """Count how many dag runs in the database match our criteria."""
197
262
  _dag_run_date_condition = (
198
263
  DagRun.run_id.in_(self.run_ids)
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: apache-airflow-providers-standard
3
- Version: 0.3.0rc2
3
+ Version: 0.4.0rc1
4
4
  Summary: Provider package apache-airflow-providers-standard for Apache Airflow
5
5
  Keywords: airflow-provider,standard,airflow,integration
6
6
  Author-email: Apache Software Foundation <dev@airflow.apache.org>
@@ -22,8 +22,8 @@ Classifier: Programming Language :: Python :: 3.12
22
22
  Classifier: Topic :: System :: Monitoring
23
23
  Requires-Dist: apache-airflow>=2.9.0rc0
24
24
  Project-URL: Bug Tracker, https://github.com/apache/airflow/issues
25
- Project-URL: Changelog, https://airflow.apache.org/docs/apache-airflow-providers-standard/0.3.0/changelog.html
26
- Project-URL: Documentation, https://airflow.apache.org/docs/apache-airflow-providers-standard/0.3.0
25
+ Project-URL: Changelog, https://airflow.apache.org/docs/apache-airflow-providers-standard/0.4.0/changelog.html
26
+ Project-URL: Documentation, https://airflow.apache.org/docs/apache-airflow-providers-standard/0.4.0
27
27
  Project-URL: Mastodon, https://fosstodon.org/@airflow
28
28
  Project-URL: Slack Chat, https://s.apache.org/airflow-slack
29
29
  Project-URL: Source Code, https://github.com/apache/airflow
@@ -54,7 +54,7 @@ Project-URL: YouTube, https://www.youtube.com/channel/UCSXwxpWZQ7XZ1WL3wqevChA/
54
54
 
55
55
  Package ``apache-airflow-providers-standard``
56
56
 
57
- Release: ``0.3.0``
57
+ Release: ``0.4.0``
58
58
 
59
59
 
60
60
  Airflow Standard Provider
@@ -67,7 +67,7 @@ This is a provider package for ``standard`` provider. All classes for this provi
67
67
  are in ``airflow.providers.standard`` python package.
68
68
 
69
69
  You can find package information and changelog for the provider
70
- in the `documentation <https://airflow.apache.org/docs/apache-airflow-providers-standard/0.3.0/>`_.
70
+ in the `documentation <https://airflow.apache.org/docs/apache-airflow-providers-standard/0.4.0/>`_.
71
71
 
72
72
  Installation
73
73
  ------------
@@ -88,5 +88,5 @@ PIP package Version required
88
88
  ================== ==================
89
89
 
90
90
  The changelog for the provider package can be found in the
91
- `changelog <https://airflow.apache.org/docs/apache-airflow-providers-standard/0.3.0/changelog.html>`_.
91
+ `changelog <https://airflow.apache.org/docs/apache-airflow-providers-standard/0.4.0/changelog.html>`_.
92
92
 
@@ -1,17 +1,17 @@
1
1
  airflow/providers/standard/LICENSE,sha256=gXPVwptPlW1TJ4HSuG5OMPg-a3h43OGMkZRR1rpwfJA,10850
2
- airflow/providers/standard/__init__.py,sha256=84Hgrj5AurK3EjrkTXn2CChxmmQY0FCSNo6nzNwAxT0,1495
3
- airflow/providers/standard/get_provider_info.py,sha256=L-tNd8ZwiC77cY0mDXdUyp29VQu0h5ChdXy-fdwBxnQ,6435
2
+ airflow/providers/standard/__init__.py,sha256=qhDil5br-GkCarIJ4gSk1fMsAIsX1W5Yi7YNNmB_ktE,1495
3
+ airflow/providers/standard/get_provider_info.py,sha256=96C-S4JowGsq9zyHVtmzZWm2VdugMee-0XZIvvgBqZI,6198
4
4
  airflow/providers/standard/version_compat.py,sha256=aHg90_DtgoSnQvILFICexMyNlHlALBdaeWqkX3dFDug,1605
5
5
  airflow/providers/standard/decorators/__init__.py,sha256=9hdXHABrVpkbpjZgUft39kOFL2xSGeG4GEua0Hmelus,785
6
- airflow/providers/standard/decorators/bash.py,sha256=dknHzPFfVwiCrcC0FzMlGKoOMi66EaA9uKR3JSsitno,4128
7
- airflow/providers/standard/decorators/branch_external_python.py,sha256=-z6JmLQrTzqQg2yqsdA91VPip52JeWp0Wnd9JDX_DeI,2352
8
- airflow/providers/standard/decorators/branch_python.py,sha256=ornGzksOiTSbvAiCgthkZw4iJaMsNCnKBasWdOS8wfA,2272
9
- airflow/providers/standard/decorators/branch_virtualenv.py,sha256=jcpxyoX86zXvzEJ8eIIf177EZZrt5TxoZbEum8blxI0,2354
10
- airflow/providers/standard/decorators/external_python.py,sha256=6_K9kjLQJQFwcxqfW51BP9BwMZDrI9ihomsR4xftavk,2673
11
- airflow/providers/standard/decorators/python.py,sha256=f-pl62ilgX45zvW6seCXKI0FoV3nypbWmjOIQauo6Y0,3219
12
- airflow/providers/standard/decorators/python_virtualenv.py,sha256=CKzMtaQr9nK-e9APm7jtXmIdc-Qc-xIV13PymdbEJAM,2359
13
- airflow/providers/standard/decorators/sensor.py,sha256=N2sKQl6xPop0gKnYWhtqnjl5yzSZa_56MQ7I5HrnG4Y,3004
14
- airflow/providers/standard/decorators/short_circuit.py,sha256=xo4h8eoZ9UXJ_8IhEhvlWat_Q_w1Y6bJmEXcAvsKZlY,2301
6
+ airflow/providers/standard/decorators/bash.py,sha256=J13t48yrRv7XpDV8_QWtI0IXbqNiqxW9Ct0ngmrQAdE,4396
7
+ airflow/providers/standard/decorators/branch_external_python.py,sha256=M6JwUxKQj8KOHZL8cHtKmn37leEz4DKoGDrexNH2aAA,2557
8
+ airflow/providers/standard/decorators/branch_python.py,sha256=MbtK3Zv5fKynrhv0PFeOzPQQ1IiW9PcurGq1bqotLqo,2476
9
+ airflow/providers/standard/decorators/branch_virtualenv.py,sha256=m_3VE88QcuMj_yFg5h6MANVsfz1cKg9TyBbJ_t6zBA8,2558
10
+ airflow/providers/standard/decorators/external_python.py,sha256=FxrsihfFy9oVjbyLugtFLDq7VkOdVXJScH0kuua1ApE,2877
11
+ airflow/providers/standard/decorators/python.py,sha256=9Fdk8CRQJ7HQzhKT1Qh-CzfbX0_aw12ccjbh6fdBRdc,3442
12
+ airflow/providers/standard/decorators/python_virtualenv.py,sha256=Xhul1iA0mJlN5N1EZl1LWIs90pUhS6bawQtVSpQhqEg,2565
13
+ airflow/providers/standard/decorators/sensor.py,sha256=04PPtcDhSr_Wa4LJct2eiBczb8JEAzjiSos2CqBu3-4,3230
14
+ airflow/providers/standard/decorators/short_circuit.py,sha256=3_6UHDhloPMT3fGeHFDBjf3rScXQm4wtfx59n-n__Ys,2506
15
15
  airflow/providers/standard/hooks/__init__.py,sha256=9hdXHABrVpkbpjZgUft39kOFL2xSGeG4GEua0Hmelus,785
16
16
  airflow/providers/standard/hooks/filesystem.py,sha256=fDZwW_EYD8z1QXnReqI7gIwSbDPZNTKtqQvgktiP02o,2870
17
17
  airflow/providers/standard/hooks/package_index.py,sha256=U7_s_02-wwz9kTkzKr3JAhVQj2spuntWd_GmjfpV-y4,3769
@@ -21,22 +21,22 @@ airflow/providers/standard/operators/bash.py,sha256=dPpaAbLRPyVo1207npt0Fr9Shdzv
21
21
  airflow/providers/standard/operators/branch.py,sha256=C_AUd7TSo_U52GiWsrR7rJIsRU5KKfrybBFw84brm_c,4070
22
22
  airflow/providers/standard/operators/datetime.py,sha256=bYDdbfAyAlEXRRHjOgB06UhgDum6SPdd5I3u-ylPSaw,5005
23
23
  airflow/providers/standard/operators/empty.py,sha256=C7_uLWJK6kExzlNc7xdMo8VAQ_ONWITvEQ2FImrMepM,1324
24
- airflow/providers/standard/operators/latest_only.py,sha256=OdUbeJA0_HuqxPFo8zFefkysUWtGpvdvoVEfIy0yJPo,3377
24
+ airflow/providers/standard/operators/latest_only.py,sha256=BVsPtkrQtaZQvBWlTAxlrJPfNThNOZWw7Vf_6we7g4o,4780
25
25
  airflow/providers/standard/operators/python.py,sha256=l0aj8d9Cwg_B8snBZA815QKy8MKhRvISfbmHEteTGTk,50106
26
26
  airflow/providers/standard/operators/smooth.py,sha256=d3OV38EzV_wlfMYN3JGWGwyzsFonx8VbqgGfXSw0_bM,1382
27
- airflow/providers/standard/operators/trigger_dagrun.py,sha256=xXGVZOaIB8Ru2tALmmS-IWjzKhA3dFhiOpa3GTuKxeQ,14231
27
+ airflow/providers/standard/operators/trigger_dagrun.py,sha256=WJuT1jUkLSh4JxdUtIYVefEPQZWKU7JYGjRqye5KkFs,15775
28
28
  airflow/providers/standard/operators/weekday.py,sha256=Qg7LhXYtybVSGZn8uQqF-r7RB7zOXfe3R6vSGVa_rJk,5083
29
29
  airflow/providers/standard/sensors/__init__.py,sha256=9hdXHABrVpkbpjZgUft39kOFL2xSGeG4GEua0Hmelus,785
30
30
  airflow/providers/standard/sensors/bash.py,sha256=afyz1m-1qzAp1fE5ta71rXhpTrKcCH7bNfwUU2Hv7GQ,5025
31
31
  airflow/providers/standard/sensors/date_time.py,sha256=hRUuLaNgqDh4jqaIaD8zdyq2BUXkpWM2NzJN5YkwTJI,6077
32
- airflow/providers/standard/sensors/external_task.py,sha256=LI8kYU8SNSfaq93MKgwyqEQF3-tFn9-2CvtEhjovb7M,27033
32
+ airflow/providers/standard/sensors/external_task.py,sha256=F0hvtcHFx8HQLLNvdepDx2Y9kjn8zbCyLTx1Uw7BbRA,27261
33
33
  airflow/providers/standard/sensors/filesystem.py,sha256=rfupSeHtFGdAcL6cw3H6u6ttBxogSThYiPqsUKgABMU,6029
34
34
  airflow/providers/standard/sensors/python.py,sha256=kvgpHN8hiyxJPlw9HsVpna0X6NRt0iTDvFFjqt3KFtQ,3405
35
35
  airflow/providers/standard/sensors/time.py,sha256=Pc9BZqqTQy3Qqz7uME9yF4qmWsXYCzAoAlsmwgpAraY,5007
36
36
  airflow/providers/standard/sensors/time_delta.py,sha256=1OlDMIwNYXhBeeE8TmfsAMIFIOur4BMlDWe0L_JScZc,6633
37
37
  airflow/providers/standard/sensors/weekday.py,sha256=HzV21T3XhrQgfsR6svl6uWlJNPSnTbAHbQKd0jifIUU,4467
38
38
  airflow/providers/standard/triggers/__init__.py,sha256=9hdXHABrVpkbpjZgUft39kOFL2xSGeG4GEua0Hmelus,785
39
- airflow/providers/standard/triggers/external_task.py,sha256=-80zAq7pPbKElcS2sNgi3rE2rXPvEZe3Sj4nsJdxLGU,8478
39
+ airflow/providers/standard/triggers/external_task.py,sha256=L55jWUWjChw7VWAF_SKI-4DsgMwHKidXpcHIs8FTo1w,11094
40
40
  airflow/providers/standard/triggers/file.py,sha256=2i8-RwSjEgdOwQNcHCqLmSdpE3Ehqg4GQJ8nE3-fHxo,4886
41
41
  airflow/providers/standard/triggers/temporal.py,sha256=Aub7Cp3HsPdeardF2jp-Z5nIRwzqtK9-aOlWtfKQfcg,4809
42
42
  airflow/providers/standard/utils/__init__.py,sha256=9hdXHABrVpkbpjZgUft39kOFL2xSGeG4GEua0Hmelus,785
@@ -45,7 +45,7 @@ airflow/providers/standard/utils/python_virtualenv_script.jinja2,sha256=3Z334hVq
45
45
  airflow/providers/standard/utils/sensor_helper.py,sha256=vrCdz4lY3Iy8Mom5KuyNidg-IAyngMRqWhStEXVsyT0,4692
46
46
  airflow/providers/standard/utils/skipmixin.py,sha256=XkhDozcXUHZ7C6AxzEW8ZYrqbra1oJGGR3ZieNQ-N0M,7791
47
47
  airflow/providers/standard/utils/weekday.py,sha256=ySDrIkWv-lqqxURo9E98IGInDqERec2O4y9o2hQTGiQ,2685
48
- apache_airflow_providers_standard-0.3.0rc2.dist-info/entry_points.txt,sha256=mW2YRh3mVdZdaP5-iGSNgmcCh3YYdALIn28BCLBZZ40,104
49
- apache_airflow_providers_standard-0.3.0rc2.dist-info/WHEEL,sha256=_2ozNFCLWc93bK4WKHCO-eDUENDlo-dgc9cU3qokYO4,82
50
- apache_airflow_providers_standard-0.3.0rc2.dist-info/METADATA,sha256=Nkhhr0r7FcXIMtjVOpZIYFUHH2NBrM8h8siVojyP398,3792
51
- apache_airflow_providers_standard-0.3.0rc2.dist-info/RECORD,,
48
+ apache_airflow_providers_standard-0.4.0rc1.dist-info/entry_points.txt,sha256=mW2YRh3mVdZdaP5-iGSNgmcCh3YYdALIn28BCLBZZ40,104
49
+ apache_airflow_providers_standard-0.4.0rc1.dist-info/WHEEL,sha256=G2gURzTEtmeR8nrdXUJfNiB3VYVxigPQ-bEQujpNiNs,82
50
+ apache_airflow_providers_standard-0.4.0rc1.dist-info/METADATA,sha256=q4xi5OhrjYL0zu6XrtkZXTJ0X-gtfbef3UU_tW93V4Q,3792
51
+ apache_airflow_providers_standard-0.4.0rc1.dist-info/RECORD,,
@@ -1,4 +1,4 @@
1
1
  Wheel-Version: 1.0
2
- Generator: flit 3.11.0
2
+ Generator: flit 3.12.0
3
3
  Root-Is-Purelib: true
4
4
  Tag: py3-none-any