apache-airflow-providers-standard 1.0.0rc1__py3-none-any.whl → 1.1.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of apache-airflow-providers-standard might be problematic. Click here for more details.
- airflow/providers/standard/__init__.py +1 -1
- airflow/providers/standard/get_provider_info.py +11 -0
- airflow/providers/standard/operators/latest_only.py +42 -30
- airflow/providers/standard/operators/trigger_dagrun.py +21 -0
- {apache_airflow_providers_standard-1.0.0rc1.dist-info → apache_airflow_providers_standard-1.1.0.dist-info}/METADATA +7 -7
- {apache_airflow_providers_standard-1.0.0rc1.dist-info → apache_airflow_providers_standard-1.1.0.dist-info}/RECORD +8 -8
- {apache_airflow_providers_standard-1.0.0rc1.dist-info → apache_airflow_providers_standard-1.1.0.dist-info}/WHEEL +0 -0
- {apache_airflow_providers_standard-1.0.0rc1.dist-info → apache_airflow_providers_standard-1.1.0.dist-info}/entry_points.txt +0 -0
|
@@ -29,7 +29,7 @@ from airflow import __version__ as airflow_version
|
|
|
29
29
|
|
|
30
30
|
__all__ = ["__version__"]
|
|
31
31
|
|
|
32
|
-
__version__ = "1.
|
|
32
|
+
__version__ = "1.1.0"
|
|
33
33
|
|
|
34
34
|
if packaging.version.parse(packaging.version.parse(airflow_version).base_version) < packaging.version.parse(
|
|
35
35
|
"2.9.0"
|
|
@@ -35,6 +35,13 @@ def get_provider_info():
|
|
|
35
35
|
"/docs/apache-airflow-providers-standard/operators/bash.rst",
|
|
36
36
|
"/docs/apache-airflow-providers-standard/operators/python.rst",
|
|
37
37
|
"/docs/apache-airflow-providers-standard/operators/datetime.rst",
|
|
38
|
+
"/docs/apache-airflow-providers-standard/operators/trigger_dag_run.rst",
|
|
39
|
+
"/docs/apache-airflow-providers-standard/operators/latest_only.rst",
|
|
40
|
+
"/docs/apache-airflow-providers-standard/sensors/bash.rst",
|
|
41
|
+
"/docs/apache-airflow-providers-standard/sensors/python.rst",
|
|
42
|
+
"/docs/apache-airflow-providers-standard/sensors/datetime.rst",
|
|
43
|
+
"/docs/apache-airflow-providers-standard/sensors/file.rst",
|
|
44
|
+
"/docs/apache-airflow-providers-standard/sensors/external_task_sensor.rst",
|
|
38
45
|
],
|
|
39
46
|
}
|
|
40
47
|
],
|
|
@@ -89,6 +96,10 @@ def get_provider_info():
|
|
|
89
96
|
],
|
|
90
97
|
}
|
|
91
98
|
],
|
|
99
|
+
"extra-links": [
|
|
100
|
+
"airflow.providers.standard.operators.trigger_dagrun.TriggerDagRunLink",
|
|
101
|
+
"airflow.providers.standard.sensors.external_task.ExternalDagLink",
|
|
102
|
+
],
|
|
92
103
|
"config": {
|
|
93
104
|
"standard": {
|
|
94
105
|
"description": "Options for the standard provider operators.",
|
|
@@ -20,6 +20,7 @@
|
|
|
20
20
|
from __future__ import annotations
|
|
21
21
|
|
|
22
22
|
from collections.abc import Iterable
|
|
23
|
+
from datetime import timedelta
|
|
23
24
|
from typing import TYPE_CHECKING
|
|
24
25
|
|
|
25
26
|
import pendulum
|
|
@@ -29,8 +30,9 @@ from airflow.providers.standard.version_compat import AIRFLOW_V_3_0_PLUS
|
|
|
29
30
|
from airflow.utils.types import DagRunType
|
|
30
31
|
|
|
31
32
|
if TYPE_CHECKING:
|
|
32
|
-
from
|
|
33
|
-
|
|
33
|
+
from pendulum.datetime import DateTime
|
|
34
|
+
|
|
35
|
+
from airflow.models import DagRun
|
|
34
36
|
|
|
35
37
|
try:
|
|
36
38
|
from airflow.sdk.definitions.context import Context
|
|
@@ -62,16 +64,16 @@ class LatestOnlyOperator(BaseBranchOperator):
|
|
|
62
64
|
dag_run: DagRun = context["dag_run"] # type: ignore[assignment]
|
|
63
65
|
if dag_run.run_type == DagRunType.MANUAL:
|
|
64
66
|
self.log.info("Manually triggered DAG_Run: allowing execution to proceed.")
|
|
65
|
-
return list(
|
|
67
|
+
return list(self.get_direct_relative_ids(upstream=False))
|
|
66
68
|
|
|
67
|
-
|
|
68
|
-
now = pendulum.now("UTC")
|
|
69
|
+
dates = self._get_compare_dates(dag_run)
|
|
69
70
|
|
|
70
|
-
if
|
|
71
|
+
if dates is None:
|
|
71
72
|
self.log.info("Last scheduled execution: allowing execution to proceed.")
|
|
72
|
-
return list(
|
|
73
|
+
return list(self.get_direct_relative_ids(upstream=False))
|
|
73
74
|
|
|
74
|
-
|
|
75
|
+
now = pendulum.now("UTC")
|
|
76
|
+
left_window, right_window = dates
|
|
75
77
|
self.log.info(
|
|
76
78
|
"Checking latest only with left_window: %s right_window: %s now: %s",
|
|
77
79
|
left_window,
|
|
@@ -79,37 +81,47 @@ class LatestOnlyOperator(BaseBranchOperator):
|
|
|
79
81
|
now,
|
|
80
82
|
)
|
|
81
83
|
|
|
82
|
-
if left_window == right_window:
|
|
83
|
-
self.log.info(
|
|
84
|
-
"Zero-length interval [%s, %s) from timetable (%s); treating current run as latest.",
|
|
85
|
-
left_window,
|
|
86
|
-
right_window,
|
|
87
|
-
self.dag.timetable.__class__,
|
|
88
|
-
)
|
|
89
|
-
return list(context["task"].get_direct_relative_ids(upstream=False))
|
|
90
|
-
|
|
91
84
|
if not left_window < now <= right_window:
|
|
92
85
|
self.log.info("Not latest execution, skipping downstream.")
|
|
93
86
|
# we return an empty list, thus the parent BaseBranchOperator
|
|
94
87
|
# won't exclude any downstream tasks from skipping.
|
|
95
88
|
return []
|
|
96
|
-
self.log.info("Latest, allowing execution to proceed.")
|
|
97
|
-
return list(context["task"].get_direct_relative_ids(upstream=False))
|
|
98
89
|
|
|
99
|
-
|
|
100
|
-
|
|
90
|
+
self.log.info("Latest, allowing execution to proceed.")
|
|
91
|
+
return list(self.get_direct_relative_ids(upstream=False))
|
|
101
92
|
|
|
93
|
+
def _get_compare_dates(self, dag_run: DagRun) -> tuple[DateTime, DateTime] | None:
|
|
94
|
+
dagrun_date: DateTime
|
|
102
95
|
if AIRFLOW_V_3_0_PLUS:
|
|
103
|
-
|
|
96
|
+
dagrun_date = dag_run.logical_date or dag_run.run_after
|
|
97
|
+
else:
|
|
98
|
+
dagrun_date = dag_run.logical_date
|
|
104
99
|
|
|
105
|
-
|
|
106
|
-
current_interval = DataInterval(start=dag_run.data_interval_start, end=dag_run.data_interval_end)
|
|
100
|
+
from airflow.timetables.base import DataInterval, TimeRestriction
|
|
107
101
|
|
|
108
|
-
|
|
109
|
-
|
|
110
|
-
|
|
111
|
-
|
|
102
|
+
current_interval = DataInterval(
|
|
103
|
+
start=dag_run.data_interval_start or dagrun_date,
|
|
104
|
+
end=dag_run.data_interval_end or dagrun_date,
|
|
105
|
+
)
|
|
112
106
|
|
|
107
|
+
time_restriction = TimeRestriction(
|
|
108
|
+
earliest=None, latest=current_interval.end - timedelta(microseconds=1), catchup=True
|
|
109
|
+
)
|
|
110
|
+
if prev_info := self.dag.timetable.next_dagrun_info(
|
|
111
|
+
last_automated_data_interval=current_interval,
|
|
112
|
+
restriction=time_restriction,
|
|
113
|
+
):
|
|
114
|
+
left = prev_info.data_interval.end
|
|
113
115
|
else:
|
|
114
|
-
|
|
115
|
-
|
|
116
|
+
left = current_interval.start
|
|
117
|
+
|
|
118
|
+
time_restriction = TimeRestriction(earliest=current_interval.end, latest=None, catchup=True)
|
|
119
|
+
next_info = self.dag.timetable.next_dagrun_info(
|
|
120
|
+
last_automated_data_interval=current_interval,
|
|
121
|
+
restriction=time_restriction,
|
|
122
|
+
)
|
|
123
|
+
|
|
124
|
+
if not next_info:
|
|
125
|
+
return None
|
|
126
|
+
|
|
127
|
+
return (left, next_info.data_interval.end)
|
|
@@ -67,6 +67,17 @@ else:
|
|
|
67
67
|
from airflow.models.baseoperatorlink import BaseOperatorLink # type: ignore[no-redef]
|
|
68
68
|
|
|
69
69
|
|
|
70
|
+
class DagIsPaused(AirflowException):
|
|
71
|
+
"""Raise when a dag is paused and something tries to run it."""
|
|
72
|
+
|
|
73
|
+
def __init__(self, dag_id: str) -> None:
|
|
74
|
+
super().__init__(dag_id)
|
|
75
|
+
self.dag_id = dag_id
|
|
76
|
+
|
|
77
|
+
def __str__(self) -> str:
|
|
78
|
+
return f"Dag {self.dag_id} is paused"
|
|
79
|
+
|
|
80
|
+
|
|
70
81
|
class TriggerDagRunLink(BaseOperatorLink):
|
|
71
82
|
"""
|
|
72
83
|
Operator link for TriggerDagRunOperator.
|
|
@@ -129,6 +140,7 @@ class TriggerDagRunOperator(BaseOperator):
|
|
|
129
140
|
Default is ``[DagRunState.FAILED]``.
|
|
130
141
|
:param skip_when_already_exists: Set to true to mark the task as SKIPPED if a DAG run of the triggered
|
|
131
142
|
DAG for the same logical date already exists.
|
|
143
|
+
:param fail_when_dag_is_paused: If the dag to trigger is paused, DagIsPaused will be raised.
|
|
132
144
|
:param deferrable: If waiting for completion, whether or not to defer the task until done,
|
|
133
145
|
default is ``False``.
|
|
134
146
|
"""
|
|
@@ -158,6 +170,7 @@ class TriggerDagRunOperator(BaseOperator):
|
|
|
158
170
|
allowed_states: list[str | DagRunState] | None = None,
|
|
159
171
|
failed_states: list[str | DagRunState] | None = None,
|
|
160
172
|
skip_when_already_exists: bool = False,
|
|
173
|
+
fail_when_dag_is_paused: bool = False,
|
|
161
174
|
deferrable: bool = conf.getboolean("operators", "default_deferrable", fallback=False),
|
|
162
175
|
**kwargs,
|
|
163
176
|
) -> None:
|
|
@@ -177,6 +190,7 @@ class TriggerDagRunOperator(BaseOperator):
|
|
|
177
190
|
else:
|
|
178
191
|
self.failed_states = [DagRunState.FAILED]
|
|
179
192
|
self.skip_when_already_exists = skip_when_already_exists
|
|
193
|
+
self.fail_when_dag_is_paused = fail_when_dag_is_paused
|
|
180
194
|
self._defer = deferrable
|
|
181
195
|
self.logical_date = logical_date
|
|
182
196
|
if logical_date is NOTSET:
|
|
@@ -214,6 +228,13 @@ class TriggerDagRunOperator(BaseOperator):
|
|
|
214
228
|
else:
|
|
215
229
|
run_id = DagRun.generate_run_id(DagRunType.MANUAL, parsed_logical_date or timezone.utcnow()) # type: ignore[misc,call-arg]
|
|
216
230
|
|
|
231
|
+
if self.fail_when_dag_is_paused:
|
|
232
|
+
dag_model = DagModel.get_current(self.trigger_dag_id)
|
|
233
|
+
if dag_model.is_paused:
|
|
234
|
+
if AIRFLOW_V_3_0_PLUS:
|
|
235
|
+
raise DagIsPaused(dag_id=self.trigger_dag_id)
|
|
236
|
+
raise AirflowException(f"Dag {self.trigger_dag_id} is paused")
|
|
237
|
+
|
|
217
238
|
if AIRFLOW_V_3_0_PLUS:
|
|
218
239
|
self._trigger_dag_af_3(context=context, run_id=run_id, parsed_logical_date=parsed_logical_date)
|
|
219
240
|
else:
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.4
|
|
2
2
|
Name: apache-airflow-providers-standard
|
|
3
|
-
Version: 1.0
|
|
3
|
+
Version: 1.1.0
|
|
4
4
|
Summary: Provider package apache-airflow-providers-standard for Apache Airflow
|
|
5
5
|
Keywords: airflow-provider,standard,airflow,integration
|
|
6
6
|
Author-email: Apache Software Foundation <dev@airflow.apache.org>
|
|
@@ -20,10 +20,10 @@ Classifier: Programming Language :: Python :: 3.10
|
|
|
20
20
|
Classifier: Programming Language :: Python :: 3.11
|
|
21
21
|
Classifier: Programming Language :: Python :: 3.12
|
|
22
22
|
Classifier: Topic :: System :: Monitoring
|
|
23
|
-
Requires-Dist: apache-airflow>=2.9.
|
|
23
|
+
Requires-Dist: apache-airflow>=2.9.0
|
|
24
24
|
Project-URL: Bug Tracker, https://github.com/apache/airflow/issues
|
|
25
|
-
Project-URL: Changelog, https://airflow.apache.org/docs/apache-airflow-providers-standard/1.
|
|
26
|
-
Project-URL: Documentation, https://airflow.apache.org/docs/apache-airflow-providers-standard/1.
|
|
25
|
+
Project-URL: Changelog, https://airflow.apache.org/docs/apache-airflow-providers-standard/1.1.0/changelog.html
|
|
26
|
+
Project-URL: Documentation, https://airflow.apache.org/docs/apache-airflow-providers-standard/1.1.0
|
|
27
27
|
Project-URL: Mastodon, https://fosstodon.org/@airflow
|
|
28
28
|
Project-URL: Slack Chat, https://s.apache.org/airflow-slack
|
|
29
29
|
Project-URL: Source Code, https://github.com/apache/airflow
|
|
@@ -54,7 +54,7 @@ Project-URL: YouTube, https://www.youtube.com/channel/UCSXwxpWZQ7XZ1WL3wqevChA/
|
|
|
54
54
|
|
|
55
55
|
Package ``apache-airflow-providers-standard``
|
|
56
56
|
|
|
57
|
-
Release: ``1.
|
|
57
|
+
Release: ``1.1.0``
|
|
58
58
|
|
|
59
59
|
|
|
60
60
|
Airflow Standard Provider
|
|
@@ -67,7 +67,7 @@ This is a provider package for ``standard`` provider. All classes for this provi
|
|
|
67
67
|
are in ``airflow.providers.standard`` python package.
|
|
68
68
|
|
|
69
69
|
You can find package information and changelog for the provider
|
|
70
|
-
in the `documentation <https://airflow.apache.org/docs/apache-airflow-providers-standard/1.
|
|
70
|
+
in the `documentation <https://airflow.apache.org/docs/apache-airflow-providers-standard/1.1.0/>`_.
|
|
71
71
|
|
|
72
72
|
Installation
|
|
73
73
|
------------
|
|
@@ -88,5 +88,5 @@ PIP package Version required
|
|
|
88
88
|
================== ==================
|
|
89
89
|
|
|
90
90
|
The changelog for the provider package can be found in the
|
|
91
|
-
`changelog <https://airflow.apache.org/docs/apache-airflow-providers-standard/1.
|
|
91
|
+
`changelog <https://airflow.apache.org/docs/apache-airflow-providers-standard/1.1.0/changelog.html>`_.
|
|
92
92
|
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
airflow/providers/standard/LICENSE,sha256=gXPVwptPlW1TJ4HSuG5OMPg-a3h43OGMkZRR1rpwfJA,10850
|
|
2
|
-
airflow/providers/standard/__init__.py,sha256=
|
|
3
|
-
airflow/providers/standard/get_provider_info.py,sha256=
|
|
2
|
+
airflow/providers/standard/__init__.py,sha256=J9d1-mgExKhv6Qbbqa9OWyGgbOVDssHxlU9VvluSS-U,1495
|
|
3
|
+
airflow/providers/standard/get_provider_info.py,sha256=9qlyfIHRu_d_jZyyE0SR8s2dN9HEjzV5P2EjyVCcbw4,7003
|
|
4
4
|
airflow/providers/standard/version_compat.py,sha256=aHg90_DtgoSnQvILFICexMyNlHlALBdaeWqkX3dFDug,1605
|
|
5
5
|
airflow/providers/standard/decorators/__init__.py,sha256=9hdXHABrVpkbpjZgUft39kOFL2xSGeG4GEua0Hmelus,785
|
|
6
6
|
airflow/providers/standard/decorators/bash.py,sha256=J13t48yrRv7XpDV8_QWtI0IXbqNiqxW9Ct0ngmrQAdE,4396
|
|
@@ -21,10 +21,10 @@ airflow/providers/standard/operators/bash.py,sha256=P2YLyFhE6XF09-zSKGXZiPHXGs5G
|
|
|
21
21
|
airflow/providers/standard/operators/branch.py,sha256=C_AUd7TSo_U52GiWsrR7rJIsRU5KKfrybBFw84brm_c,4070
|
|
22
22
|
airflow/providers/standard/operators/datetime.py,sha256=bYDdbfAyAlEXRRHjOgB06UhgDum6SPdd5I3u-ylPSaw,5005
|
|
23
23
|
airflow/providers/standard/operators/empty.py,sha256=C7_uLWJK6kExzlNc7xdMo8VAQ_ONWITvEQ2FImrMepM,1324
|
|
24
|
-
airflow/providers/standard/operators/latest_only.py,sha256=
|
|
24
|
+
airflow/providers/standard/operators/latest_only.py,sha256=1yJtpi6cK4TIjARQgcrf460as4V6uVBdoDtjJEUnbvs,4884
|
|
25
25
|
airflow/providers/standard/operators/python.py,sha256=UrRPJP4ZP2-n5kx8U3ExiUVAYGu3LJUE7JoxBkWeLBU,50046
|
|
26
26
|
airflow/providers/standard/operators/smooth.py,sha256=d3OV38EzV_wlfMYN3JGWGwyzsFonx8VbqgGfXSw0_bM,1382
|
|
27
|
-
airflow/providers/standard/operators/trigger_dagrun.py,sha256=
|
|
27
|
+
airflow/providers/standard/operators/trigger_dagrun.py,sha256=isd9HVzYhAZS_Y3V1iQ3Gm2QlOV6KdGWHdq0PnhWxrU,16746
|
|
28
28
|
airflow/providers/standard/operators/weekday.py,sha256=Qg7LhXYtybVSGZn8uQqF-r7RB7zOXfe3R6vSGVa_rJk,5083
|
|
29
29
|
airflow/providers/standard/sensors/__init__.py,sha256=9hdXHABrVpkbpjZgUft39kOFL2xSGeG4GEua0Hmelus,785
|
|
30
30
|
airflow/providers/standard/sensors/bash.py,sha256=sNcII9aLzJhfdimOwsTggeZYk1TM_ulWDS5iKpB_9XE,4963
|
|
@@ -45,7 +45,7 @@ airflow/providers/standard/utils/python_virtualenv_script.jinja2,sha256=3Z334hVq
|
|
|
45
45
|
airflow/providers/standard/utils/sensor_helper.py,sha256=PNIETsl_a4BkmOypFfHdpP0VuTkC6eWKUDuwnNVaWsA,5000
|
|
46
46
|
airflow/providers/standard/utils/skipmixin.py,sha256=XkhDozcXUHZ7C6AxzEW8ZYrqbra1oJGGR3ZieNQ-N0M,7791
|
|
47
47
|
airflow/providers/standard/utils/weekday.py,sha256=ySDrIkWv-lqqxURo9E98IGInDqERec2O4y9o2hQTGiQ,2685
|
|
48
|
-
apache_airflow_providers_standard-1.0.
|
|
49
|
-
apache_airflow_providers_standard-1.0.
|
|
50
|
-
apache_airflow_providers_standard-1.0.
|
|
51
|
-
apache_airflow_providers_standard-1.0.
|
|
48
|
+
apache_airflow_providers_standard-1.1.0.dist-info/entry_points.txt,sha256=mW2YRh3mVdZdaP5-iGSNgmcCh3YYdALIn28BCLBZZ40,104
|
|
49
|
+
apache_airflow_providers_standard-1.1.0.dist-info/WHEEL,sha256=G2gURzTEtmeR8nrdXUJfNiB3VYVxigPQ-bEQujpNiNs,82
|
|
50
|
+
apache_airflow_providers_standard-1.1.0.dist-info/METADATA,sha256=DlPpPeT7VKLNgroj8o6RqhMfWzD0FvjbxlYLcud_rkM,3786
|
|
51
|
+
apache_airflow_providers_standard-1.1.0.dist-info/RECORD,,
|
|
File without changes
|