apache-airflow-providers-standard 0.0.3rc2__py3-none-any.whl → 0.1.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of apache-airflow-providers-standard might be problematic. Click here for more details.
- airflow/providers/standard/LICENSE +0 -52
- airflow/providers/standard/__init__.py +1 -1
- airflow/providers/standard/get_provider_info.py +5 -5
- airflow/providers/standard/operators/bash.py +7 -9
- airflow/providers/standard/operators/datetime.py +5 -1
- airflow/providers/standard/operators/empty.py +39 -0
- airflow/providers/standard/operators/generic_transfer.py +5 -1
- airflow/providers/standard/operators/latest_only.py +7 -2
- airflow/providers/standard/operators/python.py +38 -61
- airflow/providers/standard/operators/trigger_dagrun.py +27 -31
- airflow/providers/standard/operators/weekday.py +7 -3
- airflow/providers/standard/sensors/bash.py +5 -1
- airflow/providers/standard/sensors/date_time.py +5 -1
- airflow/providers/standard/sensors/external_task.py +13 -16
- airflow/providers/standard/sensors/filesystem.py +5 -1
- airflow/providers/standard/sensors/python.py +5 -1
- airflow/providers/standard/sensors/time.py +5 -1
- airflow/providers/standard/sensors/time_delta.py +19 -6
- airflow/providers/standard/sensors/weekday.py +5 -1
- airflow/providers/standard/triggers/external_task.py +41 -46
- airflow/providers/standard/triggers/file.py +57 -3
- airflow/providers/standard/utils/python_virtualenv_script.jinja2 +0 -24
- airflow/providers/standard/utils/sensor_helper.py +9 -13
- {apache_airflow_providers_standard-0.0.3rc2.dist-info → apache_airflow_providers_standard-0.1.0.dist-info}/METADATA +10 -27
- apache_airflow_providers_standard-0.1.0.dist-info/RECORD +38 -0
- apache_airflow_providers_standard-0.0.3rc2.dist-info/RECORD +0 -37
- {apache_airflow_providers_standard-0.0.3rc2.dist-info → apache_airflow_providers_standard-0.1.0.dist-info}/WHEEL +0 -0
- {apache_airflow_providers_standard-0.0.3rc2.dist-info → apache_airflow_providers_standard-0.1.0.dist-info}/entry_points.txt +0 -0
|
@@ -44,7 +44,11 @@ except ImportError:
|
|
|
44
44
|
from airflow.utils import timezone
|
|
45
45
|
|
|
46
46
|
if TYPE_CHECKING:
|
|
47
|
-
|
|
47
|
+
try:
|
|
48
|
+
from airflow.sdk.definitions.context import Context
|
|
49
|
+
except ImportError:
|
|
50
|
+
# TODO: Remove once provider drops support for Airflow 2
|
|
51
|
+
from airflow.utils.context import Context
|
|
48
52
|
|
|
49
53
|
|
|
50
54
|
class DateTimeSensor(BaseSensorOperator):
|
|
@@ -28,8 +28,7 @@ from airflow.exceptions import AirflowException, AirflowSkipException
|
|
|
28
28
|
from airflow.models.baseoperatorlink import BaseOperatorLink
|
|
29
29
|
from airflow.models.dag import DagModel
|
|
30
30
|
from airflow.models.dagbag import DagBag
|
|
31
|
-
from airflow.
|
|
32
|
-
from airflow.operators.empty import EmptyOperator
|
|
31
|
+
from airflow.providers.standard.operators.empty import EmptyOperator
|
|
33
32
|
from airflow.providers.standard.triggers.external_task import WorkflowTrigger
|
|
34
33
|
from airflow.providers.standard.utils.sensor_helper import _get_count, _get_external_task_group_task_ids
|
|
35
34
|
from airflow.sensors.base import BaseSensorOperator
|
|
@@ -43,7 +42,12 @@ if TYPE_CHECKING:
|
|
|
43
42
|
|
|
44
43
|
from airflow.models.baseoperator import BaseOperator
|
|
45
44
|
from airflow.models.taskinstancekey import TaskInstanceKey
|
|
46
|
-
|
|
45
|
+
|
|
46
|
+
try:
|
|
47
|
+
from airflow.sdk.definitions.context import Context
|
|
48
|
+
except ImportError:
|
|
49
|
+
# TODO: Remove once provider drops support for Airflow 2
|
|
50
|
+
from airflow.utils.context import Context
|
|
47
51
|
|
|
48
52
|
|
|
49
53
|
class ExternalDagLink(BaseOperatorLink):
|
|
@@ -58,22 +62,15 @@ class ExternalDagLink(BaseOperatorLink):
|
|
|
58
62
|
def get_link(self, operator: BaseOperator, *, ti_key: TaskInstanceKey) -> str:
|
|
59
63
|
from airflow.models.renderedtifields import RenderedTaskInstanceFields
|
|
60
64
|
|
|
61
|
-
ti = TaskInstance.get_task_instance(
|
|
62
|
-
dag_id=ti_key.dag_id, run_id=ti_key.run_id, task_id=ti_key.task_id, map_index=ti_key.map_index
|
|
63
|
-
)
|
|
64
|
-
|
|
65
65
|
if TYPE_CHECKING:
|
|
66
|
-
assert
|
|
66
|
+
assert isinstance(operator, (ExternalTaskMarker, ExternalTaskSensor))
|
|
67
67
|
|
|
68
|
-
template_fields
|
|
69
|
-
|
|
70
|
-
|
|
71
|
-
|
|
72
|
-
query = {
|
|
73
|
-
"dag_id": external_dag_id,
|
|
74
|
-
"logical_date": ti.logical_date.isoformat(), # type: ignore[union-attr]
|
|
75
|
-
}
|
|
68
|
+
if template_fields := RenderedTaskInstanceFields.get_templated_fields(ti_key):
|
|
69
|
+
external_dag_id: str = template_fields.get("external_dag_id", operator.external_dag_id)
|
|
70
|
+
else:
|
|
71
|
+
external_dag_id = operator.external_dag_id
|
|
76
72
|
|
|
73
|
+
query = {"dag_id": external_dag_id, "run_id": ti_key.run_id}
|
|
77
74
|
return build_airflow_url_with_query(query)
|
|
78
75
|
|
|
79
76
|
|
|
@@ -47,7 +47,11 @@ except ImportError:
|
|
|
47
47
|
|
|
48
48
|
|
|
49
49
|
if TYPE_CHECKING:
|
|
50
|
-
|
|
50
|
+
try:
|
|
51
|
+
from airflow.sdk.definitions.context import Context
|
|
52
|
+
except ImportError:
|
|
53
|
+
# TODO: Remove once provider drops support for Airflow 2
|
|
54
|
+
from airflow.utils.context import Context
|
|
51
55
|
|
|
52
56
|
|
|
53
57
|
class FileSensor(BaseSensorOperator):
|
|
@@ -25,7 +25,11 @@ from airflow.utils.context import context_merge
|
|
|
25
25
|
from airflow.utils.operator_helpers import determine_kwargs
|
|
26
26
|
|
|
27
27
|
if TYPE_CHECKING:
|
|
28
|
-
|
|
28
|
+
try:
|
|
29
|
+
from airflow.sdk.definitions.context import Context
|
|
30
|
+
except ImportError:
|
|
31
|
+
# TODO: Remove once provider drops support for Airflow 2
|
|
32
|
+
from airflow.utils.context import Context
|
|
29
33
|
|
|
30
34
|
|
|
31
35
|
class PythonSensor(BaseSensorOperator):
|
|
@@ -43,7 +43,11 @@ except ImportError:
|
|
|
43
43
|
from airflow.utils import timezone
|
|
44
44
|
|
|
45
45
|
if TYPE_CHECKING:
|
|
46
|
-
|
|
46
|
+
try:
|
|
47
|
+
from airflow.sdk.definitions.context import Context
|
|
48
|
+
except ImportError:
|
|
49
|
+
# TODO: Remove once provider drops support for Airflow 2
|
|
50
|
+
from airflow.utils.context import Context
|
|
47
51
|
|
|
48
52
|
|
|
49
53
|
class TimeSensor(BaseSensorOperator):
|
|
@@ -17,7 +17,7 @@
|
|
|
17
17
|
# under the License.
|
|
18
18
|
from __future__ import annotations
|
|
19
19
|
|
|
20
|
-
from datetime import timedelta
|
|
20
|
+
from datetime import datetime, timedelta
|
|
21
21
|
from time import sleep
|
|
22
22
|
from typing import TYPE_CHECKING, Any, NoReturn
|
|
23
23
|
|
|
@@ -31,7 +31,11 @@ from airflow.sensors.base import BaseSensorOperator
|
|
|
31
31
|
from airflow.utils import timezone
|
|
32
32
|
|
|
33
33
|
if TYPE_CHECKING:
|
|
34
|
-
|
|
34
|
+
try:
|
|
35
|
+
from airflow.sdk.definitions.context import Context
|
|
36
|
+
except ImportError:
|
|
37
|
+
# TODO: Remove once provider drops support for Airflow 2
|
|
38
|
+
from airflow.utils.context import Context
|
|
35
39
|
|
|
36
40
|
|
|
37
41
|
def _get_airflow_version():
|
|
@@ -58,8 +62,12 @@ class TimeDeltaSensor(BaseSensorOperator):
|
|
|
58
62
|
self.delta = delta
|
|
59
63
|
|
|
60
64
|
def poke(self, context: Context):
|
|
61
|
-
|
|
62
|
-
|
|
65
|
+
data_interval_end = context["data_interval_end"]
|
|
66
|
+
|
|
67
|
+
if not isinstance(data_interval_end, datetime):
|
|
68
|
+
raise ValueError("`data_interval_end` returned non-datetime object")
|
|
69
|
+
|
|
70
|
+
target_dttm: datetime = data_interval_end + self.delta
|
|
63
71
|
self.log.info("Checking if the time (%s) has come", target_dttm)
|
|
64
72
|
return timezone.utcnow() > target_dttm
|
|
65
73
|
|
|
@@ -84,8 +92,13 @@ class TimeDeltaSensorAsync(TimeDeltaSensor):
|
|
|
84
92
|
self.end_from_trigger = end_from_trigger
|
|
85
93
|
|
|
86
94
|
def execute(self, context: Context) -> bool | NoReturn:
|
|
87
|
-
|
|
88
|
-
|
|
95
|
+
data_interval_end = context["data_interval_end"]
|
|
96
|
+
|
|
97
|
+
if not isinstance(data_interval_end, datetime):
|
|
98
|
+
raise ValueError("`data_interval_end` returned non-datetime object")
|
|
99
|
+
|
|
100
|
+
target_dttm: datetime = data_interval_end + self.delta
|
|
101
|
+
|
|
89
102
|
if timezone.utcnow() > target_dttm:
|
|
90
103
|
# If the target datetime is in the past, return immediately
|
|
91
104
|
return True
|
|
@@ -25,7 +25,11 @@ from airflow.utils import timezone
|
|
|
25
25
|
from airflow.utils.weekday import WeekDay
|
|
26
26
|
|
|
27
27
|
if TYPE_CHECKING:
|
|
28
|
-
|
|
28
|
+
try:
|
|
29
|
+
from airflow.sdk.definitions.context import Context
|
|
30
|
+
except ImportError:
|
|
31
|
+
# TODO: Remove once provider drops support for Airflow 2
|
|
32
|
+
from airflow.utils.context import Context
|
|
29
33
|
|
|
30
34
|
|
|
31
35
|
class DayOfWeekSensor(BaseSensorOperator):
|
|
@@ -41,21 +41,21 @@ class WorkflowTrigger(BaseTrigger):
|
|
|
41
41
|
"""
|
|
42
42
|
A trigger to monitor tasks, task group and dag execution in Apache Airflow.
|
|
43
43
|
|
|
44
|
-
:param external_dag_id: The ID of the external
|
|
45
|
-
:param
|
|
44
|
+
:param external_dag_id: The ID of the external dag.
|
|
45
|
+
:param run_ids: A list of run ids for the external dag.
|
|
46
46
|
:param external_task_ids: A collection of external task IDs to wait for.
|
|
47
47
|
:param external_task_group_id: The ID of the external task group to wait for.
|
|
48
48
|
:param failed_states: States considered as failed for external tasks.
|
|
49
49
|
:param skipped_states: States considered as skipped for external tasks.
|
|
50
50
|
:param allowed_states: States considered as successful for external tasks.
|
|
51
51
|
:param poke_interval: The interval (in seconds) for poking the external tasks.
|
|
52
|
-
:param soft_fail: If True, the trigger will not fail the entire
|
|
52
|
+
:param soft_fail: If True, the trigger will not fail the entire dag on external task failure.
|
|
53
53
|
"""
|
|
54
54
|
|
|
55
55
|
def __init__(
|
|
56
56
|
self,
|
|
57
57
|
external_dag_id: str,
|
|
58
|
-
|
|
58
|
+
run_ids: list[str] | None = None,
|
|
59
59
|
execution_dates: list[datetime] | None = None,
|
|
60
60
|
external_task_ids: typing.Collection[str] | None = None,
|
|
61
61
|
external_task_group_id: str | None = None,
|
|
@@ -72,7 +72,7 @@ class WorkflowTrigger(BaseTrigger):
|
|
|
72
72
|
self.failed_states = failed_states
|
|
73
73
|
self.skipped_states = skipped_states
|
|
74
74
|
self.allowed_states = allowed_states
|
|
75
|
-
self.
|
|
75
|
+
self.run_ids = run_ids
|
|
76
76
|
self.poke_interval = poke_interval
|
|
77
77
|
self.soft_fail = soft_fail
|
|
78
78
|
self.execution_dates = execution_dates
|
|
@@ -80,25 +80,22 @@ class WorkflowTrigger(BaseTrigger):
|
|
|
80
80
|
|
|
81
81
|
def serialize(self) -> tuple[str, dict[str, Any]]:
|
|
82
82
|
"""Serialize the trigger param and module path."""
|
|
83
|
-
|
|
84
|
-
|
|
85
|
-
|
|
86
|
-
|
|
87
|
-
|
|
88
|
-
|
|
89
|
-
"
|
|
90
|
-
|
|
91
|
-
|
|
92
|
-
|
|
93
|
-
|
|
94
|
-
|
|
95
|
-
|
|
96
|
-
|
|
97
|
-
|
|
98
|
-
|
|
99
|
-
"soft_fail": self.soft_fail,
|
|
100
|
-
},
|
|
101
|
-
)
|
|
83
|
+
data: dict[str, typing.Any] = {
|
|
84
|
+
"external_dag_id": self.external_dag_id,
|
|
85
|
+
"external_task_ids": self.external_task_ids,
|
|
86
|
+
"external_task_group_id": self.external_task_group_id,
|
|
87
|
+
"failed_states": self.failed_states,
|
|
88
|
+
"skipped_states": self.skipped_states,
|
|
89
|
+
"allowed_states": self.allowed_states,
|
|
90
|
+
"poke_interval": self.poke_interval,
|
|
91
|
+
"soft_fail": self.soft_fail,
|
|
92
|
+
}
|
|
93
|
+
if AIRFLOW_V_3_0_PLUS:
|
|
94
|
+
data["run_ids"] = self.run_ids
|
|
95
|
+
else:
|
|
96
|
+
data["execution_dates"] = self.execution_dates
|
|
97
|
+
|
|
98
|
+
return "airflow.providers.standard.triggers.external_task.WorkflowTrigger", data
|
|
102
99
|
|
|
103
100
|
async def run(self) -> typing.AsyncIterator[TriggerEvent]:
|
|
104
101
|
"""Check periodically tasks, task group or dag status."""
|
|
@@ -117,7 +114,7 @@ class WorkflowTrigger(BaseTrigger):
|
|
|
117
114
|
yield TriggerEvent({"status": "skipped"})
|
|
118
115
|
return
|
|
119
116
|
allowed_count = await self._get_count(self.allowed_states)
|
|
120
|
-
_dates = self.
|
|
117
|
+
_dates = self.run_ids if AIRFLOW_V_3_0_PLUS else self.execution_dates
|
|
121
118
|
if allowed_count == len(_dates): # type: ignore[arg-type]
|
|
122
119
|
yield TriggerEvent({"status": "success"})
|
|
123
120
|
return
|
|
@@ -133,7 +130,7 @@ class WorkflowTrigger(BaseTrigger):
|
|
|
133
130
|
:return The count of records.
|
|
134
131
|
"""
|
|
135
132
|
return _get_count(
|
|
136
|
-
dttm_filter=self.
|
|
133
|
+
dttm_filter=self.run_ids if AIRFLOW_V_3_0_PLUS else self.execution_dates,
|
|
137
134
|
external_task_ids=self.external_task_ids,
|
|
138
135
|
external_task_group_id=self.external_task_group_id,
|
|
139
136
|
external_dag_id=self.external_dag_id,
|
|
@@ -143,11 +140,11 @@ class WorkflowTrigger(BaseTrigger):
|
|
|
143
140
|
|
|
144
141
|
class DagStateTrigger(BaseTrigger):
|
|
145
142
|
"""
|
|
146
|
-
Waits asynchronously for a
|
|
143
|
+
Waits asynchronously for a dag to complete for a specific run_id.
|
|
147
144
|
|
|
148
145
|
:param dag_id: The dag_id that contains the task you want to wait for
|
|
149
146
|
:param states: allowed states, default is ``['success']``
|
|
150
|
-
:param
|
|
147
|
+
:param run_ids: The run_id of dag run.
|
|
151
148
|
:param poll_interval: The time interval in seconds to check the state.
|
|
152
149
|
The default value is 5.0 sec.
|
|
153
150
|
"""
|
|
@@ -156,40 +153,38 @@ class DagStateTrigger(BaseTrigger):
|
|
|
156
153
|
self,
|
|
157
154
|
dag_id: str,
|
|
158
155
|
states: list[DagRunState],
|
|
159
|
-
|
|
156
|
+
run_ids: list[str] | None = None,
|
|
160
157
|
execution_dates: list[datetime] | None = None,
|
|
161
158
|
poll_interval: float = 5.0,
|
|
162
159
|
):
|
|
163
160
|
super().__init__()
|
|
164
161
|
self.dag_id = dag_id
|
|
165
162
|
self.states = states
|
|
166
|
-
self.
|
|
163
|
+
self.run_ids = run_ids
|
|
167
164
|
self.execution_dates = execution_dates
|
|
168
165
|
self.poll_interval = poll_interval
|
|
169
166
|
|
|
170
167
|
def serialize(self) -> tuple[str, dict[str, typing.Any]]:
|
|
171
168
|
"""Serialize DagStateTrigger arguments and classpath."""
|
|
172
|
-
|
|
173
|
-
|
|
174
|
-
|
|
175
|
-
|
|
176
|
-
|
|
177
|
-
|
|
178
|
-
|
|
179
|
-
|
|
180
|
-
|
|
181
|
-
|
|
182
|
-
|
|
183
|
-
|
|
184
|
-
},
|
|
185
|
-
)
|
|
169
|
+
data = {
|
|
170
|
+
"dag_id": self.dag_id,
|
|
171
|
+
"states": self.states,
|
|
172
|
+
"poll_interval": self.poll_interval,
|
|
173
|
+
}
|
|
174
|
+
|
|
175
|
+
if AIRFLOW_V_3_0_PLUS:
|
|
176
|
+
data["run_ids"] = self.run_ids
|
|
177
|
+
else:
|
|
178
|
+
data["execution_dates"] = self.execution_dates
|
|
179
|
+
|
|
180
|
+
return "airflow.providers.standard.triggers.external_task.DagStateTrigger", data
|
|
186
181
|
|
|
187
182
|
async def run(self) -> typing.AsyncIterator[TriggerEvent]:
|
|
188
183
|
"""Check periodically if the dag run exists, and has hit one of the states yet, or not."""
|
|
189
184
|
while True:
|
|
190
185
|
# mypy confuses typing here
|
|
191
186
|
num_dags = await self.count_dags() # type: ignore[call-arg]
|
|
192
|
-
_dates = self.
|
|
187
|
+
_dates = self.run_ids if AIRFLOW_V_3_0_PLUS else self.execution_dates
|
|
193
188
|
if num_dags == len(_dates): # type: ignore[arg-type]
|
|
194
189
|
yield TriggerEvent(self.serialize())
|
|
195
190
|
return
|
|
@@ -200,7 +195,7 @@ class DagStateTrigger(BaseTrigger):
|
|
|
200
195
|
def count_dags(self, *, session: Session = NEW_SESSION) -> int | None:
|
|
201
196
|
"""Count how many dag runs in the database match our criteria."""
|
|
202
197
|
_dag_run_date_condition = (
|
|
203
|
-
DagRun.
|
|
198
|
+
DagRun.run_id.in_(self.run_ids)
|
|
204
199
|
if AIRFLOW_V_3_0_PLUS
|
|
205
200
|
else DagRun.execution_date.in_(self.execution_dates)
|
|
206
201
|
)
|
|
@@ -19,11 +19,20 @@ from __future__ import annotations
|
|
|
19
19
|
import asyncio
|
|
20
20
|
import datetime
|
|
21
21
|
import os
|
|
22
|
-
import
|
|
22
|
+
from collections.abc import AsyncIterator
|
|
23
23
|
from glob import glob
|
|
24
24
|
from typing import Any
|
|
25
25
|
|
|
26
|
-
from airflow.
|
|
26
|
+
from airflow.providers.standard.version_compat import AIRFLOW_V_3_0_PLUS
|
|
27
|
+
|
|
28
|
+
if AIRFLOW_V_3_0_PLUS:
|
|
29
|
+
from airflow.triggers.base import BaseEventTrigger, BaseTrigger, TriggerEvent
|
|
30
|
+
else:
|
|
31
|
+
from airflow.triggers.base import ( # type: ignore
|
|
32
|
+
BaseTrigger,
|
|
33
|
+
BaseTrigger as BaseEventTrigger,
|
|
34
|
+
TriggerEvent,
|
|
35
|
+
)
|
|
27
36
|
|
|
28
37
|
|
|
29
38
|
class FileTrigger(BaseTrigger):
|
|
@@ -60,7 +69,7 @@ class FileTrigger(BaseTrigger):
|
|
|
60
69
|
},
|
|
61
70
|
)
|
|
62
71
|
|
|
63
|
-
async def run(self) ->
|
|
72
|
+
async def run(self) -> AsyncIterator[TriggerEvent]:
|
|
64
73
|
"""Loop until the relevant files are found."""
|
|
65
74
|
while True:
|
|
66
75
|
for path in glob(self.filepath, recursive=self.recursive):
|
|
@@ -75,3 +84,48 @@ class FileTrigger(BaseTrigger):
|
|
|
75
84
|
yield TriggerEvent(True)
|
|
76
85
|
return
|
|
77
86
|
await asyncio.sleep(self.poke_interval)
|
|
87
|
+
|
|
88
|
+
|
|
89
|
+
class FileDeleteTrigger(BaseEventTrigger):
|
|
90
|
+
"""
|
|
91
|
+
A trigger that fires exactly once after it finds the requested file and then delete the file.
|
|
92
|
+
|
|
93
|
+
The difference between ``FileTrigger`` and ``FileDeleteTrigger`` is ``FileDeleteTrigger`` can only find a
|
|
94
|
+
specific file.
|
|
95
|
+
|
|
96
|
+
:param filepath: File (relative to the base path set within the connection).
|
|
97
|
+
:param poke_interval: Time that the job should wait in between each try
|
|
98
|
+
"""
|
|
99
|
+
|
|
100
|
+
def __init__(
|
|
101
|
+
self,
|
|
102
|
+
filepath: str,
|
|
103
|
+
poke_interval: float = 5.0,
|
|
104
|
+
**kwargs,
|
|
105
|
+
):
|
|
106
|
+
super().__init__()
|
|
107
|
+
self.filepath = filepath
|
|
108
|
+
self.poke_interval = poke_interval
|
|
109
|
+
|
|
110
|
+
def serialize(self) -> tuple[str, dict[str, Any]]:
|
|
111
|
+
"""Serialize FileDeleteTrigger arguments and classpath."""
|
|
112
|
+
return (
|
|
113
|
+
"airflow.providers.standard.triggers.file.FileDeleteTrigger",
|
|
114
|
+
{
|
|
115
|
+
"filepath": self.filepath,
|
|
116
|
+
"poke_interval": self.poke_interval,
|
|
117
|
+
},
|
|
118
|
+
)
|
|
119
|
+
|
|
120
|
+
async def run(self) -> AsyncIterator[TriggerEvent]:
|
|
121
|
+
"""Loop until the relevant file is found."""
|
|
122
|
+
while True:
|
|
123
|
+
if os.path.isfile(self.filepath):
|
|
124
|
+
mod_time_f = os.path.getmtime(self.filepath)
|
|
125
|
+
mod_time = datetime.datetime.fromtimestamp(mod_time_f).strftime("%Y%m%d%H%M%S")
|
|
126
|
+
self.log.info("Found file %s last modified: %s", self.filepath, mod_time)
|
|
127
|
+
os.remove(self.filepath)
|
|
128
|
+
self.log.info("File %s has been deleted", self.filepath)
|
|
129
|
+
yield TriggerEvent(True)
|
|
130
|
+
return
|
|
131
|
+
await asyncio.sleep(self.poke_interval)
|
|
@@ -64,30 +64,6 @@ with open(sys.argv[3], "r") as file:
|
|
|
64
64
|
virtualenv_string_args = list(map(lambda x: x.strip(), list(file)))
|
|
65
65
|
{% endif %}
|
|
66
66
|
|
|
67
|
-
{% if use_airflow_context | default(false) -%}
|
|
68
|
-
if len(sys.argv) > 5:
|
|
69
|
-
import json
|
|
70
|
-
from types import ModuleType
|
|
71
|
-
|
|
72
|
-
from airflow.providers.standard.operators import python as airflow_python
|
|
73
|
-
|
|
74
|
-
|
|
75
|
-
class _MockPython(ModuleType):
|
|
76
|
-
@staticmethod
|
|
77
|
-
def get_current_context():
|
|
78
|
-
with open(sys.argv[5]) as file:
|
|
79
|
-
context = json.load(file)
|
|
80
|
-
raise Exception("Not yet implemented")
|
|
81
|
-
# TODO: return deserialized context
|
|
82
|
-
|
|
83
|
-
def __getattr__(self, name: str):
|
|
84
|
-
return getattr(airflow_python, name)
|
|
85
|
-
|
|
86
|
-
|
|
87
|
-
MockPython = _MockPython("MockPython")
|
|
88
|
-
sys.modules["airflow.providers.standard.operators.python"] = MockPython
|
|
89
|
-
{% endif %}
|
|
90
|
-
|
|
91
67
|
try:
|
|
92
68
|
res = {{ python_callable }}(*arg_dict["args"], **arg_dict["kwargs"])
|
|
93
69
|
except Exception as e:
|
|
@@ -18,14 +18,14 @@ from __future__ import annotations
|
|
|
18
18
|
|
|
19
19
|
from typing import TYPE_CHECKING, cast
|
|
20
20
|
|
|
21
|
-
from sqlalchemy import func, select
|
|
21
|
+
from sqlalchemy import func, select, tuple_
|
|
22
22
|
|
|
23
23
|
from airflow.models import DagBag, DagRun, TaskInstance
|
|
24
24
|
from airflow.utils.session import NEW_SESSION, provide_session
|
|
25
|
-
from airflow.utils.sqlalchemy import tuple_in_condition
|
|
26
25
|
|
|
27
26
|
if TYPE_CHECKING:
|
|
28
|
-
from sqlalchemy.orm import
|
|
27
|
+
from sqlalchemy.orm import Session
|
|
28
|
+
from sqlalchemy.sql import Executable
|
|
29
29
|
|
|
30
30
|
|
|
31
31
|
@provide_session
|
|
@@ -55,9 +55,7 @@ def _get_count(
|
|
|
55
55
|
if external_task_ids:
|
|
56
56
|
count = (
|
|
57
57
|
session.scalar(
|
|
58
|
-
|
|
59
|
-
TI.task_id.in_(external_task_ids)
|
|
60
|
-
)
|
|
58
|
+
_count_stmt(TI, states, dttm_filter, external_dag_id).where(TI.task_id.in_(external_task_ids))
|
|
61
59
|
)
|
|
62
60
|
) / len(external_task_ids)
|
|
63
61
|
elif external_task_group_id:
|
|
@@ -69,17 +67,17 @@ def _get_count(
|
|
|
69
67
|
else:
|
|
70
68
|
count = (
|
|
71
69
|
session.scalar(
|
|
72
|
-
|
|
73
|
-
|
|
70
|
+
_count_stmt(TI, states, dttm_filter, external_dag_id).where(
|
|
71
|
+
tuple_(TI.task_id, TI.map_index).in_(external_task_group_task_ids)
|
|
74
72
|
)
|
|
75
73
|
)
|
|
76
74
|
) / len(external_task_group_task_ids)
|
|
77
75
|
else:
|
|
78
|
-
count = session.scalar(
|
|
76
|
+
count = session.scalar(_count_stmt(DR, states, dttm_filter, external_dag_id))
|
|
79
77
|
return cast(int, count)
|
|
80
78
|
|
|
81
79
|
|
|
82
|
-
def
|
|
80
|
+
def _count_stmt(model, states, dttm_filter, external_dag_id) -> Executable:
|
|
83
81
|
"""
|
|
84
82
|
Get the count of records against dttm filter and states.
|
|
85
83
|
|
|
@@ -87,12 +85,10 @@ def _count_query(model, states, dttm_filter, external_dag_id, session: Session)
|
|
|
87
85
|
:param states: task or dag states
|
|
88
86
|
:param dttm_filter: date time filter for logical date
|
|
89
87
|
:param external_dag_id: The ID of the external DAG.
|
|
90
|
-
:param session: airflow session object
|
|
91
88
|
"""
|
|
92
|
-
|
|
89
|
+
return select(func.count()).where(
|
|
93
90
|
model.dag_id == external_dag_id, model.state.in_(states), model.logical_date.in_(dttm_filter)
|
|
94
91
|
)
|
|
95
|
-
return query
|
|
96
92
|
|
|
97
93
|
|
|
98
94
|
def _get_external_task_group_task_ids(dttm_filter, external_task_group_id, external_dag_id, session):
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.3
|
|
2
2
|
Name: apache-airflow-providers-standard
|
|
3
|
-
Version: 0.0
|
|
3
|
+
Version: 0.1.0
|
|
4
4
|
Summary: Provider package apache-airflow-providers-standard for Apache Airflow
|
|
5
5
|
Keywords: airflow-provider,standard,airflow,integration
|
|
6
6
|
Author-email: Apache Software Foundation <dev@airflow.apache.org>
|
|
@@ -20,34 +20,17 @@ Classifier: Programming Language :: Python :: 3.10
|
|
|
20
20
|
Classifier: Programming Language :: Python :: 3.11
|
|
21
21
|
Classifier: Programming Language :: Python :: 3.12
|
|
22
22
|
Classifier: Topic :: System :: Monitoring
|
|
23
|
-
Requires-Dist: apache-airflow
|
|
24
|
-
Requires-Dist: apache-airflow>=
|
|
23
|
+
Requires-Dist: apache-airflow>=2.9.0
|
|
24
|
+
Requires-Dist: apache-airflow-providers-common-sql>=1.20.0
|
|
25
25
|
Project-URL: Bug Tracker, https://github.com/apache/airflow/issues
|
|
26
|
-
Project-URL: Changelog, https://airflow.apache.org/docs/apache-airflow-providers-standard/0.0
|
|
27
|
-
Project-URL: Documentation, https://airflow.apache.org/docs/apache-airflow-providers-standard/0.0
|
|
26
|
+
Project-URL: Changelog, https://airflow.apache.org/docs/apache-airflow-providers-standard/0.1.0/changelog.html
|
|
27
|
+
Project-URL: Documentation, https://airflow.apache.org/docs/apache-airflow-providers-standard/0.1.0
|
|
28
28
|
Project-URL: Slack Chat, https://s.apache.org/airflow-slack
|
|
29
29
|
Project-URL: Source Code, https://github.com/apache/airflow
|
|
30
30
|
Project-URL: Twitter, https://x.com/ApacheAirflow
|
|
31
31
|
Project-URL: YouTube, https://www.youtube.com/channel/UCSXwxpWZQ7XZ1WL3wqevChA/
|
|
32
32
|
|
|
33
33
|
|
|
34
|
-
.. Licensed to the Apache Software Foundation (ASF) under one
|
|
35
|
-
or more contributor license agreements. See the NOTICE file
|
|
36
|
-
distributed with this work for additional information
|
|
37
|
-
regarding copyright ownership. The ASF licenses this file
|
|
38
|
-
to you under the Apache License, Version 2.0 (the
|
|
39
|
-
"License"); you may not use this file except in compliance
|
|
40
|
-
with the License. You may obtain a copy of the License at
|
|
41
|
-
|
|
42
|
-
.. http://www.apache.org/licenses/LICENSE-2.0
|
|
43
|
-
|
|
44
|
-
.. Unless required by applicable law or agreed to in writing,
|
|
45
|
-
software distributed under the License is distributed on an
|
|
46
|
-
"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
|
47
|
-
KIND, either express or implied. See the License for the
|
|
48
|
-
specific language governing permissions and limitations
|
|
49
|
-
under the License.
|
|
50
|
-
|
|
51
34
|
.. Licensed to the Apache Software Foundation (ASF) under one
|
|
52
35
|
or more contributor license agreements. See the NOTICE file
|
|
53
36
|
distributed with this work for additional information
|
|
@@ -65,8 +48,7 @@ Project-URL: YouTube, https://www.youtube.com/channel/UCSXwxpWZQ7XZ1WL3wqevChA/
|
|
|
65
48
|
specific language governing permissions and limitations
|
|
66
49
|
under the License.
|
|
67
50
|
|
|
68
|
-
.. NOTE! THIS FILE IS AUTOMATICALLY GENERATED AND WILL BE
|
|
69
|
-
OVERWRITTEN WHEN PREPARING PACKAGES.
|
|
51
|
+
.. NOTE! THIS FILE IS AUTOMATICALLY GENERATED AND WILL BE OVERWRITTEN!
|
|
70
52
|
|
|
71
53
|
.. IF YOU WANT TO MODIFY TEMPLATE FOR THIS FILE, YOU SHOULD MODIFY THE TEMPLATE
|
|
72
54
|
`PROVIDER_README_TEMPLATE.rst.jinja2` IN the `dev/breeze/src/airflow_breeze/templates` DIRECTORY
|
|
@@ -74,7 +56,7 @@ Project-URL: YouTube, https://www.youtube.com/channel/UCSXwxpWZQ7XZ1WL3wqevChA/
|
|
|
74
56
|
|
|
75
57
|
Package ``apache-airflow-providers-standard``
|
|
76
58
|
|
|
77
|
-
Release: ``0.0
|
|
59
|
+
Release: ``0.1.0``
|
|
78
60
|
|
|
79
61
|
|
|
80
62
|
Airflow Standard Provider
|
|
@@ -87,7 +69,7 @@ This is a provider package for ``standard`` provider. All classes for this provi
|
|
|
87
69
|
are in ``airflow.providers.standard`` python package.
|
|
88
70
|
|
|
89
71
|
You can find package information and changelog for the provider
|
|
90
|
-
in the `documentation <https://airflow.apache.org/docs/apache-airflow-providers-standard/0.0
|
|
72
|
+
in the `documentation <https://airflow.apache.org/docs/apache-airflow-providers-standard/0.1.0/>`_.
|
|
91
73
|
|
|
92
74
|
Installation
|
|
93
75
|
------------
|
|
@@ -109,4 +91,5 @@ PIP package Version required
|
|
|
109
91
|
======================================= ==================
|
|
110
92
|
|
|
111
93
|
The changelog for the provider package can be found in the
|
|
112
|
-
`changelog <https://airflow.apache.org/docs/apache-airflow-providers-standard/0.0
|
|
94
|
+
`changelog <https://airflow.apache.org/docs/apache-airflow-providers-standard/0.1.0/changelog.html>`_.
|
|
95
|
+
|
|
@@ -0,0 +1,38 @@
|
|
|
1
|
+
airflow/providers/standard/LICENSE,sha256=gXPVwptPlW1TJ4HSuG5OMPg-a3h43OGMkZRR1rpwfJA,10850
|
|
2
|
+
airflow/providers/standard/__init__.py,sha256=_0SheSBOknCZLmPVxMFYyFxMp7FYpHj-cI8mbJodsBE,1495
|
|
3
|
+
airflow/providers/standard/get_provider_info.py,sha256=zFnTra9uOUt8ZdhaCqkoNTVqqAy51VP16SI56dk-YfM,4939
|
|
4
|
+
airflow/providers/standard/version_compat.py,sha256=aHg90_DtgoSnQvILFICexMyNlHlALBdaeWqkX3dFDug,1605
|
|
5
|
+
airflow/providers/standard/hooks/__init__.py,sha256=9hdXHABrVpkbpjZgUft39kOFL2xSGeG4GEua0Hmelus,785
|
|
6
|
+
airflow/providers/standard/hooks/filesystem.py,sha256=fDZwW_EYD8z1QXnReqI7gIwSbDPZNTKtqQvgktiP02o,2870
|
|
7
|
+
airflow/providers/standard/hooks/package_index.py,sha256=U7_s_02-wwz9kTkzKr3JAhVQj2spuntWd_GmjfpV-y4,3769
|
|
8
|
+
airflow/providers/standard/hooks/subprocess.py,sha256=GAmdF69jwUcpc7DH5I42GnJRs6NMQvHwFhimWpIdTU4,4920
|
|
9
|
+
airflow/providers/standard/operators/__init__.py,sha256=9hdXHABrVpkbpjZgUft39kOFL2xSGeG4GEua0Hmelus,785
|
|
10
|
+
airflow/providers/standard/operators/bash.py,sha256=AMSTPCgtArDE78XQ01b5jf1d3HEbZuP8_xz9dyXQgKc,13664
|
|
11
|
+
airflow/providers/standard/operators/datetime.py,sha256=vsn2eaeVvUZBLXTzrEIC5Bd9svk81gM2VlxXCcmZhHY,4749
|
|
12
|
+
airflow/providers/standard/operators/empty.py,sha256=C7_uLWJK6kExzlNc7xdMo8VAQ_ONWITvEQ2FImrMepM,1324
|
|
13
|
+
airflow/providers/standard/operators/generic_transfer.py,sha256=BFCDTc_GTd6LNfU0Mr8Fx_MzGL9qcKNwzC4KNzD1gUw,5247
|
|
14
|
+
airflow/providers/standard/operators/latest_only.py,sha256=NlpkrHk3QblaXYEFowLy9pRS-l0zpFtI12bDmF-t9Lo,3291
|
|
15
|
+
airflow/providers/standard/operators/python.py,sha256=ZLeesBNGbZLFGeeEe9irZUfJUvceoQxEwYvnejzoPs4,49247
|
|
16
|
+
airflow/providers/standard/operators/trigger_dagrun.py,sha256=lPbV-FR_6RHB6XDv58Fc8N92o3MEYfNJPFxt9h1SPFw,12301
|
|
17
|
+
airflow/providers/standard/operators/weekday.py,sha256=XL1fMejCoCrifl52t9QmlrnavL3Nm3_VYbhUMWhI10I,4841
|
|
18
|
+
airflow/providers/standard/sensors/__init__.py,sha256=9hdXHABrVpkbpjZgUft39kOFL2xSGeG4GEua0Hmelus,785
|
|
19
|
+
airflow/providers/standard/sensors/bash.py,sha256=afyz1m-1qzAp1fE5ta71rXhpTrKcCH7bNfwUU2Hv7GQ,5025
|
|
20
|
+
airflow/providers/standard/sensors/date_time.py,sha256=hRUuLaNgqDh4jqaIaD8zdyq2BUXkpWM2NzJN5YkwTJI,6077
|
|
21
|
+
airflow/providers/standard/sensors/external_task.py,sha256=T5cCj1txJUjnql6cHZayDqSjfWCE-zOxJS9-nxkSuio,23840
|
|
22
|
+
airflow/providers/standard/sensors/filesystem.py,sha256=rfupSeHtFGdAcL6cw3H6u6ttBxogSThYiPqsUKgABMU,6029
|
|
23
|
+
airflow/providers/standard/sensors/python.py,sha256=kvgpHN8hiyxJPlw9HsVpna0X6NRt0iTDvFFjqt3KFtQ,3405
|
|
24
|
+
airflow/providers/standard/sensors/time.py,sha256=Pc9BZqqTQy3Qqz7uME9yF4qmWsXYCzAoAlsmwgpAraY,5007
|
|
25
|
+
airflow/providers/standard/sensors/time_delta.py,sha256=H1jSNT72e-83usqMPMIRSgnR41IAFwkrafmE006jAOc,6012
|
|
26
|
+
airflow/providers/standard/sensors/weekday.py,sha256=GdYa-DdKdQ_cOpuAFppHSaDKrzGGvVha4BfkoiJLTpM,3884
|
|
27
|
+
airflow/providers/standard/triggers/__init__.py,sha256=9hdXHABrVpkbpjZgUft39kOFL2xSGeG4GEua0Hmelus,785
|
|
28
|
+
airflow/providers/standard/triggers/external_task.py,sha256=iZn-WsjTlJRd780xVds6rrTOrfLkf-Bp3Q1PbGfbYuU,8476
|
|
29
|
+
airflow/providers/standard/triggers/file.py,sha256=2i8-RwSjEgdOwQNcHCqLmSdpE3Ehqg4GQJ8nE3-fHxo,4886
|
|
30
|
+
airflow/providers/standard/triggers/temporal.py,sha256=Aub7Cp3HsPdeardF2jp-Z5nIRwzqtK9-aOlWtfKQfcg,4809
|
|
31
|
+
airflow/providers/standard/utils/__init__.py,sha256=9hdXHABrVpkbpjZgUft39kOFL2xSGeG4GEua0Hmelus,785
|
|
32
|
+
airflow/providers/standard/utils/python_virtualenv.py,sha256=FR3241l5Obuo2BBwwBs-s87pRpCLyJnh3sUtHxrgRuM,7759
|
|
33
|
+
airflow/providers/standard/utils/python_virtualenv_script.jinja2,sha256=bn_QOYOj8Q2k-RE77LKgCy3iDTuv9vllyBAD4yeCb-A,2502
|
|
34
|
+
airflow/providers/standard/utils/sensor_helper.py,sha256=BeaWt9X4PUE49V3QAG8WPHj3fWwUGeZngS5_Y8g_auA,4401
|
|
35
|
+
apache_airflow_providers_standard-0.1.0.dist-info/entry_points.txt,sha256=mW2YRh3mVdZdaP5-iGSNgmcCh3YYdALIn28BCLBZZ40,104
|
|
36
|
+
apache_airflow_providers_standard-0.1.0.dist-info/WHEEL,sha256=CpUCUxeHQbRN5UGRQHYRJorO5Af-Qy_fHMctcQ8DSGI,82
|
|
37
|
+
apache_airflow_providers_standard-0.1.0.dist-info/METADATA,sha256=1RxTZNiAnyyDqip0X7T6mjVLYserLXdKUyzjtpdjW1E,4014
|
|
38
|
+
apache_airflow_providers_standard-0.1.0.dist-info/RECORD,,
|