apache-airflow-providers-standard 1.9.2rc1__py3-none-any.whl → 1.10.3__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- airflow/providers/standard/__init__.py +3 -3
- airflow/providers/standard/decorators/bash.py +1 -2
- airflow/providers/standard/example_dags/example_bash_decorator.py +1 -1
- airflow/providers/standard/exceptions.py +1 -1
- airflow/providers/standard/operators/bash.py +7 -3
- airflow/providers/standard/operators/datetime.py +1 -2
- airflow/providers/standard/operators/hitl.py +9 -2
- airflow/providers/standard/operators/latest_only.py +17 -8
- airflow/providers/standard/operators/python.py +33 -3
- airflow/providers/standard/operators/trigger_dagrun.py +75 -25
- airflow/providers/standard/sensors/bash.py +1 -2
- airflow/providers/standard/sensors/date_time.py +1 -16
- airflow/providers/standard/sensors/external_task.py +17 -4
- airflow/providers/standard/sensors/filesystem.py +2 -19
- airflow/providers/standard/sensors/time.py +2 -18
- airflow/providers/standard/sensors/time_delta.py +7 -6
- airflow/providers/standard/triggers/external_task.py +11 -8
- airflow/providers/standard/triggers/hitl.py +2 -2
- airflow/providers/standard/utils/openlineage.py +185 -0
- airflow/providers/standard/utils/python_virtualenv.py +4 -3
- airflow/providers/standard/utils/python_virtualenv_script.jinja2 +18 -3
- airflow/providers/standard/utils/skipmixin.py +2 -2
- {apache_airflow_providers_standard-1.9.2rc1.dist-info → apache_airflow_providers_standard-1.10.3.dist-info}/METADATA +22 -10
- {apache_airflow_providers_standard-1.9.2rc1.dist-info → apache_airflow_providers_standard-1.10.3.dist-info}/RECORD +28 -27
- {apache_airflow_providers_standard-1.9.2rc1.dist-info → apache_airflow_providers_standard-1.10.3.dist-info}/licenses/NOTICE +1 -1
- {apache_airflow_providers_standard-1.9.2rc1.dist-info → apache_airflow_providers_standard-1.10.3.dist-info}/WHEEL +0 -0
- {apache_airflow_providers_standard-1.9.2rc1.dist-info → apache_airflow_providers_standard-1.10.3.dist-info}/entry_points.txt +0 -0
- {apache_airflow_providers_standard-1.9.2rc1.dist-info → apache_airflow_providers_standard-1.10.3.dist-info}/licenses/LICENSE +0 -0
|
@@ -29,11 +29,11 @@ from airflow import __version__ as airflow_version
|
|
|
29
29
|
|
|
30
30
|
__all__ = ["__version__"]
|
|
31
31
|
|
|
32
|
-
__version__ = "1.
|
|
32
|
+
__version__ = "1.10.3"
|
|
33
33
|
|
|
34
34
|
if packaging.version.parse(packaging.version.parse(airflow_version).base_version) < packaging.version.parse(
|
|
35
|
-
"2.
|
|
35
|
+
"2.11.0"
|
|
36
36
|
):
|
|
37
37
|
raise RuntimeError(
|
|
38
|
-
f"The package `apache-airflow-providers-standard:{__version__}` needs Apache Airflow 2.
|
|
38
|
+
f"The package `apache-airflow-providers-standard:{__version__}` needs Apache Airflow 2.11.0+"
|
|
39
39
|
)
|
|
@@ -89,8 +89,7 @@ class _BashDecoratedOperator(DecoratedOperator, BashOperator):
|
|
|
89
89
|
raise TypeError("The returned value from the TaskFlow callable must be a non-empty string.")
|
|
90
90
|
|
|
91
91
|
self._is_inline_cmd = self._is_inline_command(bash_command=self.bash_command)
|
|
92
|
-
|
|
93
|
-
|
|
92
|
+
self.render_template_fields(context)
|
|
94
93
|
return super().execute(context)
|
|
95
94
|
|
|
96
95
|
|
|
@@ -19,11 +19,11 @@ from __future__ import annotations
|
|
|
19
19
|
|
|
20
20
|
import pendulum
|
|
21
21
|
|
|
22
|
-
from airflow.exceptions import AirflowSkipException
|
|
23
22
|
from airflow.providers.common.compat.sdk import TriggerRule
|
|
24
23
|
from airflow.providers.standard.operators.empty import EmptyOperator
|
|
25
24
|
from airflow.providers.standard.utils.weekday import WeekDay
|
|
26
25
|
from airflow.sdk import chain, dag, task
|
|
26
|
+
from airflow.sdk.exceptions import AirflowSkipException
|
|
27
27
|
|
|
28
28
|
|
|
29
29
|
@dag(schedule=None, start_date=pendulum.datetime(2023, 1, 1, tz="UTC"), catchup=False)
|
|
@@ -24,14 +24,18 @@ from collections.abc import Callable, Container, Sequence
|
|
|
24
24
|
from functools import cached_property
|
|
25
25
|
from typing import TYPE_CHECKING, Any, cast
|
|
26
26
|
|
|
27
|
-
from airflow.
|
|
28
|
-
|
|
27
|
+
from airflow.providers.common.compat.sdk import (
|
|
28
|
+
AirflowException,
|
|
29
|
+
AirflowSkipException,
|
|
30
|
+
context_to_airflow_vars,
|
|
31
|
+
)
|
|
29
32
|
from airflow.providers.standard.hooks.subprocess import SubprocessHook, SubprocessResult, working_directory
|
|
30
33
|
from airflow.providers.standard.version_compat import BaseOperator
|
|
31
34
|
|
|
32
35
|
if TYPE_CHECKING:
|
|
33
36
|
from airflow.providers.common.compat.sdk import Context
|
|
34
|
-
|
|
37
|
+
|
|
38
|
+
from tests_common.test_utils.version_compat import ArgNotSet
|
|
35
39
|
|
|
36
40
|
|
|
37
41
|
class BashOperator(BaseOperator):
|
|
@@ -20,8 +20,7 @@ import datetime
|
|
|
20
20
|
from collections.abc import Iterable
|
|
21
21
|
from typing import TYPE_CHECKING
|
|
22
22
|
|
|
23
|
-
from airflow.
|
|
24
|
-
from airflow.providers.common.compat.sdk import timezone
|
|
23
|
+
from airflow.providers.common.compat.sdk import AirflowException, timezone
|
|
25
24
|
from airflow.providers.standard.operators.branch import BaseBranchOperator
|
|
26
25
|
|
|
27
26
|
if TYPE_CHECKING:
|
|
@@ -18,7 +18,7 @@ from __future__ import annotations
|
|
|
18
18
|
|
|
19
19
|
import logging
|
|
20
20
|
|
|
21
|
-
from airflow.
|
|
21
|
+
from airflow.providers.common.compat.sdk import AirflowOptionalProviderFeatureException
|
|
22
22
|
from airflow.providers.standard.version_compat import AIRFLOW_V_3_1_3_PLUS, AIRFLOW_V_3_1_PLUS
|
|
23
23
|
|
|
24
24
|
if not AIRFLOW_V_3_1_PLUS:
|
|
@@ -28,7 +28,7 @@ from collections.abc import Collection, Mapping, Sequence
|
|
|
28
28
|
from typing import TYPE_CHECKING, Any
|
|
29
29
|
from urllib.parse import ParseResult, urlencode, urlparse, urlunparse
|
|
30
30
|
|
|
31
|
-
from airflow.
|
|
31
|
+
from airflow.providers.common.compat.sdk import conf
|
|
32
32
|
from airflow.providers.standard.exceptions import HITLRejectException, HITLTimeoutError, HITLTriggerEventError
|
|
33
33
|
from airflow.providers.standard.operators.branch import BranchMixIn
|
|
34
34
|
from airflow.providers.standard.triggers.hitl import HITLTrigger, HITLTriggerEventSuccessPayload
|
|
@@ -84,6 +84,13 @@ class HITLOperator(BaseOperator):
|
|
|
84
84
|
self.multiple = multiple
|
|
85
85
|
|
|
86
86
|
self.params: ParamsDict = params if isinstance(params, ParamsDict) else ParamsDict(params or {})
|
|
87
|
+
if hasattr(ParamsDict, "filter_params_by_source"):
|
|
88
|
+
# Params that exist only in Dag level does not make sense to appear in HITLOperator
|
|
89
|
+
self.params = ParamsDict.filter_params_by_source(self.params, source="task")
|
|
90
|
+
elif self.params:
|
|
91
|
+
self.log.debug(
|
|
92
|
+
"ParamsDict.filter_params_by_source not available; HITLOperator will also include Dag level params."
|
|
93
|
+
)
|
|
87
94
|
|
|
88
95
|
self.notifiers: Sequence[BaseNotifier] = (
|
|
89
96
|
[notifiers] if isinstance(notifiers, BaseNotifier) else notifiers or []
|
|
@@ -26,7 +26,7 @@ from typing import TYPE_CHECKING
|
|
|
26
26
|
import pendulum
|
|
27
27
|
|
|
28
28
|
from airflow.providers.standard.operators.branch import BaseBranchOperator
|
|
29
|
-
from airflow.providers.standard.version_compat import AIRFLOW_V_3_0_PLUS
|
|
29
|
+
from airflow.providers.standard.version_compat import AIRFLOW_V_3_0_PLUS, AIRFLOW_V_3_2_PLUS
|
|
30
30
|
from airflow.utils.types import DagRunType
|
|
31
31
|
|
|
32
32
|
if TYPE_CHECKING:
|
|
@@ -35,6 +35,17 @@ if TYPE_CHECKING:
|
|
|
35
35
|
from airflow.models import DagRun
|
|
36
36
|
from airflow.providers.common.compat.sdk import Context
|
|
37
37
|
|
|
38
|
+
if AIRFLOW_V_3_2_PLUS:
|
|
39
|
+
|
|
40
|
+
def _get_dag_timetable(dag):
|
|
41
|
+
from airflow.serialization.encoders import coerce_to_core_timetable
|
|
42
|
+
|
|
43
|
+
return coerce_to_core_timetable(dag.timetable)
|
|
44
|
+
else:
|
|
45
|
+
|
|
46
|
+
def _get_dag_timetable(dag):
|
|
47
|
+
return dag.timetable
|
|
48
|
+
|
|
38
49
|
|
|
39
50
|
class LatestOnlyOperator(BaseBranchOperator):
|
|
40
51
|
"""
|
|
@@ -104,15 +115,13 @@ class LatestOnlyOperator(BaseBranchOperator):
|
|
|
104
115
|
else:
|
|
105
116
|
end = dagrun_date
|
|
106
117
|
|
|
107
|
-
|
|
108
|
-
|
|
109
|
-
end=end,
|
|
110
|
-
)
|
|
111
|
-
|
|
118
|
+
timetable = _get_dag_timetable(self.dag)
|
|
119
|
+
current_interval = DataInterval(start=start, end=end)
|
|
112
120
|
time_restriction = TimeRestriction(
|
|
113
121
|
earliest=None, latest=current_interval.end - timedelta(microseconds=1), catchup=True
|
|
114
122
|
)
|
|
115
|
-
|
|
123
|
+
|
|
124
|
+
if prev_info := timetable.next_dagrun_info(
|
|
116
125
|
last_automated_data_interval=current_interval,
|
|
117
126
|
restriction=time_restriction,
|
|
118
127
|
):
|
|
@@ -121,7 +130,7 @@ class LatestOnlyOperator(BaseBranchOperator):
|
|
|
121
130
|
left = current_interval.start
|
|
122
131
|
|
|
123
132
|
time_restriction = TimeRestriction(earliest=current_interval.end, latest=None, catchup=True)
|
|
124
|
-
next_info =
|
|
133
|
+
next_info = timetable.next_dagrun_info(
|
|
125
134
|
last_automated_data_interval=current_interval,
|
|
126
135
|
restriction=time_restriction,
|
|
127
136
|
)
|
|
@@ -43,13 +43,11 @@ from packaging.version import InvalidVersion
|
|
|
43
43
|
|
|
44
44
|
from airflow.exceptions import (
|
|
45
45
|
AirflowConfigException,
|
|
46
|
-
AirflowException,
|
|
47
46
|
AirflowProviderDeprecationWarning,
|
|
48
|
-
AirflowSkipException,
|
|
49
47
|
DeserializingResultError,
|
|
50
48
|
)
|
|
51
49
|
from airflow.models.variable import Variable
|
|
52
|
-
from airflow.providers.common.compat.sdk import context_merge
|
|
50
|
+
from airflow.providers.common.compat.sdk import AirflowException, AirflowSkipException, context_merge
|
|
53
51
|
from airflow.providers.standard.hooks.package_index import PackageIndexHook
|
|
54
52
|
from airflow.providers.standard.utils.python_virtualenv import (
|
|
55
53
|
_execute_in_subprocess,
|
|
@@ -488,8 +486,28 @@ class _BasePythonVirtualenvOperator(PythonOperator, metaclass=ABCMeta):
|
|
|
488
486
|
serializable_keys = set(self._iter_serializable_context_keys())
|
|
489
487
|
new = {k: v for k, v in context.items() if k in serializable_keys}
|
|
490
488
|
serializable_context = cast("Context", new)
|
|
489
|
+
# Store bundle_path for subprocess execution
|
|
490
|
+
self._bundle_path = self._get_bundle_path_from_context(context)
|
|
491
491
|
return super().execute(context=serializable_context)
|
|
492
492
|
|
|
493
|
+
def _get_bundle_path_from_context(self, context: Context) -> str | None:
|
|
494
|
+
"""
|
|
495
|
+
Extract bundle_path from the task instance's bundle_instance.
|
|
496
|
+
|
|
497
|
+
:param context: The task execution context
|
|
498
|
+
:return: Path to the bundle root directory, or None if not in a bundle
|
|
499
|
+
"""
|
|
500
|
+
if not AIRFLOW_V_3_0_PLUS:
|
|
501
|
+
return None
|
|
502
|
+
|
|
503
|
+
# In Airflow 3.x, the RuntimeTaskInstance has a bundle_instance attribute
|
|
504
|
+
# that contains the bundle information including its path
|
|
505
|
+
ti = context["ti"]
|
|
506
|
+
if bundle_instance := getattr(ti, "bundle_instance", None):
|
|
507
|
+
return bundle_instance.path
|
|
508
|
+
|
|
509
|
+
return None
|
|
510
|
+
|
|
493
511
|
def get_python_source(self):
|
|
494
512
|
"""Return the source of self.python_callable."""
|
|
495
513
|
return textwrap.dedent(inspect.getsource(self.python_callable))
|
|
@@ -562,9 +580,21 @@ class _BasePythonVirtualenvOperator(PythonOperator, metaclass=ABCMeta):
|
|
|
562
580
|
)
|
|
563
581
|
|
|
564
582
|
env_vars = dict(os.environ) if self.inherit_env else {}
|
|
583
|
+
if fd := os.getenv("__AIRFLOW_SUPERVISOR_FD"):
|
|
584
|
+
env_vars["__AIRFLOW_SUPERVISOR_FD"] = fd
|
|
565
585
|
if self.env_vars:
|
|
566
586
|
env_vars.update(self.env_vars)
|
|
567
587
|
|
|
588
|
+
# Add bundle_path to PYTHONPATH for subprocess to import Dag bundle modules
|
|
589
|
+
if self._bundle_path:
|
|
590
|
+
bundle_path = self._bundle_path
|
|
591
|
+
existing_pythonpath = env_vars.get("PYTHONPATH", "")
|
|
592
|
+
if existing_pythonpath:
|
|
593
|
+
# Append bundle_path after existing PYTHONPATH
|
|
594
|
+
env_vars["PYTHONPATH"] = f"{existing_pythonpath}{os.pathsep}{bundle_path}"
|
|
595
|
+
else:
|
|
596
|
+
env_vars["PYTHONPATH"] = bundle_path
|
|
597
|
+
|
|
568
598
|
try:
|
|
569
599
|
cmd: list[str] = [
|
|
570
600
|
os.fspath(python_path),
|
|
@@ -28,21 +28,28 @@ from sqlalchemy import select
|
|
|
28
28
|
from sqlalchemy.orm.exc import NoResultFound
|
|
29
29
|
|
|
30
30
|
from airflow.api.common.trigger_dag import trigger_dag
|
|
31
|
-
from airflow.
|
|
32
|
-
from airflow.exceptions import (
|
|
33
|
-
AirflowException,
|
|
34
|
-
AirflowSkipException,
|
|
35
|
-
DagNotFound,
|
|
36
|
-
DagRunAlreadyExists,
|
|
37
|
-
)
|
|
31
|
+
from airflow.exceptions import DagNotFound, DagRunAlreadyExists
|
|
38
32
|
from airflow.models.dag import DagModel
|
|
39
33
|
from airflow.models.dagrun import DagRun
|
|
40
34
|
from airflow.models.serialized_dag import SerializedDagModel
|
|
41
|
-
from airflow.providers.common.compat.sdk import
|
|
35
|
+
from airflow.providers.common.compat.sdk import (
|
|
36
|
+
AirflowException,
|
|
37
|
+
AirflowSkipException,
|
|
38
|
+
BaseOperatorLink,
|
|
39
|
+
XCom,
|
|
40
|
+
conf,
|
|
41
|
+
timezone,
|
|
42
|
+
)
|
|
42
43
|
from airflow.providers.standard.triggers.external_task import DagStateTrigger
|
|
44
|
+
from airflow.providers.standard.utils.openlineage import safe_inject_openlineage_properties_into_dagrun_conf
|
|
43
45
|
from airflow.providers.standard.version_compat import AIRFLOW_V_3_0_PLUS, BaseOperator
|
|
44
46
|
from airflow.utils.state import DagRunState
|
|
45
|
-
from airflow.utils.types import
|
|
47
|
+
from airflow.utils.types import DagRunType
|
|
48
|
+
|
|
49
|
+
try:
|
|
50
|
+
from airflow.sdk.definitions._internal.types import NOTSET, ArgNotSet
|
|
51
|
+
except ImportError:
|
|
52
|
+
from airflow.utils.types import NOTSET, ArgNotSet # type: ignore[attr-defined,no-redef]
|
|
46
53
|
|
|
47
54
|
XCOM_LOGICAL_DATE_ISO = "trigger_logical_date_iso"
|
|
48
55
|
XCOM_RUN_ID = "trigger_run_id"
|
|
@@ -129,8 +136,13 @@ class TriggerDagRunOperator(BaseOperator):
|
|
|
129
136
|
:param skip_when_already_exists: Set to true to mark the task as SKIPPED if a DAG run of the triggered
|
|
130
137
|
DAG for the same logical date already exists.
|
|
131
138
|
:param fail_when_dag_is_paused: If the dag to trigger is paused, DagIsPaused will be raised.
|
|
132
|
-
:param deferrable: If waiting for completion, whether
|
|
133
|
-
|
|
139
|
+
:param deferrable: If waiting for completion, whether to defer the task until done, default is ``False``.
|
|
140
|
+
:param openlineage_inject_parent_info: whether to include OpenLineage metadata about the parent task
|
|
141
|
+
in the triggered DAG run's conf, enabling improved lineage tracking. The metadata is only injected
|
|
142
|
+
if OpenLineage is enabled and running. This option does not modify any other part of the conf,
|
|
143
|
+
and existing OpenLineage-related settings in the conf will not be overwritten. The injection process
|
|
144
|
+
is safeguarded against exceptions - if any error occurs during metadata injection, it is gracefully
|
|
145
|
+
handled and the conf remains unchanged - so it's safe to use. Default is ``True``
|
|
134
146
|
"""
|
|
135
147
|
|
|
136
148
|
template_fields: Sequence[str] = (
|
|
@@ -160,6 +172,7 @@ class TriggerDagRunOperator(BaseOperator):
|
|
|
160
172
|
skip_when_already_exists: bool = False,
|
|
161
173
|
fail_when_dag_is_paused: bool = False,
|
|
162
174
|
deferrable: bool = conf.getboolean("operators", "default_deferrable", fallback=False),
|
|
175
|
+
openlineage_inject_parent_info: bool = True,
|
|
163
176
|
**kwargs,
|
|
164
177
|
) -> None:
|
|
165
178
|
super().__init__(**kwargs)
|
|
@@ -179,7 +192,8 @@ class TriggerDagRunOperator(BaseOperator):
|
|
|
179
192
|
self.failed_states = [DagRunState.FAILED]
|
|
180
193
|
self.skip_when_already_exists = skip_when_already_exists
|
|
181
194
|
self.fail_when_dag_is_paused = fail_when_dag_is_paused
|
|
182
|
-
self.
|
|
195
|
+
self.openlineage_inject_parent_info = openlineage_inject_parent_info
|
|
196
|
+
self.deferrable = deferrable
|
|
183
197
|
self.logical_date = logical_date
|
|
184
198
|
if logical_date is NOTSET:
|
|
185
199
|
self.logical_date = NOTSET
|
|
@@ -209,6 +223,12 @@ class TriggerDagRunOperator(BaseOperator):
|
|
|
209
223
|
except (TypeError, JSONDecodeError):
|
|
210
224
|
raise ValueError("conf parameter should be JSON Serializable %s", self.conf)
|
|
211
225
|
|
|
226
|
+
if self.openlineage_inject_parent_info:
|
|
227
|
+
self.log.debug("Checking if OpenLineage information can be safely injected into dagrun conf.")
|
|
228
|
+
self.conf = safe_inject_openlineage_properties_into_dagrun_conf(
|
|
229
|
+
dr_conf=self.conf, ti=context.get("ti")
|
|
230
|
+
)
|
|
231
|
+
|
|
212
232
|
if self.trigger_run_id:
|
|
213
233
|
run_id = str(self.trigger_run_id)
|
|
214
234
|
else:
|
|
@@ -221,6 +241,9 @@ class TriggerDagRunOperator(BaseOperator):
|
|
|
221
241
|
else:
|
|
222
242
|
run_id = DagRun.generate_run_id(DagRunType.MANUAL, parsed_logical_date or timezone.utcnow()) # type: ignore[misc,call-arg]
|
|
223
243
|
|
|
244
|
+
# Save run_id as task attribute - to be used by listeners
|
|
245
|
+
self.trigger_run_id = run_id
|
|
246
|
+
|
|
224
247
|
if self.fail_when_dag_is_paused:
|
|
225
248
|
dag_model = DagModel.get_current(self.trigger_dag_id)
|
|
226
249
|
if not dag_model:
|
|
@@ -232,12 +255,16 @@ class TriggerDagRunOperator(BaseOperator):
|
|
|
232
255
|
raise AirflowException(f"Dag {self.trigger_dag_id} is paused")
|
|
233
256
|
|
|
234
257
|
if AIRFLOW_V_3_0_PLUS:
|
|
235
|
-
self._trigger_dag_af_3(
|
|
258
|
+
self._trigger_dag_af_3(
|
|
259
|
+
context=context, run_id=self.trigger_run_id, parsed_logical_date=parsed_logical_date
|
|
260
|
+
)
|
|
236
261
|
else:
|
|
237
|
-
self._trigger_dag_af_2(
|
|
262
|
+
self._trigger_dag_af_2(
|
|
263
|
+
context=context, run_id=self.trigger_run_id, parsed_logical_date=parsed_logical_date
|
|
264
|
+
)
|
|
238
265
|
|
|
239
266
|
def _trigger_dag_af_3(self, context, run_id, parsed_logical_date):
|
|
240
|
-
from airflow.
|
|
267
|
+
from airflow.providers.common.compat.sdk import DagRunTriggerException
|
|
241
268
|
|
|
242
269
|
raise DagRunTriggerException(
|
|
243
270
|
trigger_dag_id=self.trigger_dag_id,
|
|
@@ -250,7 +277,7 @@ class TriggerDagRunOperator(BaseOperator):
|
|
|
250
277
|
allowed_states=self.allowed_states,
|
|
251
278
|
failed_states=self.failed_states,
|
|
252
279
|
poke_interval=self.poke_interval,
|
|
253
|
-
deferrable=self.
|
|
280
|
+
deferrable=self.deferrable,
|
|
254
281
|
)
|
|
255
282
|
|
|
256
283
|
def _trigger_dag_af_2(self, context, run_id, parsed_logical_date):
|
|
@@ -291,7 +318,7 @@ class TriggerDagRunOperator(BaseOperator):
|
|
|
291
318
|
|
|
292
319
|
if self.wait_for_completion:
|
|
293
320
|
# Kick off the deferral process
|
|
294
|
-
if self.
|
|
321
|
+
if self.deferrable:
|
|
295
322
|
self.defer(
|
|
296
323
|
trigger=DagStateTrigger(
|
|
297
324
|
dag_id=self.trigger_dag_id,
|
|
@@ -322,17 +349,40 @@ class TriggerDagRunOperator(BaseOperator):
|
|
|
322
349
|
return
|
|
323
350
|
|
|
324
351
|
def execute_complete(self, context: Context, event: tuple[str, dict[str, Any]]):
|
|
352
|
+
"""
|
|
353
|
+
Handle task completion after returning from a deferral.
|
|
354
|
+
|
|
355
|
+
Args:
|
|
356
|
+
context: The Airflow context dictionary.
|
|
357
|
+
event: A tuple containing the class path of the trigger and the trigger event data.
|
|
358
|
+
"""
|
|
359
|
+
# Example event tuple content:
|
|
360
|
+
# (
|
|
361
|
+
# "airflow.providers.standard.triggers.external_task.DagStateTrigger",
|
|
362
|
+
# {
|
|
363
|
+
# 'dag_id': 'some_dag',
|
|
364
|
+
# 'states': ['success', 'failed'],
|
|
365
|
+
# 'poll_interval': 15,
|
|
366
|
+
# 'run_ids': ['manual__2025-11-19T17:49:20.907083+00:00'],
|
|
367
|
+
# 'execution_dates': [
|
|
368
|
+
# DateTime(2025, 11, 19, 17, 49, 20, 907083, tzinfo=Timezone('UTC'))
|
|
369
|
+
# ]
|
|
370
|
+
# }
|
|
371
|
+
# )
|
|
372
|
+
_, event_data = event
|
|
373
|
+
run_ids = event_data["run_ids"]
|
|
374
|
+
# Re-set as attribute after coming back from deferral - to be used by listeners.
|
|
375
|
+
# Just a safety check on length, we should always have single run_id here.
|
|
376
|
+
self.trigger_run_id = run_ids[0] if len(run_ids) == 1 else None
|
|
325
377
|
if AIRFLOW_V_3_0_PLUS:
|
|
326
|
-
self._trigger_dag_run_af_3_execute_complete(
|
|
378
|
+
self._trigger_dag_run_af_3_execute_complete(event_data=event_data)
|
|
327
379
|
else:
|
|
328
|
-
self._trigger_dag_run_af_2_execute_complete(
|
|
380
|
+
self._trigger_dag_run_af_2_execute_complete(event_data=event_data)
|
|
329
381
|
|
|
330
|
-
def _trigger_dag_run_af_3_execute_complete(self,
|
|
331
|
-
run_ids = event[1]["run_ids"]
|
|
332
|
-
event_data = event[1]
|
|
382
|
+
def _trigger_dag_run_af_3_execute_complete(self, event_data: dict[str, Any]):
|
|
333
383
|
failed_run_id_conditions = []
|
|
334
384
|
|
|
335
|
-
for run_id in run_ids:
|
|
385
|
+
for run_id in event_data["run_ids"]:
|
|
336
386
|
state = event_data.get(run_id)
|
|
337
387
|
if state in self.failed_states:
|
|
338
388
|
failed_run_id_conditions.append(run_id)
|
|
@@ -356,10 +406,10 @@ class TriggerDagRunOperator(BaseOperator):
|
|
|
356
406
|
|
|
357
407
|
@provide_session
|
|
358
408
|
def _trigger_dag_run_af_2_execute_complete(
|
|
359
|
-
self,
|
|
409
|
+
self, event_data: dict[str, Any], session: Session = NEW_SESSION
|
|
360
410
|
):
|
|
361
411
|
# This logical_date is parsed from the return trigger event
|
|
362
|
-
provided_logical_date =
|
|
412
|
+
provided_logical_date = event_data["execution_dates"][0]
|
|
363
413
|
try:
|
|
364
414
|
# Note: here execution fails on database isolation mode. Needs structural changes for AIP-72
|
|
365
415
|
dag_run = session.execute(
|
|
@@ -22,8 +22,7 @@ from subprocess import PIPE, STDOUT, Popen
|
|
|
22
22
|
from tempfile import NamedTemporaryFile, TemporaryDirectory, gettempdir
|
|
23
23
|
from typing import TYPE_CHECKING
|
|
24
24
|
|
|
25
|
-
from airflow.
|
|
26
|
-
from airflow.providers.common.compat.sdk import BaseSensorOperator
|
|
25
|
+
from airflow.providers.common.compat.sdk import AirflowFailException, BaseSensorOperator
|
|
27
26
|
|
|
28
27
|
if TYPE_CHECKING:
|
|
29
28
|
from airflow.providers.common.compat.sdk import Context
|
|
@@ -19,27 +19,12 @@ from __future__ import annotations
|
|
|
19
19
|
|
|
20
20
|
import datetime
|
|
21
21
|
from collections.abc import Sequence
|
|
22
|
-
from dataclasses import dataclass
|
|
23
22
|
from typing import TYPE_CHECKING, Any, NoReturn
|
|
24
23
|
|
|
25
24
|
from airflow.providers.common.compat.sdk import BaseSensorOperator, timezone
|
|
26
25
|
from airflow.providers.standard.triggers.temporal import DateTimeTrigger
|
|
27
26
|
from airflow.providers.standard.version_compat import AIRFLOW_V_3_0_PLUS
|
|
28
|
-
|
|
29
|
-
try:
|
|
30
|
-
from airflow.triggers.base import StartTriggerArgs # type: ignore[no-redef]
|
|
31
|
-
except ImportError: # TODO: Remove this when min airflow version is 2.10.0 for standard provider
|
|
32
|
-
|
|
33
|
-
@dataclass
|
|
34
|
-
class StartTriggerArgs: # type: ignore[no-redef]
|
|
35
|
-
"""Arguments required for start task execution from triggerer."""
|
|
36
|
-
|
|
37
|
-
trigger_cls: str
|
|
38
|
-
next_method: str
|
|
39
|
-
trigger_kwargs: dict[str, Any] | None = None
|
|
40
|
-
next_kwargs: dict[str, Any] | None = None
|
|
41
|
-
timeout: datetime.timedelta | None = None
|
|
42
|
-
|
|
27
|
+
from airflow.triggers.base import StartTriggerArgs
|
|
43
28
|
|
|
44
29
|
if TYPE_CHECKING:
|
|
45
30
|
from airflow.sdk import Context
|
|
@@ -23,10 +23,13 @@ import warnings
|
|
|
23
23
|
from collections.abc import Callable, Collection, Iterable, Sequence
|
|
24
24
|
from typing import TYPE_CHECKING, ClassVar
|
|
25
25
|
|
|
26
|
-
from airflow.configuration import conf
|
|
27
|
-
from airflow.exceptions import AirflowSkipException
|
|
28
26
|
from airflow.models.dag import DagModel
|
|
29
|
-
from airflow.providers.common.compat.sdk import
|
|
27
|
+
from airflow.providers.common.compat.sdk import (
|
|
28
|
+
AirflowSkipException,
|
|
29
|
+
BaseOperatorLink,
|
|
30
|
+
BaseSensorOperator,
|
|
31
|
+
conf,
|
|
32
|
+
)
|
|
30
33
|
from airflow.providers.standard.exceptions import (
|
|
31
34
|
DuplicateStateError,
|
|
32
35
|
ExternalDagDeletedError,
|
|
@@ -251,6 +254,7 @@ class ExternalTaskSensor(BaseSensorOperator):
|
|
|
251
254
|
self._has_checked_existence = False
|
|
252
255
|
self.deferrable = deferrable
|
|
253
256
|
self.poll_interval = poll_interval
|
|
257
|
+
self.external_dates_filter: str | None = None
|
|
254
258
|
|
|
255
259
|
def _get_dttm_filter(self, context: Context) -> Sequence[datetime.datetime]:
|
|
256
260
|
logical_date = self._get_logical_date(context)
|
|
@@ -262,13 +266,19 @@ class ExternalTaskSensor(BaseSensorOperator):
|
|
|
262
266
|
return result if isinstance(result, list) else [result]
|
|
263
267
|
return [logical_date]
|
|
264
268
|
|
|
269
|
+
@staticmethod
|
|
270
|
+
def _serialize_dttm_filter(dttm_filter: Sequence[datetime.datetime]) -> str:
|
|
271
|
+
return ",".join(dt.isoformat() for dt in dttm_filter)
|
|
272
|
+
|
|
265
273
|
def poke(self, context: Context) -> bool:
|
|
266
274
|
# delay check to poke rather than __init__ in case it was supplied as XComArgs
|
|
267
275
|
if self.external_task_ids and len(self.external_task_ids) > len(set(self.external_task_ids)):
|
|
268
276
|
raise ValueError("Duplicate task_ids passed in external_task_ids parameter")
|
|
269
277
|
|
|
270
278
|
dttm_filter = self._get_dttm_filter(context)
|
|
271
|
-
serialized_dttm_filter =
|
|
279
|
+
serialized_dttm_filter = self._serialize_dttm_filter(dttm_filter)
|
|
280
|
+
# Save as attribute - to be used by listeners
|
|
281
|
+
self.external_dates_filter = serialized_dttm_filter
|
|
272
282
|
|
|
273
283
|
if self.external_task_ids:
|
|
274
284
|
self.log.info(
|
|
@@ -457,6 +467,9 @@ class ExternalTaskSensor(BaseSensorOperator):
|
|
|
457
467
|
if event is None:
|
|
458
468
|
raise ExternalTaskNotFoundError("No event received from trigger")
|
|
459
469
|
|
|
470
|
+
# Re-set as attribute after coming back from deferral - to be used by listeners
|
|
471
|
+
self.external_dates_filter = self._serialize_dttm_filter(self._get_dttm_filter(context))
|
|
472
|
+
|
|
460
473
|
if event["status"] == "success":
|
|
461
474
|
self.log.info("External tasks %s has executed successfully.", self.external_task_ids)
|
|
462
475
|
elif event["status"] == "skipped":
|
|
@@ -20,31 +20,14 @@ from __future__ import annotations
|
|
|
20
20
|
import datetime
|
|
21
21
|
import os
|
|
22
22
|
from collections.abc import Sequence
|
|
23
|
-
from dataclasses import dataclass
|
|
24
23
|
from functools import cached_property
|
|
25
24
|
from glob import glob
|
|
26
25
|
from typing import TYPE_CHECKING, Any
|
|
27
26
|
|
|
28
|
-
from airflow.
|
|
29
|
-
from airflow.exceptions import AirflowException
|
|
30
|
-
from airflow.providers.common.compat.sdk import BaseSensorOperator
|
|
27
|
+
from airflow.providers.common.compat.sdk import AirflowException, BaseSensorOperator, conf
|
|
31
28
|
from airflow.providers.standard.hooks.filesystem import FSHook
|
|
32
29
|
from airflow.providers.standard.triggers.file import FileTrigger
|
|
33
|
-
|
|
34
|
-
try:
|
|
35
|
-
from airflow.triggers.base import StartTriggerArgs # type: ignore[no-redef]
|
|
36
|
-
except ImportError: # TODO: Remove this when min airflow version is 2.10.0 for standard provider
|
|
37
|
-
|
|
38
|
-
@dataclass
|
|
39
|
-
class StartTriggerArgs: # type: ignore[no-redef]
|
|
40
|
-
"""Arguments required for start task execution from triggerer."""
|
|
41
|
-
|
|
42
|
-
trigger_cls: str
|
|
43
|
-
next_method: str
|
|
44
|
-
trigger_kwargs: dict[str, Any] | None = None
|
|
45
|
-
next_kwargs: dict[str, Any] | None = None
|
|
46
|
-
timeout: datetime.timedelta | None = None
|
|
47
|
-
|
|
30
|
+
from airflow.triggers.base import StartTriggerArgs
|
|
48
31
|
|
|
49
32
|
if TYPE_CHECKING:
|
|
50
33
|
from airflow.sdk import Context
|
|
@@ -19,28 +19,12 @@ from __future__ import annotations
|
|
|
19
19
|
|
|
20
20
|
import datetime
|
|
21
21
|
import warnings
|
|
22
|
-
from dataclasses import dataclass
|
|
23
22
|
from typing import TYPE_CHECKING, Any
|
|
24
23
|
|
|
25
|
-
from airflow.configuration import conf
|
|
26
24
|
from airflow.exceptions import AirflowProviderDeprecationWarning
|
|
27
|
-
from airflow.providers.common.compat.sdk import BaseSensorOperator, timezone
|
|
25
|
+
from airflow.providers.common.compat.sdk import BaseSensorOperator, conf, timezone
|
|
28
26
|
from airflow.providers.standard.triggers.temporal import DateTimeTrigger
|
|
29
|
-
|
|
30
|
-
try:
|
|
31
|
-
from airflow.triggers.base import StartTriggerArgs # type: ignore[no-redef]
|
|
32
|
-
except ImportError: # TODO: Remove this when min airflow version is 2.10.0 for standard provider
|
|
33
|
-
|
|
34
|
-
@dataclass
|
|
35
|
-
class StartTriggerArgs: # type: ignore[no-redef]
|
|
36
|
-
"""Arguments required for start task execution from triggerer."""
|
|
37
|
-
|
|
38
|
-
trigger_cls: str
|
|
39
|
-
next_method: str
|
|
40
|
-
trigger_kwargs: dict[str, Any] | None = None
|
|
41
|
-
next_kwargs: dict[str, Any] | None = None
|
|
42
|
-
timeout: datetime.timedelta | None = None
|
|
43
|
-
|
|
27
|
+
from airflow.triggers.base import StartTriggerArgs
|
|
44
28
|
|
|
45
29
|
if TYPE_CHECKING:
|
|
46
30
|
from airflow.sdk import Context
|
|
@@ -25,9 +25,8 @@ from typing import TYPE_CHECKING, Any
|
|
|
25
25
|
from deprecated.classic import deprecated
|
|
26
26
|
from packaging.version import Version
|
|
27
27
|
|
|
28
|
-
from airflow.
|
|
29
|
-
from airflow.
|
|
30
|
-
from airflow.providers.common.compat.sdk import BaseSensorOperator, timezone
|
|
28
|
+
from airflow.exceptions import AirflowProviderDeprecationWarning
|
|
29
|
+
from airflow.providers.common.compat.sdk import AirflowSkipException, BaseSensorOperator, conf, timezone
|
|
31
30
|
from airflow.providers.standard.triggers.temporal import DateTimeTrigger, TimeDeltaTrigger
|
|
32
31
|
from airflow.providers.standard.version_compat import AIRFLOW_V_3_0_PLUS
|
|
33
32
|
|
|
@@ -194,9 +193,11 @@ class WaitSensor(BaseSensorOperator):
|
|
|
194
193
|
def execute(self, context: Context) -> None:
|
|
195
194
|
if self.deferrable:
|
|
196
195
|
self.defer(
|
|
197
|
-
trigger=
|
|
198
|
-
|
|
199
|
-
|
|
196
|
+
trigger=(
|
|
197
|
+
TimeDeltaTrigger(self.time_to_wait, end_from_trigger=True)
|
|
198
|
+
if AIRFLOW_V_3_0_PLUS
|
|
199
|
+
else TimeDeltaTrigger(self.time_to_wait)
|
|
200
|
+
),
|
|
200
201
|
method_name="execute_complete",
|
|
201
202
|
)
|
|
202
203
|
else:
|
|
@@ -226,23 +226,26 @@ class DagStateTrigger(BaseTrigger):
|
|
|
226
226
|
elif self.execution_dates:
|
|
227
227
|
runs_ids_or_dates = len(self.execution_dates)
|
|
228
228
|
|
|
229
|
+
cls_path, data = self.serialize()
|
|
230
|
+
|
|
229
231
|
if AIRFLOW_V_3_0_PLUS:
|
|
230
|
-
data
|
|
231
|
-
|
|
232
|
+
data.update( # update with {run_id: run_state} dict
|
|
233
|
+
await self.validate_count_dags_af_3(runs_ids_or_dates_len=runs_ids_or_dates)
|
|
234
|
+
)
|
|
235
|
+
yield TriggerEvent((cls_path, data))
|
|
232
236
|
return
|
|
233
237
|
else:
|
|
234
238
|
while True:
|
|
235
239
|
num_dags = await self.count_dags()
|
|
236
240
|
if num_dags == runs_ids_or_dates:
|
|
237
|
-
yield TriggerEvent(
|
|
241
|
+
yield TriggerEvent((cls_path, data))
|
|
238
242
|
return
|
|
239
243
|
await asyncio.sleep(self.poll_interval)
|
|
240
244
|
|
|
241
|
-
async def validate_count_dags_af_3(self, runs_ids_or_dates_len: int = 0) -> dict[str,
|
|
245
|
+
async def validate_count_dags_af_3(self, runs_ids_or_dates_len: int = 0) -> dict[str, str]:
|
|
242
246
|
from airflow.sdk.execution_time.task_runner import RuntimeTaskInstance
|
|
243
247
|
|
|
244
|
-
|
|
245
|
-
|
|
248
|
+
run_states: dict[str, str] = {} # {run_id: run_state}
|
|
246
249
|
while True:
|
|
247
250
|
num_dags = await sync_to_async(RuntimeTaskInstance.get_dr_count)(
|
|
248
251
|
dag_id=self.dag_id,
|
|
@@ -257,8 +260,8 @@ class DagStateTrigger(BaseTrigger):
|
|
|
257
260
|
dag_id=self.dag_id,
|
|
258
261
|
run_id=run_id,
|
|
259
262
|
)
|
|
260
|
-
|
|
261
|
-
return
|
|
263
|
+
run_states[run_id] = state
|
|
264
|
+
return run_states
|
|
262
265
|
await asyncio.sleep(self.poll_interval)
|
|
263
266
|
|
|
264
267
|
if not AIRFLOW_V_3_0_PLUS:
|
|
@@ -16,7 +16,7 @@
|
|
|
16
16
|
# under the License.
|
|
17
17
|
from __future__ import annotations
|
|
18
18
|
|
|
19
|
-
from airflow.
|
|
19
|
+
from airflow.providers.common.compat.sdk import AirflowOptionalProviderFeatureException
|
|
20
20
|
from airflow.providers.standard.version_compat import AIRFLOW_V_3_1_PLUS
|
|
21
21
|
|
|
22
22
|
if not AIRFLOW_V_3_1_PLUS:
|
|
@@ -30,7 +30,7 @@ from uuid import UUID
|
|
|
30
30
|
|
|
31
31
|
from asgiref.sync import sync_to_async
|
|
32
32
|
|
|
33
|
-
from airflow.
|
|
33
|
+
from airflow.providers.common.compat.sdk import ParamValidationError
|
|
34
34
|
from airflow.sdk import Param
|
|
35
35
|
from airflow.sdk.definitions.param import ParamsDict
|
|
36
36
|
from airflow.sdk.execution_time.hitl import (
|
|
@@ -0,0 +1,185 @@
|
|
|
1
|
+
# Licensed to the Apache Software Foundation (ASF) under one
|
|
2
|
+
# or more contributor license agreements. See the NOTICE file
|
|
3
|
+
# distributed with this work for additional information
|
|
4
|
+
# regarding copyright ownership. The ASF licenses this file
|
|
5
|
+
# to you under the Apache License, Version 2.0 (the
|
|
6
|
+
# "License"); you may not use this file except in compliance
|
|
7
|
+
# with the License. You may obtain a copy of the License at
|
|
8
|
+
#
|
|
9
|
+
# http://www.apache.org/licenses/LICENSE-2.0
|
|
10
|
+
#
|
|
11
|
+
# Unless required by applicable law or agreed to in writing,
|
|
12
|
+
# software distributed under the License is distributed on an
|
|
13
|
+
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
|
14
|
+
# KIND, either express or implied. See the License for the
|
|
15
|
+
# specific language governing permissions and limitations
|
|
16
|
+
# under the License.
|
|
17
|
+
from __future__ import annotations
|
|
18
|
+
|
|
19
|
+
import logging
|
|
20
|
+
from typing import TYPE_CHECKING
|
|
21
|
+
|
|
22
|
+
from airflow.providers.common.compat.openlineage.check import require_openlineage_version
|
|
23
|
+
from airflow.providers.common.compat.sdk import AirflowOptionalProviderFeatureException
|
|
24
|
+
|
|
25
|
+
if TYPE_CHECKING:
|
|
26
|
+
from airflow.models import TaskInstance
|
|
27
|
+
from airflow.sdk.types import RuntimeTaskInstanceProtocol as RuntimeTI
|
|
28
|
+
|
|
29
|
+
log = logging.getLogger(__name__)
|
|
30
|
+
|
|
31
|
+
OPENLINEAGE_PROVIDER_MIN_VERSION = "2.8.0"
|
|
32
|
+
|
|
33
|
+
|
|
34
|
+
def _is_openlineage_provider_accessible() -> bool:
|
|
35
|
+
"""
|
|
36
|
+
Check if the OpenLineage provider is accessible.
|
|
37
|
+
|
|
38
|
+
This function attempts to import the necessary OpenLineage modules and checks if the provider
|
|
39
|
+
is enabled and the listener is available.
|
|
40
|
+
|
|
41
|
+
Returns:
|
|
42
|
+
bool: True if the OpenLineage provider is accessible, False otherwise.
|
|
43
|
+
"""
|
|
44
|
+
try:
|
|
45
|
+
from airflow.providers.openlineage.conf import is_disabled
|
|
46
|
+
from airflow.providers.openlineage.plugins.listener import get_openlineage_listener
|
|
47
|
+
except (ImportError, AttributeError):
|
|
48
|
+
log.debug("OpenLineage provider could not be imported.")
|
|
49
|
+
return False
|
|
50
|
+
|
|
51
|
+
if is_disabled():
|
|
52
|
+
log.debug("OpenLineage provider is disabled.")
|
|
53
|
+
return False
|
|
54
|
+
|
|
55
|
+
if not get_openlineage_listener():
|
|
56
|
+
log.debug("OpenLineage listener could not be found.")
|
|
57
|
+
return False
|
|
58
|
+
|
|
59
|
+
return True
|
|
60
|
+
|
|
61
|
+
|
|
62
|
+
@require_openlineage_version(provider_min_version=OPENLINEAGE_PROVIDER_MIN_VERSION)
|
|
63
|
+
def _get_openlineage_parent_info(ti: TaskInstance | RuntimeTI) -> dict[str, str]:
|
|
64
|
+
"""Get OpenLineage metadata about the parent task."""
|
|
65
|
+
from airflow.providers.openlineage.plugins.macros import (
|
|
66
|
+
lineage_job_name,
|
|
67
|
+
lineage_job_namespace,
|
|
68
|
+
lineage_root_job_name,
|
|
69
|
+
lineage_root_job_namespace,
|
|
70
|
+
lineage_root_run_id,
|
|
71
|
+
lineage_run_id,
|
|
72
|
+
)
|
|
73
|
+
|
|
74
|
+
return {
|
|
75
|
+
"parentRunId": lineage_run_id(ti),
|
|
76
|
+
"parentJobName": lineage_job_name(ti),
|
|
77
|
+
"parentJobNamespace": lineage_job_namespace(),
|
|
78
|
+
"rootParentRunId": lineage_root_run_id(ti),
|
|
79
|
+
"rootParentJobName": lineage_root_job_name(ti),
|
|
80
|
+
"rootParentJobNamespace": lineage_root_job_namespace(ti),
|
|
81
|
+
}
|
|
82
|
+
|
|
83
|
+
|
|
84
|
+
def _inject_openlineage_parent_info_to_dagrun_conf(
|
|
85
|
+
dr_conf: dict | None, ol_parent_info: dict[str, str]
|
|
86
|
+
) -> dict:
|
|
87
|
+
"""
|
|
88
|
+
Safely inject OpenLineage parent and root run metadata into a DAG run configuration.
|
|
89
|
+
|
|
90
|
+
This function adds parent and root job/run identifiers derived from the given TaskInstance into the
|
|
91
|
+
`openlineage` section of the DAG run configuration. If an `openlineage` key already exists, it is
|
|
92
|
+
preserved and extended, but no existing parent or root identifiers are overwritten.
|
|
93
|
+
|
|
94
|
+
The function performs several safety checks:
|
|
95
|
+
- If conf is not a dictionary or contains a non-dict `openlineage` section, conf is returned unmodified.
|
|
96
|
+
- If `openlineage` section contains any parent/root lineage identifiers, conf is returned unmodified.
|
|
97
|
+
|
|
98
|
+
Args:
|
|
99
|
+
dr_conf: The original DAG run configuration dictionary or None.
|
|
100
|
+
ol_parent_info: OpenLineage metadata about the parent task
|
|
101
|
+
|
|
102
|
+
Returns:
|
|
103
|
+
A modified DAG run conf with injected OpenLineage parent and root metadata,
|
|
104
|
+
or the original conf if injection is not possible.
|
|
105
|
+
"""
|
|
106
|
+
current_ol_dr_conf = {}
|
|
107
|
+
if isinstance(dr_conf, dict) and dr_conf.get("openlineage"):
|
|
108
|
+
current_ol_dr_conf = dr_conf["openlineage"]
|
|
109
|
+
if not isinstance(current_ol_dr_conf, dict):
|
|
110
|
+
log.warning(
|
|
111
|
+
"Existing 'openlineage' section of DagRun conf is not a dictionary; "
|
|
112
|
+
"skipping injection of parent metadata."
|
|
113
|
+
)
|
|
114
|
+
return dr_conf
|
|
115
|
+
forbidden_keys = (
|
|
116
|
+
"parentRunId",
|
|
117
|
+
"parentJobName",
|
|
118
|
+
"parentJobNamespace",
|
|
119
|
+
"rootParentRunId",
|
|
120
|
+
"rootJobName",
|
|
121
|
+
"rootJobNamespace",
|
|
122
|
+
)
|
|
123
|
+
|
|
124
|
+
if existing := [k for k in forbidden_keys if k in current_ol_dr_conf]:
|
|
125
|
+
log.warning(
|
|
126
|
+
"'openlineage' section of DagRun conf already contains parent or root "
|
|
127
|
+
"identifiers: `%s`; skipping injection to avoid overwriting existing values.",
|
|
128
|
+
", ".join(existing),
|
|
129
|
+
)
|
|
130
|
+
return dr_conf
|
|
131
|
+
|
|
132
|
+
return {**(dr_conf or {}), **{"openlineage": {**ol_parent_info, **current_ol_dr_conf}}}
|
|
133
|
+
|
|
134
|
+
|
|
135
|
+
def safe_inject_openlineage_properties_into_dagrun_conf(
|
|
136
|
+
dr_conf: dict | None, ti: TaskInstance | RuntimeTI | None
|
|
137
|
+
) -> dict | None:
|
|
138
|
+
"""
|
|
139
|
+
Safely inject OpenLineage parent task metadata into a DAG run conf.
|
|
140
|
+
|
|
141
|
+
This function checks whether the OpenLineage provider is accessible and supports parent information
|
|
142
|
+
injection. If so, it enriches the DAG run conf with OpenLineage metadata about the parent task
|
|
143
|
+
to improve lineage tracking. The function does not modify other conf fields, will not overwrite
|
|
144
|
+
any existing content, and safely returns the original configuration if OpenLineage is unavailable,
|
|
145
|
+
unsupported, or an error occurs during injection.
|
|
146
|
+
|
|
147
|
+
:param dr_conf: The original DAG run configuration dictionary.
|
|
148
|
+
:param ti: The TaskInstance whose metadata may be injected.
|
|
149
|
+
|
|
150
|
+
:return: A potentially enriched DAG run conf with OpenLineage parent information,
|
|
151
|
+
or the original conf if injection was skipped or failed.
|
|
152
|
+
"""
|
|
153
|
+
try:
|
|
154
|
+
if ti is None:
|
|
155
|
+
log.debug("Task instance not provided - dagrun conf not modified.")
|
|
156
|
+
return dr_conf
|
|
157
|
+
|
|
158
|
+
if not _is_openlineage_provider_accessible():
|
|
159
|
+
log.debug("OpenLineage provider not accessible - dagrun conf not modified.")
|
|
160
|
+
return dr_conf
|
|
161
|
+
|
|
162
|
+
ol_parent_info = _get_openlineage_parent_info(ti=ti)
|
|
163
|
+
|
|
164
|
+
log.info("Injecting openlineage parent task information into dagrun conf.")
|
|
165
|
+
new_conf = _inject_openlineage_parent_info_to_dagrun_conf(
|
|
166
|
+
dr_conf=dr_conf.copy() if isinstance(dr_conf, dict) else dr_conf,
|
|
167
|
+
ol_parent_info=ol_parent_info,
|
|
168
|
+
)
|
|
169
|
+
return new_conf
|
|
170
|
+
except AirflowOptionalProviderFeatureException:
|
|
171
|
+
log.info(
|
|
172
|
+
"Current OpenLineage provider version doesn't support parent information in "
|
|
173
|
+
"the DagRun conf. Upgrade `apache-airflow-providers-openlineage>=%s` to use this feature. "
|
|
174
|
+
"DagRun conf has not been modified by OpenLineage.",
|
|
175
|
+
OPENLINEAGE_PROVIDER_MIN_VERSION,
|
|
176
|
+
)
|
|
177
|
+
return dr_conf
|
|
178
|
+
except Exception as e:
|
|
179
|
+
log.warning(
|
|
180
|
+
"An error occurred while trying to inject OpenLineage information into dagrun conf. "
|
|
181
|
+
"DagRun conf has not been modified by OpenLineage. Error: %s",
|
|
182
|
+
str(e),
|
|
183
|
+
)
|
|
184
|
+
log.debug("Error details: ", exc_info=e)
|
|
185
|
+
return dr_conf
|
|
@@ -30,7 +30,7 @@ from pathlib import Path
|
|
|
30
30
|
import jinja2
|
|
31
31
|
from jinja2 import select_autoescape
|
|
32
32
|
|
|
33
|
-
from airflow.
|
|
33
|
+
from airflow.providers.common.compat.sdk import conf
|
|
34
34
|
|
|
35
35
|
|
|
36
36
|
def _is_uv_installed() -> bool:
|
|
@@ -150,7 +150,7 @@ def _execute_in_subprocess(cmd: list[str], cwd: str | None = None, env: dict[str
|
|
|
150
150
|
stdout=subprocess.PIPE,
|
|
151
151
|
stderr=subprocess.STDOUT,
|
|
152
152
|
bufsize=0,
|
|
153
|
-
close_fds=
|
|
153
|
+
close_fds=False,
|
|
154
154
|
cwd=cwd,
|
|
155
155
|
env=env,
|
|
156
156
|
) as proc:
|
|
@@ -200,9 +200,10 @@ def prepare_virtualenv(
|
|
|
200
200
|
|
|
201
201
|
if _use_uv():
|
|
202
202
|
venv_cmd = _generate_uv_cmd(venv_directory, python_bin, system_site_packages)
|
|
203
|
+
_execute_in_subprocess(venv_cmd, env={**os.environ, **_index_urls_to_uv_env_vars(index_urls)})
|
|
203
204
|
else:
|
|
204
205
|
venv_cmd = _generate_venv_cmd(venv_directory, python_bin, system_site_packages)
|
|
205
|
-
|
|
206
|
+
_execute_in_subprocess(venv_cmd)
|
|
206
207
|
|
|
207
208
|
pip_cmd = None
|
|
208
209
|
if requirements is not None and len(requirements) != 0:
|
|
@@ -40,6 +40,23 @@ if sys.version_info >= (3,6):
|
|
|
40
40
|
pass
|
|
41
41
|
{% endif %}
|
|
42
42
|
|
|
43
|
+
try:
|
|
44
|
+
from airflow.sdk.execution_time import task_runner
|
|
45
|
+
except ModuleNotFoundError:
|
|
46
|
+
pass
|
|
47
|
+
else:
|
|
48
|
+
{#-
|
|
49
|
+
We are in an Airflow 3.x environment, try and set up supervisor comms so
|
|
50
|
+
virtualenv can access Vars/Conn/XCom/etc that normal tasks can
|
|
51
|
+
|
|
52
|
+
We don't use the walrus operator (`:=`) below as it is possible people can
|
|
53
|
+
be using this on pre-3.8 versions of python, and while Airflow doesn't
|
|
54
|
+
support them, it's easy to not break it not using that operator here.
|
|
55
|
+
#}
|
|
56
|
+
reinit_supervisor_comms = getattr(task_runner, "reinit_supervisor_comms", None)
|
|
57
|
+
if reinit_supervisor_comms:
|
|
58
|
+
reinit_supervisor_comms()
|
|
59
|
+
|
|
43
60
|
# Script
|
|
44
61
|
{{ python_callable_source }}
|
|
45
62
|
|
|
@@ -49,12 +66,10 @@ if sys.version_info >= (3,6):
|
|
|
49
66
|
import types
|
|
50
67
|
|
|
51
68
|
{{ modified_dag_module_name }} = types.ModuleType("{{ modified_dag_module_name }}")
|
|
52
|
-
|
|
53
69
|
{{ modified_dag_module_name }}.{{ python_callable }} = {{ python_callable }}
|
|
54
|
-
|
|
55
70
|
sys.modules["{{modified_dag_module_name}}"] = {{modified_dag_module_name}}
|
|
56
71
|
|
|
57
|
-
{
|
|
72
|
+
{%- endif -%}
|
|
58
73
|
|
|
59
74
|
{% if op_args or op_kwargs %}
|
|
60
75
|
with open(sys.argv[1], "rb") as file:
|
|
@@ -21,7 +21,7 @@ from collections.abc import Iterable, Sequence
|
|
|
21
21
|
from types import GeneratorType
|
|
22
22
|
from typing import TYPE_CHECKING
|
|
23
23
|
|
|
24
|
-
from airflow.
|
|
24
|
+
from airflow.providers.common.compat.sdk import AirflowException
|
|
25
25
|
from airflow.utils.log.logging_mixin import LoggingMixin
|
|
26
26
|
|
|
27
27
|
if TYPE_CHECKING:
|
|
@@ -63,7 +63,7 @@ class SkipMixin(LoggingMixin):
|
|
|
63
63
|
"""
|
|
64
64
|
# Import is internal for backward compatibility when importing PythonOperator
|
|
65
65
|
# from airflow.providers.common.compat.standard.operators
|
|
66
|
-
from airflow.
|
|
66
|
+
from airflow.providers.common.compat.sdk import DownstreamTasksSkipped
|
|
67
67
|
|
|
68
68
|
# The following could be applied only for non-mapped tasks,
|
|
69
69
|
# as future mapped tasks have not been expanded yet. Such tasks
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.4
|
|
2
2
|
Name: apache-airflow-providers-standard
|
|
3
|
-
Version: 1.
|
|
3
|
+
Version: 1.10.3
|
|
4
4
|
Summary: Provider package apache-airflow-providers-standard for Apache Airflow
|
|
5
5
|
Keywords: airflow-provider,standard,airflow,integration
|
|
6
6
|
Author-email: Apache Software Foundation <dev@airflow.apache.org>
|
|
@@ -22,15 +22,17 @@ Classifier: Programming Language :: Python :: 3.13
|
|
|
22
22
|
Classifier: Topic :: System :: Monitoring
|
|
23
23
|
License-File: LICENSE
|
|
24
24
|
License-File: NOTICE
|
|
25
|
-
Requires-Dist: apache-airflow>=2.
|
|
26
|
-
Requires-Dist: apache-airflow-providers-common-compat>=1.
|
|
25
|
+
Requires-Dist: apache-airflow>=2.11.0
|
|
26
|
+
Requires-Dist: apache-airflow-providers-common-compat>=1.12.0
|
|
27
|
+
Requires-Dist: apache-airflow-providers-openlineage ; extra == "openlineage"
|
|
27
28
|
Project-URL: Bug Tracker, https://github.com/apache/airflow/issues
|
|
28
|
-
Project-URL: Changelog, https://airflow.
|
|
29
|
-
Project-URL: Documentation, https://airflow.
|
|
29
|
+
Project-URL: Changelog, https://airflow.apache.org/docs/apache-airflow-providers-standard/1.10.3/changelog.html
|
|
30
|
+
Project-URL: Documentation, https://airflow.apache.org/docs/apache-airflow-providers-standard/1.10.3
|
|
30
31
|
Project-URL: Mastodon, https://fosstodon.org/@airflow
|
|
31
32
|
Project-URL: Slack Chat, https://s.apache.org/airflow-slack
|
|
32
33
|
Project-URL: Source Code, https://github.com/apache/airflow
|
|
33
34
|
Project-URL: YouTube, https://www.youtube.com/channel/UCSXwxpWZQ7XZ1WL3wqevChA/
|
|
35
|
+
Provides-Extra: openlineage
|
|
34
36
|
|
|
35
37
|
|
|
36
38
|
.. Licensed to the Apache Software Foundation (ASF) under one
|
|
@@ -57,7 +59,7 @@ Project-URL: YouTube, https://www.youtube.com/channel/UCSXwxpWZQ7XZ1WL3wqevChA/
|
|
|
57
59
|
|
|
58
60
|
Package ``apache-airflow-providers-standard``
|
|
59
61
|
|
|
60
|
-
Release: ``1.
|
|
62
|
+
Release: ``1.10.3``
|
|
61
63
|
|
|
62
64
|
|
|
63
65
|
Airflow Standard Provider
|
|
@@ -70,7 +72,7 @@ This is a provider package for ``standard`` provider. All classes for this provi
|
|
|
70
72
|
are in ``airflow.providers.standard`` python package.
|
|
71
73
|
|
|
72
74
|
You can find package information and changelog for the provider
|
|
73
|
-
in the `documentation <https://airflow.apache.org/docs/apache-airflow-providers-standard/1.
|
|
75
|
+
in the `documentation <https://airflow.apache.org/docs/apache-airflow-providers-standard/1.10.3/>`_.
|
|
74
76
|
|
|
75
77
|
Installation
|
|
76
78
|
------------
|
|
@@ -87,8 +89,8 @@ Requirements
|
|
|
87
89
|
========================================== ==================
|
|
88
90
|
PIP package Version required
|
|
89
91
|
========================================== ==================
|
|
90
|
-
``apache-airflow`` ``>=2.
|
|
91
|
-
``apache-airflow-providers-common-compat`` ``>=1.
|
|
92
|
+
``apache-airflow`` ``>=2.11.0``
|
|
93
|
+
``apache-airflow-providers-common-compat`` ``>=1.10.1``
|
|
92
94
|
========================================== ==================
|
|
93
95
|
|
|
94
96
|
Cross provider package dependencies
|
|
@@ -108,8 +110,18 @@ You can install such cross-provider dependencies when installing from PyPI. For
|
|
|
108
110
|
Dependent package Extra
|
|
109
111
|
================================================================================================================== =================
|
|
110
112
|
`apache-airflow-providers-common-compat <https://airflow.apache.org/docs/apache-airflow-providers-common-compat>`_ ``common.compat``
|
|
113
|
+
`apache-airflow-providers-openlineage <https://airflow.apache.org/docs/apache-airflow-providers-openlineage>`_ ``openlineage``
|
|
111
114
|
================================================================================================================== =================
|
|
112
115
|
|
|
116
|
+
Optional dependencies
|
|
117
|
+
----------------------
|
|
118
|
+
|
|
119
|
+
=============== ========================================
|
|
120
|
+
Extra Dependencies
|
|
121
|
+
=============== ========================================
|
|
122
|
+
``openlineage`` ``apache-airflow-providers-openlineage``
|
|
123
|
+
=============== ========================================
|
|
124
|
+
|
|
113
125
|
The changelog for the provider package can be found in the
|
|
114
|
-
`changelog <https://airflow.apache.org/docs/apache-airflow-providers-standard/1.
|
|
126
|
+
`changelog <https://airflow.apache.org/docs/apache-airflow-providers-standard/1.10.3/changelog.html>`_.
|
|
115
127
|
|
|
@@ -1,9 +1,9 @@
|
|
|
1
|
-
airflow/providers/standard/__init__.py,sha256=
|
|
2
|
-
airflow/providers/standard/exceptions.py,sha256=
|
|
1
|
+
airflow/providers/standard/__init__.py,sha256=9U1f1ln4A2DrS16rZ4_tF36RA70CDZaIGImbJjKbpsQ,1498
|
|
2
|
+
airflow/providers/standard/exceptions.py,sha256=d5BzPvWfKGMZbgvlkFQCFyVijh2Y2_CB2_2tOmU2djE,2433
|
|
3
3
|
airflow/providers/standard/get_provider_info.py,sha256=NVstkG2ZeAiTZnvmbrMpxcYgJzcdITKdQvgDOZYX1Rk,7227
|
|
4
4
|
airflow/providers/standard/version_compat.py,sha256=wzS7qPjUGZp-zZeL2YnvAlxD-Sdtt7ff6KnlFk3xlUA,2166
|
|
5
5
|
airflow/providers/standard/decorators/__init__.py,sha256=9hdXHABrVpkbpjZgUft39kOFL2xSGeG4GEua0Hmelus,785
|
|
6
|
-
airflow/providers/standard/decorators/bash.py,sha256=
|
|
6
|
+
airflow/providers/standard/decorators/bash.py,sha256=JFEC7uKb8LJfk2EeRnmo1P5s6Yp5onfqDm7l5jQne1Q,4105
|
|
7
7
|
airflow/providers/standard/decorators/branch_external_python.py,sha256=sGtSrCK137HLm77fcxbYgpDeMMuYdMJp6oVbPrQHvPU,2403
|
|
8
8
|
airflow/providers/standard/decorators/branch_python.py,sha256=-p3Mwm0CURcPrh4nHHIUK8KCS_R2fXLaGYPWK62CTSY,2323
|
|
9
9
|
airflow/providers/standard/decorators/branch_virtualenv.py,sha256=D1vbkyMB6dOItIVvd3evMT4WC0XX39vp0USs-ZJ9N6A,2405
|
|
@@ -14,7 +14,7 @@ airflow/providers/standard/decorators/sensor.py,sha256=vF-AeZN0Fl4cOV1l7r-J6Nac_
|
|
|
14
14
|
airflow/providers/standard/decorators/short_circuit.py,sha256=lhcnnhgVMq7yOJvQrWGMYxATYJPOe9UOmdf-LLg-N8c,2352
|
|
15
15
|
airflow/providers/standard/decorators/stub.py,sha256=faKSBFIaLOZSbf4fD7WwLGKclW-bMAUlGf8b_HLWX0Q,3020
|
|
16
16
|
airflow/providers/standard/example_dags/__init__.py,sha256=9hdXHABrVpkbpjZgUft39kOFL2xSGeG4GEua0Hmelus,785
|
|
17
|
-
airflow/providers/standard/example_dags/example_bash_decorator.py,sha256=
|
|
17
|
+
airflow/providers/standard/example_dags/example_bash_decorator.py,sha256=pIwJcGzwWO9EMznljSQfopn9ug3UkOb86jRtbejuoAg,3733
|
|
18
18
|
airflow/providers/standard/example_dags/example_bash_operator.py,sha256=tAS4cBsKW9B1nUukmYTpUw5Vf63476v_-tYjffyAtd4,2352
|
|
19
19
|
airflow/providers/standard/example_dags/example_branch_datetime_operator.py,sha256=6sGzn1xlMaF3I-HMI7bvx78oyxZUw5WAF_Gja_ZUch0,3765
|
|
20
20
|
airflow/providers/standard/example_dags/example_branch_day_of_week_operator.py,sha256=75ncMaGfkjxN0ULszqeXrSL5rHauUTNOhGiGAGPm3pw,2362
|
|
@@ -40,39 +40,40 @@ airflow/providers/standard/hooks/package_index.py,sha256=BgPZB9z0UKV1jO-QERdxZTw
|
|
|
40
40
|
airflow/providers/standard/hooks/subprocess.py,sha256=bO6xV9JBfQ_iZAdV1JiycHSqd_HWYgKUOSGCd32H2Js,4667
|
|
41
41
|
airflow/providers/standard/models/__init__.py,sha256=9hdXHABrVpkbpjZgUft39kOFL2xSGeG4GEua0Hmelus,785
|
|
42
42
|
airflow/providers/standard/operators/__init__.py,sha256=9hdXHABrVpkbpjZgUft39kOFL2xSGeG4GEua0Hmelus,785
|
|
43
|
-
airflow/providers/standard/operators/bash.py,sha256=
|
|
43
|
+
airflow/providers/standard/operators/bash.py,sha256=IP9-Ji7dL9FSoWGA95DZEIz4P1K1kylKL1BzZY2-Bdc,11060
|
|
44
44
|
airflow/providers/standard/operators/branch.py,sha256=NtQVkB63nvZCnFoYlfhM-fG1c9zakEDIid20KfuoLW4,4247
|
|
45
|
-
airflow/providers/standard/operators/datetime.py,sha256=
|
|
45
|
+
airflow/providers/standard/operators/datetime.py,sha256=G7wBc9bn0YSJqNQCgEBPiu_Qmsn9Ic4mt3W1aoU0N2A,4849
|
|
46
46
|
airflow/providers/standard/operators/empty.py,sha256=flxN2BhuHegEOiiAcJR9QOuMO8PcxQu353Q9p-Yk82s,1342
|
|
47
|
-
airflow/providers/standard/operators/hitl.py,sha256=
|
|
48
|
-
airflow/providers/standard/operators/latest_only.py,sha256=
|
|
49
|
-
airflow/providers/standard/operators/python.py,sha256=
|
|
47
|
+
airflow/providers/standard/operators/hitl.py,sha256=BRMrUsdDjc4snZdXdaMD6u15gvjn1AqDWb7A-JbVrIg,18669
|
|
48
|
+
airflow/providers/standard/operators/latest_only.py,sha256=CjlffWV9bK2UZ8L5KTPLNq3EG_EF6tQl7CLT4uz-eBg,5292
|
|
49
|
+
airflow/providers/standard/operators/python.py,sha256=AFxRb68rgLQbmJ-k5fruGSTNFQRM_mD4w5OD3gThreg,55071
|
|
50
50
|
airflow/providers/standard/operators/smooth.py,sha256=WamRqmeSm6BcGCCBAqBEVYIRk4ZXbeI_Q7OjPgLfnUI,1400
|
|
51
|
-
airflow/providers/standard/operators/trigger_dagrun.py,sha256=
|
|
51
|
+
airflow/providers/standard/operators/trigger_dagrun.py,sha256=MHejvbWj6QCK24-U9OsWM4ZS6iQ2L8voONpslluuLtE,19330
|
|
52
52
|
airflow/providers/standard/operators/weekday.py,sha256=s8C6T-x9Hvkj4YQWCguTRyhiAqdJYCBr12rPm2qYC3M,4957
|
|
53
53
|
airflow/providers/standard/sensors/__init__.py,sha256=9hdXHABrVpkbpjZgUft39kOFL2xSGeG4GEua0Hmelus,785
|
|
54
|
-
airflow/providers/standard/sensors/bash.py,sha256=
|
|
55
|
-
airflow/providers/standard/sensors/date_time.py,sha256=
|
|
56
|
-
airflow/providers/standard/sensors/external_task.py,sha256=
|
|
57
|
-
airflow/providers/standard/sensors/filesystem.py,sha256=
|
|
54
|
+
airflow/providers/standard/sensors/bash.py,sha256=EkG1zl2yoUirWAg03BjEpdDTSJbo1ugE-AxhE5s40Ks,4792
|
|
55
|
+
airflow/providers/standard/sensors/date_time.py,sha256=WEbbniUQZU1pGURDieEGWZcsEy0RRxjoKPIxCYdsObg,5838
|
|
56
|
+
airflow/providers/standard/sensors/external_task.py,sha256=wIYceWOOpw5pePy_UdEvIGr1pGVF_fk71NF9pHlFShE,30825
|
|
57
|
+
airflow/providers/standard/sensors/filesystem.py,sha256=fTI6FsBXOI9NE2zlipGAKJE1kvQ5gLaNJf5dOi1pgE8,5316
|
|
58
58
|
airflow/providers/standard/sensors/python.py,sha256=eBfy0QRgsQHw4H4pZ4u7DNzu7ZdMH7TtPQWOdP4wWqA,3221
|
|
59
|
-
airflow/providers/standard/sensors/time.py,sha256=
|
|
60
|
-
airflow/providers/standard/sensors/time_delta.py,sha256=
|
|
59
|
+
airflow/providers/standard/sensors/time.py,sha256=qgHENTcppShQUtxGJrvPeFPGoDK41zCB3TbLJLpBJzQ,4436
|
|
60
|
+
airflow/providers/standard/sensors/time_delta.py,sha256=7sqmAcaCtnfZeSW6DTSNQbf6TEVH2Doe58OqLHhOusA,7468
|
|
61
61
|
airflow/providers/standard/sensors/weekday.py,sha256=Jb_QPJNN7_Os1X0Y-MA0-J_tv-rtWjZcU0C_5edo8X0,4291
|
|
62
62
|
airflow/providers/standard/triggers/__init__.py,sha256=9hdXHABrVpkbpjZgUft39kOFL2xSGeG4GEua0Hmelus,785
|
|
63
|
-
airflow/providers/standard/triggers/external_task.py,sha256=
|
|
63
|
+
airflow/providers/standard/triggers/external_task.py,sha256=jmHxCg96NPudD-HytsFiGsaI75_X6Cj1qlc5drG8KYU,11808
|
|
64
64
|
airflow/providers/standard/triggers/file.py,sha256=mkZuOBNMHON9DQSBRO1NIqcNNjxGM5dbYOQ1Cfsm-BQ,4877
|
|
65
|
-
airflow/providers/standard/triggers/hitl.py,sha256=
|
|
65
|
+
airflow/providers/standard/triggers/hitl.py,sha256=z8Mf9S9WQTKIagMdrjWm1TcqC9GY5dKmQHHDbNCO7fs,8535
|
|
66
66
|
airflow/providers/standard/triggers/temporal.py,sha256=-Zxu96xqt40bhsyzFSK2gv-Ddb2GNr2UogeIoyBX684,4468
|
|
67
67
|
airflow/providers/standard/utils/__init__.py,sha256=9hdXHABrVpkbpjZgUft39kOFL2xSGeG4GEua0Hmelus,785
|
|
68
|
-
airflow/providers/standard/utils/
|
|
69
|
-
airflow/providers/standard/utils/
|
|
68
|
+
airflow/providers/standard/utils/openlineage.py,sha256=B5oON4f9C1Pl69Sl7nMdL9G4aZWXd8s-BjabxoQwbgo,7530
|
|
69
|
+
airflow/providers/standard/utils/python_virtualenv.py,sha256=wUKF6l7fLXD_OKKMcxkQ9O7yoTc52HzbFwT9ezrQjf4,9644
|
|
70
|
+
airflow/providers/standard/utils/python_virtualenv_script.jinja2,sha256=yg4FngvJFX7f3KKv7qbWiuNnq7L-_9hemstywJls1uM,3403
|
|
70
71
|
airflow/providers/standard/utils/sensor_helper.py,sha256=ZcJeWAGymwUma7R6U7pQXhmQLC2UEeiyjQOrH4uFxt0,5407
|
|
71
|
-
airflow/providers/standard/utils/skipmixin.py,sha256=
|
|
72
|
+
airflow/providers/standard/utils/skipmixin.py,sha256=gVmICO2CjH6faJPhzVC8_NkwwnhcEhnULifGJF1tVtg,8046
|
|
72
73
|
airflow/providers/standard/utils/weekday.py,sha256=ySDrIkWv-lqqxURo9E98IGInDqERec2O4y9o2hQTGiQ,2685
|
|
73
|
-
apache_airflow_providers_standard-1.
|
|
74
|
-
apache_airflow_providers_standard-1.
|
|
75
|
-
apache_airflow_providers_standard-1.
|
|
76
|
-
apache_airflow_providers_standard-1.
|
|
77
|
-
apache_airflow_providers_standard-1.
|
|
78
|
-
apache_airflow_providers_standard-1.
|
|
74
|
+
apache_airflow_providers_standard-1.10.3.dist-info/entry_points.txt,sha256=mW2YRh3mVdZdaP5-iGSNgmcCh3YYdALIn28BCLBZZ40,104
|
|
75
|
+
apache_airflow_providers_standard-1.10.3.dist-info/licenses/LICENSE,sha256=gXPVwptPlW1TJ4HSuG5OMPg-a3h43OGMkZRR1rpwfJA,10850
|
|
76
|
+
apache_airflow_providers_standard-1.10.3.dist-info/licenses/NOTICE,sha256=_cWHznIoUSbLCY_KfmKqetlKlsoH0c2VBjmZjElAzuc,168
|
|
77
|
+
apache_airflow_providers_standard-1.10.3.dist-info/WHEEL,sha256=G2gURzTEtmeR8nrdXUJfNiB3VYVxigPQ-bEQujpNiNs,82
|
|
78
|
+
apache_airflow_providers_standard-1.10.3.dist-info/METADATA,sha256=ERiN8OnMbiovVzLR-NdKko5hbWglP7O7m89WsZ9bZ3I,5662
|
|
79
|
+
apache_airflow_providers_standard-1.10.3.dist-info/RECORD,,
|
|
File without changes
|
|
File without changes
|