apache-airflow-providers-standard 0.3.0rc1__py3-none-any.whl → 0.4.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- airflow/providers/standard/__init__.py +1 -1
- airflow/providers/standard/decorators/__init__.py +16 -0
- airflow/providers/standard/decorators/bash.py +121 -0
- airflow/providers/standard/decorators/branch_external_python.py +63 -0
- airflow/providers/standard/decorators/branch_python.py +62 -0
- airflow/providers/standard/decorators/branch_virtualenv.py +62 -0
- airflow/providers/standard/decorators/external_python.py +70 -0
- airflow/providers/standard/decorators/python.py +86 -0
- airflow/providers/standard/decorators/python_virtualenv.py +67 -0
- airflow/providers/standard/decorators/sensor.py +83 -0
- airflow/providers/standard/decorators/short_circuit.py +65 -0
- airflow/providers/standard/get_provider_info.py +29 -5
- airflow/providers/standard/operators/latest_only.py +34 -2
- airflow/providers/standard/operators/trigger_dagrun.py +50 -15
- airflow/providers/standard/sensors/external_task.py +7 -3
- airflow/providers/standard/triggers/external_task.py +82 -17
- {apache_airflow_providers_standard-0.3.0rc1.dist-info → apache_airflow_providers_standard-0.4.0.dist-info}/METADATA +7 -7
- {apache_airflow_providers_standard-0.3.0rc1.dist-info → apache_airflow_providers_standard-0.4.0.dist-info}/RECORD +20 -10
- {apache_airflow_providers_standard-0.3.0rc1.dist-info → apache_airflow_providers_standard-0.4.0.dist-info}/WHEEL +1 -1
- {apache_airflow_providers_standard-0.3.0rc1.dist-info → apache_airflow_providers_standard-0.4.0.dist-info}/entry_points.txt +0 -0
|
@@ -29,7 +29,7 @@ from airflow import __version__ as airflow_version
|
|
|
29
29
|
|
|
30
30
|
__all__ = ["__version__"]
|
|
31
31
|
|
|
32
|
-
__version__ = "0.
|
|
32
|
+
__version__ = "0.4.0"
|
|
33
33
|
|
|
34
34
|
if packaging.version.parse(packaging.version.parse(airflow_version).base_version) < packaging.version.parse(
|
|
35
35
|
"2.9.0"
|
|
@@ -0,0 +1,16 @@
|
|
|
1
|
+
# Licensed to the Apache Software Foundation (ASF) under one
|
|
2
|
+
# or more contributor license agreements. See the NOTICE file
|
|
3
|
+
# distributed with this work for additional information
|
|
4
|
+
# regarding copyright ownership. The ASF licenses this file
|
|
5
|
+
# to you under the Apache License, Version 2.0 (the
|
|
6
|
+
# "License"); you may not use this file except in compliance
|
|
7
|
+
# with the License. You may obtain a copy of the License at
|
|
8
|
+
#
|
|
9
|
+
# http://www.apache.org/licenses/LICENSE-2.0
|
|
10
|
+
#
|
|
11
|
+
# Unless required by applicable law or agreed to in writing,
|
|
12
|
+
# software distributed under the License is distributed on an
|
|
13
|
+
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
|
14
|
+
# KIND, either express or implied. See the License for the
|
|
15
|
+
# specific language governing permissions and limitations
|
|
16
|
+
# under the License.
|
|
@@ -0,0 +1,121 @@
|
|
|
1
|
+
# Licensed to the Apache Software Foundation (ASF) under one
|
|
2
|
+
# or more contributor license agreements. See the NOTICE file
|
|
3
|
+
# distributed with this work for additional information
|
|
4
|
+
# regarding copyright ownership. The ASF licenses this file
|
|
5
|
+
# to you under the Apache License, Version 2.0 (the
|
|
6
|
+
# "License"); you may not use this file except in compliance
|
|
7
|
+
# with the License. You may obtain a copy of the License at
|
|
8
|
+
#
|
|
9
|
+
# http://www.apache.org/licenses/LICENSE-2.0
|
|
10
|
+
#
|
|
11
|
+
# Unless required by applicable law or agreed to in writing,
|
|
12
|
+
# software distributed under the License is distributed on an
|
|
13
|
+
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
|
14
|
+
# KIND, either express or implied. See the License for the
|
|
15
|
+
# specific language governing permissions and limitations
|
|
16
|
+
# under the License.
|
|
17
|
+
|
|
18
|
+
from __future__ import annotations
|
|
19
|
+
|
|
20
|
+
import warnings
|
|
21
|
+
from collections.abc import Collection, Mapping, Sequence
|
|
22
|
+
from typing import TYPE_CHECKING, Any, Callable, ClassVar
|
|
23
|
+
|
|
24
|
+
from airflow.providers.standard.version_compat import AIRFLOW_V_3_0_PLUS
|
|
25
|
+
|
|
26
|
+
if AIRFLOW_V_3_0_PLUS:
|
|
27
|
+
from airflow.sdk.bases.decorator import DecoratedOperator, TaskDecorator, task_decorator_factory
|
|
28
|
+
else:
|
|
29
|
+
from airflow.decorators.base import ( # type: ignore[no-redef]
|
|
30
|
+
DecoratedOperator,
|
|
31
|
+
TaskDecorator,
|
|
32
|
+
task_decorator_factory,
|
|
33
|
+
)
|
|
34
|
+
|
|
35
|
+
from airflow.providers.standard.operators.bash import BashOperator
|
|
36
|
+
from airflow.sdk.definitions._internal.types import SET_DURING_EXECUTION
|
|
37
|
+
from airflow.utils.context import context_merge
|
|
38
|
+
from airflow.utils.operator_helpers import determine_kwargs
|
|
39
|
+
|
|
40
|
+
if TYPE_CHECKING:
|
|
41
|
+
from airflow.sdk.definitions.context import Context
|
|
42
|
+
|
|
43
|
+
|
|
44
|
+
class _BashDecoratedOperator(DecoratedOperator, BashOperator):
|
|
45
|
+
"""
|
|
46
|
+
Wraps a Python callable and uses the callable return value as the Bash command to be executed.
|
|
47
|
+
|
|
48
|
+
:param python_callable: A reference to an object that is callable.
|
|
49
|
+
:param op_kwargs: A dictionary of keyword arguments that will get unpacked
|
|
50
|
+
in your function (templated).
|
|
51
|
+
:param op_args: A list of positional arguments that will get unpacked when
|
|
52
|
+
calling your callable (templated).
|
|
53
|
+
"""
|
|
54
|
+
|
|
55
|
+
template_fields: Sequence[str] = (*DecoratedOperator.template_fields, *BashOperator.template_fields)
|
|
56
|
+
template_fields_renderers: ClassVar[dict[str, str]] = {
|
|
57
|
+
**DecoratedOperator.template_fields_renderers,
|
|
58
|
+
**BashOperator.template_fields_renderers,
|
|
59
|
+
}
|
|
60
|
+
|
|
61
|
+
custom_operator_name: str = "@task.bash"
|
|
62
|
+
overwrite_rtif_after_execution: bool = True
|
|
63
|
+
|
|
64
|
+
def __init__(
|
|
65
|
+
self,
|
|
66
|
+
*,
|
|
67
|
+
python_callable: Callable,
|
|
68
|
+
op_args: Collection[Any] | None = None,
|
|
69
|
+
op_kwargs: Mapping[str, Any] | None = None,
|
|
70
|
+
**kwargs,
|
|
71
|
+
) -> None:
|
|
72
|
+
if kwargs.pop("multiple_outputs", None):
|
|
73
|
+
warnings.warn(
|
|
74
|
+
f"`multiple_outputs=True` is not supported in {self.custom_operator_name} tasks. Ignoring.",
|
|
75
|
+
UserWarning,
|
|
76
|
+
stacklevel=3,
|
|
77
|
+
)
|
|
78
|
+
|
|
79
|
+
super().__init__(
|
|
80
|
+
python_callable=python_callable,
|
|
81
|
+
op_args=op_args,
|
|
82
|
+
op_kwargs=op_kwargs,
|
|
83
|
+
bash_command=SET_DURING_EXECUTION,
|
|
84
|
+
multiple_outputs=False,
|
|
85
|
+
**kwargs,
|
|
86
|
+
)
|
|
87
|
+
|
|
88
|
+
def execute(self, context: Context) -> Any:
|
|
89
|
+
context_merge(context, self.op_kwargs)
|
|
90
|
+
kwargs = determine_kwargs(self.python_callable, self.op_args, context)
|
|
91
|
+
|
|
92
|
+
self.bash_command = self.python_callable(*self.op_args, **kwargs)
|
|
93
|
+
|
|
94
|
+
if not isinstance(self.bash_command, str) or self.bash_command.strip() == "":
|
|
95
|
+
raise TypeError("The returned value from the TaskFlow callable must be a non-empty string.")
|
|
96
|
+
|
|
97
|
+
self._is_inline_cmd = self._is_inline_command(bash_command=self.bash_command)
|
|
98
|
+
context["ti"].render_templates() # type: ignore[attr-defined]
|
|
99
|
+
|
|
100
|
+
return super().execute(context)
|
|
101
|
+
|
|
102
|
+
|
|
103
|
+
def bash_task(
|
|
104
|
+
python_callable: Callable | None = None,
|
|
105
|
+
**kwargs,
|
|
106
|
+
) -> TaskDecorator:
|
|
107
|
+
"""
|
|
108
|
+
Wrap a function into a BashOperator.
|
|
109
|
+
|
|
110
|
+
Accepts kwargs for operator kwargs. Can be reused in a single DAG. This function is only used only used
|
|
111
|
+
during type checking or auto-completion.
|
|
112
|
+
|
|
113
|
+
:param python_callable: Function to decorate.
|
|
114
|
+
|
|
115
|
+
:meta private:
|
|
116
|
+
"""
|
|
117
|
+
return task_decorator_factory(
|
|
118
|
+
python_callable=python_callable,
|
|
119
|
+
decorated_operator_class=_BashDecoratedOperator,
|
|
120
|
+
**kwargs,
|
|
121
|
+
)
|
|
@@ -0,0 +1,63 @@
|
|
|
1
|
+
# Licensed to the Apache Software Foundation (ASF) under one
|
|
2
|
+
# or more contributor license agreements. See the NOTICE file
|
|
3
|
+
# distributed with this work for additional information
|
|
4
|
+
# regarding copyright ownership. The ASF licenses this file
|
|
5
|
+
# to you under the Apache License, Version 2.0 (the
|
|
6
|
+
# "License"); you may not use this file except in compliance
|
|
7
|
+
# with the License. You may obtain a copy of the License at
|
|
8
|
+
#
|
|
9
|
+
# http://www.apache.org/licenses/LICENSE-2.0
|
|
10
|
+
#
|
|
11
|
+
# Unless required by applicable law or agreed to in writing,
|
|
12
|
+
# software distributed under the License is distributed on an
|
|
13
|
+
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
|
14
|
+
# KIND, either express or implied. See the License for the
|
|
15
|
+
# specific language governing permissions and limitations
|
|
16
|
+
# under the License.
|
|
17
|
+
from __future__ import annotations
|
|
18
|
+
|
|
19
|
+
from typing import TYPE_CHECKING, Callable
|
|
20
|
+
|
|
21
|
+
from airflow.providers.standard.version_compat import AIRFLOW_V_3_0_PLUS
|
|
22
|
+
|
|
23
|
+
if AIRFLOW_V_3_0_PLUS:
|
|
24
|
+
from airflow.sdk.bases.decorator import task_decorator_factory
|
|
25
|
+
else:
|
|
26
|
+
from airflow.decorators.base import task_decorator_factory # type: ignore[no-redef]
|
|
27
|
+
|
|
28
|
+
from airflow.providers.standard.decorators.python import _PythonDecoratedOperator
|
|
29
|
+
from airflow.providers.standard.operators.python import BranchExternalPythonOperator
|
|
30
|
+
|
|
31
|
+
if TYPE_CHECKING:
|
|
32
|
+
from airflow.sdk.bases.decorator import TaskDecorator
|
|
33
|
+
|
|
34
|
+
|
|
35
|
+
class _BranchExternalPythonDecoratedOperator(_PythonDecoratedOperator, BranchExternalPythonOperator):
|
|
36
|
+
"""Wraps a Python callable and captures args/kwargs when called for execution."""
|
|
37
|
+
|
|
38
|
+
template_fields = BranchExternalPythonOperator.template_fields
|
|
39
|
+
custom_operator_name: str = "@task.branch_external_python"
|
|
40
|
+
|
|
41
|
+
|
|
42
|
+
def branch_external_python_task(
|
|
43
|
+
python_callable: Callable | None = None, multiple_outputs: bool | None = None, **kwargs
|
|
44
|
+
) -> TaskDecorator:
|
|
45
|
+
"""
|
|
46
|
+
Wrap a python function into a BranchExternalPythonOperator.
|
|
47
|
+
|
|
48
|
+
For more information on how to use this operator, take a look at the guide:
|
|
49
|
+
:ref:`concepts:branching`
|
|
50
|
+
|
|
51
|
+
Accepts kwargs for operator kwarg. Can be reused in a single DAG.
|
|
52
|
+
|
|
53
|
+
:param python_callable: Function to decorate
|
|
54
|
+
:param multiple_outputs: if set, function return value will be
|
|
55
|
+
unrolled to multiple XCom values. Dict will unroll to xcom values with keys as XCom keys.
|
|
56
|
+
Defaults to False.
|
|
57
|
+
"""
|
|
58
|
+
return task_decorator_factory(
|
|
59
|
+
python_callable=python_callable,
|
|
60
|
+
multiple_outputs=multiple_outputs,
|
|
61
|
+
decorated_operator_class=_BranchExternalPythonDecoratedOperator,
|
|
62
|
+
**kwargs,
|
|
63
|
+
)
|
|
@@ -0,0 +1,62 @@
|
|
|
1
|
+
# Licensed to the Apache Software Foundation (ASF) under one
|
|
2
|
+
# or more contributor license agreements. See the NOTICE file
|
|
3
|
+
# distributed with this work for additional information
|
|
4
|
+
# regarding copyright ownership. The ASF licenses this file
|
|
5
|
+
# to you under the Apache License, Version 2.0 (the
|
|
6
|
+
# "License"); you may not use this file except in compliance
|
|
7
|
+
# with the License. You may obtain a copy of the License at
|
|
8
|
+
#
|
|
9
|
+
# http://www.apache.org/licenses/LICENSE-2.0
|
|
10
|
+
#
|
|
11
|
+
# Unless required by applicable law or agreed to in writing,
|
|
12
|
+
# software distributed under the License is distributed on an
|
|
13
|
+
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
|
14
|
+
# KIND, either express or implied. See the License for the
|
|
15
|
+
# specific language governing permissions and limitations
|
|
16
|
+
# under the License.
|
|
17
|
+
from __future__ import annotations
|
|
18
|
+
|
|
19
|
+
from typing import TYPE_CHECKING, Callable
|
|
20
|
+
|
|
21
|
+
from airflow.providers.standard.version_compat import AIRFLOW_V_3_0_PLUS
|
|
22
|
+
|
|
23
|
+
if AIRFLOW_V_3_0_PLUS:
|
|
24
|
+
from airflow.sdk.bases.decorator import task_decorator_factory
|
|
25
|
+
else:
|
|
26
|
+
from airflow.decorators.base import task_decorator_factory # type: ignore[no-redef]
|
|
27
|
+
from airflow.providers.standard.decorators.python import _PythonDecoratedOperator
|
|
28
|
+
from airflow.providers.standard.operators.python import BranchPythonOperator
|
|
29
|
+
|
|
30
|
+
if TYPE_CHECKING:
|
|
31
|
+
from airflow.sdk.bases.decorator import TaskDecorator
|
|
32
|
+
|
|
33
|
+
|
|
34
|
+
class _BranchPythonDecoratedOperator(_PythonDecoratedOperator, BranchPythonOperator):
|
|
35
|
+
"""Wraps a Python callable and captures args/kwargs when called for execution."""
|
|
36
|
+
|
|
37
|
+
template_fields = BranchPythonOperator.template_fields
|
|
38
|
+
custom_operator_name: str = "@task.branch"
|
|
39
|
+
|
|
40
|
+
|
|
41
|
+
def branch_task(
|
|
42
|
+
python_callable: Callable | None = None, multiple_outputs: bool | None = None, **kwargs
|
|
43
|
+
) -> TaskDecorator:
|
|
44
|
+
"""
|
|
45
|
+
Wrap a python function into a BranchPythonOperator.
|
|
46
|
+
|
|
47
|
+
For more information on how to use this operator, take a look at the guide:
|
|
48
|
+
:ref:`concepts:branching`
|
|
49
|
+
|
|
50
|
+
Accepts kwargs for operator kwarg. Can be reused in a single DAG.
|
|
51
|
+
|
|
52
|
+
:param python_callable: Function to decorate
|
|
53
|
+
:param multiple_outputs: if set, function return value will be
|
|
54
|
+
unrolled to multiple XCom values. Dict will unroll to xcom values with keys as XCom keys.
|
|
55
|
+
Defaults to False.
|
|
56
|
+
"""
|
|
57
|
+
return task_decorator_factory(
|
|
58
|
+
python_callable=python_callable,
|
|
59
|
+
multiple_outputs=multiple_outputs,
|
|
60
|
+
decorated_operator_class=_BranchPythonDecoratedOperator,
|
|
61
|
+
**kwargs,
|
|
62
|
+
)
|
|
@@ -0,0 +1,62 @@
|
|
|
1
|
+
# Licensed to the Apache Software Foundation (ASF) under one
|
|
2
|
+
# or more contributor license agreements. See the NOTICE file
|
|
3
|
+
# distributed with this work for additional information
|
|
4
|
+
# regarding copyright ownership. The ASF licenses this file
|
|
5
|
+
# to you under the Apache License, Version 2.0 (the
|
|
6
|
+
# "License"); you may not use this file except in compliance
|
|
7
|
+
# with the License. You may obtain a copy of the License at
|
|
8
|
+
#
|
|
9
|
+
# http://www.apache.org/licenses/LICENSE-2.0
|
|
10
|
+
#
|
|
11
|
+
# Unless required by applicable law or agreed to in writing,
|
|
12
|
+
# software distributed under the License is distributed on an
|
|
13
|
+
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
|
14
|
+
# KIND, either express or implied. See the License for the
|
|
15
|
+
# specific language governing permissions and limitations
|
|
16
|
+
# under the License.
|
|
17
|
+
from __future__ import annotations
|
|
18
|
+
|
|
19
|
+
from typing import TYPE_CHECKING, Callable
|
|
20
|
+
|
|
21
|
+
from airflow.providers.standard.version_compat import AIRFLOW_V_3_0_PLUS
|
|
22
|
+
|
|
23
|
+
if AIRFLOW_V_3_0_PLUS:
|
|
24
|
+
from airflow.sdk.bases.decorator import task_decorator_factory
|
|
25
|
+
else:
|
|
26
|
+
from airflow.decorators.base import task_decorator_factory # type: ignore[no-redef]
|
|
27
|
+
from airflow.providers.standard.decorators.python import _PythonDecoratedOperator
|
|
28
|
+
from airflow.providers.standard.operators.python import BranchPythonVirtualenvOperator
|
|
29
|
+
|
|
30
|
+
if TYPE_CHECKING:
|
|
31
|
+
from airflow.sdk.bases.decorator import TaskDecorator
|
|
32
|
+
|
|
33
|
+
|
|
34
|
+
class _BranchPythonVirtualenvDecoratedOperator(_PythonDecoratedOperator, BranchPythonVirtualenvOperator):
|
|
35
|
+
"""Wraps a Python callable and captures args/kwargs when called for execution."""
|
|
36
|
+
|
|
37
|
+
template_fields = BranchPythonVirtualenvOperator.template_fields
|
|
38
|
+
custom_operator_name: str = "@task.branch_virtualenv"
|
|
39
|
+
|
|
40
|
+
|
|
41
|
+
def branch_virtualenv_task(
|
|
42
|
+
python_callable: Callable | None = None, multiple_outputs: bool | None = None, **kwargs
|
|
43
|
+
) -> TaskDecorator:
|
|
44
|
+
"""
|
|
45
|
+
Wrap a python function into a BranchPythonVirtualenvOperator.
|
|
46
|
+
|
|
47
|
+
For more information on how to use this operator, take a look at the guide:
|
|
48
|
+
:ref:`concepts:branching`
|
|
49
|
+
|
|
50
|
+
Accepts kwargs for operator kwarg. Can be reused in a single DAG.
|
|
51
|
+
|
|
52
|
+
:param python_callable: Function to decorate
|
|
53
|
+
:param multiple_outputs: if set, function return value will be
|
|
54
|
+
unrolled to multiple XCom values. Dict will unroll to xcom values with keys as XCom keys.
|
|
55
|
+
Defaults to False.
|
|
56
|
+
"""
|
|
57
|
+
return task_decorator_factory(
|
|
58
|
+
python_callable=python_callable,
|
|
59
|
+
multiple_outputs=multiple_outputs,
|
|
60
|
+
decorated_operator_class=_BranchPythonVirtualenvDecoratedOperator,
|
|
61
|
+
**kwargs,
|
|
62
|
+
)
|
|
@@ -0,0 +1,70 @@
|
|
|
1
|
+
# Licensed to the Apache Software Foundation (ASF) under one
|
|
2
|
+
# or more contributor license agreements. See the NOTICE file
|
|
3
|
+
# distributed with this work for additional information
|
|
4
|
+
# regarding copyright ownership. The ASF licenses this file
|
|
5
|
+
# to you under the Apache License, Version 2.0 (the
|
|
6
|
+
# "License"); you may not use this file except in compliance
|
|
7
|
+
# with the License. You may obtain a copy of the License at
|
|
8
|
+
#
|
|
9
|
+
# http://www.apache.org/licenses/LICENSE-2.0
|
|
10
|
+
#
|
|
11
|
+
# Unless required by applicable law or agreed to in writing,
|
|
12
|
+
# software distributed under the License is distributed on an
|
|
13
|
+
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
|
14
|
+
# KIND, either express or implied. See the License for the
|
|
15
|
+
# specific language governing permissions and limitations
|
|
16
|
+
# under the License.
|
|
17
|
+
from __future__ import annotations
|
|
18
|
+
|
|
19
|
+
from typing import TYPE_CHECKING, Callable
|
|
20
|
+
|
|
21
|
+
from airflow.providers.standard.version_compat import AIRFLOW_V_3_0_PLUS
|
|
22
|
+
|
|
23
|
+
if AIRFLOW_V_3_0_PLUS:
|
|
24
|
+
from airflow.sdk.bases.decorator import task_decorator_factory
|
|
25
|
+
else:
|
|
26
|
+
from airflow.decorators.base import task_decorator_factory # type: ignore[no-redef]
|
|
27
|
+
from airflow.providers.standard.decorators.python import _PythonDecoratedOperator
|
|
28
|
+
from airflow.providers.standard.operators.python import ExternalPythonOperator
|
|
29
|
+
|
|
30
|
+
if TYPE_CHECKING:
|
|
31
|
+
from airflow.sdk.bases.decorator import TaskDecorator
|
|
32
|
+
|
|
33
|
+
|
|
34
|
+
class _PythonExternalDecoratedOperator(_PythonDecoratedOperator, ExternalPythonOperator):
|
|
35
|
+
"""Wraps a Python callable and captures args/kwargs when called for execution."""
|
|
36
|
+
|
|
37
|
+
template_fields = ExternalPythonOperator.template_fields
|
|
38
|
+
custom_operator_name: str = "@task.external_python"
|
|
39
|
+
|
|
40
|
+
|
|
41
|
+
def external_python_task(
|
|
42
|
+
python: str | None = None,
|
|
43
|
+
python_callable: Callable | None = None,
|
|
44
|
+
multiple_outputs: bool | None = None,
|
|
45
|
+
**kwargs,
|
|
46
|
+
) -> TaskDecorator:
|
|
47
|
+
"""
|
|
48
|
+
Wrap a callable into an Airflow operator to run via a Python virtual environment.
|
|
49
|
+
|
|
50
|
+
Accepts kwargs for operator kwarg. Can be reused in a single DAG.
|
|
51
|
+
|
|
52
|
+
This function is only used during type checking or auto-completion.
|
|
53
|
+
|
|
54
|
+
:meta private:
|
|
55
|
+
|
|
56
|
+
:param python: Full path string (file-system specific) that points to a Python binary inside
|
|
57
|
+
a virtualenv that should be used (in ``VENV/bin`` folder). Should be absolute path
|
|
58
|
+
(so usually start with "/" or "X:/" depending on the filesystem/os used).
|
|
59
|
+
:param python_callable: Function to decorate
|
|
60
|
+
:param multiple_outputs: If set to True, the decorated function's return value will be unrolled to
|
|
61
|
+
multiple XCom values. Dict will unroll to XCom values with its keys as XCom keys.
|
|
62
|
+
Defaults to False.
|
|
63
|
+
"""
|
|
64
|
+
return task_decorator_factory(
|
|
65
|
+
python=python,
|
|
66
|
+
python_callable=python_callable,
|
|
67
|
+
multiple_outputs=multiple_outputs,
|
|
68
|
+
decorated_operator_class=_PythonExternalDecoratedOperator,
|
|
69
|
+
**kwargs,
|
|
70
|
+
)
|
|
@@ -0,0 +1,86 @@
|
|
|
1
|
+
# Licensed to the Apache Software Foundation (ASF) under one
|
|
2
|
+
# or more contributor license agreements. See the NOTICE file
|
|
3
|
+
# distributed with this work for additional information
|
|
4
|
+
# regarding copyright ownership. The ASF licenses this file
|
|
5
|
+
# to you under the Apache License, Version 2.0 (the
|
|
6
|
+
# "License"); you may not use this file except in compliance
|
|
7
|
+
# with the License. You may obtain a copy of the License at
|
|
8
|
+
#
|
|
9
|
+
# http://www.apache.org/licenses/LICENSE-2.0
|
|
10
|
+
#
|
|
11
|
+
# Unless required by applicable law or agreed to in writing,
|
|
12
|
+
# software distributed under the License is distributed on an
|
|
13
|
+
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
|
14
|
+
# KIND, either express or implied. See the License for the
|
|
15
|
+
# specific language governing permissions and limitations
|
|
16
|
+
# under the License.
|
|
17
|
+
from __future__ import annotations
|
|
18
|
+
|
|
19
|
+
from collections.abc import Sequence
|
|
20
|
+
from typing import TYPE_CHECKING, Callable
|
|
21
|
+
|
|
22
|
+
from airflow.providers.standard.operators.python import PythonOperator
|
|
23
|
+
from airflow.providers.standard.version_compat import AIRFLOW_V_3_0_PLUS
|
|
24
|
+
|
|
25
|
+
if AIRFLOW_V_3_0_PLUS:
|
|
26
|
+
from airflow.sdk.bases.decorator import DecoratedOperator, task_decorator_factory
|
|
27
|
+
else:
|
|
28
|
+
from airflow.decorators.base import DecoratedOperator, task_decorator_factory # type: ignore[no-redef]
|
|
29
|
+
|
|
30
|
+
if TYPE_CHECKING:
|
|
31
|
+
from airflow.sdk.bases.decorator import TaskDecorator
|
|
32
|
+
|
|
33
|
+
|
|
34
|
+
class _PythonDecoratedOperator(DecoratedOperator, PythonOperator):
|
|
35
|
+
"""
|
|
36
|
+
Wraps a Python callable and captures args/kwargs when called for execution.
|
|
37
|
+
|
|
38
|
+
:param python_callable: A reference to an object that is callable
|
|
39
|
+
:param op_kwargs: a dictionary of keyword arguments that will get unpacked
|
|
40
|
+
in your function (templated)
|
|
41
|
+
:param op_args: a list of positional arguments that will get unpacked when
|
|
42
|
+
calling your callable (templated)
|
|
43
|
+
:param multiple_outputs: If set to True, the decorated function's return value will be unrolled to
|
|
44
|
+
multiple XCom values. Dict will unroll to XCom values with its keys as XCom keys. Defaults to False.
|
|
45
|
+
"""
|
|
46
|
+
|
|
47
|
+
template_fields: Sequence[str] = ("templates_dict", "op_args", "op_kwargs")
|
|
48
|
+
template_fields_renderers = {"templates_dict": "json", "op_args": "py", "op_kwargs": "py"}
|
|
49
|
+
|
|
50
|
+
custom_operator_name: str = "@task"
|
|
51
|
+
|
|
52
|
+
def __init__(self, *, python_callable, op_args, op_kwargs, **kwargs) -> None:
|
|
53
|
+
kwargs_to_upstream = {
|
|
54
|
+
"python_callable": python_callable,
|
|
55
|
+
"op_args": op_args,
|
|
56
|
+
"op_kwargs": op_kwargs,
|
|
57
|
+
}
|
|
58
|
+
super().__init__(
|
|
59
|
+
kwargs_to_upstream=kwargs_to_upstream,
|
|
60
|
+
python_callable=python_callable,
|
|
61
|
+
op_args=op_args,
|
|
62
|
+
op_kwargs=op_kwargs,
|
|
63
|
+
**kwargs,
|
|
64
|
+
)
|
|
65
|
+
|
|
66
|
+
|
|
67
|
+
def python_task(
|
|
68
|
+
python_callable: Callable | None = None,
|
|
69
|
+
multiple_outputs: bool | None = None,
|
|
70
|
+
**kwargs,
|
|
71
|
+
) -> TaskDecorator:
|
|
72
|
+
"""
|
|
73
|
+
Wrap a function into an Airflow operator.
|
|
74
|
+
|
|
75
|
+
Accepts kwargs for operator kwarg. Can be reused in a single DAG.
|
|
76
|
+
|
|
77
|
+
:param python_callable: Function to decorate
|
|
78
|
+
:param multiple_outputs: If set to True, the decorated function's return value will be unrolled to
|
|
79
|
+
multiple XCom values. Dict will unroll to XCom values with its keys as XCom keys. Defaults to False.
|
|
80
|
+
"""
|
|
81
|
+
return task_decorator_factory(
|
|
82
|
+
python_callable=python_callable,
|
|
83
|
+
multiple_outputs=multiple_outputs,
|
|
84
|
+
decorated_operator_class=_PythonDecoratedOperator,
|
|
85
|
+
**kwargs,
|
|
86
|
+
)
|
|
@@ -0,0 +1,67 @@
|
|
|
1
|
+
# Licensed to the Apache Software Foundation (ASF) under one
|
|
2
|
+
# or more contributor license agreements. See the NOTICE file
|
|
3
|
+
# distributed with this work for additional information
|
|
4
|
+
# regarding copyright ownership. The ASF licenses this file
|
|
5
|
+
# to you under the Apache License, Version 2.0 (the
|
|
6
|
+
# "License"); you may not use this file except in compliance
|
|
7
|
+
# with the License. You may obtain a copy of the License at
|
|
8
|
+
#
|
|
9
|
+
# http://www.apache.org/licenses/LICENSE-2.0
|
|
10
|
+
#
|
|
11
|
+
# Unless required by applicable law or agreed to in writing,
|
|
12
|
+
# software distributed under the License is distributed on an
|
|
13
|
+
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
|
14
|
+
# KIND, either express or implied. See the License for the
|
|
15
|
+
# specific language governing permissions and limitations
|
|
16
|
+
# under the License.
|
|
17
|
+
from __future__ import annotations
|
|
18
|
+
|
|
19
|
+
from typing import TYPE_CHECKING, Callable
|
|
20
|
+
|
|
21
|
+
from airflow.providers.standard.version_compat import AIRFLOW_V_3_0_PLUS
|
|
22
|
+
|
|
23
|
+
if AIRFLOW_V_3_0_PLUS:
|
|
24
|
+
from airflow.sdk.bases.decorator import task_decorator_factory
|
|
25
|
+
else:
|
|
26
|
+
from airflow.decorators.base import task_decorator_factory # type: ignore[no-redef]
|
|
27
|
+
|
|
28
|
+
|
|
29
|
+
from airflow.providers.standard.decorators.python import _PythonDecoratedOperator
|
|
30
|
+
from airflow.providers.standard.operators.python import PythonVirtualenvOperator
|
|
31
|
+
|
|
32
|
+
if TYPE_CHECKING:
|
|
33
|
+
from airflow.sdk.bases.decorator import TaskDecorator
|
|
34
|
+
|
|
35
|
+
|
|
36
|
+
class _PythonVirtualenvDecoratedOperator(_PythonDecoratedOperator, PythonVirtualenvOperator):
|
|
37
|
+
"""Wraps a Python callable and captures args/kwargs when called for execution."""
|
|
38
|
+
|
|
39
|
+
template_fields = PythonVirtualenvOperator.template_fields
|
|
40
|
+
custom_operator_name: str = "@task.virtualenv"
|
|
41
|
+
|
|
42
|
+
|
|
43
|
+
def virtualenv_task(
|
|
44
|
+
python_callable: Callable | None = None,
|
|
45
|
+
multiple_outputs: bool | None = None,
|
|
46
|
+
**kwargs,
|
|
47
|
+
) -> TaskDecorator:
|
|
48
|
+
"""
|
|
49
|
+
Wrap a callable into an Airflow operator to run via a Python virtual environment.
|
|
50
|
+
|
|
51
|
+
Accepts kwargs for operator kwarg. Can be reused in a single DAG.
|
|
52
|
+
|
|
53
|
+
This function is only used only used during type checking or auto-completion.
|
|
54
|
+
|
|
55
|
+
:meta private:
|
|
56
|
+
|
|
57
|
+
:param python_callable: Function to decorate
|
|
58
|
+
:param multiple_outputs: If set to True, the decorated function's return value will be unrolled to
|
|
59
|
+
multiple XCom values. Dict will unroll to XCom values with its keys as XCom keys.
|
|
60
|
+
Defaults to False.
|
|
61
|
+
"""
|
|
62
|
+
return task_decorator_factory(
|
|
63
|
+
python_callable=python_callable,
|
|
64
|
+
multiple_outputs=multiple_outputs,
|
|
65
|
+
decorated_operator_class=_PythonVirtualenvDecoratedOperator,
|
|
66
|
+
**kwargs,
|
|
67
|
+
)
|
|
@@ -0,0 +1,83 @@
|
|
|
1
|
+
# Licensed to the Apache Software Foundation (ASF) under one
|
|
2
|
+
# or more contributor license agreements. See the NOTICE file
|
|
3
|
+
# distributed with this work for additional information
|
|
4
|
+
# regarding copyright ownership. The ASF licenses this file
|
|
5
|
+
# to you under the Apache License, Version 2.0 (the
|
|
6
|
+
# "License"); you may not use this file except in compliance
|
|
7
|
+
# with the License. You may obtain a copy of the License at
|
|
8
|
+
#
|
|
9
|
+
# http://www.apache.org/licenses/LICENSE-2.0
|
|
10
|
+
#
|
|
11
|
+
# Unless required by applicable law or agreed to in writing,
|
|
12
|
+
# software distributed under the License is distributed on an
|
|
13
|
+
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
|
14
|
+
# KIND, either express or implied. See the License for the
|
|
15
|
+
# specific language governing permissions and limitations
|
|
16
|
+
# under the License.
|
|
17
|
+
|
|
18
|
+
from __future__ import annotations
|
|
19
|
+
|
|
20
|
+
from collections.abc import Sequence
|
|
21
|
+
from typing import TYPE_CHECKING, Callable, ClassVar
|
|
22
|
+
|
|
23
|
+
from airflow.providers.standard.version_compat import AIRFLOW_V_3_0_PLUS
|
|
24
|
+
|
|
25
|
+
if AIRFLOW_V_3_0_PLUS:
|
|
26
|
+
from airflow.sdk.bases.decorator import get_unique_task_id, task_decorator_factory
|
|
27
|
+
else:
|
|
28
|
+
from airflow.decorators.base import get_unique_task_id, task_decorator_factory # type: ignore[no-redef]
|
|
29
|
+
|
|
30
|
+
|
|
31
|
+
from airflow.providers.standard.sensors.python import PythonSensor
|
|
32
|
+
|
|
33
|
+
if TYPE_CHECKING:
|
|
34
|
+
from airflow.sdk.bases.decorator import TaskDecorator
|
|
35
|
+
|
|
36
|
+
|
|
37
|
+
class DecoratedSensorOperator(PythonSensor):
|
|
38
|
+
"""
|
|
39
|
+
Wraps a Python callable and captures args/kwargs when called for execution.
|
|
40
|
+
|
|
41
|
+
:param python_callable: A reference to an object that is callable
|
|
42
|
+
:param task_id: task Id
|
|
43
|
+
:param op_args: a list of positional arguments that will get unpacked when
|
|
44
|
+
calling your callable (templated)
|
|
45
|
+
:param op_kwargs: a dictionary of keyword arguments that will get unpacked
|
|
46
|
+
in your function (templated)
|
|
47
|
+
:param kwargs_to_upstream: For certain operators, we might need to upstream certain arguments
|
|
48
|
+
that would otherwise be absorbed by the DecoratedOperator (for example python_callable for the
|
|
49
|
+
PythonOperator). This gives a user the option to upstream kwargs as needed.
|
|
50
|
+
"""
|
|
51
|
+
|
|
52
|
+
template_fields: Sequence[str] = ("op_args", "op_kwargs")
|
|
53
|
+
template_fields_renderers: ClassVar[dict[str, str]] = {"op_args": "py", "op_kwargs": "py"}
|
|
54
|
+
|
|
55
|
+
custom_operator_name = "@task.sensor"
|
|
56
|
+
|
|
57
|
+
# since we won't mutate the arguments, we should just do the shallow copy
|
|
58
|
+
# there are some cases we can't deepcopy the objects (e.g protobuf).
|
|
59
|
+
shallow_copy_attrs: Sequence[str] = ("python_callable",)
|
|
60
|
+
|
|
61
|
+
def __init__(
|
|
62
|
+
self,
|
|
63
|
+
*,
|
|
64
|
+
task_id: str,
|
|
65
|
+
**kwargs,
|
|
66
|
+
) -> None:
|
|
67
|
+
kwargs["task_id"] = get_unique_task_id(task_id, kwargs.get("dag"), kwargs.get("task_group"))
|
|
68
|
+
super().__init__(**kwargs)
|
|
69
|
+
|
|
70
|
+
|
|
71
|
+
def sensor_task(python_callable: Callable | None = None, **kwargs) -> TaskDecorator:
|
|
72
|
+
"""
|
|
73
|
+
Wrap a function into an Airflow operator.
|
|
74
|
+
|
|
75
|
+
Accepts kwargs for operator kwarg. Can be reused in a single DAG.
|
|
76
|
+
:param python_callable: Function to decorate
|
|
77
|
+
"""
|
|
78
|
+
return task_decorator_factory(
|
|
79
|
+
python_callable=python_callable,
|
|
80
|
+
multiple_outputs=False,
|
|
81
|
+
decorated_operator_class=DecoratedSensorOperator,
|
|
82
|
+
**kwargs,
|
|
83
|
+
)
|
|
@@ -0,0 +1,65 @@
|
|
|
1
|
+
# Licensed to the Apache Software Foundation (ASF) under one
|
|
2
|
+
# or more contributor license agreements. See the NOTICE file
|
|
3
|
+
# distributed with this work for additional information
|
|
4
|
+
# regarding copyright ownership. The ASF licenses this file
|
|
5
|
+
# to you under the Apache License, Version 2.0 (the
|
|
6
|
+
# "License"); you may not use this file except in compliance
|
|
7
|
+
# with the License. You may obtain a copy of the License at
|
|
8
|
+
#
|
|
9
|
+
# http://www.apache.org/licenses/LICENSE-2.0
|
|
10
|
+
#
|
|
11
|
+
# Unless required by applicable law or agreed to in writing,
|
|
12
|
+
# software distributed under the License is distributed on an
|
|
13
|
+
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
|
14
|
+
# KIND, either express or implied. See the License for the
|
|
15
|
+
# specific language governing permissions and limitations
|
|
16
|
+
# under the License.
|
|
17
|
+
from __future__ import annotations
|
|
18
|
+
|
|
19
|
+
from typing import TYPE_CHECKING, Callable
|
|
20
|
+
|
|
21
|
+
from airflow.providers.standard.version_compat import AIRFLOW_V_3_0_PLUS
|
|
22
|
+
|
|
23
|
+
if AIRFLOW_V_3_0_PLUS:
|
|
24
|
+
from airflow.sdk.bases.decorator import task_decorator_factory
|
|
25
|
+
else:
|
|
26
|
+
from airflow.decorators.base import task_decorator_factory # type: ignore[no-redef]
|
|
27
|
+
|
|
28
|
+
from airflow.providers.standard.decorators.python import _PythonDecoratedOperator
|
|
29
|
+
from airflow.providers.standard.operators.python import ShortCircuitOperator
|
|
30
|
+
|
|
31
|
+
if TYPE_CHECKING:
|
|
32
|
+
from airflow.sdk.bases.decorator import TaskDecorator
|
|
33
|
+
|
|
34
|
+
|
|
35
|
+
class _ShortCircuitDecoratedOperator(_PythonDecoratedOperator, ShortCircuitOperator):
|
|
36
|
+
"""Wraps a Python callable and captures args/kwargs when called for execution."""
|
|
37
|
+
|
|
38
|
+
template_fields = ShortCircuitOperator.template_fields
|
|
39
|
+
custom_operator_name: str = "@task.short_circuit"
|
|
40
|
+
|
|
41
|
+
|
|
42
|
+
def short_circuit_task(
|
|
43
|
+
python_callable: Callable | None = None,
|
|
44
|
+
multiple_outputs: bool | None = None,
|
|
45
|
+
**kwargs,
|
|
46
|
+
) -> TaskDecorator:
|
|
47
|
+
"""
|
|
48
|
+
Wrap a function into an ShortCircuitOperator.
|
|
49
|
+
|
|
50
|
+
Accepts kwargs for operator kwarg. Can be reused in a single DAG.
|
|
51
|
+
|
|
52
|
+
This function is only used only used during type checking or auto-completion.
|
|
53
|
+
|
|
54
|
+
:param python_callable: Function to decorate
|
|
55
|
+
:param multiple_outputs: If set to True, the decorated function's return value will be unrolled to
|
|
56
|
+
multiple XCom values. Dict will unroll to XCom values with its keys as XCom keys. Defaults to False.
|
|
57
|
+
|
|
58
|
+
:meta private:
|
|
59
|
+
"""
|
|
60
|
+
return task_decorator_factory(
|
|
61
|
+
python_callable=python_callable,
|
|
62
|
+
multiple_outputs=multiple_outputs,
|
|
63
|
+
decorated_operator_class=_ShortCircuitDecoratedOperator,
|
|
64
|
+
**kwargs,
|
|
65
|
+
)
|
|
@@ -26,9 +26,6 @@ def get_provider_info():
|
|
|
26
26
|
"package-name": "apache-airflow-providers-standard",
|
|
27
27
|
"name": "Standard",
|
|
28
28
|
"description": "Airflow Standard Provider\n",
|
|
29
|
-
"state": "ready",
|
|
30
|
-
"source-date-epoch": 1743477899,
|
|
31
|
-
"versions": ["0.3.0", "0.2.0", "0.1.1", "0.1.0", "0.0.3", "0.0.2", "0.0.1"],
|
|
32
29
|
"integrations": [
|
|
33
30
|
{
|
|
34
31
|
"integration-name": "Standard",
|
|
@@ -106,6 +103,33 @@ def get_provider_info():
|
|
|
106
103
|
},
|
|
107
104
|
}
|
|
108
105
|
},
|
|
109
|
-
"
|
|
110
|
-
|
|
106
|
+
"task-decorators": [
|
|
107
|
+
{"class-name": "airflow.providers.standard.decorators.python.python_task", "name": "python"},
|
|
108
|
+
{"class-name": "airflow.providers.standard.decorators.bash.bash_task", "name": "bash"},
|
|
109
|
+
{
|
|
110
|
+
"class-name": "airflow.providers.standard.decorators.branch_external_python.branch_external_python_task",
|
|
111
|
+
"name": "branch_external_python",
|
|
112
|
+
},
|
|
113
|
+
{
|
|
114
|
+
"class-name": "airflow.providers.standard.decorators.branch_python.branch_task",
|
|
115
|
+
"name": "branch",
|
|
116
|
+
},
|
|
117
|
+
{
|
|
118
|
+
"class-name": "airflow.providers.standard.decorators.branch_virtualenv.branch_virtualenv_task",
|
|
119
|
+
"name": "branch_virtualenv",
|
|
120
|
+
},
|
|
121
|
+
{
|
|
122
|
+
"class-name": "airflow.providers.standard.decorators.external_python.external_python_task",
|
|
123
|
+
"name": "external_python",
|
|
124
|
+
},
|
|
125
|
+
{
|
|
126
|
+
"class-name": "airflow.providers.standard.decorators.python_virtualenv.virtualenv_task",
|
|
127
|
+
"name": "virtualenv",
|
|
128
|
+
},
|
|
129
|
+
{"class-name": "airflow.providers.standard.decorators.sensor.sensor_task", "name": "sensor"},
|
|
130
|
+
{
|
|
131
|
+
"class-name": "airflow.providers.standard.decorators.short_circuit.short_circuit_task",
|
|
132
|
+
"name": "short_circuit",
|
|
133
|
+
},
|
|
134
|
+
],
|
|
111
135
|
}
|
|
@@ -25,10 +25,12 @@ from typing import TYPE_CHECKING
|
|
|
25
25
|
import pendulum
|
|
26
26
|
|
|
27
27
|
from airflow.providers.standard.operators.branch import BaseBranchOperator
|
|
28
|
+
from airflow.providers.standard.version_compat import AIRFLOW_V_3_0_PLUS
|
|
28
29
|
from airflow.utils.types import DagRunType
|
|
29
30
|
|
|
30
31
|
if TYPE_CHECKING:
|
|
31
32
|
from airflow.models import DAG, DagRun
|
|
33
|
+
from airflow.timetables.base import DagRunInfo
|
|
32
34
|
|
|
33
35
|
try:
|
|
34
36
|
from airflow.sdk.definitions.context import Context
|
|
@@ -46,6 +48,10 @@ class LatestOnlyOperator(BaseBranchOperator):
|
|
|
46
48
|
|
|
47
49
|
Note that downstream tasks are never skipped if the given DAG_Run is
|
|
48
50
|
marked as externally triggered.
|
|
51
|
+
|
|
52
|
+
Note that when used with timetables that produce zero-length or point-in-time data intervals
|
|
53
|
+
(e.g., ``DeltaTriggerTimetable``), this operator assumes each run is the latest
|
|
54
|
+
and does not skip downstream tasks.
|
|
49
55
|
"""
|
|
50
56
|
|
|
51
57
|
ui_color = "#e9ffdb" # nyanza
|
|
@@ -58,8 +64,7 @@ class LatestOnlyOperator(BaseBranchOperator):
|
|
|
58
64
|
self.log.info("Manually triggered DAG_Run: allowing execution to proceed.")
|
|
59
65
|
return list(context["task"].get_direct_relative_ids(upstream=False))
|
|
60
66
|
|
|
61
|
-
|
|
62
|
-
next_info = dag.next_dagrun_info(dag.get_run_data_interval(dag_run), restricted=False)
|
|
67
|
+
next_info = self._get_next_run_info(context, dag_run)
|
|
63
68
|
now = pendulum.now("UTC")
|
|
64
69
|
|
|
65
70
|
if next_info is None:
|
|
@@ -74,6 +79,15 @@ class LatestOnlyOperator(BaseBranchOperator):
|
|
|
74
79
|
now,
|
|
75
80
|
)
|
|
76
81
|
|
|
82
|
+
if left_window == right_window:
|
|
83
|
+
self.log.info(
|
|
84
|
+
"Zero-length interval [%s, %s) from timetable (%s); treating current run as latest.",
|
|
85
|
+
left_window,
|
|
86
|
+
right_window,
|
|
87
|
+
self.dag.timetable.__class__,
|
|
88
|
+
)
|
|
89
|
+
return list(context["task"].get_direct_relative_ids(upstream=False))
|
|
90
|
+
|
|
77
91
|
if not left_window < now <= right_window:
|
|
78
92
|
self.log.info("Not latest execution, skipping downstream.")
|
|
79
93
|
# we return an empty list, thus the parent BaseBranchOperator
|
|
@@ -82,3 +96,21 @@ class LatestOnlyOperator(BaseBranchOperator):
|
|
|
82
96
|
else:
|
|
83
97
|
self.log.info("Latest, allowing execution to proceed.")
|
|
84
98
|
return list(context["task"].get_direct_relative_ids(upstream=False))
|
|
99
|
+
|
|
100
|
+
def _get_next_run_info(self, context: Context, dag_run: DagRun) -> DagRunInfo | None:
|
|
101
|
+
dag: DAG = context["dag"] # type: ignore[assignment]
|
|
102
|
+
|
|
103
|
+
if AIRFLOW_V_3_0_PLUS:
|
|
104
|
+
from airflow.timetables.base import DataInterval, TimeRestriction
|
|
105
|
+
|
|
106
|
+
time_restriction = TimeRestriction(earliest=None, latest=None, catchup=True)
|
|
107
|
+
current_interval = DataInterval(start=dag_run.data_interval_start, end=dag_run.data_interval_end)
|
|
108
|
+
|
|
109
|
+
next_info = dag.timetable.next_dagrun_info(
|
|
110
|
+
last_automated_data_interval=current_interval,
|
|
111
|
+
restriction=time_restriction,
|
|
112
|
+
)
|
|
113
|
+
|
|
114
|
+
else:
|
|
115
|
+
next_info = dag.next_dagrun_info(dag.get_run_data_interval(dag_run), restricted=False)
|
|
116
|
+
return next_info
|
|
@@ -41,9 +41,9 @@ from airflow.models.dagrun import DagRun
|
|
|
41
41
|
from airflow.providers.standard.triggers.external_task import DagStateTrigger
|
|
42
42
|
from airflow.providers.standard.version_compat import AIRFLOW_V_3_0_PLUS
|
|
43
43
|
from airflow.utils import timezone
|
|
44
|
-
from airflow.utils.session import provide_session
|
|
44
|
+
from airflow.utils.session import NEW_SESSION, provide_session
|
|
45
45
|
from airflow.utils.state import DagRunState
|
|
46
|
-
from airflow.utils.types import DagRunType
|
|
46
|
+
from airflow.utils.types import NOTSET, ArgNotSet, DagRunType
|
|
47
47
|
|
|
48
48
|
XCOM_LOGICAL_DATE_ISO = "trigger_logical_date_iso"
|
|
49
49
|
XCOM_RUN_ID = "trigger_run_id"
|
|
@@ -153,7 +153,7 @@ class TriggerDagRunOperator(BaseOperator):
|
|
|
153
153
|
trigger_dag_id: str,
|
|
154
154
|
trigger_run_id: str | None = None,
|
|
155
155
|
conf: dict | None = None,
|
|
156
|
-
logical_date: str | datetime.datetime | None =
|
|
156
|
+
logical_date: str | datetime.datetime | None | ArgNotSet = NOTSET,
|
|
157
157
|
reset_dag_run: bool = False,
|
|
158
158
|
wait_for_completion: bool = False,
|
|
159
159
|
poke_interval: int = 60,
|
|
@@ -180,19 +180,23 @@ class TriggerDagRunOperator(BaseOperator):
|
|
|
180
180
|
self.failed_states = [DagRunState.FAILED]
|
|
181
181
|
self.skip_when_already_exists = skip_when_already_exists
|
|
182
182
|
self._defer = deferrable
|
|
183
|
-
|
|
184
|
-
if logical_date is
|
|
185
|
-
|
|
183
|
+
self.logical_date = logical_date
|
|
184
|
+
if logical_date is NOTSET:
|
|
185
|
+
self.logical_date = NOTSET
|
|
186
|
+
elif logical_date is None or isinstance(logical_date, (str, datetime.datetime)):
|
|
187
|
+
self.logical_date = logical_date
|
|
188
|
+
else:
|
|
186
189
|
raise TypeError(
|
|
187
|
-
f"Expected str
|
|
190
|
+
f"Expected str, datetime.datetime, or None for parameter 'logical_date'. Got {type(logical_date).__name__}"
|
|
188
191
|
)
|
|
189
192
|
|
|
190
|
-
self.logical_date = logical_date
|
|
191
|
-
|
|
192
193
|
def execute(self, context: Context):
|
|
193
|
-
if self.logical_date is
|
|
194
|
-
|
|
195
|
-
|
|
194
|
+
if self.logical_date is NOTSET:
|
|
195
|
+
# If no logical_date is provided we will set utcnow()
|
|
196
|
+
parsed_logical_date = timezone.utcnow()
|
|
197
|
+
elif self.logical_date is None or isinstance(self.logical_date, datetime.datetime):
|
|
198
|
+
parsed_logical_date = self.logical_date # type: ignore
|
|
199
|
+
elif isinstance(self.logical_date, str):
|
|
196
200
|
parsed_logical_date = timezone.parse(self.logical_date)
|
|
197
201
|
|
|
198
202
|
try:
|
|
@@ -231,10 +235,9 @@ class TriggerDagRunOperator(BaseOperator):
|
|
|
231
235
|
allowed_states=self.allowed_states,
|
|
232
236
|
failed_states=self.failed_states,
|
|
233
237
|
poke_interval=self.poke_interval,
|
|
238
|
+
deferrable=self._defer,
|
|
234
239
|
)
|
|
235
240
|
|
|
236
|
-
# TODO: Support deferral
|
|
237
|
-
|
|
238
241
|
def _trigger_dag_af_2(self, context, run_id, parsed_logical_date):
|
|
239
242
|
try:
|
|
240
243
|
dag_run = trigger_dag(
|
|
@@ -304,8 +307,40 @@ class TriggerDagRunOperator(BaseOperator):
|
|
|
304
307
|
self.log.info("%s finished with allowed state %s", self.trigger_dag_id, state)
|
|
305
308
|
return
|
|
306
309
|
|
|
310
|
+
def execute_complete(self, context: Context, event: tuple[str, dict[str, Any]]):
|
|
311
|
+
if AIRFLOW_V_3_0_PLUS:
|
|
312
|
+
self._trigger_dag_run_af_3_execute_complete(event=event)
|
|
313
|
+
else:
|
|
314
|
+
self._trigger_dag_run_af_2_execute_complete(event=event)
|
|
315
|
+
|
|
316
|
+
def _trigger_dag_run_af_3_execute_complete(self, event: tuple[str, dict[str, Any]]):
|
|
317
|
+
run_ids = event[1]["run_ids"]
|
|
318
|
+
event_data = event[1]
|
|
319
|
+
failed_run_id_conditions = []
|
|
320
|
+
|
|
321
|
+
for run_id in run_ids:
|
|
322
|
+
state = event_data.get(run_id)
|
|
323
|
+
if state in self.failed_states:
|
|
324
|
+
failed_run_id_conditions.append(run_id)
|
|
325
|
+
continue
|
|
326
|
+
if state in self.allowed_states:
|
|
327
|
+
self.log.info(
|
|
328
|
+
"%s finished with allowed state %s for run_id %s",
|
|
329
|
+
self.trigger_dag_id,
|
|
330
|
+
state,
|
|
331
|
+
run_id,
|
|
332
|
+
)
|
|
333
|
+
|
|
334
|
+
if failed_run_id_conditions:
|
|
335
|
+
raise AirflowException(
|
|
336
|
+
f"{self.trigger_dag_id} failed with failed states {self.failed_states} for run_ids"
|
|
337
|
+
f" {failed_run_id_conditions}"
|
|
338
|
+
)
|
|
339
|
+
|
|
307
340
|
@provide_session
|
|
308
|
-
def
|
|
341
|
+
def _trigger_dag_run_af_2_execute_complete(
|
|
342
|
+
self, event: tuple[str, dict[str, Any]], session: Session = NEW_SESSION
|
|
343
|
+
):
|
|
309
344
|
# This logical_date is parsed from the return trigger event
|
|
310
345
|
provided_logical_date = event[1]["execution_dates"][0]
|
|
311
346
|
try:
|
|
@@ -346,8 +346,6 @@ class ExternalTaskSensor(BaseSensorOperator):
|
|
|
346
346
|
"""Calculate the normalized count based on the type of check."""
|
|
347
347
|
if self.external_task_ids:
|
|
348
348
|
return count / len(self.external_task_ids)
|
|
349
|
-
elif self.external_task_group_id:
|
|
350
|
-
return count / len(dttm_filter)
|
|
351
349
|
else:
|
|
352
350
|
return count
|
|
353
351
|
|
|
@@ -421,16 +419,22 @@ class ExternalTaskSensor(BaseSensorOperator):
|
|
|
421
419
|
if not self.deferrable:
|
|
422
420
|
super().execute(context)
|
|
423
421
|
else:
|
|
422
|
+
dttm_filter = self._get_dttm_filter(context)
|
|
423
|
+
logical_or_execution_dates = (
|
|
424
|
+
{"logical_dates": dttm_filter} if AIRFLOW_V_3_0_PLUS else {"execution_date": dttm_filter}
|
|
425
|
+
)
|
|
424
426
|
self.defer(
|
|
425
427
|
timeout=self.execution_timeout,
|
|
426
428
|
trigger=WorkflowTrigger(
|
|
427
429
|
external_dag_id=self.external_dag_id,
|
|
428
430
|
external_task_group_id=self.external_task_group_id,
|
|
429
431
|
external_task_ids=self.external_task_ids,
|
|
430
|
-
logical_dates=self._get_dttm_filter(context),
|
|
431
432
|
allowed_states=self.allowed_states,
|
|
433
|
+
failed_states=self.failed_states,
|
|
434
|
+
skipped_states=self.skipped_states,
|
|
432
435
|
poke_interval=self.poll_interval,
|
|
433
436
|
soft_fail=self.soft_fail,
|
|
437
|
+
**logical_or_execution_dates,
|
|
434
438
|
),
|
|
435
439
|
method_name="execute_complete",
|
|
436
440
|
)
|
|
@@ -50,6 +50,7 @@ class WorkflowTrigger(BaseTrigger):
|
|
|
50
50
|
:param allowed_states: States considered as successful for external tasks.
|
|
51
51
|
:param poke_interval: The interval (in seconds) for poking the external tasks.
|
|
52
52
|
:param soft_fail: If True, the trigger will not fail the entire dag on external task failure.
|
|
53
|
+
:param logical_dates: A list of logical dates for the external dag.
|
|
53
54
|
"""
|
|
54
55
|
|
|
55
56
|
def __init__(
|
|
@@ -57,6 +58,7 @@ class WorkflowTrigger(BaseTrigger):
|
|
|
57
58
|
external_dag_id: str,
|
|
58
59
|
run_ids: list[str] | None = None,
|
|
59
60
|
execution_dates: list[datetime] | None = None,
|
|
61
|
+
logical_dates: list[datetime] | None = None,
|
|
60
62
|
external_task_ids: typing.Collection[str] | None = None,
|
|
61
63
|
external_task_group_id: str | None = None,
|
|
62
64
|
failed_states: typing.Iterable[str] | None = None,
|
|
@@ -76,6 +78,7 @@ class WorkflowTrigger(BaseTrigger):
|
|
|
76
78
|
self.poke_interval = poke_interval
|
|
77
79
|
self.soft_fail = soft_fail
|
|
78
80
|
self.execution_dates = execution_dates
|
|
81
|
+
self.logical_dates = logical_dates
|
|
79
82
|
super().__init__(**kwargs)
|
|
80
83
|
|
|
81
84
|
def serialize(self) -> tuple[str, dict[str, Any]]:
|
|
@@ -92,6 +95,7 @@ class WorkflowTrigger(BaseTrigger):
|
|
|
92
95
|
}
|
|
93
96
|
if AIRFLOW_V_3_0_PLUS:
|
|
94
97
|
data["run_ids"] = self.run_ids
|
|
98
|
+
data["logical_dates"] = self.logical_dates
|
|
95
99
|
else:
|
|
96
100
|
data["execution_dates"] = self.execution_dates
|
|
97
101
|
|
|
@@ -99,9 +103,16 @@ class WorkflowTrigger(BaseTrigger):
|
|
|
99
103
|
|
|
100
104
|
async def run(self) -> typing.AsyncIterator[TriggerEvent]:
|
|
101
105
|
"""Check periodically tasks, task group or dag status."""
|
|
106
|
+
if AIRFLOW_V_3_0_PLUS:
|
|
107
|
+
get_count_func = self._get_count_af_3
|
|
108
|
+
run_id_or_dates = (self.run_ids or self.logical_dates) or []
|
|
109
|
+
else:
|
|
110
|
+
get_count_func = self._get_count
|
|
111
|
+
run_id_or_dates = self.execution_dates or []
|
|
112
|
+
|
|
102
113
|
while True:
|
|
103
114
|
if self.failed_states:
|
|
104
|
-
failed_count = await
|
|
115
|
+
failed_count = await get_count_func(self.failed_states)
|
|
105
116
|
if failed_count > 0:
|
|
106
117
|
yield TriggerEvent({"status": "failed"})
|
|
107
118
|
return
|
|
@@ -109,18 +120,43 @@ class WorkflowTrigger(BaseTrigger):
|
|
|
109
120
|
yield TriggerEvent({"status": "success"})
|
|
110
121
|
return
|
|
111
122
|
if self.skipped_states:
|
|
112
|
-
skipped_count = await
|
|
123
|
+
skipped_count = await get_count_func(self.skipped_states)
|
|
113
124
|
if skipped_count > 0:
|
|
114
125
|
yield TriggerEvent({"status": "skipped"})
|
|
115
126
|
return
|
|
116
|
-
allowed_count = await
|
|
117
|
-
|
|
118
|
-
if allowed_count == len(
|
|
127
|
+
allowed_count = await get_count_func(self.allowed_states)
|
|
128
|
+
|
|
129
|
+
if allowed_count == len(run_id_or_dates): # type: ignore[arg-type]
|
|
119
130
|
yield TriggerEvent({"status": "success"})
|
|
120
131
|
return
|
|
121
132
|
self.log.info("Sleeping for %s seconds", self.poke_interval)
|
|
122
133
|
await asyncio.sleep(self.poke_interval)
|
|
123
134
|
|
|
135
|
+
async def _get_count_af_3(self, states):
|
|
136
|
+
from airflow.sdk.execution_time.task_runner import RuntimeTaskInstance
|
|
137
|
+
|
|
138
|
+
if self.external_task_ids or self.external_task_group_id:
|
|
139
|
+
count = await sync_to_async(RuntimeTaskInstance.get_ti_count)(
|
|
140
|
+
dag_id=self.external_dag_id,
|
|
141
|
+
task_ids=self.external_task_ids,
|
|
142
|
+
task_group_id=self.external_task_group_id,
|
|
143
|
+
logical_dates=self.logical_dates,
|
|
144
|
+
run_ids=self.run_ids,
|
|
145
|
+
states=states,
|
|
146
|
+
)
|
|
147
|
+
else:
|
|
148
|
+
count = await sync_to_async(RuntimeTaskInstance.get_dr_count)(
|
|
149
|
+
dag_id=self.external_dag_id,
|
|
150
|
+
logical_dates=self.logical_dates,
|
|
151
|
+
run_ids=self.run_ids,
|
|
152
|
+
states=states,
|
|
153
|
+
)
|
|
154
|
+
|
|
155
|
+
if self.external_task_ids:
|
|
156
|
+
return count / len(self.external_task_ids)
|
|
157
|
+
else:
|
|
158
|
+
return count
|
|
159
|
+
|
|
124
160
|
@sync_to_async
|
|
125
161
|
def _get_count(self, states: typing.Iterable[str] | None) -> int:
|
|
126
162
|
"""
|
|
@@ -170,29 +206,58 @@ class DagStateTrigger(BaseTrigger):
|
|
|
170
206
|
"dag_id": self.dag_id,
|
|
171
207
|
"states": self.states,
|
|
172
208
|
"poll_interval": self.poll_interval,
|
|
209
|
+
"run_ids": self.run_ids,
|
|
210
|
+
"execution_dates": self.execution_dates,
|
|
173
211
|
}
|
|
174
212
|
|
|
175
|
-
if AIRFLOW_V_3_0_PLUS:
|
|
176
|
-
data["run_ids"] = self.run_ids
|
|
177
|
-
else:
|
|
178
|
-
data["execution_dates"] = self.execution_dates
|
|
179
|
-
|
|
180
213
|
return "airflow.providers.standard.triggers.external_task.DagStateTrigger", data
|
|
181
214
|
|
|
182
215
|
async def run(self) -> typing.AsyncIterator[TriggerEvent]:
|
|
183
216
|
"""Check periodically if the dag run exists, and has hit one of the states yet, or not."""
|
|
217
|
+
runs_ids_or_dates = 0
|
|
218
|
+
if self.run_ids:
|
|
219
|
+
runs_ids_or_dates = len(self.run_ids)
|
|
220
|
+
elif self.execution_dates:
|
|
221
|
+
runs_ids_or_dates = len(self.execution_dates)
|
|
222
|
+
|
|
223
|
+
if AIRFLOW_V_3_0_PLUS:
|
|
224
|
+
event = await self.validate_count_dags_af_3(runs_ids_or_dates_len=runs_ids_or_dates)
|
|
225
|
+
yield TriggerEvent(event)
|
|
226
|
+
return
|
|
227
|
+
else:
|
|
228
|
+
while True:
|
|
229
|
+
num_dags = await self.count_dags() # type: ignore[call-arg]
|
|
230
|
+
if num_dags == runs_ids_or_dates:
|
|
231
|
+
yield TriggerEvent(self.serialize())
|
|
232
|
+
return
|
|
233
|
+
await asyncio.sleep(self.poll_interval)
|
|
234
|
+
|
|
235
|
+
async def validate_count_dags_af_3(self, runs_ids_or_dates_len: int = 0) -> tuple[str, dict[str, Any]]:
|
|
236
|
+
from airflow.sdk.execution_time.task_runner import RuntimeTaskInstance
|
|
237
|
+
|
|
238
|
+
cls_path, data = self.serialize()
|
|
239
|
+
|
|
184
240
|
while True:
|
|
185
|
-
|
|
186
|
-
|
|
187
|
-
|
|
188
|
-
|
|
189
|
-
|
|
190
|
-
|
|
241
|
+
num_dags = await sync_to_async(RuntimeTaskInstance.get_dr_count)(
|
|
242
|
+
dag_id=self.dag_id,
|
|
243
|
+
run_ids=self.run_ids,
|
|
244
|
+
states=self.states, # type: ignore[arg-type]
|
|
245
|
+
logical_dates=self.execution_dates,
|
|
246
|
+
)
|
|
247
|
+
if num_dags == runs_ids_or_dates_len:
|
|
248
|
+
if isinstance(self.run_ids, list):
|
|
249
|
+
for run_id in self.run_ids:
|
|
250
|
+
state = await sync_to_async(RuntimeTaskInstance.get_dagrun_state)(
|
|
251
|
+
dag_id=self.dag_id,
|
|
252
|
+
run_id=run_id,
|
|
253
|
+
)
|
|
254
|
+
data[run_id] = state
|
|
255
|
+
return cls_path, data
|
|
191
256
|
await asyncio.sleep(self.poll_interval)
|
|
192
257
|
|
|
193
258
|
@sync_to_async
|
|
194
259
|
@provide_session
|
|
195
|
-
def count_dags(self, *, session: Session = NEW_SESSION) -> int
|
|
260
|
+
def count_dags(self, *, session: Session = NEW_SESSION) -> int:
|
|
196
261
|
"""Count how many dag runs in the database match our criteria."""
|
|
197
262
|
_dag_run_date_condition = (
|
|
198
263
|
DagRun.run_id.in_(self.run_ids)
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.4
|
|
2
2
|
Name: apache-airflow-providers-standard
|
|
3
|
-
Version: 0.
|
|
3
|
+
Version: 0.4.0
|
|
4
4
|
Summary: Provider package apache-airflow-providers-standard for Apache Airflow
|
|
5
5
|
Keywords: airflow-provider,standard,airflow,integration
|
|
6
6
|
Author-email: Apache Software Foundation <dev@airflow.apache.org>
|
|
@@ -20,10 +20,10 @@ Classifier: Programming Language :: Python :: 3.10
|
|
|
20
20
|
Classifier: Programming Language :: Python :: 3.11
|
|
21
21
|
Classifier: Programming Language :: Python :: 3.12
|
|
22
22
|
Classifier: Topic :: System :: Monitoring
|
|
23
|
-
Requires-Dist: apache-airflow>=2.9.
|
|
23
|
+
Requires-Dist: apache-airflow>=2.9.0
|
|
24
24
|
Project-URL: Bug Tracker, https://github.com/apache/airflow/issues
|
|
25
|
-
Project-URL: Changelog, https://airflow.apache.org/docs/apache-airflow-providers-standard/0.
|
|
26
|
-
Project-URL: Documentation, https://airflow.apache.org/docs/apache-airflow-providers-standard/0.
|
|
25
|
+
Project-URL: Changelog, https://airflow.apache.org/docs/apache-airflow-providers-standard/0.4.0/changelog.html
|
|
26
|
+
Project-URL: Documentation, https://airflow.apache.org/docs/apache-airflow-providers-standard/0.4.0
|
|
27
27
|
Project-URL: Mastodon, https://fosstodon.org/@airflow
|
|
28
28
|
Project-URL: Slack Chat, https://s.apache.org/airflow-slack
|
|
29
29
|
Project-URL: Source Code, https://github.com/apache/airflow
|
|
@@ -54,7 +54,7 @@ Project-URL: YouTube, https://www.youtube.com/channel/UCSXwxpWZQ7XZ1WL3wqevChA/
|
|
|
54
54
|
|
|
55
55
|
Package ``apache-airflow-providers-standard``
|
|
56
56
|
|
|
57
|
-
Release: ``0.
|
|
57
|
+
Release: ``0.4.0``
|
|
58
58
|
|
|
59
59
|
|
|
60
60
|
Airflow Standard Provider
|
|
@@ -67,7 +67,7 @@ This is a provider package for ``standard`` provider. All classes for this provi
|
|
|
67
67
|
are in ``airflow.providers.standard`` python package.
|
|
68
68
|
|
|
69
69
|
You can find package information and changelog for the provider
|
|
70
|
-
in the `documentation <https://airflow.apache.org/docs/apache-airflow-providers-standard/0.
|
|
70
|
+
in the `documentation <https://airflow.apache.org/docs/apache-airflow-providers-standard/0.4.0/>`_.
|
|
71
71
|
|
|
72
72
|
Installation
|
|
73
73
|
------------
|
|
@@ -88,5 +88,5 @@ PIP package Version required
|
|
|
88
88
|
================== ==================
|
|
89
89
|
|
|
90
90
|
The changelog for the provider package can be found in the
|
|
91
|
-
`changelog <https://airflow.apache.org/docs/apache-airflow-providers-standard/0.
|
|
91
|
+
`changelog <https://airflow.apache.org/docs/apache-airflow-providers-standard/0.4.0/changelog.html>`_.
|
|
92
92
|
|
|
@@ -1,7 +1,17 @@
|
|
|
1
1
|
airflow/providers/standard/LICENSE,sha256=gXPVwptPlW1TJ4HSuG5OMPg-a3h43OGMkZRR1rpwfJA,10850
|
|
2
|
-
airflow/providers/standard/__init__.py,sha256=
|
|
3
|
-
airflow/providers/standard/get_provider_info.py,sha256=
|
|
2
|
+
airflow/providers/standard/__init__.py,sha256=qhDil5br-GkCarIJ4gSk1fMsAIsX1W5Yi7YNNmB_ktE,1495
|
|
3
|
+
airflow/providers/standard/get_provider_info.py,sha256=96C-S4JowGsq9zyHVtmzZWm2VdugMee-0XZIvvgBqZI,6198
|
|
4
4
|
airflow/providers/standard/version_compat.py,sha256=aHg90_DtgoSnQvILFICexMyNlHlALBdaeWqkX3dFDug,1605
|
|
5
|
+
airflow/providers/standard/decorators/__init__.py,sha256=9hdXHABrVpkbpjZgUft39kOFL2xSGeG4GEua0Hmelus,785
|
|
6
|
+
airflow/providers/standard/decorators/bash.py,sha256=J13t48yrRv7XpDV8_QWtI0IXbqNiqxW9Ct0ngmrQAdE,4396
|
|
7
|
+
airflow/providers/standard/decorators/branch_external_python.py,sha256=M6JwUxKQj8KOHZL8cHtKmn37leEz4DKoGDrexNH2aAA,2557
|
|
8
|
+
airflow/providers/standard/decorators/branch_python.py,sha256=MbtK3Zv5fKynrhv0PFeOzPQQ1IiW9PcurGq1bqotLqo,2476
|
|
9
|
+
airflow/providers/standard/decorators/branch_virtualenv.py,sha256=m_3VE88QcuMj_yFg5h6MANVsfz1cKg9TyBbJ_t6zBA8,2558
|
|
10
|
+
airflow/providers/standard/decorators/external_python.py,sha256=FxrsihfFy9oVjbyLugtFLDq7VkOdVXJScH0kuua1ApE,2877
|
|
11
|
+
airflow/providers/standard/decorators/python.py,sha256=9Fdk8CRQJ7HQzhKT1Qh-CzfbX0_aw12ccjbh6fdBRdc,3442
|
|
12
|
+
airflow/providers/standard/decorators/python_virtualenv.py,sha256=Xhul1iA0mJlN5N1EZl1LWIs90pUhS6bawQtVSpQhqEg,2565
|
|
13
|
+
airflow/providers/standard/decorators/sensor.py,sha256=04PPtcDhSr_Wa4LJct2eiBczb8JEAzjiSos2CqBu3-4,3230
|
|
14
|
+
airflow/providers/standard/decorators/short_circuit.py,sha256=3_6UHDhloPMT3fGeHFDBjf3rScXQm4wtfx59n-n__Ys,2506
|
|
5
15
|
airflow/providers/standard/hooks/__init__.py,sha256=9hdXHABrVpkbpjZgUft39kOFL2xSGeG4GEua0Hmelus,785
|
|
6
16
|
airflow/providers/standard/hooks/filesystem.py,sha256=fDZwW_EYD8z1QXnReqI7gIwSbDPZNTKtqQvgktiP02o,2870
|
|
7
17
|
airflow/providers/standard/hooks/package_index.py,sha256=U7_s_02-wwz9kTkzKr3JAhVQj2spuntWd_GmjfpV-y4,3769
|
|
@@ -11,22 +21,22 @@ airflow/providers/standard/operators/bash.py,sha256=dPpaAbLRPyVo1207npt0Fr9Shdzv
|
|
|
11
21
|
airflow/providers/standard/operators/branch.py,sha256=C_AUd7TSo_U52GiWsrR7rJIsRU5KKfrybBFw84brm_c,4070
|
|
12
22
|
airflow/providers/standard/operators/datetime.py,sha256=bYDdbfAyAlEXRRHjOgB06UhgDum6SPdd5I3u-ylPSaw,5005
|
|
13
23
|
airflow/providers/standard/operators/empty.py,sha256=C7_uLWJK6kExzlNc7xdMo8VAQ_ONWITvEQ2FImrMepM,1324
|
|
14
|
-
airflow/providers/standard/operators/latest_only.py,sha256=
|
|
24
|
+
airflow/providers/standard/operators/latest_only.py,sha256=BVsPtkrQtaZQvBWlTAxlrJPfNThNOZWw7Vf_6we7g4o,4780
|
|
15
25
|
airflow/providers/standard/operators/python.py,sha256=l0aj8d9Cwg_B8snBZA815QKy8MKhRvISfbmHEteTGTk,50106
|
|
16
26
|
airflow/providers/standard/operators/smooth.py,sha256=d3OV38EzV_wlfMYN3JGWGwyzsFonx8VbqgGfXSw0_bM,1382
|
|
17
|
-
airflow/providers/standard/operators/trigger_dagrun.py,sha256=
|
|
27
|
+
airflow/providers/standard/operators/trigger_dagrun.py,sha256=WJuT1jUkLSh4JxdUtIYVefEPQZWKU7JYGjRqye5KkFs,15775
|
|
18
28
|
airflow/providers/standard/operators/weekday.py,sha256=Qg7LhXYtybVSGZn8uQqF-r7RB7zOXfe3R6vSGVa_rJk,5083
|
|
19
29
|
airflow/providers/standard/sensors/__init__.py,sha256=9hdXHABrVpkbpjZgUft39kOFL2xSGeG4GEua0Hmelus,785
|
|
20
30
|
airflow/providers/standard/sensors/bash.py,sha256=afyz1m-1qzAp1fE5ta71rXhpTrKcCH7bNfwUU2Hv7GQ,5025
|
|
21
31
|
airflow/providers/standard/sensors/date_time.py,sha256=hRUuLaNgqDh4jqaIaD8zdyq2BUXkpWM2NzJN5YkwTJI,6077
|
|
22
|
-
airflow/providers/standard/sensors/external_task.py,sha256=
|
|
32
|
+
airflow/providers/standard/sensors/external_task.py,sha256=F0hvtcHFx8HQLLNvdepDx2Y9kjn8zbCyLTx1Uw7BbRA,27261
|
|
23
33
|
airflow/providers/standard/sensors/filesystem.py,sha256=rfupSeHtFGdAcL6cw3H6u6ttBxogSThYiPqsUKgABMU,6029
|
|
24
34
|
airflow/providers/standard/sensors/python.py,sha256=kvgpHN8hiyxJPlw9HsVpna0X6NRt0iTDvFFjqt3KFtQ,3405
|
|
25
35
|
airflow/providers/standard/sensors/time.py,sha256=Pc9BZqqTQy3Qqz7uME9yF4qmWsXYCzAoAlsmwgpAraY,5007
|
|
26
36
|
airflow/providers/standard/sensors/time_delta.py,sha256=1OlDMIwNYXhBeeE8TmfsAMIFIOur4BMlDWe0L_JScZc,6633
|
|
27
37
|
airflow/providers/standard/sensors/weekday.py,sha256=HzV21T3XhrQgfsR6svl6uWlJNPSnTbAHbQKd0jifIUU,4467
|
|
28
38
|
airflow/providers/standard/triggers/__init__.py,sha256=9hdXHABrVpkbpjZgUft39kOFL2xSGeG4GEua0Hmelus,785
|
|
29
|
-
airflow/providers/standard/triggers/external_task.py,sha256
|
|
39
|
+
airflow/providers/standard/triggers/external_task.py,sha256=L55jWUWjChw7VWAF_SKI-4DsgMwHKidXpcHIs8FTo1w,11094
|
|
30
40
|
airflow/providers/standard/triggers/file.py,sha256=2i8-RwSjEgdOwQNcHCqLmSdpE3Ehqg4GQJ8nE3-fHxo,4886
|
|
31
41
|
airflow/providers/standard/triggers/temporal.py,sha256=Aub7Cp3HsPdeardF2jp-Z5nIRwzqtK9-aOlWtfKQfcg,4809
|
|
32
42
|
airflow/providers/standard/utils/__init__.py,sha256=9hdXHABrVpkbpjZgUft39kOFL2xSGeG4GEua0Hmelus,785
|
|
@@ -35,7 +45,7 @@ airflow/providers/standard/utils/python_virtualenv_script.jinja2,sha256=3Z334hVq
|
|
|
35
45
|
airflow/providers/standard/utils/sensor_helper.py,sha256=vrCdz4lY3Iy8Mom5KuyNidg-IAyngMRqWhStEXVsyT0,4692
|
|
36
46
|
airflow/providers/standard/utils/skipmixin.py,sha256=XkhDozcXUHZ7C6AxzEW8ZYrqbra1oJGGR3ZieNQ-N0M,7791
|
|
37
47
|
airflow/providers/standard/utils/weekday.py,sha256=ySDrIkWv-lqqxURo9E98IGInDqERec2O4y9o2hQTGiQ,2685
|
|
38
|
-
apache_airflow_providers_standard-0.
|
|
39
|
-
apache_airflow_providers_standard-0.
|
|
40
|
-
apache_airflow_providers_standard-0.
|
|
41
|
-
apache_airflow_providers_standard-0.
|
|
48
|
+
apache_airflow_providers_standard-0.4.0.dist-info/entry_points.txt,sha256=mW2YRh3mVdZdaP5-iGSNgmcCh3YYdALIn28BCLBZZ40,104
|
|
49
|
+
apache_airflow_providers_standard-0.4.0.dist-info/WHEEL,sha256=G2gURzTEtmeR8nrdXUJfNiB3VYVxigPQ-bEQujpNiNs,82
|
|
50
|
+
apache_airflow_providers_standard-0.4.0.dist-info/METADATA,sha256=zncp6uT5_LYwv21S9dg9lubVy3-okXYK7VLkRjXqOYE,3786
|
|
51
|
+
apache_airflow_providers_standard-0.4.0.dist-info/RECORD,,
|