apache-airflow-providers-standard 0.2.0rc1__py3-none-any.whl → 0.3.0rc2__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of apache-airflow-providers-standard might be problematic. Click here for more details.
- airflow/providers/standard/__init__.py +1 -1
- airflow/providers/standard/decorators/__init__.py +16 -0
- airflow/providers/standard/decorators/bash.py +111 -0
- airflow/providers/standard/decorators/branch_external_python.py +57 -0
- airflow/providers/standard/decorators/branch_python.py +57 -0
- airflow/providers/standard/decorators/branch_virtualenv.py +57 -0
- airflow/providers/standard/decorators/external_python.py +65 -0
- airflow/providers/standard/decorators/python.py +81 -0
- airflow/providers/standard/decorators/python_virtualenv.py +60 -0
- airflow/providers/standard/decorators/sensor.py +76 -0
- airflow/providers/standard/decorators/short_circuit.py +59 -0
- airflow/providers/standard/get_provider_info.py +31 -2
- airflow/providers/standard/operators/bash.py +1 -1
- airflow/providers/standard/operators/datetime.py +6 -1
- airflow/providers/standard/operators/python.py +3 -5
- airflow/providers/standard/operators/weekday.py +5 -2
- airflow/providers/standard/sensors/external_task.py +99 -28
- airflow/providers/standard/triggers/external_task.py +1 -1
- airflow/providers/standard/utils/sensor_helper.py +11 -4
- {apache_airflow_providers_standard-0.2.0rc1.dist-info → apache_airflow_providers_standard-0.3.0rc2.dist-info}/METADATA +6 -6
- {apache_airflow_providers_standard-0.2.0rc1.dist-info → apache_airflow_providers_standard-0.3.0rc2.dist-info}/RECORD +23 -13
- {apache_airflow_providers_standard-0.2.0rc1.dist-info → apache_airflow_providers_standard-0.3.0rc2.dist-info}/WHEEL +0 -0
- {apache_airflow_providers_standard-0.2.0rc1.dist-info → apache_airflow_providers_standard-0.3.0rc2.dist-info}/entry_points.txt +0 -0
|
@@ -29,7 +29,7 @@ from airflow import __version__ as airflow_version
|
|
|
29
29
|
|
|
30
30
|
__all__ = ["__version__"]
|
|
31
31
|
|
|
32
|
-
__version__ = "0.
|
|
32
|
+
__version__ = "0.3.0"
|
|
33
33
|
|
|
34
34
|
if packaging.version.parse(packaging.version.parse(airflow_version).base_version) < packaging.version.parse(
|
|
35
35
|
"2.9.0"
|
|
@@ -0,0 +1,16 @@
|
|
|
1
|
+
# Licensed to the Apache Software Foundation (ASF) under one
|
|
2
|
+
# or more contributor license agreements. See the NOTICE file
|
|
3
|
+
# distributed with this work for additional information
|
|
4
|
+
# regarding copyright ownership. The ASF licenses this file
|
|
5
|
+
# to you under the Apache License, Version 2.0 (the
|
|
6
|
+
# "License"); you may not use this file except in compliance
|
|
7
|
+
# with the License. You may obtain a copy of the License at
|
|
8
|
+
#
|
|
9
|
+
# http://www.apache.org/licenses/LICENSE-2.0
|
|
10
|
+
#
|
|
11
|
+
# Unless required by applicable law or agreed to in writing,
|
|
12
|
+
# software distributed under the License is distributed on an
|
|
13
|
+
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
|
14
|
+
# KIND, either express or implied. See the License for the
|
|
15
|
+
# specific language governing permissions and limitations
|
|
16
|
+
# under the License.
|
|
@@ -0,0 +1,111 @@
|
|
|
1
|
+
# Licensed to the Apache Software Foundation (ASF) under one
|
|
2
|
+
# or more contributor license agreements. See the NOTICE file
|
|
3
|
+
# distributed with this work for additional information
|
|
4
|
+
# regarding copyright ownership. The ASF licenses this file
|
|
5
|
+
# to you under the Apache License, Version 2.0 (the
|
|
6
|
+
# "License"); you may not use this file except in compliance
|
|
7
|
+
# with the License. You may obtain a copy of the License at
|
|
8
|
+
#
|
|
9
|
+
# http://www.apache.org/licenses/LICENSE-2.0
|
|
10
|
+
#
|
|
11
|
+
# Unless required by applicable law or agreed to in writing,
|
|
12
|
+
# software distributed under the License is distributed on an
|
|
13
|
+
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
|
14
|
+
# KIND, either express or implied. See the License for the
|
|
15
|
+
# specific language governing permissions and limitations
|
|
16
|
+
# under the License.
|
|
17
|
+
|
|
18
|
+
from __future__ import annotations
|
|
19
|
+
|
|
20
|
+
import warnings
|
|
21
|
+
from collections.abc import Collection, Mapping, Sequence
|
|
22
|
+
from typing import TYPE_CHECKING, Any, Callable, ClassVar
|
|
23
|
+
|
|
24
|
+
from airflow.decorators.base import DecoratedOperator, TaskDecorator, task_decorator_factory
|
|
25
|
+
from airflow.providers.standard.operators.bash import BashOperator
|
|
26
|
+
from airflow.sdk.definitions._internal.types import SET_DURING_EXECUTION
|
|
27
|
+
from airflow.utils.context import context_merge
|
|
28
|
+
from airflow.utils.operator_helpers import determine_kwargs
|
|
29
|
+
|
|
30
|
+
if TYPE_CHECKING:
|
|
31
|
+
from airflow.sdk.definitions.context import Context
|
|
32
|
+
|
|
33
|
+
|
|
34
|
+
class _BashDecoratedOperator(DecoratedOperator, BashOperator):
|
|
35
|
+
"""
|
|
36
|
+
Wraps a Python callable and uses the callable return value as the Bash command to be executed.
|
|
37
|
+
|
|
38
|
+
:param python_callable: A reference to an object that is callable.
|
|
39
|
+
:param op_kwargs: A dictionary of keyword arguments that will get unpacked
|
|
40
|
+
in your function (templated).
|
|
41
|
+
:param op_args: A list of positional arguments that will get unpacked when
|
|
42
|
+
calling your callable (templated).
|
|
43
|
+
"""
|
|
44
|
+
|
|
45
|
+
template_fields: Sequence[str] = (*DecoratedOperator.template_fields, *BashOperator.template_fields)
|
|
46
|
+
template_fields_renderers: ClassVar[dict[str, str]] = {
|
|
47
|
+
**DecoratedOperator.template_fields_renderers,
|
|
48
|
+
**BashOperator.template_fields_renderers,
|
|
49
|
+
}
|
|
50
|
+
|
|
51
|
+
custom_operator_name: str = "@task.bash"
|
|
52
|
+
overwrite_rtif_after_execution: bool = True
|
|
53
|
+
|
|
54
|
+
def __init__(
|
|
55
|
+
self,
|
|
56
|
+
*,
|
|
57
|
+
python_callable: Callable,
|
|
58
|
+
op_args: Collection[Any] | None = None,
|
|
59
|
+
op_kwargs: Mapping[str, Any] | None = None,
|
|
60
|
+
**kwargs,
|
|
61
|
+
) -> None:
|
|
62
|
+
if kwargs.pop("multiple_outputs", None):
|
|
63
|
+
warnings.warn(
|
|
64
|
+
f"`multiple_outputs=True` is not supported in {self.custom_operator_name} tasks. Ignoring.",
|
|
65
|
+
UserWarning,
|
|
66
|
+
stacklevel=3,
|
|
67
|
+
)
|
|
68
|
+
|
|
69
|
+
super().__init__(
|
|
70
|
+
python_callable=python_callable,
|
|
71
|
+
op_args=op_args,
|
|
72
|
+
op_kwargs=op_kwargs,
|
|
73
|
+
bash_command=SET_DURING_EXECUTION,
|
|
74
|
+
multiple_outputs=False,
|
|
75
|
+
**kwargs,
|
|
76
|
+
)
|
|
77
|
+
|
|
78
|
+
def execute(self, context: Context) -> Any:
|
|
79
|
+
context_merge(context, self.op_kwargs)
|
|
80
|
+
kwargs = determine_kwargs(self.python_callable, self.op_args, context)
|
|
81
|
+
|
|
82
|
+
self.bash_command = self.python_callable(*self.op_args, **kwargs)
|
|
83
|
+
|
|
84
|
+
if not isinstance(self.bash_command, str) or self.bash_command.strip() == "":
|
|
85
|
+
raise TypeError("The returned value from the TaskFlow callable must be a non-empty string.")
|
|
86
|
+
|
|
87
|
+
self._is_inline_cmd = self._is_inline_command(bash_command=self.bash_command)
|
|
88
|
+
context["ti"].render_templates() # type: ignore[attr-defined]
|
|
89
|
+
|
|
90
|
+
return super().execute(context)
|
|
91
|
+
|
|
92
|
+
|
|
93
|
+
def bash_task(
|
|
94
|
+
python_callable: Callable | None = None,
|
|
95
|
+
**kwargs,
|
|
96
|
+
) -> TaskDecorator:
|
|
97
|
+
"""
|
|
98
|
+
Wrap a function into a BashOperator.
|
|
99
|
+
|
|
100
|
+
Accepts kwargs for operator kwargs. Can be reused in a single DAG. This function is only used only used
|
|
101
|
+
during type checking or auto-completion.
|
|
102
|
+
|
|
103
|
+
:param python_callable: Function to decorate.
|
|
104
|
+
|
|
105
|
+
:meta private:
|
|
106
|
+
"""
|
|
107
|
+
return task_decorator_factory(
|
|
108
|
+
python_callable=python_callable,
|
|
109
|
+
decorated_operator_class=_BashDecoratedOperator,
|
|
110
|
+
**kwargs,
|
|
111
|
+
)
|
|
@@ -0,0 +1,57 @@
|
|
|
1
|
+
# Licensed to the Apache Software Foundation (ASF) under one
|
|
2
|
+
# or more contributor license agreements. See the NOTICE file
|
|
3
|
+
# distributed with this work for additional information
|
|
4
|
+
# regarding copyright ownership. The ASF licenses this file
|
|
5
|
+
# to you under the Apache License, Version 2.0 (the
|
|
6
|
+
# "License"); you may not use this file except in compliance
|
|
7
|
+
# with the License. You may obtain a copy of the License at
|
|
8
|
+
#
|
|
9
|
+
# http://www.apache.org/licenses/LICENSE-2.0
|
|
10
|
+
#
|
|
11
|
+
# Unless required by applicable law or agreed to in writing,
|
|
12
|
+
# software distributed under the License is distributed on an
|
|
13
|
+
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
|
14
|
+
# KIND, either express or implied. See the License for the
|
|
15
|
+
# specific language governing permissions and limitations
|
|
16
|
+
# under the License.
|
|
17
|
+
from __future__ import annotations
|
|
18
|
+
|
|
19
|
+
from typing import TYPE_CHECKING, Callable
|
|
20
|
+
|
|
21
|
+
from airflow.decorators.base import task_decorator_factory
|
|
22
|
+
from airflow.providers.standard.decorators.python import _PythonDecoratedOperator
|
|
23
|
+
from airflow.providers.standard.operators.python import BranchExternalPythonOperator
|
|
24
|
+
|
|
25
|
+
if TYPE_CHECKING:
|
|
26
|
+
from airflow.decorators.base import TaskDecorator
|
|
27
|
+
|
|
28
|
+
|
|
29
|
+
class _BranchExternalPythonDecoratedOperator(_PythonDecoratedOperator, BranchExternalPythonOperator):
|
|
30
|
+
"""Wraps a Python callable and captures args/kwargs when called for execution."""
|
|
31
|
+
|
|
32
|
+
template_fields = BranchExternalPythonOperator.template_fields
|
|
33
|
+
custom_operator_name: str = "@task.branch_external_python"
|
|
34
|
+
|
|
35
|
+
|
|
36
|
+
def branch_external_python_task(
|
|
37
|
+
python_callable: Callable | None = None, multiple_outputs: bool | None = None, **kwargs
|
|
38
|
+
) -> TaskDecorator:
|
|
39
|
+
"""
|
|
40
|
+
Wrap a python function into a BranchExternalPythonOperator.
|
|
41
|
+
|
|
42
|
+
For more information on how to use this operator, take a look at the guide:
|
|
43
|
+
:ref:`concepts:branching`
|
|
44
|
+
|
|
45
|
+
Accepts kwargs for operator kwarg. Can be reused in a single DAG.
|
|
46
|
+
|
|
47
|
+
:param python_callable: Function to decorate
|
|
48
|
+
:param multiple_outputs: if set, function return value will be
|
|
49
|
+
unrolled to multiple XCom values. Dict will unroll to xcom values with keys as XCom keys.
|
|
50
|
+
Defaults to False.
|
|
51
|
+
"""
|
|
52
|
+
return task_decorator_factory(
|
|
53
|
+
python_callable=python_callable,
|
|
54
|
+
multiple_outputs=multiple_outputs,
|
|
55
|
+
decorated_operator_class=_BranchExternalPythonDecoratedOperator,
|
|
56
|
+
**kwargs,
|
|
57
|
+
)
|
|
@@ -0,0 +1,57 @@
|
|
|
1
|
+
# Licensed to the Apache Software Foundation (ASF) under one
|
|
2
|
+
# or more contributor license agreements. See the NOTICE file
|
|
3
|
+
# distributed with this work for additional information
|
|
4
|
+
# regarding copyright ownership. The ASF licenses this file
|
|
5
|
+
# to you under the Apache License, Version 2.0 (the
|
|
6
|
+
# "License"); you may not use this file except in compliance
|
|
7
|
+
# with the License. You may obtain a copy of the License at
|
|
8
|
+
#
|
|
9
|
+
# http://www.apache.org/licenses/LICENSE-2.0
|
|
10
|
+
#
|
|
11
|
+
# Unless required by applicable law or agreed to in writing,
|
|
12
|
+
# software distributed under the License is distributed on an
|
|
13
|
+
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
|
14
|
+
# KIND, either express or implied. See the License for the
|
|
15
|
+
# specific language governing permissions and limitations
|
|
16
|
+
# under the License.
|
|
17
|
+
from __future__ import annotations
|
|
18
|
+
|
|
19
|
+
from typing import TYPE_CHECKING, Callable
|
|
20
|
+
|
|
21
|
+
from airflow.decorators.base import task_decorator_factory
|
|
22
|
+
from airflow.providers.standard.decorators.python import _PythonDecoratedOperator
|
|
23
|
+
from airflow.providers.standard.operators.python import BranchPythonOperator
|
|
24
|
+
|
|
25
|
+
if TYPE_CHECKING:
|
|
26
|
+
from airflow.decorators.base import TaskDecorator
|
|
27
|
+
|
|
28
|
+
|
|
29
|
+
class _BranchPythonDecoratedOperator(_PythonDecoratedOperator, BranchPythonOperator):
|
|
30
|
+
"""Wraps a Python callable and captures args/kwargs when called for execution."""
|
|
31
|
+
|
|
32
|
+
template_fields = BranchPythonOperator.template_fields
|
|
33
|
+
custom_operator_name: str = "@task.branch"
|
|
34
|
+
|
|
35
|
+
|
|
36
|
+
def branch_task(
|
|
37
|
+
python_callable: Callable | None = None, multiple_outputs: bool | None = None, **kwargs
|
|
38
|
+
) -> TaskDecorator:
|
|
39
|
+
"""
|
|
40
|
+
Wrap a python function into a BranchPythonOperator.
|
|
41
|
+
|
|
42
|
+
For more information on how to use this operator, take a look at the guide:
|
|
43
|
+
:ref:`concepts:branching`
|
|
44
|
+
|
|
45
|
+
Accepts kwargs for operator kwarg. Can be reused in a single DAG.
|
|
46
|
+
|
|
47
|
+
:param python_callable: Function to decorate
|
|
48
|
+
:param multiple_outputs: if set, function return value will be
|
|
49
|
+
unrolled to multiple XCom values. Dict will unroll to xcom values with keys as XCom keys.
|
|
50
|
+
Defaults to False.
|
|
51
|
+
"""
|
|
52
|
+
return task_decorator_factory(
|
|
53
|
+
python_callable=python_callable,
|
|
54
|
+
multiple_outputs=multiple_outputs,
|
|
55
|
+
decorated_operator_class=_BranchPythonDecoratedOperator,
|
|
56
|
+
**kwargs,
|
|
57
|
+
)
|
|
@@ -0,0 +1,57 @@
|
|
|
1
|
+
# Licensed to the Apache Software Foundation (ASF) under one
|
|
2
|
+
# or more contributor license agreements. See the NOTICE file
|
|
3
|
+
# distributed with this work for additional information
|
|
4
|
+
# regarding copyright ownership. The ASF licenses this file
|
|
5
|
+
# to you under the Apache License, Version 2.0 (the
|
|
6
|
+
# "License"); you may not use this file except in compliance
|
|
7
|
+
# with the License. You may obtain a copy of the License at
|
|
8
|
+
#
|
|
9
|
+
# http://www.apache.org/licenses/LICENSE-2.0
|
|
10
|
+
#
|
|
11
|
+
# Unless required by applicable law or agreed to in writing,
|
|
12
|
+
# software distributed under the License is distributed on an
|
|
13
|
+
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
|
14
|
+
# KIND, either express or implied. See the License for the
|
|
15
|
+
# specific language governing permissions and limitations
|
|
16
|
+
# under the License.
|
|
17
|
+
from __future__ import annotations
|
|
18
|
+
|
|
19
|
+
from typing import TYPE_CHECKING, Callable
|
|
20
|
+
|
|
21
|
+
from airflow.decorators.base import task_decorator_factory
|
|
22
|
+
from airflow.providers.standard.decorators.python import _PythonDecoratedOperator
|
|
23
|
+
from airflow.providers.standard.operators.python import BranchPythonVirtualenvOperator
|
|
24
|
+
|
|
25
|
+
if TYPE_CHECKING:
|
|
26
|
+
from airflow.decorators.base import TaskDecorator
|
|
27
|
+
|
|
28
|
+
|
|
29
|
+
class _BranchPythonVirtualenvDecoratedOperator(_PythonDecoratedOperator, BranchPythonVirtualenvOperator):
|
|
30
|
+
"""Wraps a Python callable and captures args/kwargs when called for execution."""
|
|
31
|
+
|
|
32
|
+
template_fields = BranchPythonVirtualenvOperator.template_fields
|
|
33
|
+
custom_operator_name: str = "@task.branch_virtualenv"
|
|
34
|
+
|
|
35
|
+
|
|
36
|
+
def branch_virtualenv_task(
|
|
37
|
+
python_callable: Callable | None = None, multiple_outputs: bool | None = None, **kwargs
|
|
38
|
+
) -> TaskDecorator:
|
|
39
|
+
"""
|
|
40
|
+
Wrap a python function into a BranchPythonVirtualenvOperator.
|
|
41
|
+
|
|
42
|
+
For more information on how to use this operator, take a look at the guide:
|
|
43
|
+
:ref:`concepts:branching`
|
|
44
|
+
|
|
45
|
+
Accepts kwargs for operator kwarg. Can be reused in a single DAG.
|
|
46
|
+
|
|
47
|
+
:param python_callable: Function to decorate
|
|
48
|
+
:param multiple_outputs: if set, function return value will be
|
|
49
|
+
unrolled to multiple XCom values. Dict will unroll to xcom values with keys as XCom keys.
|
|
50
|
+
Defaults to False.
|
|
51
|
+
"""
|
|
52
|
+
return task_decorator_factory(
|
|
53
|
+
python_callable=python_callable,
|
|
54
|
+
multiple_outputs=multiple_outputs,
|
|
55
|
+
decorated_operator_class=_BranchPythonVirtualenvDecoratedOperator,
|
|
56
|
+
**kwargs,
|
|
57
|
+
)
|
|
@@ -0,0 +1,65 @@
|
|
|
1
|
+
# Licensed to the Apache Software Foundation (ASF) under one
|
|
2
|
+
# or more contributor license agreements. See the NOTICE file
|
|
3
|
+
# distributed with this work for additional information
|
|
4
|
+
# regarding copyright ownership. The ASF licenses this file
|
|
5
|
+
# to you under the Apache License, Version 2.0 (the
|
|
6
|
+
# "License"); you may not use this file except in compliance
|
|
7
|
+
# with the License. You may obtain a copy of the License at
|
|
8
|
+
#
|
|
9
|
+
# http://www.apache.org/licenses/LICENSE-2.0
|
|
10
|
+
#
|
|
11
|
+
# Unless required by applicable law or agreed to in writing,
|
|
12
|
+
# software distributed under the License is distributed on an
|
|
13
|
+
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
|
14
|
+
# KIND, either express or implied. See the License for the
|
|
15
|
+
# specific language governing permissions and limitations
|
|
16
|
+
# under the License.
|
|
17
|
+
from __future__ import annotations
|
|
18
|
+
|
|
19
|
+
from typing import TYPE_CHECKING, Callable
|
|
20
|
+
|
|
21
|
+
from airflow.decorators.base import task_decorator_factory
|
|
22
|
+
from airflow.providers.standard.decorators.python import _PythonDecoratedOperator
|
|
23
|
+
from airflow.providers.standard.operators.python import ExternalPythonOperator
|
|
24
|
+
|
|
25
|
+
if TYPE_CHECKING:
|
|
26
|
+
from airflow.decorators.base import TaskDecorator
|
|
27
|
+
|
|
28
|
+
|
|
29
|
+
class _PythonExternalDecoratedOperator(_PythonDecoratedOperator, ExternalPythonOperator):
|
|
30
|
+
"""Wraps a Python callable and captures args/kwargs when called for execution."""
|
|
31
|
+
|
|
32
|
+
template_fields = ExternalPythonOperator.template_fields
|
|
33
|
+
custom_operator_name: str = "@task.external_python"
|
|
34
|
+
|
|
35
|
+
|
|
36
|
+
def external_python_task(
|
|
37
|
+
python: str | None = None,
|
|
38
|
+
python_callable: Callable | None = None,
|
|
39
|
+
multiple_outputs: bool | None = None,
|
|
40
|
+
**kwargs,
|
|
41
|
+
) -> TaskDecorator:
|
|
42
|
+
"""
|
|
43
|
+
Wrap a callable into an Airflow operator to run via a Python virtual environment.
|
|
44
|
+
|
|
45
|
+
Accepts kwargs for operator kwarg. Can be reused in a single DAG.
|
|
46
|
+
|
|
47
|
+
This function is only used during type checking or auto-completion.
|
|
48
|
+
|
|
49
|
+
:meta private:
|
|
50
|
+
|
|
51
|
+
:param python: Full path string (file-system specific) that points to a Python binary inside
|
|
52
|
+
a virtualenv that should be used (in ``VENV/bin`` folder). Should be absolute path
|
|
53
|
+
(so usually start with "/" or "X:/" depending on the filesystem/os used).
|
|
54
|
+
:param python_callable: Function to decorate
|
|
55
|
+
:param multiple_outputs: If set to True, the decorated function's return value will be unrolled to
|
|
56
|
+
multiple XCom values. Dict will unroll to XCom values with its keys as XCom keys.
|
|
57
|
+
Defaults to False.
|
|
58
|
+
"""
|
|
59
|
+
return task_decorator_factory(
|
|
60
|
+
python=python,
|
|
61
|
+
python_callable=python_callable,
|
|
62
|
+
multiple_outputs=multiple_outputs,
|
|
63
|
+
decorated_operator_class=_PythonExternalDecoratedOperator,
|
|
64
|
+
**kwargs,
|
|
65
|
+
)
|
|
@@ -0,0 +1,81 @@
|
|
|
1
|
+
# Licensed to the Apache Software Foundation (ASF) under one
|
|
2
|
+
# or more contributor license agreements. See the NOTICE file
|
|
3
|
+
# distributed with this work for additional information
|
|
4
|
+
# regarding copyright ownership. The ASF licenses this file
|
|
5
|
+
# to you under the Apache License, Version 2.0 (the
|
|
6
|
+
# "License"); you may not use this file except in compliance
|
|
7
|
+
# with the License. You may obtain a copy of the License at
|
|
8
|
+
#
|
|
9
|
+
# http://www.apache.org/licenses/LICENSE-2.0
|
|
10
|
+
#
|
|
11
|
+
# Unless required by applicable law or agreed to in writing,
|
|
12
|
+
# software distributed under the License is distributed on an
|
|
13
|
+
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
|
14
|
+
# KIND, either express or implied. See the License for the
|
|
15
|
+
# specific language governing permissions and limitations
|
|
16
|
+
# under the License.
|
|
17
|
+
from __future__ import annotations
|
|
18
|
+
|
|
19
|
+
from collections.abc import Sequence
|
|
20
|
+
from typing import TYPE_CHECKING, Callable
|
|
21
|
+
|
|
22
|
+
from airflow.decorators.base import DecoratedOperator, task_decorator_factory
|
|
23
|
+
from airflow.providers.standard.operators.python import PythonOperator
|
|
24
|
+
|
|
25
|
+
if TYPE_CHECKING:
|
|
26
|
+
from airflow.decorators.base import TaskDecorator
|
|
27
|
+
|
|
28
|
+
|
|
29
|
+
class _PythonDecoratedOperator(DecoratedOperator, PythonOperator):
|
|
30
|
+
"""
|
|
31
|
+
Wraps a Python callable and captures args/kwargs when called for execution.
|
|
32
|
+
|
|
33
|
+
:param python_callable: A reference to an object that is callable
|
|
34
|
+
:param op_kwargs: a dictionary of keyword arguments that will get unpacked
|
|
35
|
+
in your function (templated)
|
|
36
|
+
:param op_args: a list of positional arguments that will get unpacked when
|
|
37
|
+
calling your callable (templated)
|
|
38
|
+
:param multiple_outputs: If set to True, the decorated function's return value will be unrolled to
|
|
39
|
+
multiple XCom values. Dict will unroll to XCom values with its keys as XCom keys. Defaults to False.
|
|
40
|
+
"""
|
|
41
|
+
|
|
42
|
+
template_fields: Sequence[str] = ("templates_dict", "op_args", "op_kwargs")
|
|
43
|
+
template_fields_renderers = {"templates_dict": "json", "op_args": "py", "op_kwargs": "py"}
|
|
44
|
+
|
|
45
|
+
custom_operator_name: str = "@task"
|
|
46
|
+
|
|
47
|
+
def __init__(self, *, python_callable, op_args, op_kwargs, **kwargs) -> None:
|
|
48
|
+
kwargs_to_upstream = {
|
|
49
|
+
"python_callable": python_callable,
|
|
50
|
+
"op_args": op_args,
|
|
51
|
+
"op_kwargs": op_kwargs,
|
|
52
|
+
}
|
|
53
|
+
super().__init__(
|
|
54
|
+
kwargs_to_upstream=kwargs_to_upstream,
|
|
55
|
+
python_callable=python_callable,
|
|
56
|
+
op_args=op_args,
|
|
57
|
+
op_kwargs=op_kwargs,
|
|
58
|
+
**kwargs,
|
|
59
|
+
)
|
|
60
|
+
|
|
61
|
+
|
|
62
|
+
def python_task(
|
|
63
|
+
python_callable: Callable | None = None,
|
|
64
|
+
multiple_outputs: bool | None = None,
|
|
65
|
+
**kwargs,
|
|
66
|
+
) -> TaskDecorator:
|
|
67
|
+
"""
|
|
68
|
+
Wrap a function into an Airflow operator.
|
|
69
|
+
|
|
70
|
+
Accepts kwargs for operator kwarg. Can be reused in a single DAG.
|
|
71
|
+
|
|
72
|
+
:param python_callable: Function to decorate
|
|
73
|
+
:param multiple_outputs: If set to True, the decorated function's return value will be unrolled to
|
|
74
|
+
multiple XCom values. Dict will unroll to XCom values with its keys as XCom keys. Defaults to False.
|
|
75
|
+
"""
|
|
76
|
+
return task_decorator_factory(
|
|
77
|
+
python_callable=python_callable,
|
|
78
|
+
multiple_outputs=multiple_outputs,
|
|
79
|
+
decorated_operator_class=_PythonDecoratedOperator,
|
|
80
|
+
**kwargs,
|
|
81
|
+
)
|
|
@@ -0,0 +1,60 @@
|
|
|
1
|
+
# Licensed to the Apache Software Foundation (ASF) under one
|
|
2
|
+
# or more contributor license agreements. See the NOTICE file
|
|
3
|
+
# distributed with this work for additional information
|
|
4
|
+
# regarding copyright ownership. The ASF licenses this file
|
|
5
|
+
# to you under the Apache License, Version 2.0 (the
|
|
6
|
+
# "License"); you may not use this file except in compliance
|
|
7
|
+
# with the License. You may obtain a copy of the License at
|
|
8
|
+
#
|
|
9
|
+
# http://www.apache.org/licenses/LICENSE-2.0
|
|
10
|
+
#
|
|
11
|
+
# Unless required by applicable law or agreed to in writing,
|
|
12
|
+
# software distributed under the License is distributed on an
|
|
13
|
+
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
|
14
|
+
# KIND, either express or implied. See the License for the
|
|
15
|
+
# specific language governing permissions and limitations
|
|
16
|
+
# under the License.
|
|
17
|
+
from __future__ import annotations
|
|
18
|
+
|
|
19
|
+
from typing import TYPE_CHECKING, Callable
|
|
20
|
+
|
|
21
|
+
from airflow.decorators.base import task_decorator_factory
|
|
22
|
+
from airflow.providers.standard.decorators.python import _PythonDecoratedOperator
|
|
23
|
+
from airflow.providers.standard.operators.python import PythonVirtualenvOperator
|
|
24
|
+
|
|
25
|
+
if TYPE_CHECKING:
|
|
26
|
+
from airflow.decorators.base import TaskDecorator
|
|
27
|
+
|
|
28
|
+
|
|
29
|
+
class _PythonVirtualenvDecoratedOperator(_PythonDecoratedOperator, PythonVirtualenvOperator):
|
|
30
|
+
"""Wraps a Python callable and captures args/kwargs when called for execution."""
|
|
31
|
+
|
|
32
|
+
template_fields = PythonVirtualenvOperator.template_fields
|
|
33
|
+
custom_operator_name: str = "@task.virtualenv"
|
|
34
|
+
|
|
35
|
+
|
|
36
|
+
def virtualenv_task(
|
|
37
|
+
python_callable: Callable | None = None,
|
|
38
|
+
multiple_outputs: bool | None = None,
|
|
39
|
+
**kwargs,
|
|
40
|
+
) -> TaskDecorator:
|
|
41
|
+
"""
|
|
42
|
+
Wrap a callable into an Airflow operator to run via a Python virtual environment.
|
|
43
|
+
|
|
44
|
+
Accepts kwargs for operator kwarg. Can be reused in a single DAG.
|
|
45
|
+
|
|
46
|
+
This function is only used only used during type checking or auto-completion.
|
|
47
|
+
|
|
48
|
+
:meta private:
|
|
49
|
+
|
|
50
|
+
:param python_callable: Function to decorate
|
|
51
|
+
:param multiple_outputs: If set to True, the decorated function's return value will be unrolled to
|
|
52
|
+
multiple XCom values. Dict will unroll to XCom values with its keys as XCom keys.
|
|
53
|
+
Defaults to False.
|
|
54
|
+
"""
|
|
55
|
+
return task_decorator_factory(
|
|
56
|
+
python_callable=python_callable,
|
|
57
|
+
multiple_outputs=multiple_outputs,
|
|
58
|
+
decorated_operator_class=_PythonVirtualenvDecoratedOperator,
|
|
59
|
+
**kwargs,
|
|
60
|
+
)
|
|
@@ -0,0 +1,76 @@
|
|
|
1
|
+
# Licensed to the Apache Software Foundation (ASF) under one
|
|
2
|
+
# or more contributor license agreements. See the NOTICE file
|
|
3
|
+
# distributed with this work for additional information
|
|
4
|
+
# regarding copyright ownership. The ASF licenses this file
|
|
5
|
+
# to you under the Apache License, Version 2.0 (the
|
|
6
|
+
# "License"); you may not use this file except in compliance
|
|
7
|
+
# with the License. You may obtain a copy of the License at
|
|
8
|
+
#
|
|
9
|
+
# http://www.apache.org/licenses/LICENSE-2.0
|
|
10
|
+
#
|
|
11
|
+
# Unless required by applicable law or agreed to in writing,
|
|
12
|
+
# software distributed under the License is distributed on an
|
|
13
|
+
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
|
14
|
+
# KIND, either express or implied. See the License for the
|
|
15
|
+
# specific language governing permissions and limitations
|
|
16
|
+
# under the License.
|
|
17
|
+
|
|
18
|
+
from __future__ import annotations
|
|
19
|
+
|
|
20
|
+
from collections.abc import Sequence
|
|
21
|
+
from typing import TYPE_CHECKING, Callable, ClassVar
|
|
22
|
+
|
|
23
|
+
from airflow.decorators.base import get_unique_task_id, task_decorator_factory
|
|
24
|
+
from airflow.providers.standard.sensors.python import PythonSensor
|
|
25
|
+
|
|
26
|
+
if TYPE_CHECKING:
|
|
27
|
+
from airflow.decorators.base import TaskDecorator
|
|
28
|
+
|
|
29
|
+
|
|
30
|
+
class DecoratedSensorOperator(PythonSensor):
|
|
31
|
+
"""
|
|
32
|
+
Wraps a Python callable and captures args/kwargs when called for execution.
|
|
33
|
+
|
|
34
|
+
:param python_callable: A reference to an object that is callable
|
|
35
|
+
:param task_id: task Id
|
|
36
|
+
:param op_args: a list of positional arguments that will get unpacked when
|
|
37
|
+
calling your callable (templated)
|
|
38
|
+
:param op_kwargs: a dictionary of keyword arguments that will get unpacked
|
|
39
|
+
in your function (templated)
|
|
40
|
+
:param kwargs_to_upstream: For certain operators, we might need to upstream certain arguments
|
|
41
|
+
that would otherwise be absorbed by the DecoratedOperator (for example python_callable for the
|
|
42
|
+
PythonOperator). This gives a user the option to upstream kwargs as needed.
|
|
43
|
+
"""
|
|
44
|
+
|
|
45
|
+
template_fields: Sequence[str] = ("op_args", "op_kwargs")
|
|
46
|
+
template_fields_renderers: ClassVar[dict[str, str]] = {"op_args": "py", "op_kwargs": "py"}
|
|
47
|
+
|
|
48
|
+
custom_operator_name = "@task.sensor"
|
|
49
|
+
|
|
50
|
+
# since we won't mutate the arguments, we should just do the shallow copy
|
|
51
|
+
# there are some cases we can't deepcopy the objects (e.g protobuf).
|
|
52
|
+
shallow_copy_attrs: Sequence[str] = ("python_callable",)
|
|
53
|
+
|
|
54
|
+
def __init__(
|
|
55
|
+
self,
|
|
56
|
+
*,
|
|
57
|
+
task_id: str,
|
|
58
|
+
**kwargs,
|
|
59
|
+
) -> None:
|
|
60
|
+
kwargs["task_id"] = get_unique_task_id(task_id, kwargs.get("dag"), kwargs.get("task_group"))
|
|
61
|
+
super().__init__(**kwargs)
|
|
62
|
+
|
|
63
|
+
|
|
64
|
+
def sensor_task(python_callable: Callable | None = None, **kwargs) -> TaskDecorator:
|
|
65
|
+
"""
|
|
66
|
+
Wrap a function into an Airflow operator.
|
|
67
|
+
|
|
68
|
+
Accepts kwargs for operator kwarg. Can be reused in a single DAG.
|
|
69
|
+
:param python_callable: Function to decorate
|
|
70
|
+
"""
|
|
71
|
+
return task_decorator_factory(
|
|
72
|
+
python_callable=python_callable,
|
|
73
|
+
multiple_outputs=False,
|
|
74
|
+
decorated_operator_class=DecoratedSensorOperator,
|
|
75
|
+
**kwargs,
|
|
76
|
+
)
|
|
@@ -0,0 +1,59 @@
|
|
|
1
|
+
# Licensed to the Apache Software Foundation (ASF) under one
|
|
2
|
+
# or more contributor license agreements. See the NOTICE file
|
|
3
|
+
# distributed with this work for additional information
|
|
4
|
+
# regarding copyright ownership. The ASF licenses this file
|
|
5
|
+
# to you under the Apache License, Version 2.0 (the
|
|
6
|
+
# "License"); you may not use this file except in compliance
|
|
7
|
+
# with the License. You may obtain a copy of the License at
|
|
8
|
+
#
|
|
9
|
+
# http://www.apache.org/licenses/LICENSE-2.0
|
|
10
|
+
#
|
|
11
|
+
# Unless required by applicable law or agreed to in writing,
|
|
12
|
+
# software distributed under the License is distributed on an
|
|
13
|
+
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
|
14
|
+
# KIND, either express or implied. See the License for the
|
|
15
|
+
# specific language governing permissions and limitations
|
|
16
|
+
# under the License.
|
|
17
|
+
from __future__ import annotations
|
|
18
|
+
|
|
19
|
+
from typing import TYPE_CHECKING, Callable
|
|
20
|
+
|
|
21
|
+
from airflow.decorators.base import task_decorator_factory
|
|
22
|
+
from airflow.providers.standard.decorators.python import _PythonDecoratedOperator
|
|
23
|
+
from airflow.providers.standard.operators.python import ShortCircuitOperator
|
|
24
|
+
|
|
25
|
+
if TYPE_CHECKING:
|
|
26
|
+
from airflow.decorators.base import TaskDecorator
|
|
27
|
+
|
|
28
|
+
|
|
29
|
+
class _ShortCircuitDecoratedOperator(_PythonDecoratedOperator, ShortCircuitOperator):
|
|
30
|
+
"""Wraps a Python callable and captures args/kwargs when called for execution."""
|
|
31
|
+
|
|
32
|
+
template_fields = ShortCircuitOperator.template_fields
|
|
33
|
+
custom_operator_name: str = "@task.short_circuit"
|
|
34
|
+
|
|
35
|
+
|
|
36
|
+
def short_circuit_task(
|
|
37
|
+
python_callable: Callable | None = None,
|
|
38
|
+
multiple_outputs: bool | None = None,
|
|
39
|
+
**kwargs,
|
|
40
|
+
) -> TaskDecorator:
|
|
41
|
+
"""
|
|
42
|
+
Wrap a function into an ShortCircuitOperator.
|
|
43
|
+
|
|
44
|
+
Accepts kwargs for operator kwarg. Can be reused in a single DAG.
|
|
45
|
+
|
|
46
|
+
This function is only used only used during type checking or auto-completion.
|
|
47
|
+
|
|
48
|
+
:param python_callable: Function to decorate
|
|
49
|
+
:param multiple_outputs: If set to True, the decorated function's return value will be unrolled to
|
|
50
|
+
multiple XCom values. Dict will unroll to XCom values with its keys as XCom keys. Defaults to False.
|
|
51
|
+
|
|
52
|
+
:meta private:
|
|
53
|
+
"""
|
|
54
|
+
return task_decorator_factory(
|
|
55
|
+
python_callable=python_callable,
|
|
56
|
+
multiple_outputs=multiple_outputs,
|
|
57
|
+
decorated_operator_class=_ShortCircuitDecoratedOperator,
|
|
58
|
+
**kwargs,
|
|
59
|
+
)
|
|
@@ -27,8 +27,8 @@ def get_provider_info():
|
|
|
27
27
|
"name": "Standard",
|
|
28
28
|
"description": "Airflow Standard Provider\n",
|
|
29
29
|
"state": "ready",
|
|
30
|
-
"source-date-epoch":
|
|
31
|
-
"versions": ["0.2.0", "0.1.1", "0.1.0", "0.0.3", "0.0.2", "0.0.1"],
|
|
30
|
+
"source-date-epoch": 1743477899,
|
|
31
|
+
"versions": ["0.3.0", "0.2.0", "0.1.1", "0.1.0", "0.0.3", "0.0.2", "0.0.1"],
|
|
32
32
|
"integrations": [
|
|
33
33
|
{
|
|
34
34
|
"integration-name": "Standard",
|
|
@@ -106,6 +106,35 @@ def get_provider_info():
|
|
|
106
106
|
},
|
|
107
107
|
}
|
|
108
108
|
},
|
|
109
|
+
"task-decorators": [
|
|
110
|
+
{"class-name": "airflow.providers.standard.decorators.python.python_task", "name": "python"},
|
|
111
|
+
{"class-name": "airflow.providers.standard.decorators.bash.bash_task", "name": "bash"},
|
|
112
|
+
{
|
|
113
|
+
"class-name": "airflow.providers.standard.decorators.branch_external_python.branch_external_python_task",
|
|
114
|
+
"name": "branch_external_python",
|
|
115
|
+
},
|
|
116
|
+
{
|
|
117
|
+
"class-name": "airflow.providers.standard.decorators.branch_python.branch_task",
|
|
118
|
+
"name": "branch",
|
|
119
|
+
},
|
|
120
|
+
{
|
|
121
|
+
"class-name": "airflow.providers.standard.decorators.branch_virtualenv.branch_virtualenv_task",
|
|
122
|
+
"name": "branch_virtualenv",
|
|
123
|
+
},
|
|
124
|
+
{
|
|
125
|
+
"class-name": "airflow.providers.standard.decorators.external_python.external_python_task",
|
|
126
|
+
"name": "external_python",
|
|
127
|
+
},
|
|
128
|
+
{
|
|
129
|
+
"class-name": "airflow.providers.standard.decorators.python_virtualenv.virtualenv_task",
|
|
130
|
+
"name": "virtualenv",
|
|
131
|
+
},
|
|
132
|
+
{"class-name": "airflow.providers.standard.decorators.sensor.sensor_task", "name": "sensor"},
|
|
133
|
+
{
|
|
134
|
+
"class-name": "airflow.providers.standard.decorators.short_circuit.short_circuit_task",
|
|
135
|
+
"name": "short_circuit",
|
|
136
|
+
},
|
|
137
|
+
],
|
|
109
138
|
"dependencies": ["apache-airflow>=2.9.0"],
|
|
110
139
|
"devel-dependencies": [],
|
|
111
140
|
}
|
|
@@ -253,7 +253,7 @@ class BashOperator(BaseOperator):
|
|
|
253
253
|
"""
|
|
254
254
|
with working_directory(cwd=self.cwd) as cwd:
|
|
255
255
|
with tempfile.NamedTemporaryFile(mode="w", dir=cwd, suffix=".sh") as file:
|
|
256
|
-
file.write(cast(str, self.bash_command))
|
|
256
|
+
file.write(cast("str", self.bash_command))
|
|
257
257
|
file.flush()
|
|
258
258
|
|
|
259
259
|
bash_script = os.path.basename(file.name)
|
|
@@ -77,9 +77,14 @@ class BranchDateTimeOperator(BaseBranchOperator):
|
|
|
77
77
|
|
|
78
78
|
def choose_branch(self, context: Context) -> str | Iterable[str]:
|
|
79
79
|
if self.use_task_logical_date:
|
|
80
|
-
now = context
|
|
80
|
+
now = context.get("logical_date")
|
|
81
|
+
if not now:
|
|
82
|
+
dag_run = context.get("dag_run")
|
|
83
|
+
now = dag_run.run_after # type: ignore[union-attr, assignment]
|
|
81
84
|
else:
|
|
82
85
|
now = timezone.coerce_datetime(timezone.utcnow())
|
|
86
|
+
if TYPE_CHECKING:
|
|
87
|
+
assert isinstance(now, datetime.datetime)
|
|
83
88
|
lower, upper = target_times_as_dates(now, self.target_lower, self.target_upper)
|
|
84
89
|
lower = timezone.coerce_datetime(lower, self.dag.timezone)
|
|
85
90
|
upper = timezone.coerce_datetime(upper, self.dag.timezone)
|
|
@@ -56,7 +56,7 @@ if AIRFLOW_V_3_0_PLUS:
|
|
|
56
56
|
from airflow.providers.standard.utils.skipmixin import SkipMixin
|
|
57
57
|
else:
|
|
58
58
|
from airflow.models.skipmixin import SkipMixin
|
|
59
|
-
from airflow.operators.branch import BranchMixIn
|
|
59
|
+
from airflow.operators.branch import BranchMixIn # type: ignore[no-redef]
|
|
60
60
|
|
|
61
61
|
|
|
62
62
|
log = logging.getLogger(__name__)
|
|
@@ -460,8 +460,7 @@ class _BasePythonVirtualenvOperator(PythonOperator, metaclass=ABCMeta):
|
|
|
460
460
|
serializer = serializer or "pickle"
|
|
461
461
|
if serializer not in _SERIALIZERS:
|
|
462
462
|
msg = (
|
|
463
|
-
f"Unsupported serializer {serializer!r}. "
|
|
464
|
-
f"Expected one of {', '.join(map(repr, _SERIALIZERS))}"
|
|
463
|
+
f"Unsupported serializer {serializer!r}. Expected one of {', '.join(map(repr, _SERIALIZERS))}"
|
|
465
464
|
)
|
|
466
465
|
raise AirflowException(msg)
|
|
467
466
|
|
|
@@ -1142,7 +1141,6 @@ def _get_current_context() -> Mapping[str, Any]:
|
|
|
1142
1141
|
|
|
1143
1142
|
if not _CURRENT_CONTEXT:
|
|
1144
1143
|
raise RuntimeError(
|
|
1145
|
-
"Current context was requested but no context was found! "
|
|
1146
|
-
"Are you running within an Airflow task?"
|
|
1144
|
+
"Current context was requested but no context was found! Are you running within an Airflow task?"
|
|
1147
1145
|
)
|
|
1148
1146
|
return _CURRENT_CONTEXT[-1]
|
|
@@ -116,10 +116,13 @@ class BranchDayOfWeekOperator(BaseBranchOperator):
|
|
|
116
116
|
|
|
117
117
|
def choose_branch(self, context: Context) -> str | Iterable[str]:
|
|
118
118
|
if self.use_task_logical_date:
|
|
119
|
-
now = context
|
|
119
|
+
now = context.get("logical_date")
|
|
120
|
+
if not now:
|
|
121
|
+
dag_run = context.get("dag_run")
|
|
122
|
+
now = dag_run.run_after # type: ignore[union-attr, assignment]
|
|
120
123
|
else:
|
|
121
124
|
now = timezone.make_naive(timezone.utcnow(), self.dag.timezone)
|
|
122
125
|
|
|
123
|
-
if now.isoweekday() in self._week_day_num:
|
|
126
|
+
if now.isoweekday() in self._week_day_num: # type: ignore[union-attr]
|
|
124
127
|
return self.follow_task_ids_if_true
|
|
125
128
|
return self.follow_task_ids_if_false
|
|
@@ -31,21 +31,26 @@ from airflow.providers.standard.operators.empty import EmptyOperator
|
|
|
31
31
|
from airflow.providers.standard.triggers.external_task import WorkflowTrigger
|
|
32
32
|
from airflow.providers.standard.utils.sensor_helper import _get_count, _get_external_task_group_task_ids
|
|
33
33
|
from airflow.providers.standard.version_compat import AIRFLOW_V_3_0_PLUS
|
|
34
|
-
from airflow.sensors.base import BaseSensorOperator
|
|
35
34
|
from airflow.utils.file import correct_maybe_zipped
|
|
36
35
|
from airflow.utils.session import NEW_SESSION, provide_session
|
|
37
36
|
from airflow.utils.state import State, TaskInstanceState
|
|
38
37
|
|
|
38
|
+
if AIRFLOW_V_3_0_PLUS:
|
|
39
|
+
from airflow.sdk.bases.sensor import BaseSensorOperator
|
|
40
|
+
else:
|
|
41
|
+
from airflow.sensors.base import BaseSensorOperator
|
|
42
|
+
|
|
39
43
|
if TYPE_CHECKING:
|
|
40
44
|
from sqlalchemy.orm import Session
|
|
41
45
|
|
|
42
|
-
from airflow.models.baseoperator import BaseOperator
|
|
43
46
|
from airflow.models.taskinstancekey import TaskInstanceKey
|
|
44
47
|
|
|
45
48
|
try:
|
|
49
|
+
from airflow.sdk import BaseOperator
|
|
46
50
|
from airflow.sdk.definitions.context import Context
|
|
47
51
|
except ImportError:
|
|
48
52
|
# TODO: Remove once provider drops support for Airflow 2
|
|
53
|
+
from airflow.models.baseoperator import BaseOperator
|
|
49
54
|
from airflow.utils.context import Context
|
|
50
55
|
|
|
51
56
|
|
|
@@ -65,15 +70,16 @@ class ExternalDagLink(BaseOperatorLink):
|
|
|
65
70
|
name = "External DAG"
|
|
66
71
|
|
|
67
72
|
def get_link(self, operator: BaseOperator, *, ti_key: TaskInstanceKey) -> str:
|
|
68
|
-
from airflow.models.renderedtifields import RenderedTaskInstanceFields
|
|
69
|
-
|
|
70
73
|
if TYPE_CHECKING:
|
|
71
74
|
assert isinstance(operator, (ExternalTaskMarker, ExternalTaskSensor))
|
|
72
75
|
|
|
73
|
-
|
|
74
|
-
|
|
75
|
-
|
|
76
|
-
|
|
76
|
+
external_dag_id = operator.external_dag_id
|
|
77
|
+
|
|
78
|
+
if not AIRFLOW_V_3_0_PLUS:
|
|
79
|
+
from airflow.models.renderedtifields import RenderedTaskInstanceFields
|
|
80
|
+
|
|
81
|
+
if template_fields := RenderedTaskInstanceFields.get_templated_fields(ti_key):
|
|
82
|
+
external_dag_id: str = template_fields.get("external_dag_id", operator.external_dag_id) # type: ignore[no-redef]
|
|
77
83
|
|
|
78
84
|
if AIRFLOW_V_3_0_PLUS:
|
|
79
85
|
from airflow.utils.helpers import build_airflow_dagrun_url
|
|
@@ -245,16 +251,22 @@ class ExternalTaskSensor(BaseSensorOperator):
|
|
|
245
251
|
self.poll_interval = poll_interval
|
|
246
252
|
|
|
247
253
|
def _get_dttm_filter(self, context):
|
|
254
|
+
logical_date = context.get("logical_date")
|
|
255
|
+
if logical_date is None:
|
|
256
|
+
dag_run = context.get("dag_run")
|
|
257
|
+
if TYPE_CHECKING:
|
|
258
|
+
assert dag_run
|
|
259
|
+
|
|
260
|
+
logical_date = dag_run.run_after
|
|
248
261
|
if self.execution_delta:
|
|
249
|
-
dttm =
|
|
262
|
+
dttm = logical_date - self.execution_delta
|
|
250
263
|
elif self.execution_date_fn:
|
|
251
264
|
dttm = self._handle_execution_date_fn(context=context)
|
|
252
265
|
else:
|
|
253
|
-
dttm =
|
|
266
|
+
dttm = logical_date
|
|
254
267
|
return dttm if isinstance(dttm, list) else [dttm]
|
|
255
268
|
|
|
256
|
-
|
|
257
|
-
def poke(self, context: Context, session: Session = NEW_SESSION) -> bool:
|
|
269
|
+
def poke(self, context: Context) -> bool:
|
|
258
270
|
# delay check to poke rather than __init__ in case it was supplied as XComArgs
|
|
259
271
|
if self.external_task_ids and len(self.external_task_ids) > len(set(self.external_task_ids)):
|
|
260
272
|
raise ValueError("Duplicate task_ids passed in external_task_ids parameter")
|
|
@@ -285,15 +297,62 @@ class ExternalTaskSensor(BaseSensorOperator):
|
|
|
285
297
|
serialized_dttm_filter,
|
|
286
298
|
)
|
|
287
299
|
|
|
288
|
-
|
|
289
|
-
|
|
290
|
-
|
|
300
|
+
if AIRFLOW_V_3_0_PLUS:
|
|
301
|
+
return self._poke_af3(context, dttm_filter)
|
|
302
|
+
else:
|
|
303
|
+
return self._poke_af2(dttm_filter)
|
|
304
|
+
|
|
305
|
+
def _poke_af3(self, context: Context, dttm_filter: list[datetime.datetime]) -> bool:
|
|
306
|
+
self._has_checked_existence = True
|
|
307
|
+
ti = context["ti"]
|
|
308
|
+
|
|
309
|
+
def _get_count(states: list[str]) -> int:
|
|
310
|
+
if self.external_task_ids:
|
|
311
|
+
return ti.get_ti_count(
|
|
312
|
+
dag_id=self.external_dag_id,
|
|
313
|
+
task_ids=self.external_task_ids, # type: ignore[arg-type]
|
|
314
|
+
logical_dates=dttm_filter,
|
|
315
|
+
states=states,
|
|
316
|
+
)
|
|
317
|
+
elif self.external_task_group_id:
|
|
318
|
+
return ti.get_ti_count(
|
|
319
|
+
dag_id=self.external_dag_id,
|
|
320
|
+
task_group_id=self.external_task_group_id,
|
|
321
|
+
logical_dates=dttm_filter,
|
|
322
|
+
states=states,
|
|
323
|
+
)
|
|
324
|
+
else:
|
|
325
|
+
return ti.get_dr_count(
|
|
326
|
+
dag_id=self.external_dag_id,
|
|
327
|
+
logical_dates=dttm_filter,
|
|
328
|
+
states=states,
|
|
329
|
+
)
|
|
291
330
|
|
|
292
|
-
count_failed = -1
|
|
293
331
|
if self.failed_states:
|
|
294
|
-
|
|
332
|
+
count = _get_count(self.failed_states)
|
|
333
|
+
count_failed = self._calculate_count(count, dttm_filter)
|
|
334
|
+
self._handle_failed_states(count_failed)
|
|
295
335
|
|
|
296
|
-
|
|
336
|
+
if self.skipped_states:
|
|
337
|
+
count = _get_count(self.skipped_states)
|
|
338
|
+
count_skipped = self._calculate_count(count, dttm_filter)
|
|
339
|
+
self._handle_skipped_states(count_skipped)
|
|
340
|
+
|
|
341
|
+
count = _get_count(self.allowed_states)
|
|
342
|
+
count_allowed = self._calculate_count(count, dttm_filter)
|
|
343
|
+
return count_allowed == len(dttm_filter)
|
|
344
|
+
|
|
345
|
+
def _calculate_count(self, count: int, dttm_filter: list[datetime.datetime]) -> float | int:
|
|
346
|
+
"""Calculate the normalized count based on the type of check."""
|
|
347
|
+
if self.external_task_ids:
|
|
348
|
+
return count / len(self.external_task_ids)
|
|
349
|
+
elif self.external_task_group_id:
|
|
350
|
+
return count / len(dttm_filter)
|
|
351
|
+
else:
|
|
352
|
+
return count
|
|
353
|
+
|
|
354
|
+
def _handle_failed_states(self, count_failed: float | int) -> None:
|
|
355
|
+
"""Handle failed states and raise appropriate exceptions."""
|
|
297
356
|
if count_failed > 0:
|
|
298
357
|
if self.external_task_ids:
|
|
299
358
|
if self.soft_fail:
|
|
@@ -315,7 +374,6 @@ class ExternalTaskSensor(BaseSensorOperator):
|
|
|
315
374
|
f"The external task_group '{self.external_task_group_id}' "
|
|
316
375
|
f"in DAG '{self.external_dag_id}' failed."
|
|
317
376
|
)
|
|
318
|
-
|
|
319
377
|
else:
|
|
320
378
|
if self.soft_fail:
|
|
321
379
|
raise AirflowSkipException(
|
|
@@ -323,12 +381,8 @@ class ExternalTaskSensor(BaseSensorOperator):
|
|
|
323
381
|
)
|
|
324
382
|
raise AirflowException(f"The external DAG {self.external_dag_id} failed.")
|
|
325
383
|
|
|
326
|
-
|
|
327
|
-
|
|
328
|
-
count_skipped = self.get_count(dttm_filter, session, self.skipped_states)
|
|
329
|
-
|
|
330
|
-
# Skip if anything in the list has skipped. Note if we are checking multiple tasks and one skips
|
|
331
|
-
# before another errors, we'll skip first.
|
|
384
|
+
def _handle_skipped_states(self, count_skipped: float | int) -> None:
|
|
385
|
+
"""Handle skipped states and raise appropriate exceptions."""
|
|
332
386
|
if count_skipped > 0:
|
|
333
387
|
if self.external_task_ids:
|
|
334
388
|
raise AirflowSkipException(
|
|
@@ -346,7 +400,19 @@ class ExternalTaskSensor(BaseSensorOperator):
|
|
|
346
400
|
"Skipping."
|
|
347
401
|
)
|
|
348
402
|
|
|
349
|
-
|
|
403
|
+
@provide_session
|
|
404
|
+
def _poke_af2(self, dttm_filter: list[datetime.datetime], session: Session = NEW_SESSION) -> bool:
|
|
405
|
+
if self.check_existence and not self._has_checked_existence:
|
|
406
|
+
self._check_for_existence(session=session)
|
|
407
|
+
|
|
408
|
+
if self.failed_states:
|
|
409
|
+
count_failed = self.get_count(dttm_filter, session, self.failed_states)
|
|
410
|
+
self._handle_failed_states(count_failed)
|
|
411
|
+
|
|
412
|
+
if self.skipped_states:
|
|
413
|
+
count_skipped = self.get_count(dttm_filter, session, self.skipped_states)
|
|
414
|
+
self._handle_skipped_states(count_skipped)
|
|
415
|
+
|
|
350
416
|
count_allowed = self.get_count(dttm_filter, session, self.allowed_states)
|
|
351
417
|
return count_allowed == len(dttm_filter)
|
|
352
418
|
|
|
@@ -398,8 +464,7 @@ class ExternalTaskSensor(BaseSensorOperator):
|
|
|
398
464
|
for external_task_id in self.external_task_ids:
|
|
399
465
|
if not refreshed_dag_info.has_task(external_task_id):
|
|
400
466
|
raise AirflowException(
|
|
401
|
-
f"The external task {external_task_id} in "
|
|
402
|
-
f"DAG {self.external_dag_id} does not exist."
|
|
467
|
+
f"The external task {external_task_id} in DAG {self.external_dag_id} does not exist."
|
|
403
468
|
)
|
|
404
469
|
|
|
405
470
|
if self.external_task_group_id:
|
|
@@ -482,6 +547,9 @@ class ExternalTaskMarker(EmptyOperator):
|
|
|
482
547
|
"""
|
|
483
548
|
|
|
484
549
|
template_fields = ["external_dag_id", "external_task_id", "logical_date"]
|
|
550
|
+
if not AIRFLOW_V_3_0_PLUS:
|
|
551
|
+
template_fields.append("execution_date")
|
|
552
|
+
|
|
485
553
|
ui_color = "#4db7db"
|
|
486
554
|
operator_extra_links = [ExternalDagLink()]
|
|
487
555
|
|
|
@@ -509,6 +577,9 @@ class ExternalTaskMarker(EmptyOperator):
|
|
|
509
577
|
f"Expected str or datetime.datetime type for logical_date. Got {type(logical_date)}"
|
|
510
578
|
)
|
|
511
579
|
|
|
580
|
+
if not AIRFLOW_V_3_0_PLUS:
|
|
581
|
+
self.execution_date = self.logical_date
|
|
582
|
+
|
|
512
583
|
if recursion_depth <= 0:
|
|
513
584
|
raise ValueError("recursion_depth should be a positive integer")
|
|
514
585
|
self.recursion_depth = recursion_depth
|
|
@@ -21,6 +21,7 @@ from typing import TYPE_CHECKING, cast
|
|
|
21
21
|
from sqlalchemy import func, select, tuple_
|
|
22
22
|
|
|
23
23
|
from airflow.models import DagBag, DagRun, TaskInstance
|
|
24
|
+
from airflow.providers.standard.version_compat import AIRFLOW_V_3_0_PLUS
|
|
24
25
|
from airflow.utils.session import NEW_SESSION, provide_session
|
|
25
26
|
|
|
26
27
|
if TYPE_CHECKING:
|
|
@@ -71,10 +72,12 @@ def _get_count(
|
|
|
71
72
|
tuple_(TI.task_id, TI.map_index).in_(external_task_group_task_ids)
|
|
72
73
|
)
|
|
73
74
|
)
|
|
74
|
-
|
|
75
|
+
/ len(external_task_group_task_ids)
|
|
76
|
+
* len(dttm_filter)
|
|
77
|
+
)
|
|
75
78
|
else:
|
|
76
79
|
count = session.scalar(_count_stmt(DR, states, dttm_filter, external_dag_id))
|
|
77
|
-
return cast(int, count)
|
|
80
|
+
return cast("int", count)
|
|
78
81
|
|
|
79
82
|
|
|
80
83
|
def _count_stmt(model, states, dttm_filter, external_dag_id) -> Executable:
|
|
@@ -86,8 +89,10 @@ def _count_stmt(model, states, dttm_filter, external_dag_id) -> Executable:
|
|
|
86
89
|
:param dttm_filter: date time filter for logical date
|
|
87
90
|
:param external_dag_id: The ID of the external DAG.
|
|
88
91
|
"""
|
|
92
|
+
date_field = model.logical_date if AIRFLOW_V_3_0_PLUS else model.execution_date
|
|
93
|
+
|
|
89
94
|
return select(func.count()).where(
|
|
90
|
-
model.dag_id == external_dag_id, model.state.in_(states),
|
|
95
|
+
model.dag_id == external_dag_id, model.state.in_(states), date_field.in_(dttm_filter)
|
|
91
96
|
)
|
|
92
97
|
|
|
93
98
|
|
|
@@ -104,11 +109,13 @@ def _get_external_task_group_task_ids(dttm_filter, external_task_group_id, exter
|
|
|
104
109
|
task_group = refreshed_dag_info.task_group_dict.get(external_task_group_id)
|
|
105
110
|
|
|
106
111
|
if task_group:
|
|
112
|
+
date_field = TaskInstance.logical_date if AIRFLOW_V_3_0_PLUS else TaskInstance.execution_date
|
|
113
|
+
|
|
107
114
|
group_tasks = session.scalars(
|
|
108
115
|
select(TaskInstance).filter(
|
|
109
116
|
TaskInstance.dag_id == external_dag_id,
|
|
110
117
|
TaskInstance.task_id.in_(task.task_id for task in task_group),
|
|
111
|
-
|
|
118
|
+
date_field.in_(dttm_filter),
|
|
112
119
|
)
|
|
113
120
|
)
|
|
114
121
|
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.4
|
|
2
2
|
Name: apache-airflow-providers-standard
|
|
3
|
-
Version: 0.
|
|
3
|
+
Version: 0.3.0rc2
|
|
4
4
|
Summary: Provider package apache-airflow-providers-standard for Apache Airflow
|
|
5
5
|
Keywords: airflow-provider,standard,airflow,integration
|
|
6
6
|
Author-email: Apache Software Foundation <dev@airflow.apache.org>
|
|
@@ -22,8 +22,8 @@ Classifier: Programming Language :: Python :: 3.12
|
|
|
22
22
|
Classifier: Topic :: System :: Monitoring
|
|
23
23
|
Requires-Dist: apache-airflow>=2.9.0rc0
|
|
24
24
|
Project-URL: Bug Tracker, https://github.com/apache/airflow/issues
|
|
25
|
-
Project-URL: Changelog, https://airflow.apache.org/docs/apache-airflow-providers-standard/0.
|
|
26
|
-
Project-URL: Documentation, https://airflow.apache.org/docs/apache-airflow-providers-standard/0.
|
|
25
|
+
Project-URL: Changelog, https://airflow.apache.org/docs/apache-airflow-providers-standard/0.3.0/changelog.html
|
|
26
|
+
Project-URL: Documentation, https://airflow.apache.org/docs/apache-airflow-providers-standard/0.3.0
|
|
27
27
|
Project-URL: Mastodon, https://fosstodon.org/@airflow
|
|
28
28
|
Project-URL: Slack Chat, https://s.apache.org/airflow-slack
|
|
29
29
|
Project-URL: Source Code, https://github.com/apache/airflow
|
|
@@ -54,7 +54,7 @@ Project-URL: YouTube, https://www.youtube.com/channel/UCSXwxpWZQ7XZ1WL3wqevChA/
|
|
|
54
54
|
|
|
55
55
|
Package ``apache-airflow-providers-standard``
|
|
56
56
|
|
|
57
|
-
Release: ``0.
|
|
57
|
+
Release: ``0.3.0``
|
|
58
58
|
|
|
59
59
|
|
|
60
60
|
Airflow Standard Provider
|
|
@@ -67,7 +67,7 @@ This is a provider package for ``standard`` provider. All classes for this provi
|
|
|
67
67
|
are in ``airflow.providers.standard`` python package.
|
|
68
68
|
|
|
69
69
|
You can find package information and changelog for the provider
|
|
70
|
-
in the `documentation <https://airflow.apache.org/docs/apache-airflow-providers-standard/0.
|
|
70
|
+
in the `documentation <https://airflow.apache.org/docs/apache-airflow-providers-standard/0.3.0/>`_.
|
|
71
71
|
|
|
72
72
|
Installation
|
|
73
73
|
------------
|
|
@@ -88,5 +88,5 @@ PIP package Version required
|
|
|
88
88
|
================== ==================
|
|
89
89
|
|
|
90
90
|
The changelog for the provider package can be found in the
|
|
91
|
-
`changelog <https://airflow.apache.org/docs/apache-airflow-providers-standard/0.
|
|
91
|
+
`changelog <https://airflow.apache.org/docs/apache-airflow-providers-standard/0.3.0/changelog.html>`_.
|
|
92
92
|
|
|
@@ -1,41 +1,51 @@
|
|
|
1
1
|
airflow/providers/standard/LICENSE,sha256=gXPVwptPlW1TJ4HSuG5OMPg-a3h43OGMkZRR1rpwfJA,10850
|
|
2
|
-
airflow/providers/standard/__init__.py,sha256=
|
|
3
|
-
airflow/providers/standard/get_provider_info.py,sha256=
|
|
2
|
+
airflow/providers/standard/__init__.py,sha256=84Hgrj5AurK3EjrkTXn2CChxmmQY0FCSNo6nzNwAxT0,1495
|
|
3
|
+
airflow/providers/standard/get_provider_info.py,sha256=L-tNd8ZwiC77cY0mDXdUyp29VQu0h5ChdXy-fdwBxnQ,6435
|
|
4
4
|
airflow/providers/standard/version_compat.py,sha256=aHg90_DtgoSnQvILFICexMyNlHlALBdaeWqkX3dFDug,1605
|
|
5
|
+
airflow/providers/standard/decorators/__init__.py,sha256=9hdXHABrVpkbpjZgUft39kOFL2xSGeG4GEua0Hmelus,785
|
|
6
|
+
airflow/providers/standard/decorators/bash.py,sha256=dknHzPFfVwiCrcC0FzMlGKoOMi66EaA9uKR3JSsitno,4128
|
|
7
|
+
airflow/providers/standard/decorators/branch_external_python.py,sha256=-z6JmLQrTzqQg2yqsdA91VPip52JeWp0Wnd9JDX_DeI,2352
|
|
8
|
+
airflow/providers/standard/decorators/branch_python.py,sha256=ornGzksOiTSbvAiCgthkZw4iJaMsNCnKBasWdOS8wfA,2272
|
|
9
|
+
airflow/providers/standard/decorators/branch_virtualenv.py,sha256=jcpxyoX86zXvzEJ8eIIf177EZZrt5TxoZbEum8blxI0,2354
|
|
10
|
+
airflow/providers/standard/decorators/external_python.py,sha256=6_K9kjLQJQFwcxqfW51BP9BwMZDrI9ihomsR4xftavk,2673
|
|
11
|
+
airflow/providers/standard/decorators/python.py,sha256=f-pl62ilgX45zvW6seCXKI0FoV3nypbWmjOIQauo6Y0,3219
|
|
12
|
+
airflow/providers/standard/decorators/python_virtualenv.py,sha256=CKzMtaQr9nK-e9APm7jtXmIdc-Qc-xIV13PymdbEJAM,2359
|
|
13
|
+
airflow/providers/standard/decorators/sensor.py,sha256=N2sKQl6xPop0gKnYWhtqnjl5yzSZa_56MQ7I5HrnG4Y,3004
|
|
14
|
+
airflow/providers/standard/decorators/short_circuit.py,sha256=xo4h8eoZ9UXJ_8IhEhvlWat_Q_w1Y6bJmEXcAvsKZlY,2301
|
|
5
15
|
airflow/providers/standard/hooks/__init__.py,sha256=9hdXHABrVpkbpjZgUft39kOFL2xSGeG4GEua0Hmelus,785
|
|
6
16
|
airflow/providers/standard/hooks/filesystem.py,sha256=fDZwW_EYD8z1QXnReqI7gIwSbDPZNTKtqQvgktiP02o,2870
|
|
7
17
|
airflow/providers/standard/hooks/package_index.py,sha256=U7_s_02-wwz9kTkzKr3JAhVQj2spuntWd_GmjfpV-y4,3769
|
|
8
18
|
airflow/providers/standard/hooks/subprocess.py,sha256=GAmdF69jwUcpc7DH5I42GnJRs6NMQvHwFhimWpIdTU4,4920
|
|
9
19
|
airflow/providers/standard/operators/__init__.py,sha256=9hdXHABrVpkbpjZgUft39kOFL2xSGeG4GEua0Hmelus,785
|
|
10
|
-
airflow/providers/standard/operators/bash.py,sha256=
|
|
20
|
+
airflow/providers/standard/operators/bash.py,sha256=dPpaAbLRPyVo1207npt0Fr9ShdzvKsT0r9-sjYON6JI,11407
|
|
11
21
|
airflow/providers/standard/operators/branch.py,sha256=C_AUd7TSo_U52GiWsrR7rJIsRU5KKfrybBFw84brm_c,4070
|
|
12
|
-
airflow/providers/standard/operators/datetime.py,sha256=
|
|
22
|
+
airflow/providers/standard/operators/datetime.py,sha256=bYDdbfAyAlEXRRHjOgB06UhgDum6SPdd5I3u-ylPSaw,5005
|
|
13
23
|
airflow/providers/standard/operators/empty.py,sha256=C7_uLWJK6kExzlNc7xdMo8VAQ_ONWITvEQ2FImrMepM,1324
|
|
14
24
|
airflow/providers/standard/operators/latest_only.py,sha256=OdUbeJA0_HuqxPFo8zFefkysUWtGpvdvoVEfIy0yJPo,3377
|
|
15
|
-
airflow/providers/standard/operators/python.py,sha256=
|
|
25
|
+
airflow/providers/standard/operators/python.py,sha256=l0aj8d9Cwg_B8snBZA815QKy8MKhRvISfbmHEteTGTk,50106
|
|
16
26
|
airflow/providers/standard/operators/smooth.py,sha256=d3OV38EzV_wlfMYN3JGWGwyzsFonx8VbqgGfXSw0_bM,1382
|
|
17
27
|
airflow/providers/standard/operators/trigger_dagrun.py,sha256=xXGVZOaIB8Ru2tALmmS-IWjzKhA3dFhiOpa3GTuKxeQ,14231
|
|
18
|
-
airflow/providers/standard/operators/weekday.py,sha256=
|
|
28
|
+
airflow/providers/standard/operators/weekday.py,sha256=Qg7LhXYtybVSGZn8uQqF-r7RB7zOXfe3R6vSGVa_rJk,5083
|
|
19
29
|
airflow/providers/standard/sensors/__init__.py,sha256=9hdXHABrVpkbpjZgUft39kOFL2xSGeG4GEua0Hmelus,785
|
|
20
30
|
airflow/providers/standard/sensors/bash.py,sha256=afyz1m-1qzAp1fE5ta71rXhpTrKcCH7bNfwUU2Hv7GQ,5025
|
|
21
31
|
airflow/providers/standard/sensors/date_time.py,sha256=hRUuLaNgqDh4jqaIaD8zdyq2BUXkpWM2NzJN5YkwTJI,6077
|
|
22
|
-
airflow/providers/standard/sensors/external_task.py,sha256=
|
|
32
|
+
airflow/providers/standard/sensors/external_task.py,sha256=LI8kYU8SNSfaq93MKgwyqEQF3-tFn9-2CvtEhjovb7M,27033
|
|
23
33
|
airflow/providers/standard/sensors/filesystem.py,sha256=rfupSeHtFGdAcL6cw3H6u6ttBxogSThYiPqsUKgABMU,6029
|
|
24
34
|
airflow/providers/standard/sensors/python.py,sha256=kvgpHN8hiyxJPlw9HsVpna0X6NRt0iTDvFFjqt3KFtQ,3405
|
|
25
35
|
airflow/providers/standard/sensors/time.py,sha256=Pc9BZqqTQy3Qqz7uME9yF4qmWsXYCzAoAlsmwgpAraY,5007
|
|
26
36
|
airflow/providers/standard/sensors/time_delta.py,sha256=1OlDMIwNYXhBeeE8TmfsAMIFIOur4BMlDWe0L_JScZc,6633
|
|
27
37
|
airflow/providers/standard/sensors/weekday.py,sha256=HzV21T3XhrQgfsR6svl6uWlJNPSnTbAHbQKd0jifIUU,4467
|
|
28
38
|
airflow/providers/standard/triggers/__init__.py,sha256=9hdXHABrVpkbpjZgUft39kOFL2xSGeG4GEua0Hmelus,785
|
|
29
|
-
airflow/providers/standard/triggers/external_task.py,sha256
|
|
39
|
+
airflow/providers/standard/triggers/external_task.py,sha256=-80zAq7pPbKElcS2sNgi3rE2rXPvEZe3Sj4nsJdxLGU,8478
|
|
30
40
|
airflow/providers/standard/triggers/file.py,sha256=2i8-RwSjEgdOwQNcHCqLmSdpE3Ehqg4GQJ8nE3-fHxo,4886
|
|
31
41
|
airflow/providers/standard/triggers/temporal.py,sha256=Aub7Cp3HsPdeardF2jp-Z5nIRwzqtK9-aOlWtfKQfcg,4809
|
|
32
42
|
airflow/providers/standard/utils/__init__.py,sha256=9hdXHABrVpkbpjZgUft39kOFL2xSGeG4GEua0Hmelus,785
|
|
33
43
|
airflow/providers/standard/utils/python_virtualenv.py,sha256=FR3241l5Obuo2BBwwBs-s87pRpCLyJnh3sUtHxrgRuM,7759
|
|
34
44
|
airflow/providers/standard/utils/python_virtualenv_script.jinja2,sha256=3Z334hVq6hQ9EHkOoGnAHc2_XNkZQkOJGxZArDKLc-c,2770
|
|
35
|
-
airflow/providers/standard/utils/sensor_helper.py,sha256=
|
|
45
|
+
airflow/providers/standard/utils/sensor_helper.py,sha256=vrCdz4lY3Iy8Mom5KuyNidg-IAyngMRqWhStEXVsyT0,4692
|
|
36
46
|
airflow/providers/standard/utils/skipmixin.py,sha256=XkhDozcXUHZ7C6AxzEW8ZYrqbra1oJGGR3ZieNQ-N0M,7791
|
|
37
47
|
airflow/providers/standard/utils/weekday.py,sha256=ySDrIkWv-lqqxURo9E98IGInDqERec2O4y9o2hQTGiQ,2685
|
|
38
|
-
apache_airflow_providers_standard-0.
|
|
39
|
-
apache_airflow_providers_standard-0.
|
|
40
|
-
apache_airflow_providers_standard-0.
|
|
41
|
-
apache_airflow_providers_standard-0.
|
|
48
|
+
apache_airflow_providers_standard-0.3.0rc2.dist-info/entry_points.txt,sha256=mW2YRh3mVdZdaP5-iGSNgmcCh3YYdALIn28BCLBZZ40,104
|
|
49
|
+
apache_airflow_providers_standard-0.3.0rc2.dist-info/WHEEL,sha256=_2ozNFCLWc93bK4WKHCO-eDUENDlo-dgc9cU3qokYO4,82
|
|
50
|
+
apache_airflow_providers_standard-0.3.0rc2.dist-info/METADATA,sha256=Nkhhr0r7FcXIMtjVOpZIYFUHH2NBrM8h8siVojyP398,3792
|
|
51
|
+
apache_airflow_providers_standard-0.3.0rc2.dist-info/RECORD,,
|
|
File without changes
|