apache-airflow-providers-standard 0.2.0b1__tar.gz → 0.3.0__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of apache-airflow-providers-standard might be problematic. Click here for more details.

Files changed (50) hide show
  1. {apache_airflow_providers_standard-0.2.0b1 → apache_airflow_providers_standard-0.3.0}/PKG-INFO +6 -6
  2. {apache_airflow_providers_standard-0.2.0b1 → apache_airflow_providers_standard-0.3.0}/README.rst +3 -3
  3. {apache_airflow_providers_standard-0.2.0b1 → apache_airflow_providers_standard-0.3.0}/pyproject.toml +5 -4
  4. {apache_airflow_providers_standard-0.2.0b1 → apache_airflow_providers_standard-0.3.0}/src/airflow/providers/standard/__init__.py +1 -1
  5. apache_airflow_providers_standard-0.3.0/src/airflow/providers/standard/decorators/bash.py +111 -0
  6. apache_airflow_providers_standard-0.3.0/src/airflow/providers/standard/decorators/branch_external_python.py +57 -0
  7. apache_airflow_providers_standard-0.3.0/src/airflow/providers/standard/decorators/branch_python.py +57 -0
  8. apache_airflow_providers_standard-0.3.0/src/airflow/providers/standard/decorators/branch_virtualenv.py +57 -0
  9. apache_airflow_providers_standard-0.3.0/src/airflow/providers/standard/decorators/external_python.py +65 -0
  10. apache_airflow_providers_standard-0.3.0/src/airflow/providers/standard/decorators/python.py +81 -0
  11. apache_airflow_providers_standard-0.3.0/src/airflow/providers/standard/decorators/python_virtualenv.py +60 -0
  12. apache_airflow_providers_standard-0.3.0/src/airflow/providers/standard/decorators/sensor.py +76 -0
  13. apache_airflow_providers_standard-0.3.0/src/airflow/providers/standard/decorators/short_circuit.py +59 -0
  14. {apache_airflow_providers_standard-0.2.0b1 → apache_airflow_providers_standard-0.3.0}/src/airflow/providers/standard/get_provider_info.py +31 -2
  15. {apache_airflow_providers_standard-0.2.0b1 → apache_airflow_providers_standard-0.3.0}/src/airflow/providers/standard/operators/bash.py +12 -49
  16. {apache_airflow_providers_standard-0.2.0b1 → apache_airflow_providers_standard-0.3.0}/src/airflow/providers/standard/operators/datetime.py +6 -1
  17. {apache_airflow_providers_standard-0.2.0b1 → apache_airflow_providers_standard-0.3.0}/src/airflow/providers/standard/operators/python.py +26 -19
  18. {apache_airflow_providers_standard-0.2.0b1 → apache_airflow_providers_standard-0.3.0}/src/airflow/providers/standard/operators/weekday.py +5 -2
  19. {apache_airflow_providers_standard-0.2.0b1 → apache_airflow_providers_standard-0.3.0}/src/airflow/providers/standard/sensors/external_task.py +99 -28
  20. {apache_airflow_providers_standard-0.2.0b1 → apache_airflow_providers_standard-0.3.0}/src/airflow/providers/standard/triggers/external_task.py +1 -1
  21. apache_airflow_providers_standard-0.3.0/src/airflow/providers/standard/utils/__init__.py +16 -0
  22. {apache_airflow_providers_standard-0.2.0b1 → apache_airflow_providers_standard-0.3.0}/src/airflow/providers/standard/utils/python_virtualenv_script.jinja2 +5 -0
  23. {apache_airflow_providers_standard-0.2.0b1 → apache_airflow_providers_standard-0.3.0}/src/airflow/providers/standard/utils/sensor_helper.py +11 -4
  24. {apache_airflow_providers_standard-0.2.0b1 → apache_airflow_providers_standard-0.3.0}/src/airflow/providers/standard/LICENSE +0 -0
  25. {apache_airflow_providers_standard-0.2.0b1/src/airflow/providers/standard/hooks → apache_airflow_providers_standard-0.3.0/src/airflow/providers/standard/decorators}/__init__.py +0 -0
  26. {apache_airflow_providers_standard-0.2.0b1/src/airflow/providers/standard/operators → apache_airflow_providers_standard-0.3.0/src/airflow/providers/standard/hooks}/__init__.py +0 -0
  27. {apache_airflow_providers_standard-0.2.0b1 → apache_airflow_providers_standard-0.3.0}/src/airflow/providers/standard/hooks/filesystem.py +0 -0
  28. {apache_airflow_providers_standard-0.2.0b1 → apache_airflow_providers_standard-0.3.0}/src/airflow/providers/standard/hooks/package_index.py +0 -0
  29. {apache_airflow_providers_standard-0.2.0b1 → apache_airflow_providers_standard-0.3.0}/src/airflow/providers/standard/hooks/subprocess.py +0 -0
  30. {apache_airflow_providers_standard-0.2.0b1/src/airflow/providers/standard/sensors → apache_airflow_providers_standard-0.3.0/src/airflow/providers/standard/operators}/__init__.py +0 -0
  31. {apache_airflow_providers_standard-0.2.0b1 → apache_airflow_providers_standard-0.3.0}/src/airflow/providers/standard/operators/branch.py +0 -0
  32. {apache_airflow_providers_standard-0.2.0b1 → apache_airflow_providers_standard-0.3.0}/src/airflow/providers/standard/operators/empty.py +0 -0
  33. {apache_airflow_providers_standard-0.2.0b1 → apache_airflow_providers_standard-0.3.0}/src/airflow/providers/standard/operators/latest_only.py +0 -0
  34. {apache_airflow_providers_standard-0.2.0b1 → apache_airflow_providers_standard-0.3.0}/src/airflow/providers/standard/operators/smooth.py +0 -0
  35. {apache_airflow_providers_standard-0.2.0b1 → apache_airflow_providers_standard-0.3.0}/src/airflow/providers/standard/operators/trigger_dagrun.py +0 -0
  36. {apache_airflow_providers_standard-0.2.0b1/src/airflow/providers/standard/triggers → apache_airflow_providers_standard-0.3.0/src/airflow/providers/standard/sensors}/__init__.py +0 -0
  37. {apache_airflow_providers_standard-0.2.0b1 → apache_airflow_providers_standard-0.3.0}/src/airflow/providers/standard/sensors/bash.py +0 -0
  38. {apache_airflow_providers_standard-0.2.0b1 → apache_airflow_providers_standard-0.3.0}/src/airflow/providers/standard/sensors/date_time.py +0 -0
  39. {apache_airflow_providers_standard-0.2.0b1 → apache_airflow_providers_standard-0.3.0}/src/airflow/providers/standard/sensors/filesystem.py +0 -0
  40. {apache_airflow_providers_standard-0.2.0b1 → apache_airflow_providers_standard-0.3.0}/src/airflow/providers/standard/sensors/python.py +0 -0
  41. {apache_airflow_providers_standard-0.2.0b1 → apache_airflow_providers_standard-0.3.0}/src/airflow/providers/standard/sensors/time.py +0 -0
  42. {apache_airflow_providers_standard-0.2.0b1 → apache_airflow_providers_standard-0.3.0}/src/airflow/providers/standard/sensors/time_delta.py +0 -0
  43. {apache_airflow_providers_standard-0.2.0b1 → apache_airflow_providers_standard-0.3.0}/src/airflow/providers/standard/sensors/weekday.py +0 -0
  44. {apache_airflow_providers_standard-0.2.0b1/src/airflow/providers/standard/utils → apache_airflow_providers_standard-0.3.0/src/airflow/providers/standard/triggers}/__init__.py +0 -0
  45. {apache_airflow_providers_standard-0.2.0b1 → apache_airflow_providers_standard-0.3.0}/src/airflow/providers/standard/triggers/file.py +0 -0
  46. {apache_airflow_providers_standard-0.2.0b1 → apache_airflow_providers_standard-0.3.0}/src/airflow/providers/standard/triggers/temporal.py +0 -0
  47. {apache_airflow_providers_standard-0.2.0b1 → apache_airflow_providers_standard-0.3.0}/src/airflow/providers/standard/utils/python_virtualenv.py +0 -0
  48. {apache_airflow_providers_standard-0.2.0b1 → apache_airflow_providers_standard-0.3.0}/src/airflow/providers/standard/utils/skipmixin.py +0 -0
  49. {apache_airflow_providers_standard-0.2.0b1 → apache_airflow_providers_standard-0.3.0}/src/airflow/providers/standard/utils/weekday.py +0 -0
  50. {apache_airflow_providers_standard-0.2.0b1 → apache_airflow_providers_standard-0.3.0}/src/airflow/providers/standard/version_compat.py +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: apache-airflow-providers-standard
3
- Version: 0.2.0b1
3
+ Version: 0.3.0
4
4
  Summary: Provider package apache-airflow-providers-standard for Apache Airflow
5
5
  Keywords: airflow-provider,standard,airflow,integration
6
6
  Author-email: Apache Software Foundation <dev@airflow.apache.org>
@@ -22,8 +22,8 @@ Classifier: Programming Language :: Python :: 3.12
22
22
  Classifier: Topic :: System :: Monitoring
23
23
  Requires-Dist: apache-airflow>=2.9.0
24
24
  Project-URL: Bug Tracker, https://github.com/apache/airflow/issues
25
- Project-URL: Changelog, https://airflow.apache.org/docs/apache-airflow-providers-standard/0.2.0b1/changelog.html
26
- Project-URL: Documentation, https://airflow.apache.org/docs/apache-airflow-providers-standard/0.2.0b1
25
+ Project-URL: Changelog, https://airflow.apache.org/docs/apache-airflow-providers-standard/0.3.0/changelog.html
26
+ Project-URL: Documentation, https://airflow.apache.org/docs/apache-airflow-providers-standard/0.3.0
27
27
  Project-URL: Mastodon, https://fosstodon.org/@airflow
28
28
  Project-URL: Slack Chat, https://s.apache.org/airflow-slack
29
29
  Project-URL: Source Code, https://github.com/apache/airflow
@@ -54,7 +54,7 @@ Project-URL: YouTube, https://www.youtube.com/channel/UCSXwxpWZQ7XZ1WL3wqevChA/
54
54
 
55
55
  Package ``apache-airflow-providers-standard``
56
56
 
57
- Release: ``0.2.0b1``
57
+ Release: ``0.3.0``
58
58
 
59
59
 
60
60
  Airflow Standard Provider
@@ -67,7 +67,7 @@ This is a provider package for ``standard`` provider. All classes for this provi
67
67
  are in ``airflow.providers.standard`` python package.
68
68
 
69
69
  You can find package information and changelog for the provider
70
- in the `documentation <https://airflow.apache.org/docs/apache-airflow-providers-standard/0.2.0b1/>`_.
70
+ in the `documentation <https://airflow.apache.org/docs/apache-airflow-providers-standard/0.3.0/>`_.
71
71
 
72
72
  Installation
73
73
  ------------
@@ -88,5 +88,5 @@ PIP package Version required
88
88
  ================== ==================
89
89
 
90
90
  The changelog for the provider package can be found in the
91
- `changelog <https://airflow.apache.org/docs/apache-airflow-providers-standard/0.2.0b1/changelog.html>`_.
91
+ `changelog <https://airflow.apache.org/docs/apache-airflow-providers-standard/0.3.0/changelog.html>`_.
92
92
 
@@ -23,7 +23,7 @@
23
23
 
24
24
  Package ``apache-airflow-providers-standard``
25
25
 
26
- Release: ``0.2.0b1``
26
+ Release: ``0.3.0``
27
27
 
28
28
 
29
29
  Airflow Standard Provider
@@ -36,7 +36,7 @@ This is a provider package for ``standard`` provider. All classes for this provi
36
36
  are in ``airflow.providers.standard`` python package.
37
37
 
38
38
  You can find package information and changelog for the provider
39
- in the `documentation <https://airflow.apache.org/docs/apache-airflow-providers-standard/0.2.0b1/>`_.
39
+ in the `documentation <https://airflow.apache.org/docs/apache-airflow-providers-standard/0.3.0/>`_.
40
40
 
41
41
  Installation
42
42
  ------------
@@ -57,4 +57,4 @@ PIP package Version required
57
57
  ================== ==================
58
58
 
59
59
  The changelog for the provider package can be found in the
60
- `changelog <https://airflow.apache.org/docs/apache-airflow-providers-standard/0.2.0b1/changelog.html>`_.
60
+ `changelog <https://airflow.apache.org/docs/apache-airflow-providers-standard/0.3.0/changelog.html>`_.
@@ -20,12 +20,12 @@
20
20
  # IF YOU WANT TO MODIFY THIS FILE EXCEPT DEPENDENCIES, YOU SHOULD MODIFY THE TEMPLATE
21
21
  # `pyproject_TEMPLATE.toml.jinja2` IN the `dev/breeze/src/airflow_breeze/templates` DIRECTORY
22
22
  [build-system]
23
- requires = ["flit_core==3.11.0"]
23
+ requires = ["flit_core==3.12.0"]
24
24
  build-backend = "flit_core.buildapi"
25
25
 
26
26
  [project]
27
27
  name = "apache-airflow-providers-standard"
28
- version = "0.2.0b1"
28
+ version = "0.3.0"
29
29
  description = "Provider package apache-airflow-providers-standard for Apache Airflow"
30
30
  readme = "README.rst"
31
31
  authors = [
@@ -66,6 +66,7 @@ dev = [
66
66
  "apache-airflow-task-sdk",
67
67
  "apache-airflow-devel-common",
68
68
  # Additional devel dependencies (do not remove this line and add extra development dependencies)
69
+ "apache-airflow-providers-mysql",
69
70
  ]
70
71
 
71
72
  [tool.uv.sources]
@@ -79,8 +80,8 @@ apache-airflow-providers-fab = {workspace = true}
79
80
  apache-airflow-providers-standard = {workspace = true}
80
81
 
81
82
  [project.urls]
82
- "Documentation" = "https://airflow.apache.org/docs/apache-airflow-providers-standard/0.2.0b1"
83
- "Changelog" = "https://airflow.apache.org/docs/apache-airflow-providers-standard/0.2.0b1/changelog.html"
83
+ "Documentation" = "https://airflow.apache.org/docs/apache-airflow-providers-standard/0.3.0"
84
+ "Changelog" = "https://airflow.apache.org/docs/apache-airflow-providers-standard/0.3.0/changelog.html"
84
85
  "Bug Tracker" = "https://github.com/apache/airflow/issues"
85
86
  "Source Code" = "https://github.com/apache/airflow"
86
87
  "Slack Chat" = "https://s.apache.org/airflow-slack"
@@ -29,7 +29,7 @@ from airflow import __version__ as airflow_version
29
29
 
30
30
  __all__ = ["__version__"]
31
31
 
32
- __version__ = "0.2.0b1"
32
+ __version__ = "0.3.0"
33
33
 
34
34
  if packaging.version.parse(packaging.version.parse(airflow_version).base_version) < packaging.version.parse(
35
35
  "2.9.0"
@@ -0,0 +1,111 @@
1
+ # Licensed to the Apache Software Foundation (ASF) under one
2
+ # or more contributor license agreements. See the NOTICE file
3
+ # distributed with this work for additional information
4
+ # regarding copyright ownership. The ASF licenses this file
5
+ # to you under the Apache License, Version 2.0 (the
6
+ # "License"); you may not use this file except in compliance
7
+ # with the License. You may obtain a copy of the License at
8
+ #
9
+ # http://www.apache.org/licenses/LICENSE-2.0
10
+ #
11
+ # Unless required by applicable law or agreed to in writing,
12
+ # software distributed under the License is distributed on an
13
+ # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
14
+ # KIND, either express or implied. See the License for the
15
+ # specific language governing permissions and limitations
16
+ # under the License.
17
+
18
+ from __future__ import annotations
19
+
20
+ import warnings
21
+ from collections.abc import Collection, Mapping, Sequence
22
+ from typing import TYPE_CHECKING, Any, Callable, ClassVar
23
+
24
+ from airflow.decorators.base import DecoratedOperator, TaskDecorator, task_decorator_factory
25
+ from airflow.providers.standard.operators.bash import BashOperator
26
+ from airflow.sdk.definitions._internal.types import SET_DURING_EXECUTION
27
+ from airflow.utils.context import context_merge
28
+ from airflow.utils.operator_helpers import determine_kwargs
29
+
30
+ if TYPE_CHECKING:
31
+ from airflow.sdk.definitions.context import Context
32
+
33
+
34
+ class _BashDecoratedOperator(DecoratedOperator, BashOperator):
35
+ """
36
+ Wraps a Python callable and uses the callable return value as the Bash command to be executed.
37
+
38
+ :param python_callable: A reference to an object that is callable.
39
+ :param op_kwargs: A dictionary of keyword arguments that will get unpacked
40
+ in your function (templated).
41
+ :param op_args: A list of positional arguments that will get unpacked when
42
+ calling your callable (templated).
43
+ """
44
+
45
+ template_fields: Sequence[str] = (*DecoratedOperator.template_fields, *BashOperator.template_fields)
46
+ template_fields_renderers: ClassVar[dict[str, str]] = {
47
+ **DecoratedOperator.template_fields_renderers,
48
+ **BashOperator.template_fields_renderers,
49
+ }
50
+
51
+ custom_operator_name: str = "@task.bash"
52
+ overwrite_rtif_after_execution: bool = True
53
+
54
+ def __init__(
55
+ self,
56
+ *,
57
+ python_callable: Callable,
58
+ op_args: Collection[Any] | None = None,
59
+ op_kwargs: Mapping[str, Any] | None = None,
60
+ **kwargs,
61
+ ) -> None:
62
+ if kwargs.pop("multiple_outputs", None):
63
+ warnings.warn(
64
+ f"`multiple_outputs=True` is not supported in {self.custom_operator_name} tasks. Ignoring.",
65
+ UserWarning,
66
+ stacklevel=3,
67
+ )
68
+
69
+ super().__init__(
70
+ python_callable=python_callable,
71
+ op_args=op_args,
72
+ op_kwargs=op_kwargs,
73
+ bash_command=SET_DURING_EXECUTION,
74
+ multiple_outputs=False,
75
+ **kwargs,
76
+ )
77
+
78
+ def execute(self, context: Context) -> Any:
79
+ context_merge(context, self.op_kwargs)
80
+ kwargs = determine_kwargs(self.python_callable, self.op_args, context)
81
+
82
+ self.bash_command = self.python_callable(*self.op_args, **kwargs)
83
+
84
+ if not isinstance(self.bash_command, str) or self.bash_command.strip() == "":
85
+ raise TypeError("The returned value from the TaskFlow callable must be a non-empty string.")
86
+
87
+ self._is_inline_cmd = self._is_inline_command(bash_command=self.bash_command)
88
+ context["ti"].render_templates() # type: ignore[attr-defined]
89
+
90
+ return super().execute(context)
91
+
92
+
93
+ def bash_task(
94
+ python_callable: Callable | None = None,
95
+ **kwargs,
96
+ ) -> TaskDecorator:
97
+ """
98
+ Wrap a function into a BashOperator.
99
+
100
+ Accepts kwargs for operator kwargs. Can be reused in a single DAG. This function is only used only used
101
+ during type checking or auto-completion.
102
+
103
+ :param python_callable: Function to decorate.
104
+
105
+ :meta private:
106
+ """
107
+ return task_decorator_factory(
108
+ python_callable=python_callable,
109
+ decorated_operator_class=_BashDecoratedOperator,
110
+ **kwargs,
111
+ )
@@ -0,0 +1,57 @@
1
+ # Licensed to the Apache Software Foundation (ASF) under one
2
+ # or more contributor license agreements. See the NOTICE file
3
+ # distributed with this work for additional information
4
+ # regarding copyright ownership. The ASF licenses this file
5
+ # to you under the Apache License, Version 2.0 (the
6
+ # "License"); you may not use this file except in compliance
7
+ # with the License. You may obtain a copy of the License at
8
+ #
9
+ # http://www.apache.org/licenses/LICENSE-2.0
10
+ #
11
+ # Unless required by applicable law or agreed to in writing,
12
+ # software distributed under the License is distributed on an
13
+ # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
14
+ # KIND, either express or implied. See the License for the
15
+ # specific language governing permissions and limitations
16
+ # under the License.
17
+ from __future__ import annotations
18
+
19
+ from typing import TYPE_CHECKING, Callable
20
+
21
+ from airflow.decorators.base import task_decorator_factory
22
+ from airflow.providers.standard.decorators.python import _PythonDecoratedOperator
23
+ from airflow.providers.standard.operators.python import BranchExternalPythonOperator
24
+
25
+ if TYPE_CHECKING:
26
+ from airflow.decorators.base import TaskDecorator
27
+
28
+
29
+ class _BranchExternalPythonDecoratedOperator(_PythonDecoratedOperator, BranchExternalPythonOperator):
30
+ """Wraps a Python callable and captures args/kwargs when called for execution."""
31
+
32
+ template_fields = BranchExternalPythonOperator.template_fields
33
+ custom_operator_name: str = "@task.branch_external_python"
34
+
35
+
36
+ def branch_external_python_task(
37
+ python_callable: Callable | None = None, multiple_outputs: bool | None = None, **kwargs
38
+ ) -> TaskDecorator:
39
+ """
40
+ Wrap a python function into a BranchExternalPythonOperator.
41
+
42
+ For more information on how to use this operator, take a look at the guide:
43
+ :ref:`concepts:branching`
44
+
45
+ Accepts kwargs for operator kwarg. Can be reused in a single DAG.
46
+
47
+ :param python_callable: Function to decorate
48
+ :param multiple_outputs: if set, function return value will be
49
+ unrolled to multiple XCom values. Dict will unroll to xcom values with keys as XCom keys.
50
+ Defaults to False.
51
+ """
52
+ return task_decorator_factory(
53
+ python_callable=python_callable,
54
+ multiple_outputs=multiple_outputs,
55
+ decorated_operator_class=_BranchExternalPythonDecoratedOperator,
56
+ **kwargs,
57
+ )
@@ -0,0 +1,57 @@
1
+ # Licensed to the Apache Software Foundation (ASF) under one
2
+ # or more contributor license agreements. See the NOTICE file
3
+ # distributed with this work for additional information
4
+ # regarding copyright ownership. The ASF licenses this file
5
+ # to you under the Apache License, Version 2.0 (the
6
+ # "License"); you may not use this file except in compliance
7
+ # with the License. You may obtain a copy of the License at
8
+ #
9
+ # http://www.apache.org/licenses/LICENSE-2.0
10
+ #
11
+ # Unless required by applicable law or agreed to in writing,
12
+ # software distributed under the License is distributed on an
13
+ # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
14
+ # KIND, either express or implied. See the License for the
15
+ # specific language governing permissions and limitations
16
+ # under the License.
17
+ from __future__ import annotations
18
+
19
+ from typing import TYPE_CHECKING, Callable
20
+
21
+ from airflow.decorators.base import task_decorator_factory
22
+ from airflow.providers.standard.decorators.python import _PythonDecoratedOperator
23
+ from airflow.providers.standard.operators.python import BranchPythonOperator
24
+
25
+ if TYPE_CHECKING:
26
+ from airflow.decorators.base import TaskDecorator
27
+
28
+
29
+ class _BranchPythonDecoratedOperator(_PythonDecoratedOperator, BranchPythonOperator):
30
+ """Wraps a Python callable and captures args/kwargs when called for execution."""
31
+
32
+ template_fields = BranchPythonOperator.template_fields
33
+ custom_operator_name: str = "@task.branch"
34
+
35
+
36
+ def branch_task(
37
+ python_callable: Callable | None = None, multiple_outputs: bool | None = None, **kwargs
38
+ ) -> TaskDecorator:
39
+ """
40
+ Wrap a python function into a BranchPythonOperator.
41
+
42
+ For more information on how to use this operator, take a look at the guide:
43
+ :ref:`concepts:branching`
44
+
45
+ Accepts kwargs for operator kwarg. Can be reused in a single DAG.
46
+
47
+ :param python_callable: Function to decorate
48
+ :param multiple_outputs: if set, function return value will be
49
+ unrolled to multiple XCom values. Dict will unroll to xcom values with keys as XCom keys.
50
+ Defaults to False.
51
+ """
52
+ return task_decorator_factory(
53
+ python_callable=python_callable,
54
+ multiple_outputs=multiple_outputs,
55
+ decorated_operator_class=_BranchPythonDecoratedOperator,
56
+ **kwargs,
57
+ )
@@ -0,0 +1,57 @@
1
+ # Licensed to the Apache Software Foundation (ASF) under one
2
+ # or more contributor license agreements. See the NOTICE file
3
+ # distributed with this work for additional information
4
+ # regarding copyright ownership. The ASF licenses this file
5
+ # to you under the Apache License, Version 2.0 (the
6
+ # "License"); you may not use this file except in compliance
7
+ # with the License. You may obtain a copy of the License at
8
+ #
9
+ # http://www.apache.org/licenses/LICENSE-2.0
10
+ #
11
+ # Unless required by applicable law or agreed to in writing,
12
+ # software distributed under the License is distributed on an
13
+ # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
14
+ # KIND, either express or implied. See the License for the
15
+ # specific language governing permissions and limitations
16
+ # under the License.
17
+ from __future__ import annotations
18
+
19
+ from typing import TYPE_CHECKING, Callable
20
+
21
+ from airflow.decorators.base import task_decorator_factory
22
+ from airflow.providers.standard.decorators.python import _PythonDecoratedOperator
23
+ from airflow.providers.standard.operators.python import BranchPythonVirtualenvOperator
24
+
25
+ if TYPE_CHECKING:
26
+ from airflow.decorators.base import TaskDecorator
27
+
28
+
29
+ class _BranchPythonVirtualenvDecoratedOperator(_PythonDecoratedOperator, BranchPythonVirtualenvOperator):
30
+ """Wraps a Python callable and captures args/kwargs when called for execution."""
31
+
32
+ template_fields = BranchPythonVirtualenvOperator.template_fields
33
+ custom_operator_name: str = "@task.branch_virtualenv"
34
+
35
+
36
+ def branch_virtualenv_task(
37
+ python_callable: Callable | None = None, multiple_outputs: bool | None = None, **kwargs
38
+ ) -> TaskDecorator:
39
+ """
40
+ Wrap a python function into a BranchPythonVirtualenvOperator.
41
+
42
+ For more information on how to use this operator, take a look at the guide:
43
+ :ref:`concepts:branching`
44
+
45
+ Accepts kwargs for operator kwarg. Can be reused in a single DAG.
46
+
47
+ :param python_callable: Function to decorate
48
+ :param multiple_outputs: if set, function return value will be
49
+ unrolled to multiple XCom values. Dict will unroll to xcom values with keys as XCom keys.
50
+ Defaults to False.
51
+ """
52
+ return task_decorator_factory(
53
+ python_callable=python_callable,
54
+ multiple_outputs=multiple_outputs,
55
+ decorated_operator_class=_BranchPythonVirtualenvDecoratedOperator,
56
+ **kwargs,
57
+ )
@@ -0,0 +1,65 @@
1
+ # Licensed to the Apache Software Foundation (ASF) under one
2
+ # or more contributor license agreements. See the NOTICE file
3
+ # distributed with this work for additional information
4
+ # regarding copyright ownership. The ASF licenses this file
5
+ # to you under the Apache License, Version 2.0 (the
6
+ # "License"); you may not use this file except in compliance
7
+ # with the License. You may obtain a copy of the License at
8
+ #
9
+ # http://www.apache.org/licenses/LICENSE-2.0
10
+ #
11
+ # Unless required by applicable law or agreed to in writing,
12
+ # software distributed under the License is distributed on an
13
+ # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
14
+ # KIND, either express or implied. See the License for the
15
+ # specific language governing permissions and limitations
16
+ # under the License.
17
+ from __future__ import annotations
18
+
19
+ from typing import TYPE_CHECKING, Callable
20
+
21
+ from airflow.decorators.base import task_decorator_factory
22
+ from airflow.providers.standard.decorators.python import _PythonDecoratedOperator
23
+ from airflow.providers.standard.operators.python import ExternalPythonOperator
24
+
25
+ if TYPE_CHECKING:
26
+ from airflow.decorators.base import TaskDecorator
27
+
28
+
29
+ class _PythonExternalDecoratedOperator(_PythonDecoratedOperator, ExternalPythonOperator):
30
+ """Wraps a Python callable and captures args/kwargs when called for execution."""
31
+
32
+ template_fields = ExternalPythonOperator.template_fields
33
+ custom_operator_name: str = "@task.external_python"
34
+
35
+
36
+ def external_python_task(
37
+ python: str | None = None,
38
+ python_callable: Callable | None = None,
39
+ multiple_outputs: bool | None = None,
40
+ **kwargs,
41
+ ) -> TaskDecorator:
42
+ """
43
+ Wrap a callable into an Airflow operator to run via a Python virtual environment.
44
+
45
+ Accepts kwargs for operator kwarg. Can be reused in a single DAG.
46
+
47
+ This function is only used during type checking or auto-completion.
48
+
49
+ :meta private:
50
+
51
+ :param python: Full path string (file-system specific) that points to a Python binary inside
52
+ a virtualenv that should be used (in ``VENV/bin`` folder). Should be absolute path
53
+ (so usually start with "/" or "X:/" depending on the filesystem/os used).
54
+ :param python_callable: Function to decorate
55
+ :param multiple_outputs: If set to True, the decorated function's return value will be unrolled to
56
+ multiple XCom values. Dict will unroll to XCom values with its keys as XCom keys.
57
+ Defaults to False.
58
+ """
59
+ return task_decorator_factory(
60
+ python=python,
61
+ python_callable=python_callable,
62
+ multiple_outputs=multiple_outputs,
63
+ decorated_operator_class=_PythonExternalDecoratedOperator,
64
+ **kwargs,
65
+ )
@@ -0,0 +1,81 @@
1
+ # Licensed to the Apache Software Foundation (ASF) under one
2
+ # or more contributor license agreements. See the NOTICE file
3
+ # distributed with this work for additional information
4
+ # regarding copyright ownership. The ASF licenses this file
5
+ # to you under the Apache License, Version 2.0 (the
6
+ # "License"); you may not use this file except in compliance
7
+ # with the License. You may obtain a copy of the License at
8
+ #
9
+ # http://www.apache.org/licenses/LICENSE-2.0
10
+ #
11
+ # Unless required by applicable law or agreed to in writing,
12
+ # software distributed under the License is distributed on an
13
+ # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
14
+ # KIND, either express or implied. See the License for the
15
+ # specific language governing permissions and limitations
16
+ # under the License.
17
+ from __future__ import annotations
18
+
19
+ from collections.abc import Sequence
20
+ from typing import TYPE_CHECKING, Callable
21
+
22
+ from airflow.decorators.base import DecoratedOperator, task_decorator_factory
23
+ from airflow.providers.standard.operators.python import PythonOperator
24
+
25
+ if TYPE_CHECKING:
26
+ from airflow.decorators.base import TaskDecorator
27
+
28
+
29
+ class _PythonDecoratedOperator(DecoratedOperator, PythonOperator):
30
+ """
31
+ Wraps a Python callable and captures args/kwargs when called for execution.
32
+
33
+ :param python_callable: A reference to an object that is callable
34
+ :param op_kwargs: a dictionary of keyword arguments that will get unpacked
35
+ in your function (templated)
36
+ :param op_args: a list of positional arguments that will get unpacked when
37
+ calling your callable (templated)
38
+ :param multiple_outputs: If set to True, the decorated function's return value will be unrolled to
39
+ multiple XCom values. Dict will unroll to XCom values with its keys as XCom keys. Defaults to False.
40
+ """
41
+
42
+ template_fields: Sequence[str] = ("templates_dict", "op_args", "op_kwargs")
43
+ template_fields_renderers = {"templates_dict": "json", "op_args": "py", "op_kwargs": "py"}
44
+
45
+ custom_operator_name: str = "@task"
46
+
47
+ def __init__(self, *, python_callable, op_args, op_kwargs, **kwargs) -> None:
48
+ kwargs_to_upstream = {
49
+ "python_callable": python_callable,
50
+ "op_args": op_args,
51
+ "op_kwargs": op_kwargs,
52
+ }
53
+ super().__init__(
54
+ kwargs_to_upstream=kwargs_to_upstream,
55
+ python_callable=python_callable,
56
+ op_args=op_args,
57
+ op_kwargs=op_kwargs,
58
+ **kwargs,
59
+ )
60
+
61
+
62
+ def python_task(
63
+ python_callable: Callable | None = None,
64
+ multiple_outputs: bool | None = None,
65
+ **kwargs,
66
+ ) -> TaskDecorator:
67
+ """
68
+ Wrap a function into an Airflow operator.
69
+
70
+ Accepts kwargs for operator kwarg. Can be reused in a single DAG.
71
+
72
+ :param python_callable: Function to decorate
73
+ :param multiple_outputs: If set to True, the decorated function's return value will be unrolled to
74
+ multiple XCom values. Dict will unroll to XCom values with its keys as XCom keys. Defaults to False.
75
+ """
76
+ return task_decorator_factory(
77
+ python_callable=python_callable,
78
+ multiple_outputs=multiple_outputs,
79
+ decorated_operator_class=_PythonDecoratedOperator,
80
+ **kwargs,
81
+ )
@@ -0,0 +1,60 @@
1
+ # Licensed to the Apache Software Foundation (ASF) under one
2
+ # or more contributor license agreements. See the NOTICE file
3
+ # distributed with this work for additional information
4
+ # regarding copyright ownership. The ASF licenses this file
5
+ # to you under the Apache License, Version 2.0 (the
6
+ # "License"); you may not use this file except in compliance
7
+ # with the License. You may obtain a copy of the License at
8
+ #
9
+ # http://www.apache.org/licenses/LICENSE-2.0
10
+ #
11
+ # Unless required by applicable law or agreed to in writing,
12
+ # software distributed under the License is distributed on an
13
+ # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
14
+ # KIND, either express or implied. See the License for the
15
+ # specific language governing permissions and limitations
16
+ # under the License.
17
+ from __future__ import annotations
18
+
19
+ from typing import TYPE_CHECKING, Callable
20
+
21
+ from airflow.decorators.base import task_decorator_factory
22
+ from airflow.providers.standard.decorators.python import _PythonDecoratedOperator
23
+ from airflow.providers.standard.operators.python import PythonVirtualenvOperator
24
+
25
+ if TYPE_CHECKING:
26
+ from airflow.decorators.base import TaskDecorator
27
+
28
+
29
+ class _PythonVirtualenvDecoratedOperator(_PythonDecoratedOperator, PythonVirtualenvOperator):
30
+ """Wraps a Python callable and captures args/kwargs when called for execution."""
31
+
32
+ template_fields = PythonVirtualenvOperator.template_fields
33
+ custom_operator_name: str = "@task.virtualenv"
34
+
35
+
36
+ def virtualenv_task(
37
+ python_callable: Callable | None = None,
38
+ multiple_outputs: bool | None = None,
39
+ **kwargs,
40
+ ) -> TaskDecorator:
41
+ """
42
+ Wrap a callable into an Airflow operator to run via a Python virtual environment.
43
+
44
+ Accepts kwargs for operator kwarg. Can be reused in a single DAG.
45
+
46
+ This function is only used only used during type checking or auto-completion.
47
+
48
+ :meta private:
49
+
50
+ :param python_callable: Function to decorate
51
+ :param multiple_outputs: If set to True, the decorated function's return value will be unrolled to
52
+ multiple XCom values. Dict will unroll to XCom values with its keys as XCom keys.
53
+ Defaults to False.
54
+ """
55
+ return task_decorator_factory(
56
+ python_callable=python_callable,
57
+ multiple_outputs=multiple_outputs,
58
+ decorated_operator_class=_PythonVirtualenvDecoratedOperator,
59
+ **kwargs,
60
+ )