apache-airflow-providers-edge3 1.0.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (39) hide show
  1. airflow/providers/edge3/LICENSE +201 -0
  2. airflow/providers/edge3/__init__.py +39 -0
  3. airflow/providers/edge3/cli/__init__.py +16 -0
  4. airflow/providers/edge3/cli/api_client.py +206 -0
  5. airflow/providers/edge3/cli/dataclasses.py +95 -0
  6. airflow/providers/edge3/cli/edge_command.py +689 -0
  7. airflow/providers/edge3/example_dags/__init__.py +16 -0
  8. airflow/providers/edge3/example_dags/integration_test.py +164 -0
  9. airflow/providers/edge3/example_dags/win_notepad.py +83 -0
  10. airflow/providers/edge3/example_dags/win_test.py +342 -0
  11. airflow/providers/edge3/executors/__init__.py +22 -0
  12. airflow/providers/edge3/executors/edge_executor.py +367 -0
  13. airflow/providers/edge3/get_provider_info.py +99 -0
  14. airflow/providers/edge3/models/__init__.py +16 -0
  15. airflow/providers/edge3/models/edge_job.py +94 -0
  16. airflow/providers/edge3/models/edge_logs.py +73 -0
  17. airflow/providers/edge3/models/edge_worker.py +230 -0
  18. airflow/providers/edge3/openapi/__init__.py +19 -0
  19. airflow/providers/edge3/openapi/edge_worker_api_v1.yaml +808 -0
  20. airflow/providers/edge3/plugins/__init__.py +16 -0
  21. airflow/providers/edge3/plugins/edge_executor_plugin.py +229 -0
  22. airflow/providers/edge3/plugins/templates/edge_worker_hosts.html +175 -0
  23. airflow/providers/edge3/plugins/templates/edge_worker_jobs.html +69 -0
  24. airflow/providers/edge3/version_compat.py +36 -0
  25. airflow/providers/edge3/worker_api/__init__.py +17 -0
  26. airflow/providers/edge3/worker_api/app.py +43 -0
  27. airflow/providers/edge3/worker_api/auth.py +135 -0
  28. airflow/providers/edge3/worker_api/datamodels.py +190 -0
  29. airflow/providers/edge3/worker_api/routes/__init__.py +16 -0
  30. airflow/providers/edge3/worker_api/routes/_v2_compat.py +135 -0
  31. airflow/providers/edge3/worker_api/routes/_v2_routes.py +237 -0
  32. airflow/providers/edge3/worker_api/routes/health.py +28 -0
  33. airflow/providers/edge3/worker_api/routes/jobs.py +162 -0
  34. airflow/providers/edge3/worker_api/routes/logs.py +133 -0
  35. airflow/providers/edge3/worker_api/routes/worker.py +224 -0
  36. apache_airflow_providers_edge3-1.0.0.dist-info/METADATA +117 -0
  37. apache_airflow_providers_edge3-1.0.0.dist-info/RECORD +39 -0
  38. apache_airflow_providers_edge3-1.0.0.dist-info/WHEEL +4 -0
  39. apache_airflow_providers_edge3-1.0.0.dist-info/entry_points.txt +6 -0
@@ -0,0 +1,16 @@
1
+ # Licensed to the Apache Software Foundation (ASF) under one
2
+ # or more contributor license agreements. See the NOTICE file
3
+ # distributed with this work for additional information
4
+ # regarding copyright ownership. The ASF licenses this file
5
+ # to you under the Apache License, Version 2.0 (the
6
+ # "License"); you may not use this file except in compliance
7
+ # with the License. You may obtain a copy of the License at
8
+ #
9
+ # http://www.apache.org/licenses/LICENSE-2.0
10
+ #
11
+ # Unless required by applicable law or agreed to in writing,
12
+ # software distributed under the License is distributed on an
13
+ # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
14
+ # KIND, either express or implied. See the License for the
15
+ # specific language governing permissions and limitations
16
+ # under the License.
@@ -0,0 +1,164 @@
1
+ #
2
+ # Licensed to the Apache Software Foundation (ASF) under one
3
+ # or more contributor license agreements. See the NOTICE file
4
+ # distributed with this work for additional information
5
+ # regarding copyright ownership. The ASF licenses this file
6
+ # to you under the Apache License, Version 2.0 (the
7
+ # "License"); you may not use this file except in compliance
8
+ # with the License. You may obtain a copy of the License at
9
+ #
10
+ # http://www.apache.org/licenses/LICENSE-2.0
11
+ #
12
+ # Unless required by applicable law or agreed to in writing,
13
+ # software distributed under the License is distributed on an
14
+ # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
15
+ # KIND, either express or implied. See the License for the
16
+ # specific language governing permissions and limitations
17
+ # under the License.
18
+ """
19
+ In this DAG all critical functions as integration test are contained.
20
+
21
+ The DAG should work in all standard setups without error.
22
+ """
23
+
24
+ from __future__ import annotations
25
+
26
+ from datetime import datetime
27
+ from time import sleep
28
+
29
+ from airflow.decorators import task, task_group
30
+ from airflow.exceptions import AirflowNotFoundException
31
+ from airflow.hooks.base import BaseHook
32
+ from airflow.models.dag import DAG
33
+ from airflow.models.variable import Variable
34
+ from airflow.providers.common.compat.standard.operators import PythonOperator
35
+ from airflow.providers.standard.operators.empty import EmptyOperator
36
+ from airflow.sdk import Param
37
+ from airflow.utils.trigger_rule import TriggerRule
38
+
39
+ try:
40
+ from airflow.providers.standard.operators.bash import BashOperator
41
+ except ImportError:
42
+ from airflow.operators.bash import BashOperator # type: ignore[no-redef,attr-defined]
43
+
44
+ with DAG(
45
+ dag_id="integration_test",
46
+ dag_display_name="Integration Test",
47
+ description=__doc__.partition(".")[0],
48
+ doc_md=__doc__,
49
+ schedule=None,
50
+ start_date=datetime(2025, 1, 1),
51
+ tags=["example", "edge", "integration test"],
52
+ params={
53
+ "mapping_count": Param(
54
+ 4,
55
+ type="integer",
56
+ title="Mapping Count",
57
+ description="Amount of tasks that should be mapped",
58
+ ),
59
+ },
60
+ ) as dag:
61
+
62
+ @task
63
+ def my_setup():
64
+ print("Assume this is a setup task")
65
+
66
+ @task
67
+ def mapping_from_params(**context) -> list[int]:
68
+ mapping_count: int = context["params"]["mapping_count"]
69
+ return list(range(1, mapping_count + 1))
70
+
71
+ @task
72
+ def add_one(x: int):
73
+ return x + 1
74
+
75
+ @task
76
+ def sum_it(values):
77
+ total = sum(values)
78
+ print(f"Total was {total}")
79
+
80
+ @task_group(prefix_group_id=False)
81
+ def mapping_task_group():
82
+ added_values = add_one.expand(x=mapping_from_params())
83
+ sum_it(added_values)
84
+
85
+ @task.branch
86
+ def branching():
87
+ return ["bash", "virtualenv", "variable", "connection", "classic_bash", "classic_python"]
88
+
89
+ @task.bash
90
+ def bash():
91
+ return "echo hello world"
92
+
93
+ @task.virtualenv(requirements="numpy")
94
+ def virtualenv():
95
+ import numpy
96
+
97
+ print(f"Welcome to virtualenv with numpy version {numpy.__version__}.")
98
+
99
+ @task
100
+ def variable():
101
+ Variable.set("integration_test_key", "value")
102
+ if Variable.get("integration_test_key") != "value":
103
+ raise ValueError("Variable not set as expected.")
104
+ Variable.delete("integration_test_key")
105
+
106
+ @task
107
+ def connection():
108
+ try:
109
+ conn = BaseHook.get_connection("integration_test")
110
+ print(f"Got connection {conn}")
111
+ except AirflowNotFoundException:
112
+ print("Connection not found... but also OK.")
113
+
114
+ @task_group(prefix_group_id=False)
115
+ def standard_tasks_group():
116
+ classic_bash = BashOperator(
117
+ task_id="classic_bash", bash_command="echo Parameter is {{ params.mapping_count }}"
118
+ )
119
+
120
+ empty = EmptyOperator(task_id="not_executed")
121
+
122
+ def python_call():
123
+ print("Hello world")
124
+
125
+ classic_py = PythonOperator(task_id="classic_python", python_callable=python_call)
126
+
127
+ branching() >> [bash(), virtualenv(), variable(), connection(), classic_bash, classic_py, empty]
128
+
129
+ @task
130
+ def plan_to_fail():
131
+ print("This task is supposed to fail")
132
+ raise ValueError("This task is supposed to fail")
133
+
134
+ @task(retries=1, retry_delay=5.0)
135
+ def needs_retry(**context):
136
+ print("This task is supposed to fail on the first attempt")
137
+ if context["ti"].try_number == 1:
138
+ raise ValueError("This task is supposed to fail")
139
+
140
+ @task(trigger_rule=TriggerRule.ONE_SUCCESS)
141
+ def capture_fail():
142
+ print("all good, we accept the fail and report OK")
143
+
144
+ @task_group(prefix_group_id=False)
145
+ def failure_tests_group():
146
+ [plan_to_fail(), needs_retry()] >> capture_fail()
147
+
148
+ @task
149
+ def long_running():
150
+ print("This task runs for 15 minutes")
151
+ for i in range(15):
152
+ sleep(60)
153
+ print(f"Running for {i + 1} minutes now.")
154
+ print("Long running task completed.")
155
+
156
+ @task
157
+ def my_teardown():
158
+ print("Assume this is a teardown task")
159
+
160
+ (
161
+ my_setup().as_setup()
162
+ >> [mapping_task_group(), standard_tasks_group(), failure_tests_group(), long_running()]
163
+ >> my_teardown().as_teardown()
164
+ )
@@ -0,0 +1,83 @@
1
+ #
2
+ # Licensed to the Apache Software Foundation (ASF) under one
3
+ # or more contributor license agreements. See the NOTICE file
4
+ # distributed with this work for additional information
5
+ # regarding copyright ownership. The ASF licenses this file
6
+ # to you under the Apache License, Version 2.0 (the
7
+ # "License"); you may not use this file except in compliance
8
+ # with the License. You may obtain a copy of the License at
9
+ #
10
+ # http://www.apache.org/licenses/LICENSE-2.0
11
+ #
12
+ # Unless required by applicable law or agreed to in writing,
13
+ # software distributed under the License is distributed on an
14
+ # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
15
+ # KIND, either express or implied. See the License for the
16
+ # specific language governing permissions and limitations
17
+ # under the License.
18
+ """
19
+ In this DAG is a demonstrator how to interact with a Windows Worker via Notepad.
20
+
21
+ The DAG is created in conjunction with the documentation in
22
+ https://github.com/apache/airflow/blob/main/providers/edge3/docs/install_on_windows.rst
23
+ and serves as a PoC test for the Windows worker.
24
+ """
25
+
26
+ from __future__ import annotations
27
+
28
+ from collections.abc import Sequence
29
+ from datetime import datetime
30
+ from pathlib import Path
31
+ from subprocess import check_call
32
+ from tempfile import gettempdir
33
+ from typing import TYPE_CHECKING, Any
34
+
35
+ from airflow.models import BaseOperator
36
+ from airflow.models.dag import DAG
37
+ from airflow.sdk import Param
38
+
39
+ if TYPE_CHECKING:
40
+ from airflow.utils.context import Context
41
+
42
+
43
+ class NotepadOperator(BaseOperator):
44
+ """Example Operator Implementation which starts a Notepod.exe on WIndows."""
45
+
46
+ template_fields: Sequence[str] = "text"
47
+
48
+ def __init__(self, text: str, **kwargs):
49
+ self.text = text
50
+ super().__init__(**kwargs)
51
+
52
+ def execute(self, context: Context) -> Any:
53
+ tmp_file = Path(gettempdir()) / "airflow_test.txt"
54
+ with open(tmp_file, "w", encoding="utf8") as textfile:
55
+ textfile.write(self.text)
56
+ check_call(["notepad.exe", tmp_file])
57
+ with open(tmp_file, encoding="utf8") as textfile:
58
+ return textfile.read()
59
+
60
+
61
+ with DAG(
62
+ dag_id="win_notepad",
63
+ dag_display_name="Windows Notepad",
64
+ description=__doc__.partition(".")[0],
65
+ doc_md=__doc__,
66
+ schedule=None,
67
+ start_date=datetime(2024, 7, 1),
68
+ tags=["edge", "Windows"],
69
+ default_args={"queue": "windows"},
70
+ params={
71
+ "notepad_text": Param(
72
+ "This is a text as proposal generated by Airflow DAG. Change it and save and it will get to XCom.",
73
+ title="Notepad Text",
74
+ description="Add some text that should be filled into Notepad at start.",
75
+ type="string",
76
+ format="multiline",
77
+ ),
78
+ },
79
+ ) as dag:
80
+ npo = NotepadOperator(
81
+ task_id="notepad",
82
+ text="{{ params.notepad_text }}",
83
+ )
@@ -0,0 +1,342 @@
1
+ #
2
+ # Licensed to the Apache Software Foundation (ASF) under one
3
+ # or more contributor license agreements. See the NOTICE file
4
+ # distributed with this work for additional information
5
+ # regarding copyright ownership. The ASF licenses this file
6
+ # to you under the Apache License, Version 2.0 (the
7
+ # "License"); you may not use this file except in compliance
8
+ # with the License. You may obtain a copy of the License at
9
+ #
10
+ # http://www.apache.org/licenses/LICENSE-2.0
11
+ #
12
+ # Unless required by applicable law or agreed to in writing,
13
+ # software distributed under the License is distributed on an
14
+ # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
15
+ # KIND, either express or implied. See the License for the
16
+ # specific language governing permissions and limitations
17
+ # under the License.
18
+ """
19
+ In this DAG some tests are made to check a worker on Windows.
20
+
21
+ The DAG is created in conjunction with the documentation in
22
+ https://github.com/apache/airflow/blob/main/providers/edge3/docs/install_on_windows.rst
23
+ and serves as a PoC test for the Windows worker.
24
+ """
25
+
26
+ from __future__ import annotations
27
+
28
+ import os
29
+ from collections.abc import Container, Sequence
30
+ from datetime import datetime
31
+ from subprocess import STDOUT, Popen
32
+ from time import sleep
33
+ from typing import TYPE_CHECKING, Any, Callable
34
+
35
+ from airflow.decorators import task, task_group
36
+ from airflow.exceptions import AirflowException, AirflowNotFoundException, AirflowSkipException
37
+ from airflow.hooks.base import BaseHook
38
+ from airflow.models import BaseOperator
39
+ from airflow.models.dag import DAG
40
+ from airflow.models.variable import Variable
41
+ from airflow.providers.standard.operators.empty import EmptyOperator
42
+ from airflow.sdk import Param
43
+ from airflow.sdk.execution_time.context import context_to_airflow_vars
44
+ from airflow.utils.trigger_rule import TriggerRule
45
+ from airflow.utils.types import ArgNotSet
46
+
47
+ if TYPE_CHECKING:
48
+ try:
49
+ from airflow.sdk.types import RuntimeTaskInstanceProtocol as TaskInstance
50
+ except ImportError:
51
+ from airflow.models import TaskInstance # type: ignore[assignment, no-redef]
52
+ from airflow.utils.context import Context
53
+
54
+ try:
55
+ from airflow.operators.python import PythonOperator # type: ignore
56
+ except ImportError:
57
+ from airflow.providers.common.compat.standard.operators import PythonOperator
58
+
59
+
60
+ class CmdOperator(BaseOperator):
61
+ r"""Execute a command or batch of commands.
62
+
63
+ This operator is forked of BashOperator to execute any process on windows.
64
+
65
+ If BaseOperator.do_xcom_push is True, the last line written to stdout
66
+ will also be pushed to an XCom when the command completes
67
+
68
+ :param command: The command, set of commands or reference to a
69
+ BAT script (must be '.bat') to be executed. (templated)
70
+ :param env: If env is not None, it must be a dict that defines the
71
+ environment variables for the new process; these are used instead
72
+ of inheriting the current process environment, which is the default
73
+ behavior. (templated)
74
+ :param append_env: If False(default) uses the environment variables passed in env params
75
+ and does not inherit the current process environment. If True, inherits the environment variables
76
+ from current passes and then environment variable passed by the user will either update the existing
77
+ inherited environment variables or the new variables gets appended to it
78
+ :param skip_on_exit_code: If task exits with this exit code, leave the task
79
+ in ``skipped`` state (default: 99). If set to ``None``, any non-zero
80
+ exit code will be treated as a failure.
81
+ :param cwd: Working directory to execute the command in (templated).
82
+ If None (default), the command is run in a temporary directory.
83
+ To use current DAG folder as the working directory,
84
+ you might set template ``{{ task.dag.folder }}``.
85
+ :param output_processor: Function to further process the output of the script / command
86
+ (default is lambda output: output).
87
+
88
+ Airflow will evaluate the exit code of the command. In general, a non-zero exit code will result in
89
+ task failure and zero will result in task success.
90
+ Exit code ``99`` (or another set in ``skip_on_exit_code``)
91
+ will throw an :class:`airflow.exceptions.AirflowSkipException`, which will leave the task in ``skipped``
92
+ state. You can have all non-zero exit codes be treated as a failure by setting ``skip_on_exit_code=None``.
93
+
94
+ .. list-table::
95
+ :widths: 25 25
96
+ :header-rows: 1
97
+
98
+ * - Exit code
99
+ - Behavior
100
+ * - 0
101
+ - success
102
+ * - `skip_on_exit_code` (default: 99)
103
+ - raise :class:`airflow.exceptions.AirflowSkipException`
104
+ * - otherwise
105
+ - raise :class:`airflow.exceptions.AirflowException`
106
+
107
+ .. warning::
108
+
109
+ Care should be taken with "user" input or when using Jinja templates in the
110
+ ``command``, as this command operator does not perform any escaping or
111
+ sanitization of the command.
112
+
113
+ This applies mostly to using "dag_run" conf, as that can be submitted via
114
+ users in the Web UI. Most of the default template variables are not at
115
+ risk.
116
+
117
+ """
118
+
119
+ template_fields: Sequence[str] = ("command", "env", "cwd")
120
+ template_fields_renderers = {"command": "bash", "env": "json"}
121
+ template_ext: Sequence[str] = ".bat"
122
+
123
+ subprocess: Popen | None = None
124
+
125
+ def __init__(
126
+ self,
127
+ *,
128
+ command: list[str] | str | ArgNotSet,
129
+ env: dict[str, str] | None = None,
130
+ append_env: bool = False,
131
+ skip_on_exit_code: int | Container[int] | None = 99,
132
+ cwd: str | None = None,
133
+ output_processor: Callable[[str], Any] = lambda result: result,
134
+ **kwargs,
135
+ ) -> None:
136
+ super().__init__(**kwargs)
137
+ self.command = command.split(" ") if isinstance(command, str) else command
138
+ self.env = env
139
+ self.skip_on_exit_code = (
140
+ skip_on_exit_code
141
+ if isinstance(skip_on_exit_code, Container)
142
+ else [skip_on_exit_code]
143
+ if skip_on_exit_code is not None
144
+ else []
145
+ )
146
+ self.cwd = cwd
147
+ self.append_env = append_env
148
+ self.output_processor = output_processor
149
+
150
+ # When using the @task.command decorator, the command is not known until the underlying Python
151
+ # callable is executed and therefore set to NOTSET initially. This flag is useful during execution to
152
+ # determine whether the command value needs to re-rendered.
153
+ self._init_command_not_set = isinstance(self.command, ArgNotSet)
154
+
155
+ @staticmethod
156
+ def refresh_command(ti: TaskInstance) -> None:
157
+ """Rewrite the underlying rendered command value for a task instance in the metadatabase.
158
+
159
+ TaskInstance.get_rendered_template_fields() cannot be used because this will retrieve the
160
+ RenderedTaskInstanceFields from the metadatabase which doesn't have the runtime-evaluated command
161
+ value.
162
+
163
+ :meta private:
164
+ """
165
+ from airflow.models.renderedtifields import RenderedTaskInstanceFields
166
+
167
+ RenderedTaskInstanceFields._update_runtime_evaluated_template_fields(ti)
168
+
169
+ def get_env(self, context):
170
+ """Build the set of environment variables to be exposed for the command."""
171
+ system_env = os.environ.copy()
172
+ env = self.env
173
+ if env is None:
174
+ env = system_env
175
+ elif self.append_env:
176
+ system_env.update(env)
177
+ env = system_env
178
+
179
+ airflow_context_vars = context_to_airflow_vars(context, in_env_var_format=True)
180
+ self.log.debug(
181
+ "Exporting env vars: %s",
182
+ " ".join(f"{k}={v!r}" for k, v in airflow_context_vars.items()),
183
+ )
184
+ env.update(airflow_context_vars)
185
+ return env
186
+
187
+ def execute(self, context: Context):
188
+ if self.cwd is not None:
189
+ if not os.path.exists(self.cwd):
190
+ raise AirflowException(f"Can not find the cwd: {self.cwd}")
191
+ if not os.path.isdir(self.cwd):
192
+ raise AirflowException(f"The cwd {self.cwd} must be a directory")
193
+ env = self.get_env(context)
194
+
195
+ # Because the command value is evaluated at runtime using the @task.command decorator, the
196
+ # RenderedTaskInstanceField data needs to be rewritten and the command value re-rendered -- the
197
+ # latter because the returned command from the decorated callable could contain a Jinja expression.
198
+ # Both will ensure the correct command is executed and that the Rendered Template view in the UI
199
+ # displays the executed command (otherwise it will display as an ArgNotSet type).
200
+ if self._init_command_not_set:
201
+ ti = context["ti"]
202
+ self.refresh_command(ti)
203
+
204
+ self.subprocess = Popen(
205
+ args=self.command, # type: ignore # here we assume the arg has been replaced by a string array
206
+ shell=True,
207
+ env=env,
208
+ stderr=STDOUT,
209
+ text=True,
210
+ cwd=self.cwd,
211
+ )
212
+ outs, _ = self.subprocess.communicate()
213
+ exit_code = self.subprocess.returncode
214
+ if exit_code in self.skip_on_exit_code:
215
+ raise AirflowSkipException(f"Command returned exit code {exit_code}. Skipping.")
216
+ if exit_code != 0:
217
+ raise AirflowException(f"Command failed. The command returned a non-zero exit code {exit_code}.")
218
+
219
+ return self.output_processor(outs)
220
+
221
+ def on_kill(self) -> None:
222
+ if self.subprocess:
223
+ self.subprocess.kill()
224
+
225
+
226
+ with DAG(
227
+ dag_id="win_test",
228
+ dag_display_name="Windows Test",
229
+ description=__doc__.partition(".")[0],
230
+ doc_md=__doc__,
231
+ schedule=None,
232
+ start_date=datetime(2025, 1, 1),
233
+ tags=["edge", "Windows"],
234
+ default_args={"queue": "windows"},
235
+ params={
236
+ "mapping_count": Param(
237
+ 4,
238
+ type="integer",
239
+ title="Mapping Count",
240
+ description="Amount of tasks that should be mapped",
241
+ ),
242
+ },
243
+ ) as dag:
244
+
245
+ @task
246
+ def my_setup():
247
+ print("Assume this is a setup task")
248
+
249
+ @task
250
+ def mapping_from_params(**context) -> list[int]:
251
+ mapping_count: int = context["params"]["mapping_count"]
252
+ return list(range(1, mapping_count + 1))
253
+
254
+ @task
255
+ def add_one(x: int):
256
+ return x + 1
257
+
258
+ @task
259
+ def sum_it(values):
260
+ total = sum(values)
261
+ print(f"Total was {total}")
262
+
263
+ @task_group(prefix_group_id=False)
264
+ def mapping_task_group():
265
+ added_values = add_one.expand(x=mapping_from_params())
266
+ sum_it(added_values)
267
+
268
+ @task_group(prefix_group_id=False)
269
+ def standard_tasks_group():
270
+ @task.branch
271
+ def branching():
272
+ return ["virtualenv", "variable", "connection", "command", "classic_python"]
273
+
274
+ @task.virtualenv(requirements="numpy")
275
+ def virtualenv():
276
+ import numpy # type: ignore
277
+
278
+ print(f"Welcome to virtualenv with numpy version {numpy.__version__}.")
279
+
280
+ @task
281
+ def variable():
282
+ print("Creating a new variable...")
283
+ Variable.set("integration_test_key", "value")
284
+ print(f"For the moment the variable is set to {Variable.get('integration_test_key')}")
285
+ print("Deleting variable...")
286
+ Variable.delete("integration_test_key")
287
+
288
+ @task
289
+ def connection():
290
+ try:
291
+ conn = BaseHook.get_connection("integration_test")
292
+ print(f"Got connection {conn}")
293
+ except AirflowNotFoundException:
294
+ print("Connection 'integration_test' not found... but also OK.")
295
+
296
+ command = CmdOperator(task_id="command", command="echo Parameter is {{params.mapping_count}}")
297
+
298
+ def python_call():
299
+ print("Hello world")
300
+
301
+ classic_py = PythonOperator(task_id="classic_python", python_callable=python_call)
302
+
303
+ empty = EmptyOperator(task_id="not_executed")
304
+
305
+ branching() >> [virtualenv(), variable(), connection(), command, classic_py, empty]
306
+
307
+ @task_group(prefix_group_id=False)
308
+ def failure_tests_group():
309
+ @task
310
+ def plan_to_fail():
311
+ print("This task is supposed to fail")
312
+ raise ValueError("This task is supposed to fail")
313
+
314
+ @task(retries=1, retry_delay=5.0)
315
+ def needs_retry(**context):
316
+ print("This task is supposed to fail on the first attempt")
317
+ if context["ti"].try_number == 1:
318
+ raise ValueError("This task is supposed to fail")
319
+
320
+ @task(trigger_rule=TriggerRule.ONE_SUCCESS)
321
+ def capture_fail():
322
+ print("all good, we accept the fail and report OK")
323
+
324
+ [plan_to_fail(), needs_retry()] >> capture_fail()
325
+
326
+ @task
327
+ def long_running():
328
+ print("This task runs for 15 minutes")
329
+ for i in range(15):
330
+ sleep(60)
331
+ print(f"Running for {i + 1} minutes now.")
332
+ print("Long running task completed.")
333
+
334
+ @task
335
+ def my_teardown():
336
+ print("Assume this is a teardown task")
337
+
338
+ (
339
+ my_setup().as_setup()
340
+ >> [mapping_task_group(), standard_tasks_group(), failure_tests_group(), long_running()]
341
+ >> my_teardown().as_teardown()
342
+ )
@@ -0,0 +1,22 @@
1
+ # Licensed to the Apache Software Foundation (ASF) under one
2
+ # or more contributor license agreements. See the NOTICE file
3
+ # distributed with this work for additional information
4
+ # regarding copyright ownership. The ASF licenses this file
5
+ # to you under the Apache License, Version 2.0 (the
6
+ # "License"); you may not use this file except in compliance
7
+ # with the License. You may obtain a copy of the License at
8
+ #
9
+ # http://www.apache.org/licenses/LICENSE-2.0
10
+ #
11
+ # Unless required by applicable law or agreed to in writing,
12
+ # software distributed under the License is distributed on an
13
+ # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
14
+ # KIND, either express or implied. See the License for the
15
+ # specific language governing permissions and limitations
16
+ # under the License.
17
+
18
+ from __future__ import annotations
19
+
20
+ from airflow.providers.edge3.executors.edge_executor import EdgeExecutor
21
+
22
+ __all__ = ["EdgeExecutor"]