apache-airflow-providers-edge3 2.0.0rc1__py3-none-any.whl → 3.0.1rc1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- airflow/providers/edge3/__init__.py +1 -1
- airflow/providers/edge3/cli/api_client.py +30 -28
- airflow/providers/edge3/cli/dataclasses.py +3 -10
- airflow/providers/edge3/cli/definition.py +261 -0
- airflow/providers/edge3/cli/edge_command.py +8 -206
- airflow/providers/edge3/cli/worker.py +226 -198
- airflow/providers/edge3/example_dags/win_notepad.py +1 -1
- airflow/providers/edge3/executors/edge_executor.py +24 -49
- airflow/providers/edge3/get_provider_info.py +1 -0
- airflow/providers/edge3/models/edge_job.py +1 -2
- airflow/providers/edge3/models/edge_worker.py +61 -16
- airflow/providers/edge3/plugins/edge_executor_plugin.py +1 -1
- airflow/providers/edge3/plugins/www/dist/main.umd.cjs +8 -8
- airflow/providers/edge3/plugins/www/package.json +32 -27
- airflow/providers/edge3/plugins/www/pnpm-lock.yaml +1625 -1716
- airflow/providers/edge3/plugins/www/src/global.d.ts +24 -0
- airflow/providers/edge3/plugins/www/src/layouts/NavTabs.tsx +25 -3
- airflow/providers/edge3/plugins/www/src/main.tsx +6 -1
- airflow/providers/edge3/plugins/www/src/theme.ts +1 -1
- airflow/providers/edge3/worker_api/datamodels.py +12 -1
- airflow/providers/edge3/worker_api/routes/jobs.py +21 -8
- airflow/providers/edge3/worker_api/routes/logs.py +1 -1
- airflow/providers/edge3/worker_api/routes/worker.py +16 -3
- {apache_airflow_providers_edge3-2.0.0rc1.dist-info → apache_airflow_providers_edge3-3.0.1rc1.dist-info}/METADATA +14 -10
- {apache_airflow_providers_edge3-2.0.0rc1.dist-info → apache_airflow_providers_edge3-3.0.1rc1.dist-info}/RECORD +29 -29
- {apache_airflow_providers_edge3-2.0.0rc1.dist-info → apache_airflow_providers_edge3-3.0.1rc1.dist-info}/licenses/NOTICE +1 -1
- airflow/providers/edge3/plugins/templates/edge_worker_hosts.html +0 -175
- airflow/providers/edge3/plugins/templates/edge_worker_jobs.html +0 -69
- {apache_airflow_providers_edge3-2.0.0rc1.dist-info → apache_airflow_providers_edge3-3.0.1rc1.dist-info}/WHEEL +0 -0
- {apache_airflow_providers_edge3-2.0.0rc1.dist-info → apache_airflow_providers_edge3-3.0.1rc1.dist-info}/entry_points.txt +0 -0
- {apache_airflow_providers_edge3-2.0.0rc1.dist-info → apache_airflow_providers_edge3-3.0.1rc1.dist-info}/licenses/LICENSE +0 -0
|
@@ -29,7 +29,7 @@ from airflow import __version__ as airflow_version
|
|
|
29
29
|
|
|
30
30
|
__all__ = ["__version__"]
|
|
31
31
|
|
|
32
|
-
__version__ = "
|
|
32
|
+
__version__ = "3.0.1"
|
|
33
33
|
|
|
34
34
|
if packaging.version.parse(packaging.version.parse(airflow_version).base_version) < packaging.version.parse(
|
|
35
35
|
"3.0.0"
|
|
@@ -16,7 +16,6 @@
|
|
|
16
16
|
# under the License.
|
|
17
17
|
from __future__ import annotations
|
|
18
18
|
|
|
19
|
-
import json
|
|
20
19
|
import logging
|
|
21
20
|
import os
|
|
22
21
|
from datetime import datetime
|
|
@@ -26,7 +25,7 @@ from pathlib import Path
|
|
|
26
25
|
from typing import TYPE_CHECKING, Any
|
|
27
26
|
from urllib.parse import quote, urljoin
|
|
28
27
|
|
|
29
|
-
import
|
|
28
|
+
from aiohttp import ClientConnectionError, ClientResponseError, ServerTimeoutError, request
|
|
30
29
|
from retryhttp import retry, wait_retry_after
|
|
31
30
|
from tenacity import before_sleep_log, wait_random_exponential
|
|
32
31
|
|
|
@@ -44,11 +43,11 @@ from airflow.providers.edge3.worker_api.datamodels import (
|
|
|
44
43
|
WorkerSetStateReturn,
|
|
45
44
|
WorkerStateBody,
|
|
46
45
|
)
|
|
47
|
-
from airflow.utils.state import TaskInstanceState # noqa: TC001
|
|
48
46
|
|
|
49
47
|
if TYPE_CHECKING:
|
|
50
|
-
from airflow.
|
|
48
|
+
from airflow.providers.common.compat.sdk import TaskInstanceKey
|
|
51
49
|
from airflow.providers.edge3.models.edge_worker import EdgeWorkerState
|
|
50
|
+
from airflow.utils.state import TaskInstanceState
|
|
52
51
|
|
|
53
52
|
logger = logging.getLogger(__name__)
|
|
54
53
|
|
|
@@ -92,39 +91,42 @@ def jwt_generator() -> JWTGenerator:
|
|
|
92
91
|
wait_timeouts=_default_wait,
|
|
93
92
|
wait_rate_limited=wait_retry_after(fallback=_default_wait), # No infinite timeout on HTTP 429
|
|
94
93
|
before_sleep=before_sleep_log(logger, logging.WARNING),
|
|
94
|
+
network_errors=ClientConnectionError,
|
|
95
|
+
timeouts=ServerTimeoutError,
|
|
95
96
|
)
|
|
96
|
-
def _make_generic_request(method: str, rest_path: str, data: str | None = None) -> Any:
|
|
97
|
+
async def _make_generic_request(method: str, rest_path: str, data: str | None = None) -> Any:
|
|
97
98
|
authorization = jwt_generator().generate({"method": rest_path})
|
|
98
99
|
api_url = conf.get("edge", "api_url")
|
|
100
|
+
content_type = {"Content-Type": "application/json"} if data else {}
|
|
99
101
|
headers = {
|
|
100
|
-
|
|
102
|
+
**content_type,
|
|
101
103
|
"Accept": "application/json",
|
|
102
104
|
"Authorization": authorization,
|
|
103
105
|
}
|
|
104
106
|
api_endpoint = urljoin(api_url, rest_path)
|
|
105
|
-
|
|
106
|
-
|
|
107
|
-
|
|
108
|
-
|
|
109
|
-
|
|
107
|
+
async with request(method, url=api_endpoint, data=data, headers=headers) as response:
|
|
108
|
+
response.raise_for_status()
|
|
109
|
+
if response.status == HTTPStatus.NO_CONTENT:
|
|
110
|
+
return None
|
|
111
|
+
return await response.json()
|
|
110
112
|
|
|
111
113
|
|
|
112
|
-
def worker_register(
|
|
114
|
+
async def worker_register(
|
|
113
115
|
hostname: str, state: EdgeWorkerState, queues: list[str] | None, sysinfo: dict
|
|
114
116
|
) -> WorkerRegistrationReturn:
|
|
115
117
|
"""Register worker with the Edge API."""
|
|
116
118
|
try:
|
|
117
|
-
result = _make_generic_request(
|
|
119
|
+
result = await _make_generic_request(
|
|
118
120
|
"POST",
|
|
119
121
|
f"worker/{quote(hostname)}",
|
|
120
122
|
WorkerStateBody(state=state, jobs_active=0, queues=queues, sysinfo=sysinfo).model_dump_json(
|
|
121
123
|
exclude_unset=True
|
|
122
124
|
),
|
|
123
125
|
)
|
|
124
|
-
except
|
|
125
|
-
if e.
|
|
126
|
+
except ClientResponseError as e:
|
|
127
|
+
if e.status == HTTPStatus.BAD_REQUEST:
|
|
126
128
|
raise EdgeWorkerVersionException(str(e))
|
|
127
|
-
if e.
|
|
129
|
+
if e.status == HTTPStatus.CONFLICT:
|
|
128
130
|
raise EdgeWorkerDuplicateException(
|
|
129
131
|
f"A worker with the name '{hostname}' is already active. "
|
|
130
132
|
"Please ensure worker names are unique, or stop the existing worker before starting a new one."
|
|
@@ -133,7 +135,7 @@ def worker_register(
|
|
|
133
135
|
return WorkerRegistrationReturn(**result)
|
|
134
136
|
|
|
135
137
|
|
|
136
|
-
def worker_set_state(
|
|
138
|
+
async def worker_set_state(
|
|
137
139
|
hostname: str,
|
|
138
140
|
state: EdgeWorkerState,
|
|
139
141
|
jobs_active: int,
|
|
@@ -143,7 +145,7 @@ def worker_set_state(
|
|
|
143
145
|
) -> WorkerSetStateReturn:
|
|
144
146
|
"""Update the state of the worker in the central site and thereby implicitly heartbeat."""
|
|
145
147
|
try:
|
|
146
|
-
result = _make_generic_request(
|
|
148
|
+
result = await _make_generic_request(
|
|
147
149
|
"PATCH",
|
|
148
150
|
f"worker/{quote(hostname)}",
|
|
149
151
|
WorkerStateBody(
|
|
@@ -154,16 +156,16 @@ def worker_set_state(
|
|
|
154
156
|
maintenance_comments=maintenance_comments,
|
|
155
157
|
).model_dump_json(exclude_unset=True),
|
|
156
158
|
)
|
|
157
|
-
except
|
|
158
|
-
if e.
|
|
159
|
+
except ClientResponseError as e:
|
|
160
|
+
if e.status == HTTPStatus.BAD_REQUEST:
|
|
159
161
|
raise EdgeWorkerVersionException(str(e))
|
|
160
162
|
raise e
|
|
161
163
|
return WorkerSetStateReturn(**result)
|
|
162
164
|
|
|
163
165
|
|
|
164
|
-
def jobs_fetch(hostname: str, queues: list[str] | None, free_concurrency: int) -> EdgeJobFetched | None:
|
|
166
|
+
async def jobs_fetch(hostname: str, queues: list[str] | None, free_concurrency: int) -> EdgeJobFetched | None:
|
|
165
167
|
"""Fetch a job to execute on the edge worker."""
|
|
166
|
-
result = _make_generic_request(
|
|
168
|
+
result = await _make_generic_request(
|
|
167
169
|
"POST",
|
|
168
170
|
f"jobs/fetch/{quote(hostname)}",
|
|
169
171
|
WorkerQueuesBody(queues=queues, free_concurrency=free_concurrency).model_dump_json(
|
|
@@ -175,17 +177,17 @@ def jobs_fetch(hostname: str, queues: list[str] | None, free_concurrency: int) -
|
|
|
175
177
|
return None
|
|
176
178
|
|
|
177
179
|
|
|
178
|
-
def jobs_set_state(key: TaskInstanceKey, state: TaskInstanceState) -> None:
|
|
180
|
+
async def jobs_set_state(key: TaskInstanceKey, state: TaskInstanceState) -> None:
|
|
179
181
|
"""Set the state of a job."""
|
|
180
|
-
_make_generic_request(
|
|
182
|
+
await _make_generic_request(
|
|
181
183
|
"PATCH",
|
|
182
184
|
f"jobs/state/{key.dag_id}/{key.task_id}/{key.run_id}/{key.try_number}/{key.map_index}/{state}",
|
|
183
185
|
)
|
|
184
186
|
|
|
185
187
|
|
|
186
|
-
def logs_logfile_path(task: TaskInstanceKey) -> Path:
|
|
188
|
+
async def logs_logfile_path(task: TaskInstanceKey) -> Path:
|
|
187
189
|
"""Elaborate the path and filename to expect from task execution."""
|
|
188
|
-
result = _make_generic_request(
|
|
190
|
+
result = await _make_generic_request(
|
|
189
191
|
"GET",
|
|
190
192
|
f"logs/logfile_path/{task.dag_id}/{task.task_id}/{task.run_id}/{task.try_number}/{task.map_index}",
|
|
191
193
|
)
|
|
@@ -193,13 +195,13 @@ def logs_logfile_path(task: TaskInstanceKey) -> Path:
|
|
|
193
195
|
return Path(base_log_folder, result)
|
|
194
196
|
|
|
195
197
|
|
|
196
|
-
def logs_push(
|
|
198
|
+
async def logs_push(
|
|
197
199
|
task: TaskInstanceKey,
|
|
198
200
|
log_chunk_time: datetime,
|
|
199
201
|
log_chunk_data: str,
|
|
200
202
|
) -> None:
|
|
201
203
|
"""Push an incremental log chunk from Edge Worker to central site."""
|
|
202
|
-
_make_generic_request(
|
|
204
|
+
await _make_generic_request(
|
|
203
205
|
"POST",
|
|
204
206
|
f"logs/push/{task.dag_id}/{task.task_id}/{task.run_id}/{task.try_number}/{task.map_index}",
|
|
205
207
|
PushLogsBody(log_chunk_time=log_chunk_time, log_chunk_data=log_chunk_data).model_dump_json(
|
|
@@ -23,8 +23,6 @@ from pathlib import Path
|
|
|
23
23
|
from typing import TYPE_CHECKING
|
|
24
24
|
|
|
25
25
|
if TYPE_CHECKING:
|
|
26
|
-
from psutil import Popen
|
|
27
|
-
|
|
28
26
|
from airflow.providers.edge3.models.edge_worker import EdgeWorkerState
|
|
29
27
|
from airflow.providers.edge3.worker_api.datamodels import EdgeJobFetched
|
|
30
28
|
|
|
@@ -74,22 +72,17 @@ class Job:
|
|
|
74
72
|
"""Holds all information for a task/job to be executed as bundle."""
|
|
75
73
|
|
|
76
74
|
edge_job: EdgeJobFetched
|
|
77
|
-
process:
|
|
75
|
+
process: Process
|
|
78
76
|
logfile: Path
|
|
79
|
-
logsize: int
|
|
77
|
+
logsize: int = 0
|
|
80
78
|
"""Last size of log file, point of last chunk push."""
|
|
81
79
|
|
|
82
80
|
@property
|
|
83
81
|
def is_running(self) -> bool:
|
|
84
82
|
"""Check if the job is still running."""
|
|
85
|
-
|
|
86
|
-
self.process.poll()
|
|
87
|
-
return self.process.returncode is None
|
|
88
|
-
return self.process.exitcode is None
|
|
83
|
+
return self.process.is_alive()
|
|
89
84
|
|
|
90
85
|
@property
|
|
91
86
|
def is_success(self) -> bool:
|
|
92
87
|
"""Check if the job was successful."""
|
|
93
|
-
if hasattr(self.process, "returncode"):
|
|
94
|
-
return self.process.returncode == 0
|
|
95
88
|
return self.process.exitcode == 0
|
|
@@ -0,0 +1,261 @@
|
|
|
1
|
+
# Licensed to the Apache Software Foundation (ASF) under one
|
|
2
|
+
# or more contributor license agreements. See the NOTICE file
|
|
3
|
+
# distributed with this work for additional information
|
|
4
|
+
# regarding copyright ownership. The ASF licenses this file
|
|
5
|
+
# to you under the Apache License, Version 2.0 (the
|
|
6
|
+
# "License"); you may not use this file except in compliance
|
|
7
|
+
# with the License. You may obtain a copy of the License at
|
|
8
|
+
#
|
|
9
|
+
# http://www.apache.org/licenses/LICENSE-2.0
|
|
10
|
+
#
|
|
11
|
+
# Unless required by applicable law or agreed to in writing,
|
|
12
|
+
# software distributed under the License is distributed on an
|
|
13
|
+
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
|
14
|
+
# KIND, either express or implied. See the License for the
|
|
15
|
+
# specific language governing permissions and limitations
|
|
16
|
+
# under the License.
|
|
17
|
+
from __future__ import annotations
|
|
18
|
+
|
|
19
|
+
from typing import TYPE_CHECKING
|
|
20
|
+
|
|
21
|
+
from airflow.cli.cli_config import ARG_PID, ARG_VERBOSE, ActionCommand, Arg, GroupCommand, lazy_load_command
|
|
22
|
+
from airflow.configuration import conf
|
|
23
|
+
|
|
24
|
+
if TYPE_CHECKING:
|
|
25
|
+
import argparse
|
|
26
|
+
|
|
27
|
+
|
|
28
|
+
ARG_CONCURRENCY = Arg(
|
|
29
|
+
("-c", "--concurrency"),
|
|
30
|
+
type=int,
|
|
31
|
+
help="The number of worker processes",
|
|
32
|
+
default=conf.getint("edge", "worker_concurrency", fallback=8),
|
|
33
|
+
)
|
|
34
|
+
ARG_QUEUES = Arg(
|
|
35
|
+
("-q", "--queues"),
|
|
36
|
+
help="Comma delimited list of queues to serve, serve all queues if not provided.",
|
|
37
|
+
)
|
|
38
|
+
ARG_EDGE_HOSTNAME = Arg(
|
|
39
|
+
("-H", "--edge-hostname"),
|
|
40
|
+
help="Set the hostname of worker if you have multiple workers on a single machine",
|
|
41
|
+
)
|
|
42
|
+
ARG_REQUIRED_EDGE_HOSTNAME = Arg(
|
|
43
|
+
("-H", "--edge-hostname"),
|
|
44
|
+
help="Set the hostname of worker if you have multiple workers on a single machine",
|
|
45
|
+
required=True,
|
|
46
|
+
)
|
|
47
|
+
ARG_MAINTENANCE = Arg(("maintenance",), help="Desired maintenance state", choices=("on", "off"))
|
|
48
|
+
ARG_MAINTENANCE_COMMENT = Arg(
|
|
49
|
+
("-c", "--comments"),
|
|
50
|
+
help="Maintenance comments to report reason. Required if maintenance is turned on.",
|
|
51
|
+
)
|
|
52
|
+
ARG_REQUIRED_MAINTENANCE_COMMENT = Arg(
|
|
53
|
+
("-c", "--comments"),
|
|
54
|
+
help="Maintenance comments to report reason. Required if enabling maintenance",
|
|
55
|
+
required=True,
|
|
56
|
+
)
|
|
57
|
+
ARG_QUEUES_MANAGE = Arg(
|
|
58
|
+
("-q", "--queues"),
|
|
59
|
+
help="Comma delimited list of queues to add or remove.",
|
|
60
|
+
required=True,
|
|
61
|
+
)
|
|
62
|
+
ARG_WAIT_MAINT = Arg(
|
|
63
|
+
("-w", "--wait"),
|
|
64
|
+
default=False,
|
|
65
|
+
help="Wait until edge worker has reached desired state.",
|
|
66
|
+
action="store_true",
|
|
67
|
+
)
|
|
68
|
+
ARG_WAIT_STOP = Arg(
|
|
69
|
+
("-w", "--wait"),
|
|
70
|
+
default=False,
|
|
71
|
+
help="Wait until edge worker is shut down.",
|
|
72
|
+
action="store_true",
|
|
73
|
+
)
|
|
74
|
+
ARG_OUTPUT = Arg(
|
|
75
|
+
(
|
|
76
|
+
"-o",
|
|
77
|
+
"--output",
|
|
78
|
+
),
|
|
79
|
+
help="Output format. Allowed values: json, yaml, plain, table (default: table)",
|
|
80
|
+
metavar="(table, json, yaml, plain)",
|
|
81
|
+
choices=("table", "json", "yaml", "plain"),
|
|
82
|
+
default="table",
|
|
83
|
+
)
|
|
84
|
+
ARG_STATE = Arg(
|
|
85
|
+
(
|
|
86
|
+
"-s",
|
|
87
|
+
"--state",
|
|
88
|
+
),
|
|
89
|
+
nargs="+",
|
|
90
|
+
help="State of the edge worker",
|
|
91
|
+
)
|
|
92
|
+
|
|
93
|
+
ARG_DAEMON = Arg(
|
|
94
|
+
("-D", "--daemon"), help="Daemonize instead of running in the foreground", action="store_true"
|
|
95
|
+
)
|
|
96
|
+
ARG_UMASK = Arg(
|
|
97
|
+
("-u", "--umask"),
|
|
98
|
+
help="Set the umask of edge worker in daemon mode",
|
|
99
|
+
)
|
|
100
|
+
ARG_STDERR = Arg(("--stderr",), help="Redirect stderr to this file if run in daemon mode")
|
|
101
|
+
ARG_STDOUT = Arg(("--stdout",), help="Redirect stdout to this file if run in daemon mode")
|
|
102
|
+
ARG_LOG_FILE = Arg(("-l", "--log-file"), help="Location of the log file if run in daemon mode")
|
|
103
|
+
ARG_YES = Arg(
|
|
104
|
+
("-y", "--yes"),
|
|
105
|
+
help="Skip confirmation prompt and proceed with shutdown",
|
|
106
|
+
action="store_true",
|
|
107
|
+
default=False,
|
|
108
|
+
)
|
|
109
|
+
|
|
110
|
+
EDGE_COMMANDS: list[ActionCommand] = [
|
|
111
|
+
ActionCommand(
|
|
112
|
+
name="worker",
|
|
113
|
+
help="Start Airflow Edge Worker.",
|
|
114
|
+
func=lazy_load_command("airflow.providers.edge3.cli.edge_command.worker"),
|
|
115
|
+
args=(
|
|
116
|
+
ARG_CONCURRENCY,
|
|
117
|
+
ARG_QUEUES,
|
|
118
|
+
ARG_EDGE_HOSTNAME,
|
|
119
|
+
ARG_PID,
|
|
120
|
+
ARG_VERBOSE,
|
|
121
|
+
ARG_DAEMON,
|
|
122
|
+
ARG_STDOUT,
|
|
123
|
+
ARG_STDERR,
|
|
124
|
+
ARG_LOG_FILE,
|
|
125
|
+
ARG_UMASK,
|
|
126
|
+
),
|
|
127
|
+
),
|
|
128
|
+
ActionCommand(
|
|
129
|
+
name="status",
|
|
130
|
+
help="Check for Airflow Local Edge Worker status.",
|
|
131
|
+
func=lazy_load_command("airflow.providers.edge3.cli.edge_command.status"),
|
|
132
|
+
args=(
|
|
133
|
+
ARG_PID,
|
|
134
|
+
ARG_VERBOSE,
|
|
135
|
+
),
|
|
136
|
+
),
|
|
137
|
+
ActionCommand(
|
|
138
|
+
name="maintenance",
|
|
139
|
+
help="Set or Unset maintenance mode of local edge worker.",
|
|
140
|
+
func=lazy_load_command("airflow.providers.edge3.cli.edge_command.maintenance"),
|
|
141
|
+
args=(
|
|
142
|
+
ARG_MAINTENANCE,
|
|
143
|
+
ARG_MAINTENANCE_COMMENT,
|
|
144
|
+
ARG_WAIT_MAINT,
|
|
145
|
+
ARG_PID,
|
|
146
|
+
ARG_VERBOSE,
|
|
147
|
+
),
|
|
148
|
+
),
|
|
149
|
+
ActionCommand(
|
|
150
|
+
name="stop",
|
|
151
|
+
help="Stop a running local Airflow Edge Worker.",
|
|
152
|
+
func=lazy_load_command("airflow.providers.edge3.cli.edge_command.stop"),
|
|
153
|
+
args=(
|
|
154
|
+
ARG_WAIT_STOP,
|
|
155
|
+
ARG_PID,
|
|
156
|
+
ARG_VERBOSE,
|
|
157
|
+
),
|
|
158
|
+
),
|
|
159
|
+
ActionCommand(
|
|
160
|
+
name="list-workers",
|
|
161
|
+
help="Query the db to list all registered edge workers.",
|
|
162
|
+
func=lazy_load_command("airflow.providers.edge3.cli.edge_command.list_edge_workers"),
|
|
163
|
+
args=(
|
|
164
|
+
ARG_OUTPUT,
|
|
165
|
+
ARG_STATE,
|
|
166
|
+
),
|
|
167
|
+
),
|
|
168
|
+
ActionCommand(
|
|
169
|
+
name="remote-edge-worker-request-maintenance",
|
|
170
|
+
help="Put remote edge worker on maintenance.",
|
|
171
|
+
func=lazy_load_command("airflow.providers.edge3.cli.edge_command.put_remote_worker_on_maintenance"),
|
|
172
|
+
args=(
|
|
173
|
+
ARG_REQUIRED_EDGE_HOSTNAME,
|
|
174
|
+
ARG_REQUIRED_MAINTENANCE_COMMENT,
|
|
175
|
+
),
|
|
176
|
+
),
|
|
177
|
+
ActionCommand(
|
|
178
|
+
name="remote-edge-worker-exit-maintenance",
|
|
179
|
+
help="Remove remote edge worker from maintenance.",
|
|
180
|
+
func=lazy_load_command(
|
|
181
|
+
"airflow.providers.edge3.cli.edge_command.remove_remote_worker_from_maintenance"
|
|
182
|
+
),
|
|
183
|
+
args=(ARG_REQUIRED_EDGE_HOSTNAME,),
|
|
184
|
+
),
|
|
185
|
+
ActionCommand(
|
|
186
|
+
name="remote-edge-worker-update-maintenance-comment",
|
|
187
|
+
help="Update maintenance comments of the remote edge worker.",
|
|
188
|
+
func=lazy_load_command(
|
|
189
|
+
"airflow.providers.edge3.cli.edge_command.remote_worker_update_maintenance_comment"
|
|
190
|
+
),
|
|
191
|
+
args=(
|
|
192
|
+
ARG_REQUIRED_EDGE_HOSTNAME,
|
|
193
|
+
ARG_REQUIRED_MAINTENANCE_COMMENT,
|
|
194
|
+
),
|
|
195
|
+
),
|
|
196
|
+
ActionCommand(
|
|
197
|
+
name="remove-remote-edge-worker",
|
|
198
|
+
help="Remove remote edge worker entry from db.",
|
|
199
|
+
func=lazy_load_command("airflow.providers.edge3.cli.edge_command.remove_remote_worker"),
|
|
200
|
+
args=(ARG_REQUIRED_EDGE_HOSTNAME,),
|
|
201
|
+
),
|
|
202
|
+
ActionCommand(
|
|
203
|
+
name="shutdown-remote-edge-worker",
|
|
204
|
+
help="Initiate the shutdown of the remote edge worker.",
|
|
205
|
+
func=lazy_load_command("airflow.providers.edge3.cli.edge_command.remote_worker_request_shutdown"),
|
|
206
|
+
args=(ARG_REQUIRED_EDGE_HOSTNAME,),
|
|
207
|
+
),
|
|
208
|
+
ActionCommand(
|
|
209
|
+
name="add-worker-queues",
|
|
210
|
+
help="Add queues to an edge worker.",
|
|
211
|
+
func=lazy_load_command("airflow.providers.edge3.cli.edge_command.add_worker_queues"),
|
|
212
|
+
args=(
|
|
213
|
+
ARG_REQUIRED_EDGE_HOSTNAME,
|
|
214
|
+
ARG_QUEUES_MANAGE,
|
|
215
|
+
),
|
|
216
|
+
),
|
|
217
|
+
ActionCommand(
|
|
218
|
+
name="remove-worker-queues",
|
|
219
|
+
help="Remove queues from an edge worker.",
|
|
220
|
+
func=lazy_load_command("airflow.providers.edge3.cli.edge_command.remove_worker_queues"),
|
|
221
|
+
args=(
|
|
222
|
+
ARG_REQUIRED_EDGE_HOSTNAME,
|
|
223
|
+
ARG_QUEUES_MANAGE,
|
|
224
|
+
),
|
|
225
|
+
),
|
|
226
|
+
ActionCommand(
|
|
227
|
+
name="shutdown-all-workers",
|
|
228
|
+
help="Request graceful shutdown of all edge workers.",
|
|
229
|
+
func=lazy_load_command("airflow.providers.edge3.cli.edge_command.shutdown_all_workers"),
|
|
230
|
+
args=(ARG_YES,),
|
|
231
|
+
),
|
|
232
|
+
]
|
|
233
|
+
|
|
234
|
+
|
|
235
|
+
def get_edge_cli_commands() -> list[GroupCommand]:
|
|
236
|
+
return [
|
|
237
|
+
GroupCommand(
|
|
238
|
+
name="edge",
|
|
239
|
+
help="Edge Worker components",
|
|
240
|
+
description=(
|
|
241
|
+
"Start and manage Edge Worker. Works only when using EdgeExecutor. For more information, "
|
|
242
|
+
"see https://airflow.apache.org/docs/apache-airflow-providers-edge3/stable/edge_executor.html"
|
|
243
|
+
),
|
|
244
|
+
subcommands=EDGE_COMMANDS,
|
|
245
|
+
),
|
|
246
|
+
]
|
|
247
|
+
|
|
248
|
+
|
|
249
|
+
def get_parser() -> argparse.ArgumentParser:
|
|
250
|
+
"""
|
|
251
|
+
Generate documentation; used by Sphinx.
|
|
252
|
+
|
|
253
|
+
:meta private:
|
|
254
|
+
"""
|
|
255
|
+
from airflow.cli.cli_parser import AirflowHelpFormatter, DefaultHelpParser, _add_command
|
|
256
|
+
|
|
257
|
+
parser = DefaultHelpParser(prog="airflow", formatter_class=AirflowHelpFormatter)
|
|
258
|
+
subparsers = parser.add_subparsers(dest="subcommand", metavar="GROUP_OR_COMMAND")
|
|
259
|
+
for group_command in get_edge_cli_commands():
|
|
260
|
+
_add_command(subparsers, group_command)
|
|
261
|
+
return parser
|