apache-airflow-providers-edge3 1.3.0rc1__py3-none-any.whl → 1.4.1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- airflow/providers/edge3/__init__.py +1 -1
- airflow/providers/edge3/cli/api_client.py +9 -3
- airflow/providers/edge3/cli/worker.py +16 -10
- airflow/providers/edge3/example_dags/win_test.py +1 -1
- airflow/providers/edge3/executors/edge_executor.py +37 -6
- airflow/providers/edge3/models/edge_job.py +18 -15
- airflow/providers/edge3/models/edge_logs.py +11 -8
- airflow/providers/edge3/models/edge_worker.py +44 -26
- airflow/providers/edge3/openapi/v2-edge-generated.yaml +13 -33
- airflow/providers/edge3/plugins/edge_executor_plugin.py +26 -19
- airflow/providers/edge3/plugins/www/dist/main.umd.cjs +14 -66
- airflow/providers/edge3/plugins/www/openapi-gen/requests/schemas.gen.ts +4 -4
- airflow/providers/edge3/plugins/www/openapi-gen/requests/services.gen.ts +10 -10
- airflow/providers/edge3/plugins/www/openapi-gen/requests/types.gen.ts +21 -21
- airflow/providers/edge3/plugins/www/package.json +26 -26
- airflow/providers/edge3/plugins/www/pnpm-lock.yaml +1637 -1683
- airflow/providers/edge3/plugins/www/src/components/ui/Alert.tsx +0 -1
- airflow/providers/edge3/plugins/www/src/layouts/EdgeLayout.tsx +9 -10
- airflow/providers/edge3/plugins/www/src/layouts/NavTabs.tsx +1 -8
- airflow/providers/edge3/plugins/www/src/main.tsx +5 -4
- airflow/providers/edge3/plugins/www/src/pages/JobsPage.tsx +7 -8
- airflow/providers/edge3/plugins/www/src/theme.ts +0 -1
- airflow/providers/edge3/plugins/www/src/utils/index.ts +0 -1
- airflow/providers/edge3/plugins/www/vite.config.ts +2 -1
- airflow/providers/edge3/version_compat.py +5 -0
- airflow/providers/edge3/worker_api/routes/_v2_compat.py +1 -0
- airflow/providers/edge3/worker_api/routes/jobs.py +5 -6
- airflow/providers/edge3/worker_api/routes/ui.py +8 -3
- airflow/providers/edge3/worker_api/routes/worker.py +9 -5
- {apache_airflow_providers_edge3-1.3.0rc1.dist-info → apache_airflow_providers_edge3-1.4.1.dist-info}/METADATA +35 -14
- {apache_airflow_providers_edge3-1.3.0rc1.dist-info → apache_airflow_providers_edge3-1.4.1.dist-info}/RECORD +33 -34
- airflow/providers/edge3/plugins/www/src/utils/tokenHandler.ts +0 -51
- {apache_airflow_providers_edge3-1.3.0rc1.dist-info → apache_airflow_providers_edge3-1.4.1.dist-info}/WHEEL +0 -0
- {apache_airflow_providers_edge3-1.3.0rc1.dist-info → apache_airflow_providers_edge3-1.4.1.dist-info}/entry_points.txt +0 -0
|
@@ -29,7 +29,7 @@ from airflow import __version__ as airflow_version
|
|
|
29
29
|
|
|
30
30
|
__all__ = ["__version__"]
|
|
31
31
|
|
|
32
|
-
__version__ = "1.
|
|
32
|
+
__version__ = "1.4.1"
|
|
33
33
|
|
|
34
34
|
if packaging.version.parse(packaging.version.parse(airflow_version).base_version) < packaging.version.parse(
|
|
35
35
|
"2.10.0"
|
|
@@ -53,12 +53,18 @@ logger = logging.getLogger(__name__)
|
|
|
53
53
|
# Note: Given defaults make attempts after 1, 3, 7, 15, 31seconds, 1:03, 2:07, 3:37 and fails after 5:07min
|
|
54
54
|
# So far there is no other config facility in Task SDK we use ENV for the moment
|
|
55
55
|
# TODO: Consider these env variables jointly in task sdk together with task_sdk/src/airflow/sdk/api/client.py
|
|
56
|
-
API_RETRIES = int(
|
|
56
|
+
API_RETRIES = int(
|
|
57
|
+
os.getenv("AIRFLOW__EDGE__API_RETRIES", os.getenv("AIRFLOW__WORKERS__API_RETRIES", str(10)))
|
|
58
|
+
)
|
|
57
59
|
API_RETRY_WAIT_MIN = float(
|
|
58
|
-
os.getenv(
|
|
60
|
+
os.getenv(
|
|
61
|
+
"AIRFLOW__EDGE__API_RETRY_WAIT_MIN", os.getenv("AIRFLOW__WORKERS__API_RETRY_WAIT_MIN", str(1.0))
|
|
62
|
+
)
|
|
59
63
|
)
|
|
60
64
|
API_RETRY_WAIT_MAX = float(
|
|
61
|
-
os.getenv(
|
|
65
|
+
os.getenv(
|
|
66
|
+
"AIRFLOW__EDGE__API_RETRY_WAIT_MAX", os.getenv("AIRFLOW__WORKERS__API_RETRY_WAIT_MAX", str(90.0))
|
|
67
|
+
)
|
|
62
68
|
)
|
|
63
69
|
|
|
64
70
|
|
|
@@ -21,19 +21,20 @@ import os
|
|
|
21
21
|
import signal
|
|
22
22
|
import sys
|
|
23
23
|
from datetime import datetime
|
|
24
|
+
from functools import cache
|
|
24
25
|
from http import HTTPStatus
|
|
25
26
|
from multiprocessing import Process
|
|
26
27
|
from pathlib import Path
|
|
27
28
|
from subprocess import Popen
|
|
28
29
|
from time import sleep
|
|
29
30
|
from typing import TYPE_CHECKING
|
|
30
|
-
from urllib.parse import urlparse
|
|
31
31
|
|
|
32
32
|
from lockfile.pidlockfile import remove_existing_pidfile
|
|
33
33
|
from requests import HTTPError
|
|
34
34
|
|
|
35
35
|
from airflow import __version__ as airflow_version
|
|
36
36
|
from airflow.configuration import conf
|
|
37
|
+
from airflow.providers.common.compat.sdk import timezone
|
|
37
38
|
from airflow.providers.edge3 import __version__ as edge_provider_version
|
|
38
39
|
from airflow.providers.edge3.cli.api_client import (
|
|
39
40
|
jobs_fetch,
|
|
@@ -52,7 +53,6 @@ from airflow.providers.edge3.cli.signalling import (
|
|
|
52
53
|
)
|
|
53
54
|
from airflow.providers.edge3.models.edge_worker import EdgeWorkerState, EdgeWorkerVersionException
|
|
54
55
|
from airflow.providers.edge3.version_compat import AIRFLOW_V_3_0_PLUS
|
|
55
|
-
from airflow.utils import timezone
|
|
56
56
|
from airflow.utils.net import getfqdn
|
|
57
57
|
from airflow.utils.state import TaskInstanceState
|
|
58
58
|
|
|
@@ -176,7 +176,19 @@ class EdgeWorker:
|
|
|
176
176
|
return EdgeWorkerState.IDLE
|
|
177
177
|
|
|
178
178
|
@staticmethod
|
|
179
|
-
|
|
179
|
+
@cache
|
|
180
|
+
def _execution_api_server_url() -> str:
|
|
181
|
+
"""Get the execution api server url from config or environment."""
|
|
182
|
+
api_url = conf.get("edge", "api_url")
|
|
183
|
+
execution_api_server_url = conf.get("core", "execution_api_server_url", fallback="")
|
|
184
|
+
if not execution_api_server_url and api_url:
|
|
185
|
+
# Derive execution api url from edge api url as fallback
|
|
186
|
+
execution_api_server_url = api_url.replace("edge_worker/v1/rpcapi", "execution")
|
|
187
|
+
logger.info("Using execution api server url: %s", execution_api_server_url)
|
|
188
|
+
return execution_api_server_url
|
|
189
|
+
|
|
190
|
+
@staticmethod
|
|
191
|
+
def _run_job_via_supervisor(workload, execution_api_server_url) -> int:
|
|
180
192
|
from airflow.sdk.execution_time.supervisor import supervise
|
|
181
193
|
|
|
182
194
|
# Ignore ctrl-c in this process -- we don't want to kill _this_ one. we let tasks run to completion
|
|
@@ -186,12 +198,6 @@ class EdgeWorker:
|
|
|
186
198
|
setproctitle(f"airflow edge worker: {workload.ti.key}")
|
|
187
199
|
|
|
188
200
|
try:
|
|
189
|
-
api_url = conf.get("edge", "api_url")
|
|
190
|
-
execution_api_server_url = conf.get("core", "execution_api_server_url", fallback="")
|
|
191
|
-
if not execution_api_server_url:
|
|
192
|
-
parsed = urlparse(api_url)
|
|
193
|
-
execution_api_server_url = f"{parsed.scheme}://{parsed.netloc}/execution/"
|
|
194
|
-
|
|
195
201
|
supervise(
|
|
196
202
|
# This is the "wrong" ti type, but it duck types the same. TODO: Create a protocol for this.
|
|
197
203
|
# Same like in airflow/executors/local_executor.py:_execute_work()
|
|
@@ -215,7 +221,7 @@ class EdgeWorker:
|
|
|
215
221
|
workload: ExecuteTask = edge_job.command
|
|
216
222
|
process = Process(
|
|
217
223
|
target=EdgeWorker._run_job_via_supervisor,
|
|
218
|
-
kwargs={"workload": workload},
|
|
224
|
+
kwargs={"workload": workload, "execution_api_server_url": EdgeWorker._execution_api_server_url()},
|
|
219
225
|
)
|
|
220
226
|
process.start()
|
|
221
227
|
base_log_folder = conf.get("logging", "base_log_folder", fallback="NOT AVAILABLE")
|
|
@@ -67,7 +67,7 @@ if TYPE_CHECKING:
|
|
|
67
67
|
try:
|
|
68
68
|
from airflow.operators.python import PythonOperator
|
|
69
69
|
except ImportError:
|
|
70
|
-
from airflow.providers.common.compat.standard.operators import PythonOperator
|
|
70
|
+
from airflow.providers.common.compat.standard.operators import PythonOperator # type: ignore[no-redef]
|
|
71
71
|
|
|
72
72
|
|
|
73
73
|
class CmdOperator(BaseOperator):
|
|
@@ -30,16 +30,17 @@ from sqlalchemy.orm import Session
|
|
|
30
30
|
from airflow.cli.cli_config import GroupCommand
|
|
31
31
|
from airflow.configuration import conf
|
|
32
32
|
from airflow.executors.base_executor import BaseExecutor
|
|
33
|
-
from airflow.models.taskinstance import TaskInstance
|
|
33
|
+
from airflow.models.taskinstance import TaskInstance
|
|
34
|
+
from airflow.providers.common.compat.sdk import timezone
|
|
34
35
|
from airflow.providers.edge3.cli.edge_command import EDGE_COMMANDS
|
|
35
36
|
from airflow.providers.edge3.models.edge_job import EdgeJobModel
|
|
36
37
|
from airflow.providers.edge3.models.edge_logs import EdgeLogsModel
|
|
37
38
|
from airflow.providers.edge3.models.edge_worker import EdgeWorkerModel, EdgeWorkerState, reset_metrics
|
|
38
39
|
from airflow.providers.edge3.version_compat import AIRFLOW_V_3_0_PLUS
|
|
39
40
|
from airflow.stats import Stats
|
|
40
|
-
from airflow.utils import timezone
|
|
41
41
|
from airflow.utils.db import DBLocks, create_global_lock
|
|
42
42
|
from airflow.utils.session import NEW_SESSION, provide_session
|
|
43
|
+
from airflow.utils.state import TaskInstanceState
|
|
43
44
|
|
|
44
45
|
if TYPE_CHECKING:
|
|
45
46
|
import argparse
|
|
@@ -68,7 +69,8 @@ class EdgeExecutor(BaseExecutor):
|
|
|
68
69
|
"""
|
|
69
70
|
Check if already existing table matches the newest table schema.
|
|
70
71
|
|
|
71
|
-
workaround till Airflow
|
|
72
|
+
workaround till support for Airflow 2.x is dropped,
|
|
73
|
+
then it is possible to use alembic also for provider distributions.
|
|
72
74
|
"""
|
|
73
75
|
inspector = inspect(engine)
|
|
74
76
|
edge_job_columns = None
|
|
@@ -78,7 +80,7 @@ class EdgeExecutor(BaseExecutor):
|
|
|
78
80
|
edge_job_columns = [column["name"] for column in edge_job_schema]
|
|
79
81
|
for column in edge_job_schema:
|
|
80
82
|
if column["name"] == "command":
|
|
81
|
-
edge_job_command_len = column["type"].length
|
|
83
|
+
edge_job_command_len = column["type"].length # type: ignore[attr-defined]
|
|
82
84
|
|
|
83
85
|
# version 0.6.0rc1 added new column concurrency_slots
|
|
84
86
|
if edge_job_columns and "concurrency_slots" not in edge_job_columns:
|
|
@@ -284,7 +286,7 @@ class EdgeExecutor(BaseExecutor):
|
|
|
284
286
|
map_index=job.map_index,
|
|
285
287
|
session=session,
|
|
286
288
|
)
|
|
287
|
-
job.state = ti.state if ti else TaskInstanceState.REMOVED
|
|
289
|
+
job.state = ti.state if ti and ti.state else TaskInstanceState.REMOVED
|
|
288
290
|
|
|
289
291
|
if job.state != TaskInstanceState.RUNNING:
|
|
290
292
|
# Edge worker does not backport emitted Airflow metrics, so export some metrics
|
|
@@ -352,7 +354,7 @@ class EdgeExecutor(BaseExecutor):
|
|
|
352
354
|
del self.last_reported_state[job.key]
|
|
353
355
|
self.fail(job.key)
|
|
354
356
|
else:
|
|
355
|
-
self.last_reported_state[job.key] = job.state
|
|
357
|
+
self.last_reported_state[job.key] = TaskInstanceState(job.state)
|
|
356
358
|
if (
|
|
357
359
|
job.state == TaskInstanceState.SUCCESS
|
|
358
360
|
and job.last_update_t < (datetime.now() - timedelta(minutes=job_success_purge)).timestamp()
|
|
@@ -399,6 +401,35 @@ class EdgeExecutor(BaseExecutor):
|
|
|
399
401
|
def terminate(self):
|
|
400
402
|
"""Terminate the executor is not doing anything."""
|
|
401
403
|
|
|
404
|
+
@provide_session
|
|
405
|
+
def revoke_task(self, *, ti: TaskInstance, session: Session = NEW_SESSION):
|
|
406
|
+
"""
|
|
407
|
+
Revoke a task instance from the executor.
|
|
408
|
+
|
|
409
|
+
This method removes the task from the executor's internal state and deletes
|
|
410
|
+
the corresponding EdgeJobModel record to prevent edge workers from picking it up.
|
|
411
|
+
|
|
412
|
+
:param ti: Task instance to revoke
|
|
413
|
+
:param session: Database session
|
|
414
|
+
"""
|
|
415
|
+
# Remove from executor's internal state
|
|
416
|
+
self.running.discard(ti.key)
|
|
417
|
+
self.queued_tasks.pop(ti.key, None)
|
|
418
|
+
if ti.key in self.last_reported_state:
|
|
419
|
+
del self.last_reported_state[ti.key]
|
|
420
|
+
|
|
421
|
+
# Delete the job from the database to prevent edge workers from picking it up
|
|
422
|
+
session.execute(
|
|
423
|
+
delete(EdgeJobModel).where(
|
|
424
|
+
EdgeJobModel.dag_id == ti.dag_id,
|
|
425
|
+
EdgeJobModel.task_id == ti.task_id,
|
|
426
|
+
EdgeJobModel.run_id == ti.run_id,
|
|
427
|
+
EdgeJobModel.map_index == ti.map_index,
|
|
428
|
+
EdgeJobModel.try_number == ti.try_number,
|
|
429
|
+
)
|
|
430
|
+
)
|
|
431
|
+
self.log.info("Revoked task instance %s from EdgeExecutor", ti.key)
|
|
432
|
+
|
|
402
433
|
def try_adopt_task_instances(self, tis: Sequence[TaskInstance]) -> Sequence[TaskInstance]:
|
|
403
434
|
"""
|
|
404
435
|
Try to adopt running task instances that have been abandoned by a SchedulerJob dying.
|
|
@@ -19,16 +19,17 @@ from __future__ import annotations
|
|
|
19
19
|
from datetime import datetime
|
|
20
20
|
|
|
21
21
|
from sqlalchemy import (
|
|
22
|
-
Column,
|
|
23
22
|
Index,
|
|
24
23
|
Integer,
|
|
25
24
|
String,
|
|
26
25
|
text,
|
|
27
26
|
)
|
|
27
|
+
from sqlalchemy.orm import Mapped
|
|
28
28
|
|
|
29
29
|
from airflow.models.base import Base, StringID
|
|
30
30
|
from airflow.models.taskinstancekey import TaskInstanceKey
|
|
31
|
-
from airflow.
|
|
31
|
+
from airflow.providers.common.compat.sdk import timezone
|
|
32
|
+
from airflow.providers.common.compat.sqlalchemy.orm import mapped_column
|
|
32
33
|
from airflow.utils.log.logging_mixin import LoggingMixin
|
|
33
34
|
from airflow.utils.sqlalchemy import UtcDateTime
|
|
34
35
|
|
|
@@ -41,18 +42,20 @@ class EdgeJobModel(Base, LoggingMixin):
|
|
|
41
42
|
"""
|
|
42
43
|
|
|
43
44
|
__tablename__ = "edge_job"
|
|
44
|
-
dag_id =
|
|
45
|
-
task_id =
|
|
46
|
-
run_id =
|
|
47
|
-
map_index
|
|
48
|
-
|
|
49
|
-
|
|
50
|
-
|
|
51
|
-
|
|
52
|
-
|
|
53
|
-
|
|
54
|
-
|
|
55
|
-
|
|
45
|
+
dag_id: Mapped[str] = mapped_column(StringID(), primary_key=True, nullable=False)
|
|
46
|
+
task_id: Mapped[str] = mapped_column(StringID(), primary_key=True, nullable=False)
|
|
47
|
+
run_id: Mapped[str] = mapped_column(StringID(), primary_key=True, nullable=False)
|
|
48
|
+
map_index: Mapped[int] = mapped_column(
|
|
49
|
+
Integer, primary_key=True, nullable=False, server_default=text("-1")
|
|
50
|
+
)
|
|
51
|
+
try_number: Mapped[int] = mapped_column(Integer, primary_key=True, default=0)
|
|
52
|
+
state: Mapped[str] = mapped_column(String(20))
|
|
53
|
+
queue: Mapped[str] = mapped_column(String(256))
|
|
54
|
+
concurrency_slots: Mapped[int] = mapped_column(Integer)
|
|
55
|
+
command: Mapped[str] = mapped_column(String(2048))
|
|
56
|
+
queued_dttm: Mapped[datetime | None] = mapped_column(UtcDateTime)
|
|
57
|
+
edge_worker: Mapped[str | None] = mapped_column(String(64))
|
|
58
|
+
last_update: Mapped[datetime | None] = mapped_column(UtcDateTime)
|
|
56
59
|
|
|
57
60
|
def __init__(
|
|
58
61
|
self,
|
|
@@ -91,4 +94,4 @@ class EdgeJobModel(Base, LoggingMixin):
|
|
|
91
94
|
|
|
92
95
|
@property
|
|
93
96
|
def last_update_t(self) -> float:
|
|
94
|
-
return self.last_update.timestamp()
|
|
97
|
+
return self.last_update.timestamp() if self.last_update else datetime.now().timestamp()
|
|
@@ -19,14 +19,15 @@ from __future__ import annotations
|
|
|
19
19
|
from datetime import datetime
|
|
20
20
|
|
|
21
21
|
from sqlalchemy import (
|
|
22
|
-
Column,
|
|
23
22
|
Integer,
|
|
24
23
|
Text,
|
|
25
24
|
text,
|
|
26
25
|
)
|
|
27
26
|
from sqlalchemy.dialects.mysql import MEDIUMTEXT
|
|
27
|
+
from sqlalchemy.orm import Mapped
|
|
28
28
|
|
|
29
29
|
from airflow.models.base import Base, StringID
|
|
30
|
+
from airflow.providers.common.compat.sqlalchemy.orm import mapped_column
|
|
30
31
|
from airflow.utils.log.logging_mixin import LoggingMixin
|
|
31
32
|
from airflow.utils.sqlalchemy import UtcDateTime
|
|
32
33
|
|
|
@@ -45,13 +46,15 @@ class EdgeLogsModel(Base, LoggingMixin):
|
|
|
45
46
|
"""
|
|
46
47
|
|
|
47
48
|
__tablename__ = "edge_logs"
|
|
48
|
-
dag_id =
|
|
49
|
-
task_id =
|
|
50
|
-
run_id =
|
|
51
|
-
map_index
|
|
52
|
-
|
|
53
|
-
|
|
54
|
-
|
|
49
|
+
dag_id: Mapped[str] = mapped_column(StringID(), primary_key=True, nullable=False)
|
|
50
|
+
task_id: Mapped[str] = mapped_column(StringID(), primary_key=True, nullable=False)
|
|
51
|
+
run_id: Mapped[str] = mapped_column(StringID(), primary_key=True, nullable=False)
|
|
52
|
+
map_index: Mapped[int] = mapped_column(
|
|
53
|
+
Integer, primary_key=True, nullable=False, server_default=text("-1")
|
|
54
|
+
)
|
|
55
|
+
try_number: Mapped[int] = mapped_column(Integer, primary_key=True, default=0)
|
|
56
|
+
log_chunk_time: Mapped[datetime] = mapped_column(UtcDateTime, primary_key=True, nullable=False)
|
|
57
|
+
log_chunk_data: Mapped[str] = mapped_column(Text().with_variant(MEDIUMTEXT(), "mysql"), nullable=False)
|
|
55
58
|
|
|
56
59
|
def __init__(
|
|
57
60
|
self,
|
|
@@ -23,18 +23,22 @@ from datetime import datetime
|
|
|
23
23
|
from enum import Enum
|
|
24
24
|
from typing import TYPE_CHECKING
|
|
25
25
|
|
|
26
|
-
from sqlalchemy import
|
|
26
|
+
from sqlalchemy import Integer, String, delete, select
|
|
27
|
+
from sqlalchemy.orm import Mapped
|
|
27
28
|
|
|
28
29
|
from airflow.exceptions import AirflowException
|
|
29
30
|
from airflow.models.base import Base
|
|
31
|
+
from airflow.providers.common.compat.sdk import timezone
|
|
32
|
+
from airflow.providers.common.compat.sqlalchemy.orm import mapped_column
|
|
30
33
|
from airflow.stats import Stats
|
|
31
|
-
from airflow.utils import timezone
|
|
32
34
|
from airflow.utils.log.logging_mixin import LoggingMixin
|
|
33
35
|
from airflow.utils.providers_configuration_loader import providers_configuration_loaded
|
|
34
36
|
from airflow.utils.session import NEW_SESSION, provide_session
|
|
35
37
|
from airflow.utils.sqlalchemy import UtcDateTime
|
|
36
38
|
|
|
37
39
|
if TYPE_CHECKING:
|
|
40
|
+
from collections.abc import Sequence
|
|
41
|
+
|
|
38
42
|
from sqlalchemy.orm import Session
|
|
39
43
|
|
|
40
44
|
logger = logging.getLogger(__name__)
|
|
@@ -79,29 +83,29 @@ class EdgeWorkerModel(Base, LoggingMixin):
|
|
|
79
83
|
"""A Edge Worker instance which reports the state and health."""
|
|
80
84
|
|
|
81
85
|
__tablename__ = "edge_worker"
|
|
82
|
-
worker_name =
|
|
83
|
-
state =
|
|
84
|
-
maintenance_comment =
|
|
85
|
-
_queues =
|
|
86
|
-
first_online =
|
|
87
|
-
last_update =
|
|
88
|
-
jobs_active =
|
|
89
|
-
jobs_taken =
|
|
90
|
-
jobs_success =
|
|
91
|
-
jobs_failed =
|
|
92
|
-
sysinfo =
|
|
86
|
+
worker_name: Mapped[str] = mapped_column(String(64), primary_key=True, nullable=False)
|
|
87
|
+
state: Mapped[EdgeWorkerState] = mapped_column(String(20))
|
|
88
|
+
maintenance_comment: Mapped[str | None] = mapped_column(String(1024))
|
|
89
|
+
_queues: Mapped[str | None] = mapped_column("queues", String(256))
|
|
90
|
+
first_online: Mapped[datetime | None] = mapped_column(UtcDateTime)
|
|
91
|
+
last_update: Mapped[datetime | None] = mapped_column(UtcDateTime)
|
|
92
|
+
jobs_active: Mapped[int] = mapped_column(Integer, default=0)
|
|
93
|
+
jobs_taken: Mapped[int] = mapped_column(Integer, default=0)
|
|
94
|
+
jobs_success: Mapped[int] = mapped_column(Integer, default=0)
|
|
95
|
+
jobs_failed: Mapped[int] = mapped_column(Integer, default=0)
|
|
96
|
+
sysinfo: Mapped[str | None] = mapped_column(String(256))
|
|
93
97
|
|
|
94
98
|
def __init__(
|
|
95
99
|
self,
|
|
96
100
|
worker_name: str,
|
|
97
|
-
state: str,
|
|
101
|
+
state: str | EdgeWorkerState,
|
|
98
102
|
queues: list[str] | None,
|
|
99
103
|
first_online: datetime | None = None,
|
|
100
104
|
last_update: datetime | None = None,
|
|
101
105
|
maintenance_comment: str | None = None,
|
|
102
106
|
):
|
|
103
107
|
self.worker_name = worker_name
|
|
104
|
-
self.state = state
|
|
108
|
+
self.state = EdgeWorkerState(state)
|
|
105
109
|
self.queues = queues
|
|
106
110
|
self.first_online = first_online or timezone.utcnow()
|
|
107
111
|
self.last_update = last_update
|
|
@@ -139,14 +143,14 @@ class EdgeWorkerModel(Base, LoggingMixin):
|
|
|
139
143
|
queues.remove(queue_name)
|
|
140
144
|
self.queues = queues
|
|
141
145
|
|
|
142
|
-
def update_state(self, state: str) -> None:
|
|
146
|
+
def update_state(self, state: str | EdgeWorkerState) -> None:
|
|
143
147
|
"""Update state field."""
|
|
144
|
-
self.state = state
|
|
148
|
+
self.state = EdgeWorkerState(state)
|
|
145
149
|
|
|
146
150
|
|
|
147
151
|
def set_metrics(
|
|
148
152
|
worker_name: str,
|
|
149
|
-
state: EdgeWorkerState,
|
|
153
|
+
state: str | EdgeWorkerState,
|
|
150
154
|
jobs_active: int,
|
|
151
155
|
concurrency: int,
|
|
152
156
|
free_concurrency: int,
|
|
@@ -204,7 +208,7 @@ def reset_metrics(worker_name: str) -> None:
|
|
|
204
208
|
@provide_session
|
|
205
209
|
def _fetch_edge_hosts_from_db(
|
|
206
210
|
hostname: str | None = None, states: list | None = None, session: Session = NEW_SESSION
|
|
207
|
-
) ->
|
|
211
|
+
) -> Sequence[EdgeWorkerModel]:
|
|
208
212
|
query = select(EdgeWorkerModel)
|
|
209
213
|
if states:
|
|
210
214
|
query = query.where(EdgeWorkerModel.state.in_(states))
|
|
@@ -226,7 +230,9 @@ def request_maintenance(
|
|
|
226
230
|
) -> None:
|
|
227
231
|
"""Write maintenance request to the db."""
|
|
228
232
|
query = select(EdgeWorkerModel).where(EdgeWorkerModel.worker_name == worker_name)
|
|
229
|
-
worker: EdgeWorkerModel = session.scalar(query)
|
|
233
|
+
worker: EdgeWorkerModel | None = session.scalar(query)
|
|
234
|
+
if not worker:
|
|
235
|
+
raise ValueError(f"Edge Worker {worker_name} not found in list of registered workers")
|
|
230
236
|
worker.state = EdgeWorkerState.MAINTENANCE_REQUEST
|
|
231
237
|
worker.maintenance_comment = maintenance_comment
|
|
232
238
|
|
|
@@ -235,7 +241,9 @@ def request_maintenance(
|
|
|
235
241
|
def exit_maintenance(worker_name: str, session: Session = NEW_SESSION) -> None:
|
|
236
242
|
"""Write maintenance exit to the db."""
|
|
237
243
|
query = select(EdgeWorkerModel).where(EdgeWorkerModel.worker_name == worker_name)
|
|
238
|
-
worker: EdgeWorkerModel = session.scalar(query)
|
|
244
|
+
worker: EdgeWorkerModel | None = session.scalar(query)
|
|
245
|
+
if not worker:
|
|
246
|
+
raise ValueError(f"Edge Worker {worker_name} not found in list of registered workers")
|
|
239
247
|
worker.state = EdgeWorkerState.MAINTENANCE_EXIT
|
|
240
248
|
worker.maintenance_comment = None
|
|
241
249
|
|
|
@@ -244,7 +252,9 @@ def exit_maintenance(worker_name: str, session: Session = NEW_SESSION) -> None:
|
|
|
244
252
|
def remove_worker(worker_name: str, session: Session = NEW_SESSION) -> None:
|
|
245
253
|
"""Remove a worker that is offline or just gone from DB."""
|
|
246
254
|
query = select(EdgeWorkerModel).where(EdgeWorkerModel.worker_name == worker_name)
|
|
247
|
-
worker: EdgeWorkerModel = session.scalar(query)
|
|
255
|
+
worker: EdgeWorkerModel | None = session.scalar(query)
|
|
256
|
+
if not worker:
|
|
257
|
+
raise ValueError(f"Edge Worker {worker_name} not found in list of registered workers")
|
|
248
258
|
if worker.state in (
|
|
249
259
|
EdgeWorkerState.OFFLINE,
|
|
250
260
|
EdgeWorkerState.OFFLINE_MAINTENANCE,
|
|
@@ -263,7 +273,9 @@ def change_maintenance_comment(
|
|
|
263
273
|
) -> None:
|
|
264
274
|
"""Write maintenance comment in the db."""
|
|
265
275
|
query = select(EdgeWorkerModel).where(EdgeWorkerModel.worker_name == worker_name)
|
|
266
|
-
worker: EdgeWorkerModel = session.scalar(query)
|
|
276
|
+
worker: EdgeWorkerModel | None = session.scalar(query)
|
|
277
|
+
if not worker:
|
|
278
|
+
raise ValueError(f"Edge Worker {worker_name} not found in list of registered workers")
|
|
267
279
|
if worker.state in (
|
|
268
280
|
EdgeWorkerState.MAINTENANCE_MODE,
|
|
269
281
|
EdgeWorkerState.MAINTENANCE_PENDING,
|
|
@@ -281,7 +293,9 @@ def change_maintenance_comment(
|
|
|
281
293
|
def request_shutdown(worker_name: str, session: Session = NEW_SESSION) -> None:
|
|
282
294
|
"""Request to shutdown the edge worker."""
|
|
283
295
|
query = select(EdgeWorkerModel).where(EdgeWorkerModel.worker_name == worker_name)
|
|
284
|
-
worker: EdgeWorkerModel = session.scalar(query)
|
|
296
|
+
worker: EdgeWorkerModel | None = session.scalar(query)
|
|
297
|
+
if not worker:
|
|
298
|
+
raise ValueError(f"Edge Worker {worker_name} not found in list of registered workers")
|
|
285
299
|
if worker.state not in (
|
|
286
300
|
EdgeWorkerState.OFFLINE,
|
|
287
301
|
EdgeWorkerState.OFFLINE_MAINTENANCE,
|
|
@@ -294,7 +308,9 @@ def request_shutdown(worker_name: str, session: Session = NEW_SESSION) -> None:
|
|
|
294
308
|
def add_worker_queues(worker_name: str, queues: list[str], session: Session = NEW_SESSION) -> None:
|
|
295
309
|
"""Add queues to an edge worker."""
|
|
296
310
|
query = select(EdgeWorkerModel).where(EdgeWorkerModel.worker_name == worker_name)
|
|
297
|
-
worker: EdgeWorkerModel = session.scalar(query)
|
|
311
|
+
worker: EdgeWorkerModel | None = session.scalar(query)
|
|
312
|
+
if not worker:
|
|
313
|
+
raise ValueError(f"Edge Worker {worker_name} not found in list of registered workers")
|
|
298
314
|
if worker.state in (
|
|
299
315
|
EdgeWorkerState.OFFLINE,
|
|
300
316
|
EdgeWorkerState.OFFLINE_MAINTENANCE,
|
|
@@ -310,7 +326,9 @@ def add_worker_queues(worker_name: str, queues: list[str], session: Session = NE
|
|
|
310
326
|
def remove_worker_queues(worker_name: str, queues: list[str], session: Session = NEW_SESSION) -> None:
|
|
311
327
|
"""Remove queues from an edge worker."""
|
|
312
328
|
query = select(EdgeWorkerModel).where(EdgeWorkerModel.worker_name == worker_name)
|
|
313
|
-
worker: EdgeWorkerModel = session.scalar(query)
|
|
329
|
+
worker: EdgeWorkerModel | None = session.scalar(query)
|
|
330
|
+
if not worker:
|
|
331
|
+
raise ValueError(f"Edge Worker {worker_name} not found in list of registered workers")
|
|
314
332
|
if worker.state in (
|
|
315
333
|
EdgeWorkerState.OFFLINE,
|
|
316
334
|
EdgeWorkerState.OFFLINE_MAINTENANCE,
|
|
@@ -142,9 +142,7 @@ paths:
|
|
|
142
142
|
description: Successful Response
|
|
143
143
|
content:
|
|
144
144
|
application/json:
|
|
145
|
-
schema:
|
|
146
|
-
type: 'null'
|
|
147
|
-
title: Response State
|
|
145
|
+
schema: {}
|
|
148
146
|
'400':
|
|
149
147
|
content:
|
|
150
148
|
application/json:
|
|
@@ -319,9 +317,7 @@ paths:
|
|
|
319
317
|
description: Successful Response
|
|
320
318
|
content:
|
|
321
319
|
application/json:
|
|
322
|
-
schema:
|
|
323
|
-
type: 'null'
|
|
324
|
-
title: Response Push Logs
|
|
320
|
+
schema: {}
|
|
325
321
|
'400':
|
|
326
322
|
content:
|
|
327
323
|
application/json:
|
|
@@ -514,9 +510,7 @@ paths:
|
|
|
514
510
|
description: Successful Response
|
|
515
511
|
content:
|
|
516
512
|
application/json:
|
|
517
|
-
schema:
|
|
518
|
-
type: 'null'
|
|
519
|
-
title: Response Update Queues
|
|
513
|
+
schema: {}
|
|
520
514
|
'400':
|
|
521
515
|
content:
|
|
522
516
|
application/json:
|
|
@@ -614,9 +608,7 @@ paths:
|
|
|
614
608
|
description: Successful Response
|
|
615
609
|
content:
|
|
616
610
|
application/json:
|
|
617
|
-
schema:
|
|
618
|
-
type: 'null'
|
|
619
|
-
title: Response Request Worker Maintenance
|
|
611
|
+
schema: {}
|
|
620
612
|
'422':
|
|
621
613
|
description: Validation Error
|
|
622
614
|
content:
|
|
@@ -650,9 +642,7 @@ paths:
|
|
|
650
642
|
description: Successful Response
|
|
651
643
|
content:
|
|
652
644
|
application/json:
|
|
653
|
-
schema:
|
|
654
|
-
type: 'null'
|
|
655
|
-
title: Response Update Worker Maintenance
|
|
645
|
+
schema: {}
|
|
656
646
|
'422':
|
|
657
647
|
description: Validation Error
|
|
658
648
|
content:
|
|
@@ -680,9 +670,7 @@ paths:
|
|
|
680
670
|
description: Successful Response
|
|
681
671
|
content:
|
|
682
672
|
application/json:
|
|
683
|
-
schema:
|
|
684
|
-
type: 'null'
|
|
685
|
-
title: Response Exit Worker Maintenance
|
|
673
|
+
schema: {}
|
|
686
674
|
'422':
|
|
687
675
|
description: Validation Error
|
|
688
676
|
content:
|
|
@@ -711,9 +699,7 @@ paths:
|
|
|
711
699
|
description: Successful Response
|
|
712
700
|
content:
|
|
713
701
|
application/json:
|
|
714
|
-
schema:
|
|
715
|
-
type: 'null'
|
|
716
|
-
title: Response Request Worker Shutdown
|
|
702
|
+
schema: {}
|
|
717
703
|
'422':
|
|
718
704
|
description: Validation Error
|
|
719
705
|
content:
|
|
@@ -742,9 +728,7 @@ paths:
|
|
|
742
728
|
description: Successful Response
|
|
743
729
|
content:
|
|
744
730
|
application/json:
|
|
745
|
-
schema:
|
|
746
|
-
type: 'null'
|
|
747
|
-
title: Response Delete Worker
|
|
731
|
+
schema: {}
|
|
748
732
|
'422':
|
|
749
733
|
description: Validation Error
|
|
750
734
|
content:
|
|
@@ -779,9 +763,7 @@ paths:
|
|
|
779
763
|
description: Successful Response
|
|
780
764
|
content:
|
|
781
765
|
application/json:
|
|
782
|
-
schema:
|
|
783
|
-
type: 'null'
|
|
784
|
-
title: Response Add Worker Queue
|
|
766
|
+
schema: {}
|
|
785
767
|
'422':
|
|
786
768
|
description: Validation Error
|
|
787
769
|
content:
|
|
@@ -815,9 +797,7 @@ paths:
|
|
|
815
797
|
description: Successful Response
|
|
816
798
|
content:
|
|
817
799
|
application/json:
|
|
818
|
-
schema:
|
|
819
|
-
type: 'null'
|
|
820
|
-
title: Response Remove Worker Queue
|
|
800
|
+
schema: {}
|
|
821
801
|
'422':
|
|
822
802
|
description: Validation Error
|
|
823
803
|
content:
|
|
@@ -906,8 +886,6 @@ components:
|
|
|
906
886
|
token:
|
|
907
887
|
type: string
|
|
908
888
|
title: Token
|
|
909
|
-
ti:
|
|
910
|
-
$ref: '#/components/schemas/TaskInstance'
|
|
911
889
|
dag_rel_path:
|
|
912
890
|
type: string
|
|
913
891
|
format: path
|
|
@@ -919,6 +897,8 @@ components:
|
|
|
919
897
|
- type: string
|
|
920
898
|
- type: 'null'
|
|
921
899
|
title: Log Path
|
|
900
|
+
ti:
|
|
901
|
+
$ref: '#/components/schemas/TaskInstance'
|
|
922
902
|
type:
|
|
923
903
|
type: string
|
|
924
904
|
const: ExecuteTask
|
|
@@ -927,10 +907,10 @@ components:
|
|
|
927
907
|
type: object
|
|
928
908
|
required:
|
|
929
909
|
- token
|
|
930
|
-
- ti
|
|
931
910
|
- dag_rel_path
|
|
932
911
|
- bundle_info
|
|
933
912
|
- log_path
|
|
913
|
+
- ti
|
|
934
914
|
title: ExecuteTask
|
|
935
915
|
description: Execute the given Task.
|
|
936
916
|
HTTPExceptionResponse:
|