apache-airflow-providers-edge3 1.1.1rc1__py3-none-any.whl → 1.1.2rc2__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -29,7 +29,7 @@ from airflow import __version__ as airflow_version
29
29
 
30
30
  __all__ = ["__version__"]
31
31
 
32
- __version__ = "1.1.1"
32
+ __version__ = "1.1.2"
33
33
 
34
34
  if packaging.version.parse(packaging.version.parse(airflow_version).base_version) < packaging.version.parse(
35
35
  "2.10.0"
@@ -64,7 +64,7 @@ EDGE_WORKER_HEADER = "\n".join(
64
64
  @providers_configuration_loaded
65
65
  def force_use_internal_api_on_edge_worker():
66
66
  """
67
- Ensure that the environment is configured for the internal API without needing to declare it outside.
67
+ Ensure the environment is configured for the internal API without explicit declaration.
68
68
 
69
69
  This is only required for an Edge worker and must to be done before the Click CLI wrapper is initiated.
70
70
  That is because the CLI wrapper will attempt to establish a DB connection, which will fail before the
@@ -351,6 +351,48 @@ def remote_worker_request_shutdown(args) -> None:
351
351
  logger.info("Requested shutdown of Edge Worker host %s by %s.", args.edge_hostname, getuser())
352
352
 
353
353
 
354
+ @cli_utils.action_cli(check_db=False)
355
+ @providers_configuration_loaded
356
+ def add_worker_queues(args) -> None:
357
+ """Add queues to an edge worker."""
358
+ _check_valid_db_connection()
359
+ _check_if_registered_edge_host(hostname=args.edge_hostname)
360
+ from airflow.providers.edge3.models.edge_worker import add_worker_queues
361
+
362
+ queues = args.queues.split(",") if args.queues else []
363
+ if not queues:
364
+ raise SystemExit("Error: No queues specified to add.")
365
+
366
+ try:
367
+ add_worker_queues(args.edge_hostname, queues)
368
+ logger.info("Added queues %s to Edge Worker host %s by %s.", queues, args.edge_hostname, getuser())
369
+ except TypeError as e:
370
+ logger.error(str(e))
371
+ raise SystemExit
372
+
373
+
374
+ @cli_utils.action_cli(check_db=False)
375
+ @providers_configuration_loaded
376
+ def remove_worker_queues(args) -> None:
377
+ """Remove queues from an edge worker."""
378
+ _check_valid_db_connection()
379
+ _check_if_registered_edge_host(hostname=args.edge_hostname)
380
+ from airflow.providers.edge3.models.edge_worker import remove_worker_queues
381
+
382
+ queues = args.queues.split(",") if args.queues else []
383
+ if not queues:
384
+ raise SystemExit("Error: No queues specified to remove.")
385
+
386
+ try:
387
+ remove_worker_queues(args.edge_hostname, queues)
388
+ logger.info(
389
+ "Removed queues %s from Edge Worker host %s by %s.", queues, args.edge_hostname, getuser()
390
+ )
391
+ except TypeError as e:
392
+ logger.error(str(e))
393
+ raise SystemExit
394
+
395
+
354
396
  ARG_CONCURRENCY = Arg(
355
397
  ("-c", "--concurrency"),
356
398
  type=int,
@@ -380,6 +422,11 @@ ARG_REQUIRED_MAINTENANCE_COMMENT = Arg(
380
422
  help="Maintenance comments to report reason. Required if enabling maintenance",
381
423
  required=True,
382
424
  )
425
+ ARG_QUEUES_MANAGE = Arg(
426
+ ("-q", "--queues"),
427
+ help="Comma delimited list of queues to add or remove.",
428
+ required=True,
429
+ )
383
430
  ARG_WAIT_MAINT = Arg(
384
431
  ("-w", "--wait"),
385
432
  default=False,
@@ -516,4 +563,22 @@ EDGE_COMMANDS: list[ActionCommand] = [
516
563
  func=remote_worker_request_shutdown,
517
564
  args=(ARG_REQUIRED_EDGE_HOSTNAME,),
518
565
  ),
566
+ ActionCommand(
567
+ name="add-worker-queues",
568
+ help=add_worker_queues.__doc__,
569
+ func=add_worker_queues,
570
+ args=(
571
+ ARG_REQUIRED_EDGE_HOSTNAME,
572
+ ARG_QUEUES_MANAGE,
573
+ ),
574
+ ),
575
+ ActionCommand(
576
+ name="remove-worker-queues",
577
+ help=remove_worker_queues.__doc__,
578
+ func=remove_worker_queues,
579
+ args=(
580
+ ARG_REQUIRED_EDGE_HOSTNAME,
581
+ ARG_QUEUES_MANAGE,
582
+ ),
583
+ ),
519
584
  ]
@@ -26,6 +26,7 @@ from pathlib import Path
26
26
  from subprocess import Popen
27
27
  from time import sleep
28
28
  from typing import TYPE_CHECKING
29
+ from urllib.parse import urlparse
29
30
 
30
31
  from lockfile.pidlockfile import remove_existing_pidfile
31
32
  from requests import HTTPError
@@ -186,11 +187,13 @@ class EdgeWorker:
186
187
  setproctitle(f"airflow edge worker: {workload.ti.key}")
187
188
 
188
189
  try:
189
- base_url = conf.get("api", "base_url", fallback="/")
190
- # If it's a relative URL, use localhost:8080 as the default
191
- if base_url.startswith("/"):
192
- base_url = f"http://localhost:8080{base_url}"
193
- default_execution_api_server = f"{base_url.rstrip('/')}/execution/"
190
+ api_url = conf.get("edge", "api_url")
191
+ execution_api_server_url = conf.get("core", "execution_api_server_url", fallback="")
192
+ if not execution_api_server_url:
193
+ parsed = urlparse(api_url)
194
+ execution_api_server_url = f"{parsed.scheme}://{parsed.netloc}/execution/"
195
+
196
+ logger.info("Worker starting up server=execution_api_server_url=%s", execution_api_server_url)
194
197
 
195
198
  supervise(
196
199
  # This is the "wrong" ti type, but it duck types the same. TODO: Create a protocol for this.
@@ -199,9 +202,7 @@ class EdgeWorker:
199
202
  dag_rel_path=workload.dag_rel_path,
200
203
  bundle_info=workload.bundle_info,
201
204
  token=workload.token,
202
- server=conf.get(
203
- "core", "execution_api_server_url", fallback=default_execution_api_server
204
- ),
205
+ server=execution_api_server_url,
205
206
  log_path=workload.log_path,
206
207
  )
207
208
  return 0
@@ -27,7 +27,11 @@ from datetime import datetime
27
27
  from time import sleep
28
28
 
29
29
  from airflow.exceptions import AirflowNotFoundException
30
- from airflow.hooks.base import BaseHook
30
+
31
+ try:
32
+ from airflow.sdk import BaseHook
33
+ except ImportError:
34
+ from airflow.hooks.base import BaseHook # type: ignore[attr-defined,no-redef]
31
35
  from airflow.utils.trigger_rule import TriggerRule
32
36
 
33
37
  try:
@@ -37,13 +41,13 @@ try:
37
41
  from airflow.sdk import DAG, Param, Variable, task, task_group
38
42
  except ImportError:
39
43
  # Airflow 2.10 compat
40
- from airflow.decorators import task, task_group # type: ignore[no-redef,attr-defined]
41
- from airflow.models.dag import DAG # type: ignore[no-redef,attr-defined,assignment]
42
- from airflow.models.param import Param # type: ignore[no-redef,attr-defined]
43
- from airflow.models.variable import Variable # type: ignore[no-redef,attr-defined]
44
- from airflow.operators.bash import BashOperator # type: ignore[no-redef,attr-defined]
45
- from airflow.operators.empty import EmptyOperator # type: ignore[no-redef,attr-defined]
46
- from airflow.operators.python import PythonOperator # type: ignore[no-redef,attr-defined]
44
+ from airflow.decorators import task, task_group # type: ignore[attr-defined,no-redef]
45
+ from airflow.models.dag import DAG # type: ignore[assignment]
46
+ from airflow.models.param import Param # type: ignore[no-redef]
47
+ from airflow.models.variable import Variable
48
+ from airflow.operators.bash import BashOperator # type: ignore[no-redef]
49
+ from airflow.operators.empty import EmptyOperator # type: ignore[no-redef]
50
+ from airflow.operators.python import PythonOperator # type: ignore[no-redef]
47
51
 
48
52
  with DAG(
49
53
  dag_id="integration_test",
@@ -26,19 +26,27 @@ and serves as a PoC test for the Windows worker.
26
26
  from __future__ import annotations
27
27
 
28
28
  import os
29
- from collections.abc import Container, Sequence
29
+ from collections.abc import Callable, Container, Sequence
30
30
  from datetime import datetime
31
31
  from subprocess import STDOUT, Popen
32
32
  from time import sleep
33
- from typing import TYPE_CHECKING, Any, Callable
33
+ from typing import TYPE_CHECKING, Any
34
34
 
35
- from airflow.decorators import task, task_group
35
+ try:
36
+ from airflow.sdk import task, task_group
37
+ except ImportError:
38
+ # Airflow 2 path
39
+ from airflow.decorators import task, task_group # type: ignore[attr-defined,no-redef]
36
40
  from airflow.exceptions import AirflowException, AirflowNotFoundException, AirflowSkipException
37
- from airflow.hooks.base import BaseHook
38
41
  from airflow.models import BaseOperator
39
42
  from airflow.models.dag import DAG
40
43
  from airflow.models.variable import Variable
41
44
  from airflow.providers.standard.operators.empty import EmptyOperator
45
+
46
+ try:
47
+ from airflow.sdk import BaseHook
48
+ except ImportError:
49
+ from airflow.hooks.base import BaseHook # type: ignore[attr-defined,no-redef]
42
50
  from airflow.sdk import Param
43
51
  from airflow.sdk.execution_time.context import context_to_airflow_vars
44
52
  from airflow.utils.trigger_rule import TriggerRule
@@ -48,11 +56,11 @@ if TYPE_CHECKING:
48
56
  try:
49
57
  from airflow.sdk.types import RuntimeTaskInstanceProtocol as TaskInstance
50
58
  except ImportError:
51
- from airflow.models import TaskInstance # type: ignore[assignment, no-redef]
59
+ from airflow.models import TaskInstance # type: ignore[assignment]
52
60
  from airflow.utils.context import Context
53
61
 
54
62
  try:
55
- from airflow.operators.python import PythonOperator # type: ignore
63
+ from airflow.operators.python import PythonOperator
56
64
  except ImportError:
57
65
  from airflow.providers.common.compat.standard.operators import PythonOperator
58
66
 
@@ -273,7 +281,7 @@ with DAG(
273
281
 
274
282
  @task.virtualenv(requirements="numpy")
275
283
  def virtualenv():
276
- import numpy # type: ignore
284
+ import numpy
277
285
 
278
286
  print(f"Welcome to virtualenv with numpy version {numpy.__version__}.")
279
287
 
@@ -21,7 +21,7 @@ import contextlib
21
21
  from collections.abc import Sequence
22
22
  from copy import deepcopy
23
23
  from datetime import datetime, timedelta
24
- from typing import TYPE_CHECKING, Any, Optional
24
+ from typing import TYPE_CHECKING, Any
25
25
 
26
26
  from sqlalchemy import delete, inspect, text
27
27
  from sqlalchemy.exc import NoSuchTableError
@@ -30,7 +30,6 @@ from sqlalchemy.orm import Session
30
30
  from airflow.cli.cli_config import GroupCommand
31
31
  from airflow.configuration import conf
32
32
  from airflow.executors.base_executor import BaseExecutor
33
- from airflow.models.abstractoperator import DEFAULT_QUEUE
34
33
  from airflow.models.taskinstance import TaskInstance, TaskInstanceState
35
34
  from airflow.providers.edge3.cli.edge_command import EDGE_COMMANDS
36
35
  from airflow.providers.edge3.models.edge_job import EdgeJobModel
@@ -52,9 +51,10 @@ if TYPE_CHECKING:
52
51
  # TODO: Airflow 2 type hints; remove when Airflow 2 support is removed
53
52
  CommandType = Sequence[str]
54
53
  # Task tuple to send to be executed
55
- TaskTuple = tuple[TaskInstanceKey, CommandType, Optional[str], Optional[Any]]
54
+ TaskTuple = tuple[TaskInstanceKey, CommandType, str | None, Any | None]
56
55
 
57
56
  PARALLELISM: int = conf.getint("core", "PARALLELISM")
57
+ DEFAULT_QUEUE: str = conf.get_mandatory_value("operators", "default_queue")
58
58
 
59
59
 
60
60
  class EdgeExecutor(BaseExecutor):
@@ -72,6 +72,7 @@ class EdgeExecutor(BaseExecutor):
72
72
  """
73
73
  inspector = inspect(engine)
74
74
  edge_job_columns = None
75
+ edge_job_command_len = None
75
76
  with contextlib.suppress(NoSuchTableError):
76
77
  edge_job_schema = inspector.get_columns("edge_job")
77
78
  edge_job_columns = [column["name"] for column in edge_job_schema]
@@ -113,7 +114,7 @@ class EdgeExecutor(BaseExecutor):
113
114
 
114
115
  def _process_tasks(self, task_tuples: list[TaskTuple]) -> None:
115
116
  """
116
- Temponary overwrite of _process_tasks function.
117
+ Temporary overwrite of _process_tasks function.
117
118
 
118
119
  Idea is to not change the interface of the execute_async function in BaseExecutor as it will be changed in Airflow 3.
119
120
  Edge worker needs task_instance in execute_async but BaseExecutor deletes this out of the self.queued_tasks.
@@ -132,7 +133,7 @@ class EdgeExecutor(BaseExecutor):
132
133
  session: Session = NEW_SESSION,
133
134
  ) -> None:
134
135
  """Execute asynchronously. Airflow 2.10 entry point to execute a task."""
135
- # Use of a temponary trick to get task instance, will be changed with Airflow 3.0.0
136
+ # Use of a temporary trick to get task instance, will be changed with Airflow 3.0.0
136
137
  # code works together with _process_tasks overwrite to get task instance.
137
138
  # TaskInstance in fourth element
138
139
  task_instance = self.edge_queued_tasks[key][3] # type: ignore[index]
@@ -109,7 +109,7 @@ class EdgeWorkerModel(Base, LoggingMixin):
109
109
  super().__init__()
110
110
 
111
111
  @property
112
- def sysinfo_json(self) -> dict:
112
+ def sysinfo_json(self) -> dict | None:
113
113
  return json.loads(self.sysinfo) if self.sysinfo else None
114
114
 
115
115
  @property
@@ -283,3 +283,37 @@ def request_shutdown(worker_name: str, session: Session = NEW_SESSION) -> None:
283
283
  EdgeWorkerState.UNKNOWN,
284
284
  ):
285
285
  worker.state = EdgeWorkerState.SHUTDOWN_REQUEST
286
+
287
+
288
+ @provide_session
289
+ def add_worker_queues(worker_name: str, queues: list[str], session: Session = NEW_SESSION) -> None:
290
+ """Add queues to an edge worker."""
291
+ query = select(EdgeWorkerModel).where(EdgeWorkerModel.worker_name == worker_name)
292
+ worker: EdgeWorkerModel = session.scalar(query)
293
+ if worker.state in (
294
+ EdgeWorkerState.OFFLINE,
295
+ EdgeWorkerState.OFFLINE_MAINTENANCE,
296
+ EdgeWorkerState.UNKNOWN,
297
+ ):
298
+ error_message = f"Cannot add queues to edge worker {worker_name} as it is in {worker.state} state!"
299
+ logger.error(error_message)
300
+ raise TypeError(error_message)
301
+ worker.add_queues(queues)
302
+
303
+
304
+ @provide_session
305
+ def remove_worker_queues(worker_name: str, queues: list[str], session: Session = NEW_SESSION) -> None:
306
+ """Remove queues from an edge worker."""
307
+ query = select(EdgeWorkerModel).where(EdgeWorkerModel.worker_name == worker_name)
308
+ worker: EdgeWorkerModel = session.scalar(query)
309
+ if worker.state in (
310
+ EdgeWorkerState.OFFLINE,
311
+ EdgeWorkerState.OFFLINE_MAINTENANCE,
312
+ EdgeWorkerState.UNKNOWN,
313
+ ):
314
+ error_message = (
315
+ f"Cannot remove queues from edge worker {worker_name} as it is in {worker.state} state!"
316
+ )
317
+ logger.error(error_message)
318
+ raise TypeError(error_message)
319
+ worker.remove_queues(queues)
@@ -17,184 +17,195 @@
17
17
 
18
18
  from __future__ import annotations
19
19
 
20
- import re
21
- from datetime import datetime, timedelta
22
- from pathlib import Path
20
+ import sys
23
21
  from typing import TYPE_CHECKING, Any
24
22
 
25
- from flask import Blueprint, redirect, request, url_for
26
- from flask_appbuilder import BaseView, expose
27
- from markupsafe import Markup
28
- from sqlalchemy import select
29
-
30
23
  from airflow.configuration import conf
31
24
  from airflow.exceptions import AirflowConfigException
32
- from airflow.models.taskinstance import TaskInstanceState
33
25
  from airflow.plugins_manager import AirflowPlugin
34
26
  from airflow.providers.edge3.version_compat import AIRFLOW_V_3_0_PLUS
35
- from airflow.utils.state import State
36
-
37
- if AIRFLOW_V_3_0_PLUS:
38
- from airflow.api_fastapi.auth.managers.models.resource_details import AccessView
39
- from airflow.providers.fab.www.auth import has_access_view
40
-
41
- else:
42
- from airflow.auth.managers.models.resource_details import AccessView # type: ignore[no-redef]
43
- from airflow.www.auth import has_access_view # type: ignore[no-redef]
44
27
  from airflow.utils.session import NEW_SESSION, provide_session
45
- from airflow.utils.yaml import safe_load
46
28
 
47
29
  if TYPE_CHECKING:
48
30
  from sqlalchemy.orm import Session
49
31
 
32
+ if AIRFLOW_V_3_0_PLUS:
33
+ from airflow.utils.db import DBLocks, create_global_lock
50
34
 
51
- def _get_airflow_2_api_endpoint() -> Blueprint:
52
- from airflow.www.constants import SWAGGER_BUNDLE, SWAGGER_ENABLED
53
- from airflow.www.extensions.init_views import _CustomErrorRequestBodyValidator, _LazyResolver
54
-
55
- folder = Path(__file__).parents[1].resolve() # this is airflow/providers/edge3/
56
- with folder.joinpath("openapi", "edge_worker_api_v1.yaml").open() as f:
57
- specification = safe_load(f)
58
- from connexion import FlaskApi
59
-
60
- bp = FlaskApi(
61
- specification=specification,
62
- resolver=_LazyResolver(),
63
- base_path="/edge_worker/v1",
64
- strict_validation=True,
65
- options={"swagger_ui": SWAGGER_ENABLED, "swagger_path": SWAGGER_BUNDLE.__fspath__()},
66
- validate_responses=True,
67
- validator_map={"body": _CustomErrorRequestBodyValidator},
68
- ).blueprint
69
- # Need to exempt CSRF to make API usable
70
- from airflow.www.app import csrf
71
-
72
- csrf.exempt(bp)
73
- return bp
74
-
75
-
76
- def _get_api_endpoint() -> dict[str, Any]:
77
- from airflow.providers.edge3.worker_api.app import create_edge_worker_api_app
78
-
79
- return {
80
- "app": create_edge_worker_api_app(),
81
- "url_prefix": "/edge_worker/v1",
82
- "name": "Airflow Edge Worker API",
83
- }
84
-
85
-
86
- def _state_token(state):
87
- """Return a formatted string with HTML for a given State."""
88
- color = State.color(state)
89
- fg_color = State.color_fg(state)
90
- return Markup(
91
- """
92
- <span class="label" style="color:{fg_color}; background-color:{color};"
93
- title="Current State: {state}">{state}</span>
94
- """
95
- ).format(color=color, state=state, fg_color=fg_color)
96
-
97
-
98
- def modify_maintenance_comment_on_update(maintenance_comment: str | None, username: str) -> str:
99
- if maintenance_comment:
100
- if re.search(
101
- r"^\[[-\d:\s]+\] - .+ put node into maintenance mode\r?\nComment:.*", maintenance_comment
102
- ):
103
- return re.sub(
104
- r"^\[[-\d:\s]+\] - .+ put node into maintenance mode\r?\nComment:",
105
- f"[{datetime.now().strftime('%Y-%m-%d %H:%M')}] - {username} updated maintenance mode\nComment:",
106
- maintenance_comment,
107
- )
108
- if re.search(r"^\[[-\d:\s]+\] - .+ updated maintenance mode\r?\nComment:.*", maintenance_comment):
109
- return re.sub(
110
- r"^\[[-\d:\s]+\] - .+ updated maintenance mode\r?\nComment:",
111
- f"[{datetime.now().strftime('%Y-%m-%d %H:%M')}] - {username} updated maintenance mode\nComment:",
112
- maintenance_comment,
113
- )
114
- return f"[{datetime.now().strftime('%Y-%m-%d %H:%M')}] - {username} updated maintenance mode\nComment: {maintenance_comment}"
115
- return f"[{datetime.now().strftime('%Y-%m-%d %H:%M')}] - {username} updated maintenance mode\nComment:"
116
-
117
-
118
- # registers airflow/providers/edge3/plugins/templates as a Jinja template folder
119
- template_bp = Blueprint(
120
- "template_blueprint",
121
- __name__,
122
- template_folder="templates",
123
- )
124
-
125
-
126
- class EdgeWorkerJobs(BaseView):
127
- """Simple view to show Edge Worker jobs."""
128
-
129
- default_view = "jobs"
130
-
131
- @expose("/jobs")
132
- @has_access_view(AccessView.JOBS)
133
35
  @provide_session
134
- def jobs(self, session: Session = NEW_SESSION):
135
- from airflow.providers.edge3.models.edge_job import EdgeJobModel
136
-
137
- jobs = session.scalars(select(EdgeJobModel).order_by(EdgeJobModel.queued_dttm)).all()
138
- html_states = {
139
- str(state): _state_token(str(state)) for state in TaskInstanceState.__members__.values()
36
+ def _get_api_endpoint(session: Session = NEW_SESSION) -> dict[str, Any]:
37
+ # Ensure all required DB modeals are created before starting the API
38
+ with create_global_lock(session=session, lock=DBLocks.MIGRATIONS):
39
+ engine = session.get_bind().engine
40
+ from airflow.providers.edge3.models.edge_job import EdgeJobModel
41
+ from airflow.providers.edge3.models.edge_logs import EdgeLogsModel
42
+ from airflow.providers.edge3.models.edge_worker import EdgeWorkerModel
43
+
44
+ EdgeJobModel.metadata.create_all(engine)
45
+ EdgeLogsModel.metadata.create_all(engine)
46
+ EdgeWorkerModel.metadata.create_all(engine)
47
+
48
+ from airflow.providers.edge3.worker_api.app import create_edge_worker_api_app
49
+
50
+ return {
51
+ "app": create_edge_worker_api_app(),
52
+ "url_prefix": "/edge_worker/v1",
53
+ "name": "Airflow Edge Worker API",
140
54
  }
141
- return self.render_template("edge_worker_jobs.html", jobs=jobs, html_states=html_states)
142
-
143
-
144
- class EdgeWorkerHosts(BaseView):
145
- """Simple view to show Edge Worker status."""
146
-
147
- default_view = "status"
148
55
 
149
- @expose("/status")
150
- @has_access_view(AccessView.JOBS)
151
- @provide_session
152
- def status(self, session: Session = NEW_SESSION):
153
- from airflow.providers.edge3.models.edge_worker import EdgeWorkerModel
154
-
155
- hosts = session.scalars(select(EdgeWorkerModel).order_by(EdgeWorkerModel.worker_name)).all()
156
- five_min_ago = datetime.now() - timedelta(minutes=5)
157
- return self.render_template("edge_worker_hosts.html", hosts=hosts, five_min_ago=five_min_ago)
158
-
159
- @expose("/status/maintenance/<string:worker_name>/on", methods=["POST"])
160
- @has_access_view(AccessView.JOBS)
161
- def worker_to_maintenance(self, worker_name: str):
162
- from flask_login import current_user
163
-
164
- from airflow.providers.edge3.models.edge_worker import request_maintenance
165
-
166
- maintenance_comment = request.form.get("maintenance_comment")
167
- maintenance_comment = f"[{datetime.now().strftime('%Y-%m-%d %H:%M')}] - {current_user.username} put node into maintenance mode\nComment: {maintenance_comment}"
168
- request_maintenance(worker_name, maintenance_comment)
169
- return redirect(url_for("EdgeWorkerHosts.status"))
170
-
171
- @expose("/status/maintenance/<string:worker_name>/off", methods=["POST"])
172
- @has_access_view(AccessView.JOBS)
173
- def remove_worker_from_maintenance(self, worker_name: str):
174
- from airflow.providers.edge3.models.edge_worker import exit_maintenance
175
-
176
- exit_maintenance(worker_name)
177
- return redirect(url_for("EdgeWorkerHosts.status"))
178
-
179
- @expose("/status/maintenance/<string:worker_name>/remove", methods=["POST"])
180
- @has_access_view(AccessView.JOBS)
181
- def remove_worker(self, worker_name: str):
182
- from airflow.providers.edge3.models.edge_worker import remove_worker
183
-
184
- remove_worker(worker_name)
185
- return redirect(url_for("EdgeWorkerHosts.status"))
186
-
187
- @expose("/status/maintenance/<string:worker_name>/change_comment", methods=["POST"])
188
- @has_access_view(AccessView.JOBS)
189
- def change_maintenance_comment(self, worker_name: str):
190
- from flask_login import current_user
191
-
192
- from airflow.providers.edge3.models.edge_worker import change_maintenance_comment
193
-
194
- maintenance_comment = request.form.get("maintenance_comment")
195
- maintenance_comment = modify_maintenance_comment_on_update(maintenance_comment, current_user.username)
196
- change_maintenance_comment(worker_name, maintenance_comment)
197
- return redirect(url_for("EdgeWorkerHosts.status"))
56
+ else:
57
+ # This is for back-compatibility with Airflow 2.x and we only make this
58
+ # to prevents dependencies and breaking imports in Airflow 3.x
59
+ import re
60
+ from datetime import datetime, timedelta
61
+ from pathlib import Path
62
+
63
+ from flask import Blueprint, redirect, request, url_for
64
+ from flask_appbuilder import BaseView, expose
65
+ from markupsafe import Markup
66
+ from sqlalchemy import select
67
+
68
+ from airflow.auth.managers.models.resource_details import AccessView
69
+ from airflow.models.taskinstance import TaskInstanceState
70
+ from airflow.utils.state import State
71
+ from airflow.utils.yaml import safe_load
72
+ from airflow.www.auth import has_access_view
73
+
74
+ def _get_airflow_2_api_endpoint() -> Blueprint:
75
+ from airflow.www.app import csrf
76
+ from airflow.www.constants import SWAGGER_BUNDLE, SWAGGER_ENABLED
77
+ from airflow.www.extensions.init_views import _CustomErrorRequestBodyValidator, _LazyResolver
78
+
79
+ folder = Path(__file__).parents[1].resolve() # this is airflow/providers/edge3/
80
+ with folder.joinpath("openapi", "edge_worker_api_v1.yaml").open() as f:
81
+ specification = safe_load(f)
82
+ from connexion import FlaskApi
83
+
84
+ bp = FlaskApi(
85
+ specification=specification,
86
+ resolver=_LazyResolver(),
87
+ base_path="/edge_worker/v1",
88
+ strict_validation=True,
89
+ options={"swagger_ui": SWAGGER_ENABLED, "swagger_path": SWAGGER_BUNDLE.__fspath__()},
90
+ validate_responses=True,
91
+ validator_map={"body": _CustomErrorRequestBodyValidator},
92
+ ).blueprint
93
+ # Need to exempt CSRF to make API usable
94
+ csrf.exempt(bp)
95
+ return bp
96
+
97
+ def _state_token(state):
98
+ """Return a formatted string with HTML for a given State."""
99
+ color = State.color(state)
100
+ fg_color = State.color_fg(state)
101
+ return Markup(
102
+ """
103
+ <span class="label" style="color:{fg_color}; background-color:{color};"
104
+ title="Current State: {state}">{state}</span>
105
+ """
106
+ ).format(color=color, state=state, fg_color=fg_color)
107
+
108
+ def modify_maintenance_comment_on_update(maintenance_comment: str | None, username: str) -> str:
109
+ if maintenance_comment:
110
+ if re.search(
111
+ r"^\[[-\d:\s]+\] - .+ put node into maintenance mode\r?\nComment:.*", maintenance_comment
112
+ ):
113
+ return re.sub(
114
+ r"^\[[-\d:\s]+\] - .+ put node into maintenance mode\r?\nComment:",
115
+ f"[{datetime.now().strftime('%Y-%m-%d %H:%M')}] - {username} updated maintenance mode\nComment:",
116
+ maintenance_comment,
117
+ )
118
+ if re.search(r"^\[[-\d:\s]+\] - .+ updated maintenance mode\r?\nComment:.*", maintenance_comment):
119
+ return re.sub(
120
+ r"^\[[-\d:\s]+\] - .+ updated maintenance mode\r?\nComment:",
121
+ f"[{datetime.now().strftime('%Y-%m-%d %H:%M')}] - {username} updated maintenance mode\nComment:",
122
+ maintenance_comment,
123
+ )
124
+ return f"[{datetime.now().strftime('%Y-%m-%d %H:%M')}] - {username} updated maintenance mode\nComment: {maintenance_comment}"
125
+ return (
126
+ f"[{datetime.now().strftime('%Y-%m-%d %H:%M')}] - {username} updated maintenance mode\nComment:"
127
+ )
128
+
129
+ # registers airflow/providers/edge3/plugins/templates as a Jinja template folder
130
+ template_bp = Blueprint(
131
+ "template_blueprint",
132
+ __name__,
133
+ template_folder="templates",
134
+ )
135
+
136
+ class EdgeWorkerJobs(BaseView):
137
+ """Simple view to show Edge Worker jobs."""
138
+
139
+ default_view = "jobs"
140
+
141
+ @expose("/jobs")
142
+ @has_access_view(AccessView.JOBS)
143
+ @provide_session
144
+ def jobs(self, session: Session = NEW_SESSION):
145
+ from airflow.providers.edge3.models.edge_job import EdgeJobModel
146
+
147
+ jobs = session.scalars(select(EdgeJobModel).order_by(EdgeJobModel.queued_dttm)).all()
148
+ html_states = {
149
+ str(state): _state_token(str(state)) for state in TaskInstanceState.__members__.values()
150
+ }
151
+ return self.render_template("edge_worker_jobs.html", jobs=jobs, html_states=html_states)
152
+
153
+ class EdgeWorkerHosts(BaseView):
154
+ """Simple view to show Edge Worker status."""
155
+
156
+ default_view = "status"
157
+
158
+ @expose("/status")
159
+ @has_access_view(AccessView.JOBS)
160
+ @provide_session
161
+ def status(self, session: Session = NEW_SESSION):
162
+ from airflow.providers.edge3.models.edge_worker import EdgeWorkerModel
163
+
164
+ hosts = session.scalars(select(EdgeWorkerModel).order_by(EdgeWorkerModel.worker_name)).all()
165
+ five_min_ago = datetime.now() - timedelta(minutes=5)
166
+ return self.render_template("edge_worker_hosts.html", hosts=hosts, five_min_ago=five_min_ago)
167
+
168
+ @expose("/status/maintenance/<string:worker_name>/on", methods=["POST"])
169
+ @has_access_view(AccessView.JOBS)
170
+ def worker_to_maintenance(self, worker_name: str):
171
+ from flask_login import current_user
172
+
173
+ from airflow.providers.edge3.models.edge_worker import request_maintenance
174
+
175
+ maintenance_comment = request.form.get("maintenance_comment")
176
+ maintenance_comment = f"[{datetime.now().strftime('%Y-%m-%d %H:%M')}] - {current_user.username} put node into maintenance mode\nComment: {maintenance_comment}"
177
+ request_maintenance(worker_name, maintenance_comment)
178
+ return redirect(url_for("EdgeWorkerHosts.status"))
179
+
180
+ @expose("/status/maintenance/<string:worker_name>/off", methods=["POST"])
181
+ @has_access_view(AccessView.JOBS)
182
+ def remove_worker_from_maintenance(self, worker_name: str):
183
+ from airflow.providers.edge3.models.edge_worker import exit_maintenance
184
+
185
+ exit_maintenance(worker_name)
186
+ return redirect(url_for("EdgeWorkerHosts.status"))
187
+
188
+ @expose("/status/maintenance/<string:worker_name>/remove", methods=["POST"])
189
+ @has_access_view(AccessView.JOBS)
190
+ def remove_worker(self, worker_name: str):
191
+ from airflow.providers.edge3.models.edge_worker import remove_worker
192
+
193
+ remove_worker(worker_name)
194
+ return redirect(url_for("EdgeWorkerHosts.status"))
195
+
196
+ @expose("/status/maintenance/<string:worker_name>/change_comment", methods=["POST"])
197
+ @has_access_view(AccessView.JOBS)
198
+ def change_maintenance_comment(self, worker_name: str):
199
+ from flask_login import current_user
200
+
201
+ from airflow.providers.edge3.models.edge_worker import change_maintenance_comment
202
+
203
+ maintenance_comment = request.form.get("maintenance_comment")
204
+ maintenance_comment = modify_maintenance_comment_on_update(
205
+ maintenance_comment, current_user.username
206
+ )
207
+ change_maintenance_comment(worker_name, maintenance_comment)
208
+ return redirect(url_for("EdgeWorkerHosts.status"))
198
209
 
199
210
 
200
211
  # Check if EdgeExecutor is actually loaded
@@ -203,27 +214,37 @@ try:
203
214
  except AirflowConfigException:
204
215
  EDGE_EXECUTOR_ACTIVE = False
205
216
 
217
+ # Load the API endpoint only on api-server (Airflow 3.x) or webserver (Airflow 2.x)
218
+ # todo(jscheffl): Remove this check when the discussion in
219
+ # https://lists.apache.org/thread/w170czq6r7bslkqp1tk6bjjjo0789wgl
220
+ # resulted in a proper API to selective initialize. Maybe backcompat-shim
221
+ # is also needed to support Airflow-versions prior the rework.
222
+ if AIRFLOW_V_3_0_PLUS:
223
+ RUNNING_ON_APISERVER = (len(sys.argv) > 1 and sys.argv[1] in ["api-server"]) or (
224
+ len(sys.argv) > 2 and sys.argv[2] == "airflow-core/src/airflow/api_fastapi/main.py"
225
+ )
226
+ else:
227
+ RUNNING_ON_APISERVER = "gunicorn" in sys.argv[0] and "airflow-webserver" in sys.argv
228
+
206
229
 
207
230
  class EdgeExecutorPlugin(AirflowPlugin):
208
231
  """EdgeExecutor Plugin - provides API endpoints for Edge Workers in Webserver."""
209
232
 
210
233
  name = "edge_executor"
211
- if EDGE_EXECUTOR_ACTIVE:
212
- appbuilder_views = [
213
- {
214
- "name": "Edge Worker Jobs",
215
- "category": "Admin",
216
- "view": EdgeWorkerJobs(),
217
- },
218
- {
219
- "name": "Edge Worker Hosts",
220
- "category": "Admin",
221
- "view": EdgeWorkerHosts(),
222
- },
223
- ]
224
-
234
+ if EDGE_EXECUTOR_ACTIVE and RUNNING_ON_APISERVER:
225
235
  if AIRFLOW_V_3_0_PLUS:
226
236
  fastapi_apps = [_get_api_endpoint()]
227
- flask_blueprints = [template_bp]
228
237
  else:
238
+ appbuilder_views = [
239
+ {
240
+ "name": "Edge Worker Jobs",
241
+ "category": "Admin",
242
+ "view": EdgeWorkerJobs(),
243
+ },
244
+ {
245
+ "name": "Edge Worker Hosts",
246
+ "category": "Admin",
247
+ "view": EdgeWorkerHosts(),
248
+ },
249
+ ]
229
250
  flask_blueprints = [_get_airflow_2_api_endpoint(), template_bp]
@@ -58,7 +58,7 @@ if AIRFLOW_V_3_0_PLUS:
58
58
 
59
59
  else:
60
60
  # Airflow 2.10 compatibility
61
- from airflow.utils.jwt_signer import JWTSigner # type: ignore
61
+ from airflow.utils.jwt_signer import JWTSigner
62
62
 
63
63
  @cache
64
64
  def jwt_signer() -> JWTSigner:
@@ -34,8 +34,8 @@ if AIRFLOW_V_3_0_PLUS:
34
34
  def parse_command(command: str) -> ExecuteTask:
35
35
  return ExecuteTask.model_validate_json(command)
36
36
  else:
37
- # Mock the external dependnecies
38
- from typing import Callable
37
+ # Mock the external dependencies
38
+ from collections.abc import Callable
39
39
 
40
40
  from connexion import ProblemException
41
41
 
@@ -127,7 +127,7 @@ else:
127
127
 
128
128
  # In Airflow 3 with AIP-72 we get workload addressed by ExecuteTask
129
129
  # But in Airflow 2.10 it is a command line array
130
- ExecuteTask = list[str] # type: ignore[no-redef,assignment,misc]
130
+ ExecuteTask = list[str] # type: ignore[assignment,misc]
131
131
 
132
132
  def parse_command(command: str) -> ExecuteTask:
133
133
  from ast import literal_eval
@@ -66,7 +66,7 @@ def rpcapi_v2(body: dict[str, Any]) -> APIResponse:
66
66
  # Note: Except the method map this _was_ a 100% copy of internal API module
67
67
  # airflow.api_internal.endpoints.rpc_api_endpoint.internal_airflow_api()
68
68
  # As of rework for FastAPI in Airflow 3.0, this is updated and to be removed in the future.
69
- from airflow.api_internal.endpoints.rpc_api_endpoint import ( # type: ignore[attr-defined]
69
+ from airflow.api_internal.endpoints.rpc_api_endpoint import (
70
70
  # Note: This is just for compatibility with Airflow 2.10, not working for Airflow 3 / main as removed
71
71
  initialize_method_map,
72
72
  )
@@ -1,11 +1,11 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: apache-airflow-providers-edge3
3
- Version: 1.1.1rc1
3
+ Version: 1.1.2rc2
4
4
  Summary: Provider package apache-airflow-providers-edge3 for Apache Airflow
5
5
  Keywords: airflow-provider,edge3,airflow,integration
6
6
  Author-email: Apache Software Foundation <dev@airflow.apache.org>
7
7
  Maintainer-email: Apache Software Foundation <dev@airflow.apache.org>
8
- Requires-Python: ~=3.9
8
+ Requires-Python: >=3.10
9
9
  Description-Content-Type: text/x-rst
10
10
  Classifier: Development Status :: 5 - Production/Stable
11
11
  Classifier: Environment :: Console
@@ -15,18 +15,17 @@ Classifier: Intended Audience :: System Administrators
15
15
  Classifier: Framework :: Apache Airflow
16
16
  Classifier: Framework :: Apache Airflow :: Provider
17
17
  Classifier: License :: OSI Approved :: Apache Software License
18
- Classifier: Programming Language :: Python :: 3.9
19
18
  Classifier: Programming Language :: Python :: 3.10
20
19
  Classifier: Programming Language :: Python :: 3.11
21
20
  Classifier: Programming Language :: Python :: 3.12
21
+ Classifier: Programming Language :: Python :: 3.13
22
22
  Classifier: Topic :: System :: Monitoring
23
23
  Requires-Dist: apache-airflow>=2.10.0rc1
24
- Requires-Dist: apache-airflow-providers-fab>=1.5.3rc1
25
24
  Requires-Dist: pydantic>=2.11.0
26
25
  Requires-Dist: retryhttp>=1.2.0,!=1.3.0
27
26
  Project-URL: Bug Tracker, https://github.com/apache/airflow/issues
28
- Project-URL: Changelog, https://airflow.staged.apache.org/docs/apache-airflow-providers-edge3/1.1.1/changelog.html
29
- Project-URL: Documentation, https://airflow.staged.apache.org/docs/apache-airflow-providers-edge3/1.1.1
27
+ Project-URL: Changelog, https://airflow.staged.apache.org/docs/apache-airflow-providers-edge3/1.1.2/changelog.html
28
+ Project-URL: Documentation, https://airflow.staged.apache.org/docs/apache-airflow-providers-edge3/1.1.2
30
29
  Project-URL: Mastodon, https://fosstodon.org/@airflow
31
30
  Project-URL: Slack Chat, https://s.apache.org/airflow-slack
32
31
  Project-URL: Source Code, https://github.com/apache/airflow
@@ -57,8 +56,9 @@ Project-URL: YouTube, https://www.youtube.com/channel/UCSXwxpWZQ7XZ1WL3wqevChA/
57
56
 
58
57
  Package ``apache-airflow-providers-edge3``
59
58
 
60
- Release: ``1.1.1``
59
+ Release: ``1.1.2``
61
60
 
61
+ Release Date: ``|PypiReleaseDate|``
62
62
 
63
63
  Handle edge workers on remote sites via HTTP(s) connection and orchestrates work over distributed sites.
64
64
 
@@ -82,7 +82,7 @@ This is a provider package for ``edge3`` provider. All classes for this provider
82
82
  are in ``airflow.providers.edge3`` python package.
83
83
 
84
84
  You can find package information and changelog for the provider
85
- in the `documentation <https://airflow.apache.org/docs/apache-airflow-providers-edge3/1.1.1/>`_.
85
+ in the `documentation <https://airflow.apache.org/docs/apache-airflow-providers-edge3/1.1.2/>`_.
86
86
 
87
87
  Installation
88
88
  ------------
@@ -91,39 +91,19 @@ You can install this package on top of an existing Airflow 2 installation (see `
91
91
  for the minimum Airflow version supported) via
92
92
  ``pip install apache-airflow-providers-edge3``
93
93
 
94
- The package supports the following python versions: 3.9,3.10,3.11,3.12
94
+ The package supports the following python versions: 3.10,3.11,3.12,3.13
95
95
 
96
96
  Requirements
97
97
  ------------
98
98
 
99
- ================================ ===================
100
- PIP package Version required
101
- ================================ ===================
102
- ``apache-airflow`` ``>=2.10.0``
103
- ``apache-airflow-providers-fab`` ``>=1.5.3``
104
- ``pydantic`` ``>=2.11.0``
105
- ``retryhttp`` ``>=1.2.0,!=1.3.0``
106
- ================================ ===================
107
-
108
- Cross provider package dependencies
109
- -----------------------------------
110
-
111
- Those are dependencies that might be needed in order to use all the features of the package.
112
- You need to install the specified providers in order to use them.
113
-
114
- You can install such cross-provider dependencies when installing from PyPI. For example:
115
-
116
- .. code-block:: bash
117
-
118
- pip install apache-airflow-providers-edge3[fab]
119
-
120
-
121
- ============================================================================================== =======
122
- Dependent package Extra
123
- ============================================================================================== =======
124
- `apache-airflow-providers-fab <https://airflow.apache.org/docs/apache-airflow-providers-fab>`_ ``fab``
125
- ============================================================================================== =======
99
+ ================== ===================
100
+ PIP package Version required
101
+ ================== ===================
102
+ ``apache-airflow`` ``>=2.10.0``
103
+ ``pydantic`` ``>=2.11.0``
104
+ ``retryhttp`` ``>=1.2.0,!=1.3.0``
105
+ ================== ===================
126
106
 
127
107
  The changelog for the provider package can be found in the
128
- `changelog <https://airflow.apache.org/docs/apache-airflow-providers-edge3/1.1.1/changelog.html>`_.
108
+ `changelog <https://airflow.apache.org/docs/apache-airflow-providers-edge3/1.1.2/changelog.html>`_.
129
109
 
@@ -1,41 +1,41 @@
1
1
  airflow/providers/edge3/LICENSE,sha256=gXPVwptPlW1TJ4HSuG5OMPg-a3h43OGMkZRR1rpwfJA,10850
2
- airflow/providers/edge3/__init__.py,sha256=lWNqnEBB7q4ggTGo6urdvAwUV0msxvtkIHymUhwdweg,1494
2
+ airflow/providers/edge3/__init__.py,sha256=cSvc8gMIo0Zq7u48rdO-1fw66BpyQg8IRkn6osMkTlY,1494
3
3
  airflow/providers/edge3/get_provider_info.py,sha256=Ek27-dB4UALHUFYoYjtoQIGq0p7zeHcEgmELHvpVmCU,6836
4
4
  airflow/providers/edge3/version_compat.py,sha256=j5PCtXvZ71aBjixu-EFTNtVDPsngzzs7os0ZQDgFVDk,1536
5
5
  airflow/providers/edge3/cli/__init__.py,sha256=9hdXHABrVpkbpjZgUft39kOFL2xSGeG4GEua0Hmelus,785
6
6
  airflow/providers/edge3/cli/api_client.py,sha256=334KHVB4eMSzRpQ5emS56o-RTUJQprxf5Q3xQldCHDQ,7440
7
7
  airflow/providers/edge3/cli/dataclasses.py,sha256=JUuvvmzSVWvG9uOEfzLIiXrTZ-HbESvu50jkPpVIYVw,2895
8
- airflow/providers/edge3/cli/edge_command.py,sha256=Yggt-hBcf3rq7cMelsP0Jx9QfSkR07YU229ZIc7pZYY,18276
8
+ airflow/providers/edge3/cli/edge_command.py,sha256=qfHjVxA8QJ6zD8Eb8Ipieso4wo51_3MBaTtuSSIEOI4,20339
9
9
  airflow/providers/edge3/cli/signalling.py,sha256=sf4S6j6OoP0bLkda3UlCmlZabjv5wsMypy3kAvx56Z0,3220
10
- airflow/providers/edge3/cli/worker.py,sha256=AqyvJyH5hn4dumt5iP_XEdiJ94F8KaYz4rVlZQR-y8E,17212
10
+ airflow/providers/edge3/cli/worker.py,sha256=x1SX5J2mFgfUTERKv5m8lDxNnoss3BV4F7Jgv6u8rPg,17259
11
11
  airflow/providers/edge3/example_dags/__init__.py,sha256=9hdXHABrVpkbpjZgUft39kOFL2xSGeG4GEua0Hmelus,785
12
- airflow/providers/edge3/example_dags/integration_test.py,sha256=kevxwDePjTRqLWJGV-nKEQQWb7qb-y2oedm3mdqi6-8,6127
12
+ airflow/providers/edge3/example_dags/integration_test.py,sha256=4iXvv379nFPxF1_gfbqb5NW1z-24MscYWLDxDwTNbPo,6120
13
13
  airflow/providers/edge3/example_dags/win_notepad.py,sha256=zYcrKqODN4KLZQ-5wNnZQQskrDd5LA-nKJNgKQDntSE,2832
14
- airflow/providers/edge3/example_dags/win_test.py,sha256=GegWqjvbsSdbsA_f3S9_FRYftVO0pggXwQQggB9Vvz4,13220
14
+ airflow/providers/edge3/example_dags/win_test.py,sha256=wJCeij-iPQOnWZlDU7Oqcv6mkE8cVe03rOFiB8We_oQ,13418
15
15
  airflow/providers/edge3/executors/__init__.py,sha256=y830gGSKCvjOcLwLuCDp84NCrHWWB9RSSH1qvJpFhyY,923
16
- airflow/providers/edge3/executors/edge_executor.py,sha256=bCdLaknCo3LIuwpYw1u68xMedDXvVaqG4ZG0QKeM7xU,16474
16
+ airflow/providers/edge3/executors/edge_executor.py,sha256=arcq959o4l5x8yjt6f4ck1pDSDFzkGiAh4jGQ_jmqso,16512
17
17
  airflow/providers/edge3/models/__init__.py,sha256=9hdXHABrVpkbpjZgUft39kOFL2xSGeG4GEua0Hmelus,785
18
18
  airflow/providers/edge3/models/edge_job.py,sha256=3D5HAzcVkyI2bxl3pVbbRxjIz--Tnr_eNFiw2oI6gEQ,3167
19
19
  airflow/providers/edge3/models/edge_logs.py,sha256=bNstp7gR54O2vbxzz4NTL0erbifFbGUjZ-YOM0I4sqk,2768
20
- airflow/providers/edge3/models/edge_worker.py,sha256=4qr5K-QU5yTd2p3AtdVhWlbvToxFFBWk2qIRFpJToWo,10749
20
+ airflow/providers/edge3/models/edge_worker.py,sha256=z3Rru2iFqaFVyvJ6vBQ4-W0T9MenDHip6bJHfLqI6K4,12102
21
21
  airflow/providers/edge3/openapi/__init__.py,sha256=0O-WvmDx8GeKSoECpHYrbe0hW-LgjlKny3VqTCpBQeQ,927
22
22
  airflow/providers/edge3/openapi/edge_worker_api_v1.yaml,sha256=GAE2IdOXmcUueNy5KFkLBgNpoWnOjnHT9TrW5NZEWpI,24938
23
23
  airflow/providers/edge3/plugins/__init__.py,sha256=9hdXHABrVpkbpjZgUft39kOFL2xSGeG4GEua0Hmelus,785
24
- airflow/providers/edge3/plugins/edge_executor_plugin.py,sha256=wvveKcIUP9QkdTFIQmxq4GS15M8KnbHkDfKjlpvdvwk,9051
24
+ airflow/providers/edge3/plugins/edge_executor_plugin.py,sha256=cJcjmk-4OYBISpeGFXGvi78GPZ5EFgymzBm6FWvMMFA,10992
25
25
  airflow/providers/edge3/plugins/templates/edge_worker_hosts.html,sha256=0_P2yfZwpy3Kvqd3GBvu_PgmmKCUbso3ieW8aYa76iU,8997
26
26
  airflow/providers/edge3/plugins/templates/edge_worker_jobs.html,sha256=bZ-6ysmIy6j4eR_TPHiqbgb3qpNMKCcEEB-SpxuxNgc,2831
27
27
  airflow/providers/edge3/worker_api/__init__.py,sha256=nnPvxWGTEKZ9YyB1Yd7P9IvDOenK01LVHm22Owwxj3g,839
28
28
  airflow/providers/edge3/worker_api/app.py,sha256=Dda2VjkzgBtbQbSWSVEAoqd22RlqvBMyiPau65uKkv4,2006
29
- airflow/providers/edge3/worker_api/auth.py,sha256=XVTfL-c0JYUhpVkKdqqaxZACZXqvnPp_3W6q2TK0Bjc,4883
29
+ airflow/providers/edge3/worker_api/auth.py,sha256=nmwfUz-nokUKyQp-UKwlMn-i2U5AXzq7c8Xfrt_bGeU,4867
30
30
  airflow/providers/edge3/worker_api/datamodels.py,sha256=FAiXqnrSN8zH4YE2fUMjXfXcH9cHlhRh4uZvvr936Ys,6696
31
31
  airflow/providers/edge3/worker_api/routes/__init__.py,sha256=9hdXHABrVpkbpjZgUft39kOFL2xSGeG4GEua0Hmelus,785
32
- airflow/providers/edge3/worker_api/routes/_v2_compat.py,sha256=Q4b2Io0yoK5V_hbgk6fiFviTeT6CFbFMOGYRZgLEeR4,4543
33
- airflow/providers/edge3/worker_api/routes/_v2_routes.py,sha256=-WAofvXJpOYpTyh983cxMarA5FkPlH7LMhZuSHE-qPg,10830
32
+ airflow/providers/edge3/worker_api/routes/_v2_compat.py,sha256=PuzSL9dMuji_MsluCoPdCvYboqJD0h4zERcIoB5kJvI,4543
33
+ airflow/providers/edge3/worker_api/routes/_v2_routes.py,sha256=xcbf6RdOHx5zOl9JlIAW9nQhiy3ju-EIyq1tbXGJSYc,10800
34
34
  airflow/providers/edge3/worker_api/routes/health.py,sha256=XxqIppnRA138Q6mAHCdyL2JvoeeganUiI-TXyXSPTGo,1075
35
35
  airflow/providers/edge3/worker_api/routes/jobs.py,sha256=UK1w6nXEUadOLwE9abZ4jHH4KtbvXcwaAF0EnwSa3y4,5733
36
36
  airflow/providers/edge3/worker_api/routes/logs.py,sha256=uk0SZ5hAimj3sAcq1FYCDu0AXYNeTeyjZDGBvw-986E,4945
37
37
  airflow/providers/edge3/worker_api/routes/worker.py,sha256=BGARu1RZ74lW9X-ltuMYbbVXczm_MZdqHaai2MhDWtY,8969
38
- apache_airflow_providers_edge3-1.1.1rc1.dist-info/entry_points.txt,sha256=7WUIGfd3o9NvvbK5trbZxNXTgYGc6pqg74wZPigbx5o,206
39
- apache_airflow_providers_edge3-1.1.1rc1.dist-info/WHEEL,sha256=G2gURzTEtmeR8nrdXUJfNiB3VYVxigPQ-bEQujpNiNs,82
40
- apache_airflow_providers_edge3-1.1.1rc1.dist-info/METADATA,sha256=FSMB2NwsD5pBBbwSBx3BQVnvPXLrLp9YVnJQR1BTf_o,5876
41
- apache_airflow_providers_edge3-1.1.1rc1.dist-info/RECORD,,
38
+ apache_airflow_providers_edge3-1.1.2rc2.dist-info/entry_points.txt,sha256=7WUIGfd3o9NvvbK5trbZxNXTgYGc6pqg74wZPigbx5o,206
39
+ apache_airflow_providers_edge3-1.1.2rc2.dist-info/WHEEL,sha256=G2gURzTEtmeR8nrdXUJfNiB3VYVxigPQ-bEQujpNiNs,82
40
+ apache_airflow_providers_edge3-1.1.2rc2.dist-info/METADATA,sha256=khGdhmJivDTg4Jpo-QLB4KGFCN0W62SKy6eUTTn42SU,4799
41
+ apache_airflow_providers_edge3-1.1.2rc2.dist-info/RECORD,,