apache-airflow-providers-edge3 1.1.1__py3-none-any.whl → 1.1.2rc1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -29,7 +29,7 @@ from airflow import __version__ as airflow_version
29
29
 
30
30
  __all__ = ["__version__"]
31
31
 
32
- __version__ = "1.1.1"
32
+ __version__ = "1.1.2"
33
33
 
34
34
  if packaging.version.parse(packaging.version.parse(airflow_version).base_version) < packaging.version.parse(
35
35
  "2.10.0"
@@ -64,7 +64,7 @@ EDGE_WORKER_HEADER = "\n".join(
64
64
  @providers_configuration_loaded
65
65
  def force_use_internal_api_on_edge_worker():
66
66
  """
67
- Ensure that the environment is configured for the internal API without needing to declare it outside.
67
+ Ensure the environment is configured for the internal API without explicit declaration.
68
68
 
69
69
  This is only required for an Edge worker and must to be done before the Click CLI wrapper is initiated.
70
70
  That is because the CLI wrapper will attempt to establish a DB connection, which will fail before the
@@ -26,6 +26,7 @@ from pathlib import Path
26
26
  from subprocess import Popen
27
27
  from time import sleep
28
28
  from typing import TYPE_CHECKING
29
+ from urllib.parse import urlparse
29
30
 
30
31
  from lockfile.pidlockfile import remove_existing_pidfile
31
32
  from requests import HTTPError
@@ -186,11 +187,13 @@ class EdgeWorker:
186
187
  setproctitle(f"airflow edge worker: {workload.ti.key}")
187
188
 
188
189
  try:
189
- base_url = conf.get("api", "base_url", fallback="/")
190
- # If it's a relative URL, use localhost:8080 as the default
191
- if base_url.startswith("/"):
192
- base_url = f"http://localhost:8080{base_url}"
193
- default_execution_api_server = f"{base_url.rstrip('/')}/execution/"
190
+ api_url = conf.get("edge", "api_url")
191
+ execution_api_server_url = conf.get("core", "execution_api_server_url", fallback=...)
192
+ if execution_api_server_url is ...:
193
+ parsed = urlparse(api_url)
194
+ execution_api_server_url = f"{parsed.scheme}://{parsed.netloc}/execution/"
195
+
196
+ logger.info("Worker starting up server=execution_api_server_url=%s", execution_api_server_url)
194
197
 
195
198
  supervise(
196
199
  # This is the "wrong" ti type, but it duck types the same. TODO: Create a protocol for this.
@@ -199,9 +202,7 @@ class EdgeWorker:
199
202
  dag_rel_path=workload.dag_rel_path,
200
203
  bundle_info=workload.bundle_info,
201
204
  token=workload.token,
202
- server=conf.get(
203
- "core", "execution_api_server_url", fallback=default_execution_api_server
204
- ),
205
+ server=execution_api_server_url,
205
206
  log_path=workload.log_path,
206
207
  )
207
208
  return 0
@@ -27,7 +27,11 @@ from datetime import datetime
27
27
  from time import sleep
28
28
 
29
29
  from airflow.exceptions import AirflowNotFoundException
30
- from airflow.hooks.base import BaseHook
30
+
31
+ try:
32
+ from airflow.sdk import BaseHook
33
+ except ImportError:
34
+ from airflow.hooks.base import BaseHook # type: ignore[attr-defined,no-redef]
31
35
  from airflow.utils.trigger_rule import TriggerRule
32
36
 
33
37
  try:
@@ -26,19 +26,23 @@ and serves as a PoC test for the Windows worker.
26
26
  from __future__ import annotations
27
27
 
28
28
  import os
29
- from collections.abc import Container, Sequence
29
+ from collections.abc import Callable, Container, Sequence
30
30
  from datetime import datetime
31
31
  from subprocess import STDOUT, Popen
32
32
  from time import sleep
33
- from typing import TYPE_CHECKING, Any, Callable
33
+ from typing import TYPE_CHECKING, Any
34
34
 
35
35
  from airflow.decorators import task, task_group
36
36
  from airflow.exceptions import AirflowException, AirflowNotFoundException, AirflowSkipException
37
- from airflow.hooks.base import BaseHook
38
37
  from airflow.models import BaseOperator
39
38
  from airflow.models.dag import DAG
40
39
  from airflow.models.variable import Variable
41
40
  from airflow.providers.standard.operators.empty import EmptyOperator
41
+
42
+ try:
43
+ from airflow.sdk import BaseHook
44
+ except ImportError:
45
+ from airflow.hooks.base import BaseHook # type: ignore[attr-defined,no-redef]
42
46
  from airflow.sdk import Param
43
47
  from airflow.sdk.execution_time.context import context_to_airflow_vars
44
48
  from airflow.utils.trigger_rule import TriggerRule
@@ -21,7 +21,7 @@ import contextlib
21
21
  from collections.abc import Sequence
22
22
  from copy import deepcopy
23
23
  from datetime import datetime, timedelta
24
- from typing import TYPE_CHECKING, Any, Optional
24
+ from typing import TYPE_CHECKING, Any
25
25
 
26
26
  from sqlalchemy import delete, inspect, text
27
27
  from sqlalchemy.exc import NoSuchTableError
@@ -52,7 +52,7 @@ if TYPE_CHECKING:
52
52
  # TODO: Airflow 2 type hints; remove when Airflow 2 support is removed
53
53
  CommandType = Sequence[str]
54
54
  # Task tuple to send to be executed
55
- TaskTuple = tuple[TaskInstanceKey, CommandType, Optional[str], Optional[Any]]
55
+ TaskTuple = tuple[TaskInstanceKey, CommandType, str | None, Any | None]
56
56
 
57
57
  PARALLELISM: int = conf.getint("core", "PARALLELISM")
58
58
 
@@ -72,6 +72,7 @@ class EdgeExecutor(BaseExecutor):
72
72
  """
73
73
  inspector = inspect(engine)
74
74
  edge_job_columns = None
75
+ edge_job_command_len = None
75
76
  with contextlib.suppress(NoSuchTableError):
76
77
  edge_job_schema = inspector.get_columns("edge_job")
77
78
  edge_job_columns = [column["name"] for column in edge_job_schema]
@@ -113,7 +114,7 @@ class EdgeExecutor(BaseExecutor):
113
114
 
114
115
  def _process_tasks(self, task_tuples: list[TaskTuple]) -> None:
115
116
  """
116
- Temponary overwrite of _process_tasks function.
117
+ Temporary overwrite of _process_tasks function.
117
118
 
118
119
  Idea is to not change the interface of the execute_async function in BaseExecutor as it will be changed in Airflow 3.
119
120
  Edge worker needs task_instance in execute_async but BaseExecutor deletes this out of the self.queued_tasks.
@@ -132,7 +133,7 @@ class EdgeExecutor(BaseExecutor):
132
133
  session: Session = NEW_SESSION,
133
134
  ) -> None:
134
135
  """Execute asynchronously. Airflow 2.10 entry point to execute a task."""
135
- # Use of a temponary trick to get task instance, will be changed with Airflow 3.0.0
136
+ # Use of a temporary trick to get task instance, will be changed with Airflow 3.0.0
136
137
  # code works together with _process_tasks overwrite to get task instance.
137
138
  # TaskInstance in fourth element
138
139
  task_instance = self.edge_queued_tasks[key][3] # type: ignore[index]
@@ -17,184 +17,194 @@
17
17
 
18
18
  from __future__ import annotations
19
19
 
20
- import re
21
- from datetime import datetime, timedelta
22
- from pathlib import Path
23
20
  from typing import TYPE_CHECKING, Any
24
21
 
25
- from flask import Blueprint, redirect, request, url_for
26
- from flask_appbuilder import BaseView, expose
27
- from markupsafe import Markup
28
- from sqlalchemy import select
29
-
30
22
  from airflow.configuration import conf
31
23
  from airflow.exceptions import AirflowConfigException
32
- from airflow.models.taskinstance import TaskInstanceState
33
24
  from airflow.plugins_manager import AirflowPlugin
34
25
  from airflow.providers.edge3.version_compat import AIRFLOW_V_3_0_PLUS
35
- from airflow.utils.state import State
36
-
37
- if AIRFLOW_V_3_0_PLUS:
38
- from airflow.api_fastapi.auth.managers.models.resource_details import AccessView
39
- from airflow.providers.fab.www.auth import has_access_view
40
-
41
- else:
42
- from airflow.auth.managers.models.resource_details import AccessView # type: ignore[no-redef]
43
- from airflow.www.auth import has_access_view # type: ignore[no-redef]
44
26
  from airflow.utils.session import NEW_SESSION, provide_session
45
- from airflow.utils.yaml import safe_load
46
27
 
47
28
  if TYPE_CHECKING:
48
29
  from sqlalchemy.orm import Session
49
30
 
31
+ if AIRFLOW_V_3_0_PLUS:
32
+ from airflow.utils.db import DBLocks, create_global_lock
50
33
 
51
- def _get_airflow_2_api_endpoint() -> Blueprint:
52
- from airflow.www.constants import SWAGGER_BUNDLE, SWAGGER_ENABLED
53
- from airflow.www.extensions.init_views import _CustomErrorRequestBodyValidator, _LazyResolver
54
-
55
- folder = Path(__file__).parents[1].resolve() # this is airflow/providers/edge3/
56
- with folder.joinpath("openapi", "edge_worker_api_v1.yaml").open() as f:
57
- specification = safe_load(f)
58
- from connexion import FlaskApi
59
-
60
- bp = FlaskApi(
61
- specification=specification,
62
- resolver=_LazyResolver(),
63
- base_path="/edge_worker/v1",
64
- strict_validation=True,
65
- options={"swagger_ui": SWAGGER_ENABLED, "swagger_path": SWAGGER_BUNDLE.__fspath__()},
66
- validate_responses=True,
67
- validator_map={"body": _CustomErrorRequestBodyValidator},
68
- ).blueprint
69
- # Need to exempt CSRF to make API usable
70
- from airflow.www.app import csrf
71
-
72
- csrf.exempt(bp)
73
- return bp
74
-
75
-
76
- def _get_api_endpoint() -> dict[str, Any]:
77
- from airflow.providers.edge3.worker_api.app import create_edge_worker_api_app
78
-
79
- return {
80
- "app": create_edge_worker_api_app(),
81
- "url_prefix": "/edge_worker/v1",
82
- "name": "Airflow Edge Worker API",
83
- }
84
-
85
-
86
- def _state_token(state):
87
- """Return a formatted string with HTML for a given State."""
88
- color = State.color(state)
89
- fg_color = State.color_fg(state)
90
- return Markup(
91
- """
92
- <span class="label" style="color:{fg_color}; background-color:{color};"
93
- title="Current State: {state}">{state}</span>
94
- """
95
- ).format(color=color, state=state, fg_color=fg_color)
96
-
97
-
98
- def modify_maintenance_comment_on_update(maintenance_comment: str | None, username: str) -> str:
99
- if maintenance_comment:
100
- if re.search(
101
- r"^\[[-\d:\s]+\] - .+ put node into maintenance mode\r?\nComment:.*", maintenance_comment
102
- ):
103
- return re.sub(
104
- r"^\[[-\d:\s]+\] - .+ put node into maintenance mode\r?\nComment:",
105
- f"[{datetime.now().strftime('%Y-%m-%d %H:%M')}] - {username} updated maintenance mode\nComment:",
106
- maintenance_comment,
107
- )
108
- if re.search(r"^\[[-\d:\s]+\] - .+ updated maintenance mode\r?\nComment:.*", maintenance_comment):
109
- return re.sub(
110
- r"^\[[-\d:\s]+\] - .+ updated maintenance mode\r?\nComment:",
111
- f"[{datetime.now().strftime('%Y-%m-%d %H:%M')}] - {username} updated maintenance mode\nComment:",
112
- maintenance_comment,
113
- )
114
- return f"[{datetime.now().strftime('%Y-%m-%d %H:%M')}] - {username} updated maintenance mode\nComment: {maintenance_comment}"
115
- return f"[{datetime.now().strftime('%Y-%m-%d %H:%M')}] - {username} updated maintenance mode\nComment:"
116
-
117
-
118
- # registers airflow/providers/edge3/plugins/templates as a Jinja template folder
119
- template_bp = Blueprint(
120
- "template_blueprint",
121
- __name__,
122
- template_folder="templates",
123
- )
124
-
125
-
126
- class EdgeWorkerJobs(BaseView):
127
- """Simple view to show Edge Worker jobs."""
128
-
129
- default_view = "jobs"
130
-
131
- @expose("/jobs")
132
- @has_access_view(AccessView.JOBS)
133
34
  @provide_session
134
- def jobs(self, session: Session = NEW_SESSION):
135
- from airflow.providers.edge3.models.edge_job import EdgeJobModel
136
-
137
- jobs = session.scalars(select(EdgeJobModel).order_by(EdgeJobModel.queued_dttm)).all()
138
- html_states = {
139
- str(state): _state_token(str(state)) for state in TaskInstanceState.__members__.values()
35
+ def _get_api_endpoint(session: Session = NEW_SESSION) -> dict[str, Any]:
36
+ # Ensure all required DB modeals are created before starting the API
37
+ with create_global_lock(session=session, lock=DBLocks.MIGRATIONS):
38
+ engine = session.get_bind().engine
39
+ from airflow.providers.edge3.models.edge_job import EdgeJobModel
40
+ from airflow.providers.edge3.models.edge_logs import EdgeLogsModel
41
+ from airflow.providers.edge3.models.edge_worker import EdgeWorkerModel
42
+
43
+ EdgeJobModel.metadata.create_all(engine)
44
+ EdgeLogsModel.metadata.create_all(engine)
45
+ EdgeWorkerModel.metadata.create_all(engine)
46
+
47
+ from airflow.providers.edge3.worker_api.app import create_edge_worker_api_app
48
+
49
+ return {
50
+ "app": create_edge_worker_api_app(),
51
+ "url_prefix": "/edge_worker/v1",
52
+ "name": "Airflow Edge Worker API",
140
53
  }
141
- return self.render_template("edge_worker_jobs.html", jobs=jobs, html_states=html_states)
142
-
143
-
144
- class EdgeWorkerHosts(BaseView):
145
- """Simple view to show Edge Worker status."""
146
54
 
147
- default_view = "status"
148
-
149
- @expose("/status")
150
- @has_access_view(AccessView.JOBS)
151
- @provide_session
152
- def status(self, session: Session = NEW_SESSION):
153
- from airflow.providers.edge3.models.edge_worker import EdgeWorkerModel
154
-
155
- hosts = session.scalars(select(EdgeWorkerModel).order_by(EdgeWorkerModel.worker_name)).all()
156
- five_min_ago = datetime.now() - timedelta(minutes=5)
157
- return self.render_template("edge_worker_hosts.html", hosts=hosts, five_min_ago=five_min_ago)
158
-
159
- @expose("/status/maintenance/<string:worker_name>/on", methods=["POST"])
160
- @has_access_view(AccessView.JOBS)
161
- def worker_to_maintenance(self, worker_name: str):
162
- from flask_login import current_user
163
-
164
- from airflow.providers.edge3.models.edge_worker import request_maintenance
165
-
166
- maintenance_comment = request.form.get("maintenance_comment")
167
- maintenance_comment = f"[{datetime.now().strftime('%Y-%m-%d %H:%M')}] - {current_user.username} put node into maintenance mode\nComment: {maintenance_comment}"
168
- request_maintenance(worker_name, maintenance_comment)
169
- return redirect(url_for("EdgeWorkerHosts.status"))
170
-
171
- @expose("/status/maintenance/<string:worker_name>/off", methods=["POST"])
172
- @has_access_view(AccessView.JOBS)
173
- def remove_worker_from_maintenance(self, worker_name: str):
174
- from airflow.providers.edge3.models.edge_worker import exit_maintenance
175
-
176
- exit_maintenance(worker_name)
177
- return redirect(url_for("EdgeWorkerHosts.status"))
178
-
179
- @expose("/status/maintenance/<string:worker_name>/remove", methods=["POST"])
180
- @has_access_view(AccessView.JOBS)
181
- def remove_worker(self, worker_name: str):
182
- from airflow.providers.edge3.models.edge_worker import remove_worker
183
-
184
- remove_worker(worker_name)
185
- return redirect(url_for("EdgeWorkerHosts.status"))
186
-
187
- @expose("/status/maintenance/<string:worker_name>/change_comment", methods=["POST"])
188
- @has_access_view(AccessView.JOBS)
189
- def change_maintenance_comment(self, worker_name: str):
190
- from flask_login import current_user
191
-
192
- from airflow.providers.edge3.models.edge_worker import change_maintenance_comment
193
-
194
- maintenance_comment = request.form.get("maintenance_comment")
195
- maintenance_comment = modify_maintenance_comment_on_update(maintenance_comment, current_user.username)
196
- change_maintenance_comment(worker_name, maintenance_comment)
197
- return redirect(url_for("EdgeWorkerHosts.status"))
55
+ else:
56
+ # This is for back-compatibility with Airflow 2.x and we only make this
57
+ # to prevents dependencies and breaking imports in Airflow 3.x
58
+ import re
59
+ from datetime import datetime, timedelta
60
+ from pathlib import Path
61
+
62
+ from flask import Blueprint, redirect, request, url_for
63
+ from flask_appbuilder import BaseView, expose
64
+ from markupsafe import Markup
65
+ from sqlalchemy import select
66
+
67
+ from airflow.auth.managers.models.resource_details import AccessView
68
+ from airflow.models.taskinstance import TaskInstanceState
69
+ from airflow.utils.state import State
70
+ from airflow.utils.yaml import safe_load
71
+ from airflow.www.auth import has_access_view
72
+
73
+ def _get_airflow_2_api_endpoint() -> Blueprint:
74
+ from airflow.www.app import csrf
75
+ from airflow.www.constants import SWAGGER_BUNDLE, SWAGGER_ENABLED
76
+ from airflow.www.extensions.init_views import _CustomErrorRequestBodyValidator, _LazyResolver
77
+
78
+ folder = Path(__file__).parents[1].resolve() # this is airflow/providers/edge3/
79
+ with folder.joinpath("openapi", "edge_worker_api_v1.yaml").open() as f:
80
+ specification = safe_load(f)
81
+ from connexion import FlaskApi
82
+
83
+ bp = FlaskApi(
84
+ specification=specification,
85
+ resolver=_LazyResolver(),
86
+ base_path="/edge_worker/v1",
87
+ strict_validation=True,
88
+ options={"swagger_ui": SWAGGER_ENABLED, "swagger_path": SWAGGER_BUNDLE.__fspath__()},
89
+ validate_responses=True,
90
+ validator_map={"body": _CustomErrorRequestBodyValidator},
91
+ ).blueprint
92
+ # Need to exempt CSRF to make API usable
93
+ csrf.exempt(bp)
94
+ return bp
95
+
96
+ def _state_token(state):
97
+ """Return a formatted string with HTML for a given State."""
98
+ color = State.color(state)
99
+ fg_color = State.color_fg(state)
100
+ return Markup(
101
+ """
102
+ <span class="label" style="color:{fg_color}; background-color:{color};"
103
+ title="Current State: {state}">{state}</span>
104
+ """
105
+ ).format(color=color, state=state, fg_color=fg_color)
106
+
107
+ def modify_maintenance_comment_on_update(maintenance_comment: str | None, username: str) -> str:
108
+ if maintenance_comment:
109
+ if re.search(
110
+ r"^\[[-\d:\s]+\] - .+ put node into maintenance mode\r?\nComment:.*", maintenance_comment
111
+ ):
112
+ return re.sub(
113
+ r"^\[[-\d:\s]+\] - .+ put node into maintenance mode\r?\nComment:",
114
+ f"[{datetime.now().strftime('%Y-%m-%d %H:%M')}] - {username} updated maintenance mode\nComment:",
115
+ maintenance_comment,
116
+ )
117
+ if re.search(r"^\[[-\d:\s]+\] - .+ updated maintenance mode\r?\nComment:.*", maintenance_comment):
118
+ return re.sub(
119
+ r"^\[[-\d:\s]+\] - .+ updated maintenance mode\r?\nComment:",
120
+ f"[{datetime.now().strftime('%Y-%m-%d %H:%M')}] - {username} updated maintenance mode\nComment:",
121
+ maintenance_comment,
122
+ )
123
+ return f"[{datetime.now().strftime('%Y-%m-%d %H:%M')}] - {username} updated maintenance mode\nComment: {maintenance_comment}"
124
+ return (
125
+ f"[{datetime.now().strftime('%Y-%m-%d %H:%M')}] - {username} updated maintenance mode\nComment:"
126
+ )
127
+
128
+ # registers airflow/providers/edge3/plugins/templates as a Jinja template folder
129
+ template_bp = Blueprint(
130
+ "template_blueprint",
131
+ __name__,
132
+ template_folder="templates",
133
+ )
134
+
135
+ class EdgeWorkerJobs(BaseView):
136
+ """Simple view to show Edge Worker jobs."""
137
+
138
+ default_view = "jobs"
139
+
140
+ @expose("/jobs")
141
+ @has_access_view(AccessView.JOBS)
142
+ @provide_session
143
+ def jobs(self, session: Session = NEW_SESSION):
144
+ from airflow.providers.edge3.models.edge_job import EdgeJobModel
145
+
146
+ jobs = session.scalars(select(EdgeJobModel).order_by(EdgeJobModel.queued_dttm)).all()
147
+ html_states = {
148
+ str(state): _state_token(str(state)) for state in TaskInstanceState.__members__.values()
149
+ }
150
+ return self.render_template("edge_worker_jobs.html", jobs=jobs, html_states=html_states)
151
+
152
+ class EdgeWorkerHosts(BaseView):
153
+ """Simple view to show Edge Worker status."""
154
+
155
+ default_view = "status"
156
+
157
+ @expose("/status")
158
+ @has_access_view(AccessView.JOBS)
159
+ @provide_session
160
+ def status(self, session: Session = NEW_SESSION):
161
+ from airflow.providers.edge3.models.edge_worker import EdgeWorkerModel
162
+
163
+ hosts = session.scalars(select(EdgeWorkerModel).order_by(EdgeWorkerModel.worker_name)).all()
164
+ five_min_ago = datetime.now() - timedelta(minutes=5)
165
+ return self.render_template("edge_worker_hosts.html", hosts=hosts, five_min_ago=five_min_ago)
166
+
167
+ @expose("/status/maintenance/<string:worker_name>/on", methods=["POST"])
168
+ @has_access_view(AccessView.JOBS)
169
+ def worker_to_maintenance(self, worker_name: str):
170
+ from flask_login import current_user
171
+
172
+ from airflow.providers.edge3.models.edge_worker import request_maintenance
173
+
174
+ maintenance_comment = request.form.get("maintenance_comment")
175
+ maintenance_comment = f"[{datetime.now().strftime('%Y-%m-%d %H:%M')}] - {current_user.username} put node into maintenance mode\nComment: {maintenance_comment}"
176
+ request_maintenance(worker_name, maintenance_comment)
177
+ return redirect(url_for("EdgeWorkerHosts.status"))
178
+
179
+ @expose("/status/maintenance/<string:worker_name>/off", methods=["POST"])
180
+ @has_access_view(AccessView.JOBS)
181
+ def remove_worker_from_maintenance(self, worker_name: str):
182
+ from airflow.providers.edge3.models.edge_worker import exit_maintenance
183
+
184
+ exit_maintenance(worker_name)
185
+ return redirect(url_for("EdgeWorkerHosts.status"))
186
+
187
+ @expose("/status/maintenance/<string:worker_name>/remove", methods=["POST"])
188
+ @has_access_view(AccessView.JOBS)
189
+ def remove_worker(self, worker_name: str):
190
+ from airflow.providers.edge3.models.edge_worker import remove_worker
191
+
192
+ remove_worker(worker_name)
193
+ return redirect(url_for("EdgeWorkerHosts.status"))
194
+
195
+ @expose("/status/maintenance/<string:worker_name>/change_comment", methods=["POST"])
196
+ @has_access_view(AccessView.JOBS)
197
+ def change_maintenance_comment(self, worker_name: str):
198
+ from flask_login import current_user
199
+
200
+ from airflow.providers.edge3.models.edge_worker import change_maintenance_comment
201
+
202
+ maintenance_comment = request.form.get("maintenance_comment")
203
+ maintenance_comment = modify_maintenance_comment_on_update(
204
+ maintenance_comment, current_user.username
205
+ )
206
+ change_maintenance_comment(worker_name, maintenance_comment)
207
+ return redirect(url_for("EdgeWorkerHosts.status"))
198
208
 
199
209
 
200
210
  # Check if EdgeExecutor is actually loaded
@@ -209,21 +219,19 @@ class EdgeExecutorPlugin(AirflowPlugin):
209
219
 
210
220
  name = "edge_executor"
211
221
  if EDGE_EXECUTOR_ACTIVE:
212
- appbuilder_views = [
213
- {
214
- "name": "Edge Worker Jobs",
215
- "category": "Admin",
216
- "view": EdgeWorkerJobs(),
217
- },
218
- {
219
- "name": "Edge Worker Hosts",
220
- "category": "Admin",
221
- "view": EdgeWorkerHosts(),
222
- },
223
- ]
224
-
225
222
  if AIRFLOW_V_3_0_PLUS:
226
223
  fastapi_apps = [_get_api_endpoint()]
227
- flask_blueprints = [template_bp]
228
224
  else:
225
+ appbuilder_views = [
226
+ {
227
+ "name": "Edge Worker Jobs",
228
+ "category": "Admin",
229
+ "view": EdgeWorkerJobs(),
230
+ },
231
+ {
232
+ "name": "Edge Worker Hosts",
233
+ "category": "Admin",
234
+ "view": EdgeWorkerHosts(),
235
+ },
236
+ ]
229
237
  flask_blueprints = [_get_airflow_2_api_endpoint(), template_bp]
@@ -34,8 +34,8 @@ if AIRFLOW_V_3_0_PLUS:
34
34
  def parse_command(command: str) -> ExecuteTask:
35
35
  return ExecuteTask.model_validate_json(command)
36
36
  else:
37
- # Mock the external dependnecies
38
- from typing import Callable
37
+ # Mock the external dependencies
38
+ from collections.abc import Callable
39
39
 
40
40
  from connexion import ProblemException
41
41
 
@@ -1,11 +1,11 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: apache-airflow-providers-edge3
3
- Version: 1.1.1
3
+ Version: 1.1.2rc1
4
4
  Summary: Provider package apache-airflow-providers-edge3 for Apache Airflow
5
5
  Keywords: airflow-provider,edge3,airflow,integration
6
6
  Author-email: Apache Software Foundation <dev@airflow.apache.org>
7
7
  Maintainer-email: Apache Software Foundation <dev@airflow.apache.org>
8
- Requires-Python: ~=3.9
8
+ Requires-Python: ~=3.10
9
9
  Description-Content-Type: text/x-rst
10
10
  Classifier: Development Status :: 5 - Production/Stable
11
11
  Classifier: Environment :: Console
@@ -15,18 +15,16 @@ Classifier: Intended Audience :: System Administrators
15
15
  Classifier: Framework :: Apache Airflow
16
16
  Classifier: Framework :: Apache Airflow :: Provider
17
17
  Classifier: License :: OSI Approved :: Apache Software License
18
- Classifier: Programming Language :: Python :: 3.9
19
18
  Classifier: Programming Language :: Python :: 3.10
20
19
  Classifier: Programming Language :: Python :: 3.11
21
20
  Classifier: Programming Language :: Python :: 3.12
22
21
  Classifier: Topic :: System :: Monitoring
23
- Requires-Dist: apache-airflow>=2.10.0
24
- Requires-Dist: apache-airflow-providers-fab>=1.5.3
22
+ Requires-Dist: apache-airflow>=2.10.0rc1
25
23
  Requires-Dist: pydantic>=2.11.0
26
24
  Requires-Dist: retryhttp>=1.2.0,!=1.3.0
27
25
  Project-URL: Bug Tracker, https://github.com/apache/airflow/issues
28
- Project-URL: Changelog, https://airflow.apache.org/docs/apache-airflow-providers-edge3/1.1.1/changelog.html
29
- Project-URL: Documentation, https://airflow.apache.org/docs/apache-airflow-providers-edge3/1.1.1
26
+ Project-URL: Changelog, https://airflow.staged.apache.org/docs/apache-airflow-providers-edge3/1.1.2/changelog.html
27
+ Project-URL: Documentation, https://airflow.staged.apache.org/docs/apache-airflow-providers-edge3/1.1.2
30
28
  Project-URL: Mastodon, https://fosstodon.org/@airflow
31
29
  Project-URL: Slack Chat, https://s.apache.org/airflow-slack
32
30
  Project-URL: Source Code, https://github.com/apache/airflow
@@ -57,7 +55,7 @@ Project-URL: YouTube, https://www.youtube.com/channel/UCSXwxpWZQ7XZ1WL3wqevChA/
57
55
 
58
56
  Package ``apache-airflow-providers-edge3``
59
57
 
60
- Release: ``1.1.1``
58
+ Release: ``1.1.2``
61
59
 
62
60
 
63
61
  Handle edge workers on remote sites via HTTP(s) connection and orchestrates work over distributed sites.
@@ -82,7 +80,7 @@ This is a provider package for ``edge3`` provider. All classes for this provider
82
80
  are in ``airflow.providers.edge3`` python package.
83
81
 
84
82
  You can find package information and changelog for the provider
85
- in the `documentation <https://airflow.apache.org/docs/apache-airflow-providers-edge3/1.1.1/>`_.
83
+ in the `documentation <https://airflow.apache.org/docs/apache-airflow-providers-edge3/1.1.2/>`_.
86
84
 
87
85
  Installation
88
86
  ------------
@@ -91,39 +89,19 @@ You can install this package on top of an existing Airflow 2 installation (see `
91
89
  for the minimum Airflow version supported) via
92
90
  ``pip install apache-airflow-providers-edge3``
93
91
 
94
- The package supports the following python versions: 3.9,3.10,3.11,3.12
92
+ The package supports the following python versions: 3.10,3.11,3.12
95
93
 
96
94
  Requirements
97
95
  ------------
98
96
 
99
- ================================ ===================
100
- PIP package Version required
101
- ================================ ===================
102
- ``apache-airflow`` ``>=2.10.0``
103
- ``apache-airflow-providers-fab`` ``>=1.5.3``
104
- ``pydantic`` ``>=2.11.0``
105
- ``retryhttp`` ``>=1.2.0,!=1.3.0``
106
- ================================ ===================
107
-
108
- Cross provider package dependencies
109
- -----------------------------------
110
-
111
- Those are dependencies that might be needed in order to use all the features of the package.
112
- You need to install the specified providers in order to use them.
113
-
114
- You can install such cross-provider dependencies when installing from PyPI. For example:
115
-
116
- .. code-block:: bash
117
-
118
- pip install apache-airflow-providers-edge3[fab]
119
-
120
-
121
- ============================================================================================== =======
122
- Dependent package Extra
123
- ============================================================================================== =======
124
- `apache-airflow-providers-fab <https://airflow.apache.org/docs/apache-airflow-providers-fab>`_ ``fab``
125
- ============================================================================================== =======
97
+ ================== ===================
98
+ PIP package Version required
99
+ ================== ===================
100
+ ``apache-airflow`` ``>=2.10.0``
101
+ ``pydantic`` ``>=2.11.0``
102
+ ``retryhttp`` ``>=1.2.0,!=1.3.0``
103
+ ================== ===================
126
104
 
127
105
  The changelog for the provider package can be found in the
128
- `changelog <https://airflow.apache.org/docs/apache-airflow-providers-edge3/1.1.1/changelog.html>`_.
106
+ `changelog <https://airflow.apache.org/docs/apache-airflow-providers-edge3/1.1.2/changelog.html>`_.
129
107
 
@@ -1,19 +1,19 @@
1
1
  airflow/providers/edge3/LICENSE,sha256=gXPVwptPlW1TJ4HSuG5OMPg-a3h43OGMkZRR1rpwfJA,10850
2
- airflow/providers/edge3/__init__.py,sha256=lWNqnEBB7q4ggTGo6urdvAwUV0msxvtkIHymUhwdweg,1494
2
+ airflow/providers/edge3/__init__.py,sha256=cSvc8gMIo0Zq7u48rdO-1fw66BpyQg8IRkn6osMkTlY,1494
3
3
  airflow/providers/edge3/get_provider_info.py,sha256=Ek27-dB4UALHUFYoYjtoQIGq0p7zeHcEgmELHvpVmCU,6836
4
4
  airflow/providers/edge3/version_compat.py,sha256=j5PCtXvZ71aBjixu-EFTNtVDPsngzzs7os0ZQDgFVDk,1536
5
5
  airflow/providers/edge3/cli/__init__.py,sha256=9hdXHABrVpkbpjZgUft39kOFL2xSGeG4GEua0Hmelus,785
6
6
  airflow/providers/edge3/cli/api_client.py,sha256=334KHVB4eMSzRpQ5emS56o-RTUJQprxf5Q3xQldCHDQ,7440
7
7
  airflow/providers/edge3/cli/dataclasses.py,sha256=JUuvvmzSVWvG9uOEfzLIiXrTZ-HbESvu50jkPpVIYVw,2895
8
- airflow/providers/edge3/cli/edge_command.py,sha256=Yggt-hBcf3rq7cMelsP0Jx9QfSkR07YU229ZIc7pZYY,18276
8
+ airflow/providers/edge3/cli/edge_command.py,sha256=v5HFkPnSYQO2Iggvr2pqfZrkoU3WwZO3-m0jntZA2Jg,18262
9
9
  airflow/providers/edge3/cli/signalling.py,sha256=sf4S6j6OoP0bLkda3UlCmlZabjv5wsMypy3kAvx56Z0,3220
10
- airflow/providers/edge3/cli/worker.py,sha256=AqyvJyH5hn4dumt5iP_XEdiJ94F8KaYz4rVlZQR-y8E,17212
10
+ airflow/providers/edge3/cli/worker.py,sha256=nsHjMgEFd71ABSSwX51NMag4I2ZDSrXd2bIv9UjFBdA,17263
11
11
  airflow/providers/edge3/example_dags/__init__.py,sha256=9hdXHABrVpkbpjZgUft39kOFL2xSGeG4GEua0Hmelus,785
12
- airflow/providers/edge3/example_dags/integration_test.py,sha256=kevxwDePjTRqLWJGV-nKEQQWb7qb-y2oedm3mdqi6-8,6127
12
+ airflow/providers/edge3/example_dags/integration_test.py,sha256=Ba55JwDK6j3x-L-C6GWKJ4NW28nJlQtm6I9OXE28vh8,6233
13
13
  airflow/providers/edge3/example_dags/win_notepad.py,sha256=zYcrKqODN4KLZQ-5wNnZQQskrDd5LA-nKJNgKQDntSE,2832
14
- airflow/providers/edge3/example_dags/win_test.py,sha256=GegWqjvbsSdbsA_f3S9_FRYftVO0pggXwQQggB9Vvz4,13220
14
+ airflow/providers/edge3/example_dags/win_test.py,sha256=i7P21nG8ND0NAgKHWsTZg6IlxtDlmspxqYdQJr30Z9w,13326
15
15
  airflow/providers/edge3/executors/__init__.py,sha256=y830gGSKCvjOcLwLuCDp84NCrHWWB9RSSH1qvJpFhyY,923
16
- airflow/providers/edge3/executors/edge_executor.py,sha256=bCdLaknCo3LIuwpYw1u68xMedDXvVaqG4ZG0QKeM7xU,16474
16
+ airflow/providers/edge3/executors/edge_executor.py,sha256=izmLzGrtSky1FPqpifuy63EoSRfB9UJys-R_AoduJjk,16494
17
17
  airflow/providers/edge3/models/__init__.py,sha256=9hdXHABrVpkbpjZgUft39kOFL2xSGeG4GEua0Hmelus,785
18
18
  airflow/providers/edge3/models/edge_job.py,sha256=3D5HAzcVkyI2bxl3pVbbRxjIz--Tnr_eNFiw2oI6gEQ,3167
19
19
  airflow/providers/edge3/models/edge_logs.py,sha256=bNstp7gR54O2vbxzz4NTL0erbifFbGUjZ-YOM0I4sqk,2768
@@ -21,7 +21,7 @@ airflow/providers/edge3/models/edge_worker.py,sha256=4qr5K-QU5yTd2p3AtdVhWlbvTox
21
21
  airflow/providers/edge3/openapi/__init__.py,sha256=0O-WvmDx8GeKSoECpHYrbe0hW-LgjlKny3VqTCpBQeQ,927
22
22
  airflow/providers/edge3/openapi/edge_worker_api_v1.yaml,sha256=GAE2IdOXmcUueNy5KFkLBgNpoWnOjnHT9TrW5NZEWpI,24938
23
23
  airflow/providers/edge3/plugins/__init__.py,sha256=9hdXHABrVpkbpjZgUft39kOFL2xSGeG4GEua0Hmelus,785
24
- airflow/providers/edge3/plugins/edge_executor_plugin.py,sha256=wvveKcIUP9QkdTFIQmxq4GS15M8KnbHkDfKjlpvdvwk,9051
24
+ airflow/providers/edge3/plugins/edge_executor_plugin.py,sha256=PWQQxvskPGj2EuxpQo_mI6KCgCqvP3gWvY3pa3fdcrA,10258
25
25
  airflow/providers/edge3/plugins/templates/edge_worker_hosts.html,sha256=0_P2yfZwpy3Kvqd3GBvu_PgmmKCUbso3ieW8aYa76iU,8997
26
26
  airflow/providers/edge3/plugins/templates/edge_worker_jobs.html,sha256=bZ-6ysmIy6j4eR_TPHiqbgb3qpNMKCcEEB-SpxuxNgc,2831
27
27
  airflow/providers/edge3/worker_api/__init__.py,sha256=nnPvxWGTEKZ9YyB1Yd7P9IvDOenK01LVHm22Owwxj3g,839
@@ -29,13 +29,13 @@ airflow/providers/edge3/worker_api/app.py,sha256=Dda2VjkzgBtbQbSWSVEAoqd22RlqvBM
29
29
  airflow/providers/edge3/worker_api/auth.py,sha256=XVTfL-c0JYUhpVkKdqqaxZACZXqvnPp_3W6q2TK0Bjc,4883
30
30
  airflow/providers/edge3/worker_api/datamodels.py,sha256=FAiXqnrSN8zH4YE2fUMjXfXcH9cHlhRh4uZvvr936Ys,6696
31
31
  airflow/providers/edge3/worker_api/routes/__init__.py,sha256=9hdXHABrVpkbpjZgUft39kOFL2xSGeG4GEua0Hmelus,785
32
- airflow/providers/edge3/worker_api/routes/_v2_compat.py,sha256=Q4b2Io0yoK5V_hbgk6fiFviTeT6CFbFMOGYRZgLEeR4,4543
32
+ airflow/providers/edge3/worker_api/routes/_v2_compat.py,sha256=U5VuxoW6sUt1fCWfz-OwbqjUFn2omAewxv9K5Bqzdic,4552
33
33
  airflow/providers/edge3/worker_api/routes/_v2_routes.py,sha256=-WAofvXJpOYpTyh983cxMarA5FkPlH7LMhZuSHE-qPg,10830
34
34
  airflow/providers/edge3/worker_api/routes/health.py,sha256=XxqIppnRA138Q6mAHCdyL2JvoeeganUiI-TXyXSPTGo,1075
35
35
  airflow/providers/edge3/worker_api/routes/jobs.py,sha256=UK1w6nXEUadOLwE9abZ4jHH4KtbvXcwaAF0EnwSa3y4,5733
36
36
  airflow/providers/edge3/worker_api/routes/logs.py,sha256=uk0SZ5hAimj3sAcq1FYCDu0AXYNeTeyjZDGBvw-986E,4945
37
37
  airflow/providers/edge3/worker_api/routes/worker.py,sha256=BGARu1RZ74lW9X-ltuMYbbVXczm_MZdqHaai2MhDWtY,8969
38
- apache_airflow_providers_edge3-1.1.1.dist-info/entry_points.txt,sha256=7WUIGfd3o9NvvbK5trbZxNXTgYGc6pqg74wZPigbx5o,206
39
- apache_airflow_providers_edge3-1.1.1.dist-info/WHEEL,sha256=G2gURzTEtmeR8nrdXUJfNiB3VYVxigPQ-bEQujpNiNs,82
40
- apache_airflow_providers_edge3-1.1.1.dist-info/METADATA,sha256=-qaaQo0pOtXtBB4xCchoS6ic46DA571QFYKe1LapkQ4,5853
41
- apache_airflow_providers_edge3-1.1.1.dist-info/RECORD,,
38
+ apache_airflow_providers_edge3-1.1.2rc1.dist-info/entry_points.txt,sha256=7WUIGfd3o9NvvbK5trbZxNXTgYGc6pqg74wZPigbx5o,206
39
+ apache_airflow_providers_edge3-1.1.2rc1.dist-info/WHEEL,sha256=G2gURzTEtmeR8nrdXUJfNiB3VYVxigPQ-bEQujpNiNs,82
40
+ apache_airflow_providers_edge3-1.1.2rc1.dist-info/METADATA,sha256=y4M-dHQwRqbExuYM_Ctw-BbQry7x0FC2gS-cPiAZwek,4707
41
+ apache_airflow_providers_edge3-1.1.2rc1.dist-info/RECORD,,