apache-airflow-providers-edge3 1.1.2rc1__py3-none-any.whl → 1.1.3__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- airflow/providers/edge3/__init__.py +1 -1
- airflow/providers/edge3/cli/edge_command.py +65 -0
- airflow/providers/edge3/cli/worker.py +2 -2
- airflow/providers/edge3/example_dags/integration_test.py +7 -7
- airflow/providers/edge3/example_dags/win_test.py +9 -5
- airflow/providers/edge3/executors/edge_executor.py +53 -13
- airflow/providers/edge3/models/edge_worker.py +35 -1
- airflow/providers/edge3/plugins/edge_executor_plugin.py +14 -1
- airflow/providers/edge3/worker_api/auth.py +1 -1
- airflow/providers/edge3/worker_api/routes/_v2_compat.py +1 -1
- airflow/providers/edge3/worker_api/routes/_v2_routes.py +1 -1
- {apache_airflow_providers_edge3-1.1.2rc1.dist-info → apache_airflow_providers_edge3-1.1.3.dist-info}/METADATA +11 -9
- {apache_airflow_providers_edge3-1.1.2rc1.dist-info → apache_airflow_providers_edge3-1.1.3.dist-info}/RECORD +15 -15
- {apache_airflow_providers_edge3-1.1.2rc1.dist-info → apache_airflow_providers_edge3-1.1.3.dist-info}/WHEEL +0 -0
- {apache_airflow_providers_edge3-1.1.2rc1.dist-info → apache_airflow_providers_edge3-1.1.3.dist-info}/entry_points.txt +0 -0
@@ -29,7 +29,7 @@ from airflow import __version__ as airflow_version
|
|
29
29
|
|
30
30
|
__all__ = ["__version__"]
|
31
31
|
|
32
|
-
__version__ = "1.1.
|
32
|
+
__version__ = "1.1.3"
|
33
33
|
|
34
34
|
if packaging.version.parse(packaging.version.parse(airflow_version).base_version) < packaging.version.parse(
|
35
35
|
"2.10.0"
|
@@ -351,6 +351,48 @@ def remote_worker_request_shutdown(args) -> None:
|
|
351
351
|
logger.info("Requested shutdown of Edge Worker host %s by %s.", args.edge_hostname, getuser())
|
352
352
|
|
353
353
|
|
354
|
+
@cli_utils.action_cli(check_db=False)
|
355
|
+
@providers_configuration_loaded
|
356
|
+
def add_worker_queues(args) -> None:
|
357
|
+
"""Add queues to an edge worker."""
|
358
|
+
_check_valid_db_connection()
|
359
|
+
_check_if_registered_edge_host(hostname=args.edge_hostname)
|
360
|
+
from airflow.providers.edge3.models.edge_worker import add_worker_queues
|
361
|
+
|
362
|
+
queues = args.queues.split(",") if args.queues else []
|
363
|
+
if not queues:
|
364
|
+
raise SystemExit("Error: No queues specified to add.")
|
365
|
+
|
366
|
+
try:
|
367
|
+
add_worker_queues(args.edge_hostname, queues)
|
368
|
+
logger.info("Added queues %s to Edge Worker host %s by %s.", queues, args.edge_hostname, getuser())
|
369
|
+
except TypeError as e:
|
370
|
+
logger.error(str(e))
|
371
|
+
raise SystemExit
|
372
|
+
|
373
|
+
|
374
|
+
@cli_utils.action_cli(check_db=False)
|
375
|
+
@providers_configuration_loaded
|
376
|
+
def remove_worker_queues(args) -> None:
|
377
|
+
"""Remove queues from an edge worker."""
|
378
|
+
_check_valid_db_connection()
|
379
|
+
_check_if_registered_edge_host(hostname=args.edge_hostname)
|
380
|
+
from airflow.providers.edge3.models.edge_worker import remove_worker_queues
|
381
|
+
|
382
|
+
queues = args.queues.split(",") if args.queues else []
|
383
|
+
if not queues:
|
384
|
+
raise SystemExit("Error: No queues specified to remove.")
|
385
|
+
|
386
|
+
try:
|
387
|
+
remove_worker_queues(args.edge_hostname, queues)
|
388
|
+
logger.info(
|
389
|
+
"Removed queues %s from Edge Worker host %s by %s.", queues, args.edge_hostname, getuser()
|
390
|
+
)
|
391
|
+
except TypeError as e:
|
392
|
+
logger.error(str(e))
|
393
|
+
raise SystemExit
|
394
|
+
|
395
|
+
|
354
396
|
ARG_CONCURRENCY = Arg(
|
355
397
|
("-c", "--concurrency"),
|
356
398
|
type=int,
|
@@ -380,6 +422,11 @@ ARG_REQUIRED_MAINTENANCE_COMMENT = Arg(
|
|
380
422
|
help="Maintenance comments to report reason. Required if enabling maintenance",
|
381
423
|
required=True,
|
382
424
|
)
|
425
|
+
ARG_QUEUES_MANAGE = Arg(
|
426
|
+
("-q", "--queues"),
|
427
|
+
help="Comma delimited list of queues to add or remove.",
|
428
|
+
required=True,
|
429
|
+
)
|
383
430
|
ARG_WAIT_MAINT = Arg(
|
384
431
|
("-w", "--wait"),
|
385
432
|
default=False,
|
@@ -516,4 +563,22 @@ EDGE_COMMANDS: list[ActionCommand] = [
|
|
516
563
|
func=remote_worker_request_shutdown,
|
517
564
|
args=(ARG_REQUIRED_EDGE_HOSTNAME,),
|
518
565
|
),
|
566
|
+
ActionCommand(
|
567
|
+
name="add-worker-queues",
|
568
|
+
help=add_worker_queues.__doc__,
|
569
|
+
func=add_worker_queues,
|
570
|
+
args=(
|
571
|
+
ARG_REQUIRED_EDGE_HOSTNAME,
|
572
|
+
ARG_QUEUES_MANAGE,
|
573
|
+
),
|
574
|
+
),
|
575
|
+
ActionCommand(
|
576
|
+
name="remove-worker-queues",
|
577
|
+
help=remove_worker_queues.__doc__,
|
578
|
+
func=remove_worker_queues,
|
579
|
+
args=(
|
580
|
+
ARG_REQUIRED_EDGE_HOSTNAME,
|
581
|
+
ARG_QUEUES_MANAGE,
|
582
|
+
),
|
583
|
+
),
|
519
584
|
]
|
@@ -188,8 +188,8 @@ class EdgeWorker:
|
|
188
188
|
|
189
189
|
try:
|
190
190
|
api_url = conf.get("edge", "api_url")
|
191
|
-
execution_api_server_url = conf.get("core", "execution_api_server_url", fallback
|
192
|
-
if execution_api_server_url
|
191
|
+
execution_api_server_url = conf.get("core", "execution_api_server_url", fallback="")
|
192
|
+
if not execution_api_server_url:
|
193
193
|
parsed = urlparse(api_url)
|
194
194
|
execution_api_server_url = f"{parsed.scheme}://{parsed.netloc}/execution/"
|
195
195
|
|
@@ -41,13 +41,13 @@ try:
|
|
41
41
|
from airflow.sdk import DAG, Param, Variable, task, task_group
|
42
42
|
except ImportError:
|
43
43
|
# Airflow 2.10 compat
|
44
|
-
from airflow.decorators import task, task_group # type: ignore[no-redef
|
45
|
-
from airflow.models.dag import DAG # type: ignore[
|
46
|
-
from airflow.models.param import Param # type: ignore[no-redef
|
47
|
-
from airflow.models.variable import Variable
|
48
|
-
from airflow.operators.bash import BashOperator # type: ignore[no-redef
|
49
|
-
from airflow.operators.empty import EmptyOperator # type: ignore[no-redef
|
50
|
-
from airflow.operators.python import PythonOperator # type: ignore[no-redef
|
44
|
+
from airflow.decorators import task, task_group # type: ignore[attr-defined,no-redef]
|
45
|
+
from airflow.models.dag import DAG # type: ignore[assignment]
|
46
|
+
from airflow.models.param import Param # type: ignore[no-redef]
|
47
|
+
from airflow.models.variable import Variable
|
48
|
+
from airflow.operators.bash import BashOperator # type: ignore[no-redef]
|
49
|
+
from airflow.operators.empty import EmptyOperator # type: ignore[no-redef]
|
50
|
+
from airflow.operators.python import PythonOperator # type: ignore[no-redef]
|
51
51
|
|
52
52
|
with DAG(
|
53
53
|
dag_id="integration_test",
|
@@ -32,7 +32,11 @@ from subprocess import STDOUT, Popen
|
|
32
32
|
from time import sleep
|
33
33
|
from typing import TYPE_CHECKING, Any
|
34
34
|
|
35
|
-
|
35
|
+
try:
|
36
|
+
from airflow.sdk import task, task_group
|
37
|
+
except ImportError:
|
38
|
+
# Airflow 2 path
|
39
|
+
from airflow.decorators import task, task_group # type: ignore[attr-defined,no-redef]
|
36
40
|
from airflow.exceptions import AirflowException, AirflowNotFoundException, AirflowSkipException
|
37
41
|
from airflow.models import BaseOperator
|
38
42
|
from airflow.models.dag import DAG
|
@@ -52,11 +56,11 @@ if TYPE_CHECKING:
|
|
52
56
|
try:
|
53
57
|
from airflow.sdk.types import RuntimeTaskInstanceProtocol as TaskInstance
|
54
58
|
except ImportError:
|
55
|
-
from airflow.models import TaskInstance # type: ignore[assignment
|
59
|
+
from airflow.models import TaskInstance # type: ignore[assignment]
|
56
60
|
from airflow.utils.context import Context
|
57
61
|
|
58
62
|
try:
|
59
|
-
from airflow.operators.python import PythonOperator
|
63
|
+
from airflow.operators.python import PythonOperator
|
60
64
|
except ImportError:
|
61
65
|
from airflow.providers.common.compat.standard.operators import PythonOperator
|
62
66
|
|
@@ -277,7 +281,7 @@ with DAG(
|
|
277
281
|
|
278
282
|
@task.virtualenv(requirements="numpy")
|
279
283
|
def virtualenv():
|
280
|
-
import numpy
|
284
|
+
import numpy
|
281
285
|
|
282
286
|
print(f"Welcome to virtualenv with numpy version {numpy.__version__}.")
|
283
287
|
|
@@ -297,7 +301,7 @@ with DAG(
|
|
297
301
|
except AirflowNotFoundException:
|
298
302
|
print("Connection 'integration_test' not found... but also OK.")
|
299
303
|
|
300
|
-
command = CmdOperator(task_id="command", command="echo
|
304
|
+
command = CmdOperator(task_id="command", command="echo Hello World")
|
301
305
|
|
302
306
|
def python_call():
|
303
307
|
print("Hello world")
|
@@ -30,7 +30,6 @@ from sqlalchemy.orm import Session
|
|
30
30
|
from airflow.cli.cli_config import GroupCommand
|
31
31
|
from airflow.configuration import conf
|
32
32
|
from airflow.executors.base_executor import BaseExecutor
|
33
|
-
from airflow.models.abstractoperator import DEFAULT_QUEUE
|
34
33
|
from airflow.models.taskinstance import TaskInstance, TaskInstanceState
|
35
34
|
from airflow.providers.edge3.cli.edge_command import EDGE_COMMANDS
|
36
35
|
from airflow.providers.edge3.models.edge_job import EdgeJobModel
|
@@ -55,6 +54,7 @@ if TYPE_CHECKING:
|
|
55
54
|
TaskTuple = tuple[TaskInstanceKey, CommandType, str | None, Any | None]
|
56
55
|
|
57
56
|
PARALLELISM: int = conf.getint("core", "PARALLELISM")
|
57
|
+
DEFAULT_QUEUE: str = conf.get_mandatory_value("operators", "default_queue")
|
58
58
|
|
59
59
|
|
60
60
|
class EdgeExecutor(BaseExecutor):
|
@@ -140,20 +140,40 @@ class EdgeExecutor(BaseExecutor):
|
|
140
140
|
del self.edge_queued_tasks[key]
|
141
141
|
|
142
142
|
self.validate_airflow_tasks_run_command(command) # type: ignore[attr-defined]
|
143
|
-
|
144
|
-
|
143
|
+
|
144
|
+
# Check if job already exists with same dag_id, task_id, run_id, map_index, try_number
|
145
|
+
existing_job = (
|
146
|
+
session.query(EdgeJobModel)
|
147
|
+
.filter_by(
|
145
148
|
dag_id=key.dag_id,
|
146
149
|
task_id=key.task_id,
|
147
150
|
run_id=key.run_id,
|
148
151
|
map_index=key.map_index,
|
149
152
|
try_number=key.try_number,
|
150
|
-
state=TaskInstanceState.QUEUED,
|
151
|
-
queue=queue or DEFAULT_QUEUE,
|
152
|
-
concurrency_slots=task_instance.pool_slots,
|
153
|
-
command=str(command),
|
154
153
|
)
|
154
|
+
.first()
|
155
155
|
)
|
156
156
|
|
157
|
+
if existing_job:
|
158
|
+
existing_job.state = TaskInstanceState.QUEUED
|
159
|
+
existing_job.queue = queue or DEFAULT_QUEUE
|
160
|
+
existing_job.concurrency_slots = task_instance.pool_slots
|
161
|
+
existing_job.command = str(command)
|
162
|
+
else:
|
163
|
+
session.add(
|
164
|
+
EdgeJobModel(
|
165
|
+
dag_id=key.dag_id,
|
166
|
+
task_id=key.task_id,
|
167
|
+
run_id=key.run_id,
|
168
|
+
map_index=key.map_index,
|
169
|
+
try_number=key.try_number,
|
170
|
+
state=TaskInstanceState.QUEUED,
|
171
|
+
queue=queue or DEFAULT_QUEUE,
|
172
|
+
concurrency_slots=task_instance.pool_slots,
|
173
|
+
command=str(command),
|
174
|
+
)
|
175
|
+
)
|
176
|
+
|
157
177
|
@provide_session
|
158
178
|
def queue_workload(
|
159
179
|
self,
|
@@ -168,20 +188,40 @@ class EdgeExecutor(BaseExecutor):
|
|
168
188
|
|
169
189
|
task_instance = workload.ti
|
170
190
|
key = task_instance.key
|
171
|
-
|
172
|
-
|
191
|
+
|
192
|
+
# Check if job already exists with same dag_id, task_id, run_id, map_index, try_number
|
193
|
+
existing_job = (
|
194
|
+
session.query(EdgeJobModel)
|
195
|
+
.filter_by(
|
173
196
|
dag_id=key.dag_id,
|
174
197
|
task_id=key.task_id,
|
175
198
|
run_id=key.run_id,
|
176
199
|
map_index=key.map_index,
|
177
200
|
try_number=key.try_number,
|
178
|
-
state=TaskInstanceState.QUEUED,
|
179
|
-
queue=task_instance.queue,
|
180
|
-
concurrency_slots=task_instance.pool_slots,
|
181
|
-
command=workload.model_dump_json(),
|
182
201
|
)
|
202
|
+
.first()
|
183
203
|
)
|
184
204
|
|
205
|
+
if existing_job:
|
206
|
+
existing_job.state = TaskInstanceState.QUEUED
|
207
|
+
existing_job.queue = task_instance.queue
|
208
|
+
existing_job.concurrency_slots = task_instance.pool_slots
|
209
|
+
existing_job.command = workload.model_dump_json()
|
210
|
+
else:
|
211
|
+
session.add(
|
212
|
+
EdgeJobModel(
|
213
|
+
dag_id=key.dag_id,
|
214
|
+
task_id=key.task_id,
|
215
|
+
run_id=key.run_id,
|
216
|
+
map_index=key.map_index,
|
217
|
+
try_number=key.try_number,
|
218
|
+
state=TaskInstanceState.QUEUED,
|
219
|
+
queue=task_instance.queue,
|
220
|
+
concurrency_slots=task_instance.pool_slots,
|
221
|
+
command=workload.model_dump_json(),
|
222
|
+
)
|
223
|
+
)
|
224
|
+
|
185
225
|
def _check_worker_liveness(self, session: Session) -> bool:
|
186
226
|
"""Reset worker state if heartbeat timed out."""
|
187
227
|
changed = False
|
@@ -109,7 +109,7 @@ class EdgeWorkerModel(Base, LoggingMixin):
|
|
109
109
|
super().__init__()
|
110
110
|
|
111
111
|
@property
|
112
|
-
def sysinfo_json(self) -> dict:
|
112
|
+
def sysinfo_json(self) -> dict | None:
|
113
113
|
return json.loads(self.sysinfo) if self.sysinfo else None
|
114
114
|
|
115
115
|
@property
|
@@ -283,3 +283,37 @@ def request_shutdown(worker_name: str, session: Session = NEW_SESSION) -> None:
|
|
283
283
|
EdgeWorkerState.UNKNOWN,
|
284
284
|
):
|
285
285
|
worker.state = EdgeWorkerState.SHUTDOWN_REQUEST
|
286
|
+
|
287
|
+
|
288
|
+
@provide_session
|
289
|
+
def add_worker_queues(worker_name: str, queues: list[str], session: Session = NEW_SESSION) -> None:
|
290
|
+
"""Add queues to an edge worker."""
|
291
|
+
query = select(EdgeWorkerModel).where(EdgeWorkerModel.worker_name == worker_name)
|
292
|
+
worker: EdgeWorkerModel = session.scalar(query)
|
293
|
+
if worker.state in (
|
294
|
+
EdgeWorkerState.OFFLINE,
|
295
|
+
EdgeWorkerState.OFFLINE_MAINTENANCE,
|
296
|
+
EdgeWorkerState.UNKNOWN,
|
297
|
+
):
|
298
|
+
error_message = f"Cannot add queues to edge worker {worker_name} as it is in {worker.state} state!"
|
299
|
+
logger.error(error_message)
|
300
|
+
raise TypeError(error_message)
|
301
|
+
worker.add_queues(queues)
|
302
|
+
|
303
|
+
|
304
|
+
@provide_session
|
305
|
+
def remove_worker_queues(worker_name: str, queues: list[str], session: Session = NEW_SESSION) -> None:
|
306
|
+
"""Remove queues from an edge worker."""
|
307
|
+
query = select(EdgeWorkerModel).where(EdgeWorkerModel.worker_name == worker_name)
|
308
|
+
worker: EdgeWorkerModel = session.scalar(query)
|
309
|
+
if worker.state in (
|
310
|
+
EdgeWorkerState.OFFLINE,
|
311
|
+
EdgeWorkerState.OFFLINE_MAINTENANCE,
|
312
|
+
EdgeWorkerState.UNKNOWN,
|
313
|
+
):
|
314
|
+
error_message = (
|
315
|
+
f"Cannot remove queues from edge worker {worker_name} as it is in {worker.state} state!"
|
316
|
+
)
|
317
|
+
logger.error(error_message)
|
318
|
+
raise TypeError(error_message)
|
319
|
+
worker.remove_queues(queues)
|
@@ -17,6 +17,7 @@
|
|
17
17
|
|
18
18
|
from __future__ import annotations
|
19
19
|
|
20
|
+
import sys
|
20
21
|
from typing import TYPE_CHECKING, Any
|
21
22
|
|
22
23
|
from airflow.configuration import conf
|
@@ -213,12 +214,24 @@ try:
|
|
213
214
|
except AirflowConfigException:
|
214
215
|
EDGE_EXECUTOR_ACTIVE = False
|
215
216
|
|
217
|
+
# Load the API endpoint only on api-server (Airflow 3.x) or webserver (Airflow 2.x)
|
218
|
+
# todo(jscheffl): Remove this check when the discussion in
|
219
|
+
# https://lists.apache.org/thread/w170czq6r7bslkqp1tk6bjjjo0789wgl
|
220
|
+
# resulted in a proper API to selective initialize. Maybe backcompat-shim
|
221
|
+
# is also needed to support Airflow-versions prior the rework.
|
222
|
+
if AIRFLOW_V_3_0_PLUS:
|
223
|
+
RUNNING_ON_APISERVER = (len(sys.argv) > 1 and sys.argv[1] in ["api-server"]) or (
|
224
|
+
len(sys.argv) > 2 and sys.argv[2] == "airflow-core/src/airflow/api_fastapi/main.py"
|
225
|
+
)
|
226
|
+
else:
|
227
|
+
RUNNING_ON_APISERVER = "gunicorn" in sys.argv[0] and "airflow-webserver" in sys.argv
|
228
|
+
|
216
229
|
|
217
230
|
class EdgeExecutorPlugin(AirflowPlugin):
|
218
231
|
"""EdgeExecutor Plugin - provides API endpoints for Edge Workers in Webserver."""
|
219
232
|
|
220
233
|
name = "edge_executor"
|
221
|
-
if EDGE_EXECUTOR_ACTIVE:
|
234
|
+
if EDGE_EXECUTOR_ACTIVE and RUNNING_ON_APISERVER:
|
222
235
|
if AIRFLOW_V_3_0_PLUS:
|
223
236
|
fastapi_apps = [_get_api_endpoint()]
|
224
237
|
else:
|
@@ -127,7 +127,7 @@ else:
|
|
127
127
|
|
128
128
|
# In Airflow 3 with AIP-72 we get workload addressed by ExecuteTask
|
129
129
|
# But in Airflow 2.10 it is a command line array
|
130
|
-
ExecuteTask = list[str] # type: ignore[
|
130
|
+
ExecuteTask = list[str] # type: ignore[assignment,misc]
|
131
131
|
|
132
132
|
def parse_command(command: str) -> ExecuteTask:
|
133
133
|
from ast import literal_eval
|
@@ -66,7 +66,7 @@ def rpcapi_v2(body: dict[str, Any]) -> APIResponse:
|
|
66
66
|
# Note: Except the method map this _was_ a 100% copy of internal API module
|
67
67
|
# airflow.api_internal.endpoints.rpc_api_endpoint.internal_airflow_api()
|
68
68
|
# As of rework for FastAPI in Airflow 3.0, this is updated and to be removed in the future.
|
69
|
-
from airflow.api_internal.endpoints.rpc_api_endpoint import (
|
69
|
+
from airflow.api_internal.endpoints.rpc_api_endpoint import (
|
70
70
|
# Note: This is just for compatibility with Airflow 2.10, not working for Airflow 3 / main as removed
|
71
71
|
initialize_method_map,
|
72
72
|
)
|
@@ -1,11 +1,11 @@
|
|
1
1
|
Metadata-Version: 2.4
|
2
2
|
Name: apache-airflow-providers-edge3
|
3
|
-
Version: 1.1.
|
3
|
+
Version: 1.1.3
|
4
4
|
Summary: Provider package apache-airflow-providers-edge3 for Apache Airflow
|
5
5
|
Keywords: airflow-provider,edge3,airflow,integration
|
6
6
|
Author-email: Apache Software Foundation <dev@airflow.apache.org>
|
7
7
|
Maintainer-email: Apache Software Foundation <dev@airflow.apache.org>
|
8
|
-
Requires-Python:
|
8
|
+
Requires-Python: >=3.10
|
9
9
|
Description-Content-Type: text/x-rst
|
10
10
|
Classifier: Development Status :: 5 - Production/Stable
|
11
11
|
Classifier: Environment :: Console
|
@@ -18,13 +18,14 @@ Classifier: License :: OSI Approved :: Apache Software License
|
|
18
18
|
Classifier: Programming Language :: Python :: 3.10
|
19
19
|
Classifier: Programming Language :: Python :: 3.11
|
20
20
|
Classifier: Programming Language :: Python :: 3.12
|
21
|
+
Classifier: Programming Language :: Python :: 3.13
|
21
22
|
Classifier: Topic :: System :: Monitoring
|
22
|
-
Requires-Dist: apache-airflow>=2.10.
|
23
|
+
Requires-Dist: apache-airflow>=2.10.0
|
23
24
|
Requires-Dist: pydantic>=2.11.0
|
24
25
|
Requires-Dist: retryhttp>=1.2.0,!=1.3.0
|
25
26
|
Project-URL: Bug Tracker, https://github.com/apache/airflow/issues
|
26
|
-
Project-URL: Changelog, https://airflow.
|
27
|
-
Project-URL: Documentation, https://airflow.
|
27
|
+
Project-URL: Changelog, https://airflow.apache.org/docs/apache-airflow-providers-edge3/1.1.3/changelog.html
|
28
|
+
Project-URL: Documentation, https://airflow.apache.org/docs/apache-airflow-providers-edge3/1.1.3
|
28
29
|
Project-URL: Mastodon, https://fosstodon.org/@airflow
|
29
30
|
Project-URL: Slack Chat, https://s.apache.org/airflow-slack
|
30
31
|
Project-URL: Source Code, https://github.com/apache/airflow
|
@@ -55,8 +56,9 @@ Project-URL: YouTube, https://www.youtube.com/channel/UCSXwxpWZQ7XZ1WL3wqevChA/
|
|
55
56
|
|
56
57
|
Package ``apache-airflow-providers-edge3``
|
57
58
|
|
58
|
-
Release: ``1.1.
|
59
|
+
Release: ``1.1.3``
|
59
60
|
|
61
|
+
Release Date: ``|PypiReleaseDate|``
|
60
62
|
|
61
63
|
Handle edge workers on remote sites via HTTP(s) connection and orchestrates work over distributed sites.
|
62
64
|
|
@@ -80,7 +82,7 @@ This is a provider package for ``edge3`` provider. All classes for this provider
|
|
80
82
|
are in ``airflow.providers.edge3`` python package.
|
81
83
|
|
82
84
|
You can find package information and changelog for the provider
|
83
|
-
in the `documentation <https://airflow.apache.org/docs/apache-airflow-providers-edge3/1.1.
|
85
|
+
in the `documentation <https://airflow.apache.org/docs/apache-airflow-providers-edge3/1.1.3/>`_.
|
84
86
|
|
85
87
|
Installation
|
86
88
|
------------
|
@@ -89,7 +91,7 @@ You can install this package on top of an existing Airflow 2 installation (see `
|
|
89
91
|
for the minimum Airflow version supported) via
|
90
92
|
``pip install apache-airflow-providers-edge3``
|
91
93
|
|
92
|
-
The package supports the following python versions: 3.10,3.11,3.12
|
94
|
+
The package supports the following python versions: 3.10,3.11,3.12,3.13
|
93
95
|
|
94
96
|
Requirements
|
95
97
|
------------
|
@@ -103,5 +105,5 @@ PIP package Version required
|
|
103
105
|
================== ===================
|
104
106
|
|
105
107
|
The changelog for the provider package can be found in the
|
106
|
-
`changelog <https://airflow.apache.org/docs/apache-airflow-providers-edge3/1.1.
|
108
|
+
`changelog <https://airflow.apache.org/docs/apache-airflow-providers-edge3/1.1.3/changelog.html>`_.
|
107
109
|
|
@@ -1,41 +1,41 @@
|
|
1
1
|
airflow/providers/edge3/LICENSE,sha256=gXPVwptPlW1TJ4HSuG5OMPg-a3h43OGMkZRR1rpwfJA,10850
|
2
|
-
airflow/providers/edge3/__init__.py,sha256=
|
2
|
+
airflow/providers/edge3/__init__.py,sha256=BjORgGO_bF2ofz3p_XipVRgv7thzIxVbHYrpwdW4qvA,1494
|
3
3
|
airflow/providers/edge3/get_provider_info.py,sha256=Ek27-dB4UALHUFYoYjtoQIGq0p7zeHcEgmELHvpVmCU,6836
|
4
4
|
airflow/providers/edge3/version_compat.py,sha256=j5PCtXvZ71aBjixu-EFTNtVDPsngzzs7os0ZQDgFVDk,1536
|
5
5
|
airflow/providers/edge3/cli/__init__.py,sha256=9hdXHABrVpkbpjZgUft39kOFL2xSGeG4GEua0Hmelus,785
|
6
6
|
airflow/providers/edge3/cli/api_client.py,sha256=334KHVB4eMSzRpQ5emS56o-RTUJQprxf5Q3xQldCHDQ,7440
|
7
7
|
airflow/providers/edge3/cli/dataclasses.py,sha256=JUuvvmzSVWvG9uOEfzLIiXrTZ-HbESvu50jkPpVIYVw,2895
|
8
|
-
airflow/providers/edge3/cli/edge_command.py,sha256=
|
8
|
+
airflow/providers/edge3/cli/edge_command.py,sha256=qfHjVxA8QJ6zD8Eb8Ipieso4wo51_3MBaTtuSSIEOI4,20339
|
9
9
|
airflow/providers/edge3/cli/signalling.py,sha256=sf4S6j6OoP0bLkda3UlCmlZabjv5wsMypy3kAvx56Z0,3220
|
10
|
-
airflow/providers/edge3/cli/worker.py,sha256=
|
10
|
+
airflow/providers/edge3/cli/worker.py,sha256=x1SX5J2mFgfUTERKv5m8lDxNnoss3BV4F7Jgv6u8rPg,17259
|
11
11
|
airflow/providers/edge3/example_dags/__init__.py,sha256=9hdXHABrVpkbpjZgUft39kOFL2xSGeG4GEua0Hmelus,785
|
12
|
-
airflow/providers/edge3/example_dags/integration_test.py,sha256=
|
12
|
+
airflow/providers/edge3/example_dags/integration_test.py,sha256=4iXvv379nFPxF1_gfbqb5NW1z-24MscYWLDxDwTNbPo,6120
|
13
13
|
airflow/providers/edge3/example_dags/win_notepad.py,sha256=zYcrKqODN4KLZQ-5wNnZQQskrDd5LA-nKJNgKQDntSE,2832
|
14
|
-
airflow/providers/edge3/example_dags/win_test.py,sha256=
|
14
|
+
airflow/providers/edge3/example_dags/win_test.py,sha256=aZ5UbgrDsxmxPgxE6OXZV-LBhRgH0QR7xCt5mcxT7uQ,13392
|
15
15
|
airflow/providers/edge3/executors/__init__.py,sha256=y830gGSKCvjOcLwLuCDp84NCrHWWB9RSSH1qvJpFhyY,923
|
16
|
-
airflow/providers/edge3/executors/edge_executor.py,sha256=
|
16
|
+
airflow/providers/edge3/executors/edge_executor.py,sha256=fsGOiJNp6RNT1xGFtV8G0Y-nUfT2daBtZeJBh2MZAZ4,18013
|
17
17
|
airflow/providers/edge3/models/__init__.py,sha256=9hdXHABrVpkbpjZgUft39kOFL2xSGeG4GEua0Hmelus,785
|
18
18
|
airflow/providers/edge3/models/edge_job.py,sha256=3D5HAzcVkyI2bxl3pVbbRxjIz--Tnr_eNFiw2oI6gEQ,3167
|
19
19
|
airflow/providers/edge3/models/edge_logs.py,sha256=bNstp7gR54O2vbxzz4NTL0erbifFbGUjZ-YOM0I4sqk,2768
|
20
|
-
airflow/providers/edge3/models/edge_worker.py,sha256=
|
20
|
+
airflow/providers/edge3/models/edge_worker.py,sha256=z3Rru2iFqaFVyvJ6vBQ4-W0T9MenDHip6bJHfLqI6K4,12102
|
21
21
|
airflow/providers/edge3/openapi/__init__.py,sha256=0O-WvmDx8GeKSoECpHYrbe0hW-LgjlKny3VqTCpBQeQ,927
|
22
22
|
airflow/providers/edge3/openapi/edge_worker_api_v1.yaml,sha256=GAE2IdOXmcUueNy5KFkLBgNpoWnOjnHT9TrW5NZEWpI,24938
|
23
23
|
airflow/providers/edge3/plugins/__init__.py,sha256=9hdXHABrVpkbpjZgUft39kOFL2xSGeG4GEua0Hmelus,785
|
24
|
-
airflow/providers/edge3/plugins/edge_executor_plugin.py,sha256=
|
24
|
+
airflow/providers/edge3/plugins/edge_executor_plugin.py,sha256=cJcjmk-4OYBISpeGFXGvi78GPZ5EFgymzBm6FWvMMFA,10992
|
25
25
|
airflow/providers/edge3/plugins/templates/edge_worker_hosts.html,sha256=0_P2yfZwpy3Kvqd3GBvu_PgmmKCUbso3ieW8aYa76iU,8997
|
26
26
|
airflow/providers/edge3/plugins/templates/edge_worker_jobs.html,sha256=bZ-6ysmIy6j4eR_TPHiqbgb3qpNMKCcEEB-SpxuxNgc,2831
|
27
27
|
airflow/providers/edge3/worker_api/__init__.py,sha256=nnPvxWGTEKZ9YyB1Yd7P9IvDOenK01LVHm22Owwxj3g,839
|
28
28
|
airflow/providers/edge3/worker_api/app.py,sha256=Dda2VjkzgBtbQbSWSVEAoqd22RlqvBMyiPau65uKkv4,2006
|
29
|
-
airflow/providers/edge3/worker_api/auth.py,sha256=
|
29
|
+
airflow/providers/edge3/worker_api/auth.py,sha256=nmwfUz-nokUKyQp-UKwlMn-i2U5AXzq7c8Xfrt_bGeU,4867
|
30
30
|
airflow/providers/edge3/worker_api/datamodels.py,sha256=FAiXqnrSN8zH4YE2fUMjXfXcH9cHlhRh4uZvvr936Ys,6696
|
31
31
|
airflow/providers/edge3/worker_api/routes/__init__.py,sha256=9hdXHABrVpkbpjZgUft39kOFL2xSGeG4GEua0Hmelus,785
|
32
|
-
airflow/providers/edge3/worker_api/routes/_v2_compat.py,sha256=
|
33
|
-
airflow/providers/edge3/worker_api/routes/_v2_routes.py,sha256
|
32
|
+
airflow/providers/edge3/worker_api/routes/_v2_compat.py,sha256=PuzSL9dMuji_MsluCoPdCvYboqJD0h4zERcIoB5kJvI,4543
|
33
|
+
airflow/providers/edge3/worker_api/routes/_v2_routes.py,sha256=xcbf6RdOHx5zOl9JlIAW9nQhiy3ju-EIyq1tbXGJSYc,10800
|
34
34
|
airflow/providers/edge3/worker_api/routes/health.py,sha256=XxqIppnRA138Q6mAHCdyL2JvoeeganUiI-TXyXSPTGo,1075
|
35
35
|
airflow/providers/edge3/worker_api/routes/jobs.py,sha256=UK1w6nXEUadOLwE9abZ4jHH4KtbvXcwaAF0EnwSa3y4,5733
|
36
36
|
airflow/providers/edge3/worker_api/routes/logs.py,sha256=uk0SZ5hAimj3sAcq1FYCDu0AXYNeTeyjZDGBvw-986E,4945
|
37
37
|
airflow/providers/edge3/worker_api/routes/worker.py,sha256=BGARu1RZ74lW9X-ltuMYbbVXczm_MZdqHaai2MhDWtY,8969
|
38
|
-
apache_airflow_providers_edge3-1.1.
|
39
|
-
apache_airflow_providers_edge3-1.1.
|
40
|
-
apache_airflow_providers_edge3-1.1.
|
41
|
-
apache_airflow_providers_edge3-1.1.
|
38
|
+
apache_airflow_providers_edge3-1.1.3.dist-info/entry_points.txt,sha256=7WUIGfd3o9NvvbK5trbZxNXTgYGc6pqg74wZPigbx5o,206
|
39
|
+
apache_airflow_providers_edge3-1.1.3.dist-info/WHEEL,sha256=G2gURzTEtmeR8nrdXUJfNiB3VYVxigPQ-bEQujpNiNs,82
|
40
|
+
apache_airflow_providers_edge3-1.1.3.dist-info/METADATA,sha256=Q_0GBApe8amF0E0-_69-wVMrDOc9QBYaKTNkTgsY8yA,4779
|
41
|
+
apache_airflow_providers_edge3-1.1.3.dist-info/RECORD,,
|
File without changes
|