apache-airflow-providers-edge3 1.1.1rc1__tar.gz → 1.1.2__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (82) hide show
  1. {apache_airflow_providers_edge3-1.1.1rc1 → apache_airflow_providers_edge3-1.1.2}/PKG-INFO +18 -38
  2. {apache_airflow_providers_edge3-1.1.1rc1 → apache_airflow_providers_edge3-1.1.2}/README.rst +12 -31
  3. {apache_airflow_providers_edge3-1.1.1rc1 → apache_airflow_providers_edge3-1.1.2}/docs/architecture.rst +2 -2
  4. {apache_airflow_providers_edge3-1.1.1rc1 → apache_airflow_providers_edge3-1.1.2}/docs/changelog.rst +50 -0
  5. {apache_airflow_providers_edge3-1.1.1rc1 → apache_airflow_providers_edge3-1.1.2}/docs/deployment.rst +10 -1
  6. {apache_airflow_providers_edge3-1.1.1rc1 → apache_airflow_providers_edge3-1.1.2}/docs/index.rst +10 -37
  7. {apache_airflow_providers_edge3-1.1.1rc1 → apache_airflow_providers_edge3-1.1.2}/docs/install_on_windows.rst +8 -3
  8. {apache_airflow_providers_edge3-1.1.1rc1 → apache_airflow_providers_edge3-1.1.2}/provider.yaml +2 -1
  9. {apache_airflow_providers_edge3-1.1.1rc1 → apache_airflow_providers_edge3-1.1.2}/pyproject.toml +6 -8
  10. {apache_airflow_providers_edge3-1.1.1rc1 → apache_airflow_providers_edge3-1.1.2}/src/airflow/__init__.py +1 -1
  11. {apache_airflow_providers_edge3-1.1.1rc1/tests/unit → apache_airflow_providers_edge3-1.1.2/src/airflow/providers}/__init__.py +1 -1
  12. {apache_airflow_providers_edge3-1.1.1rc1 → apache_airflow_providers_edge3-1.1.2}/src/airflow/providers/edge3/__init__.py +1 -1
  13. {apache_airflow_providers_edge3-1.1.1rc1 → apache_airflow_providers_edge3-1.1.2}/src/airflow/providers/edge3/cli/edge_command.py +66 -1
  14. {apache_airflow_providers_edge3-1.1.1rc1 → apache_airflow_providers_edge3-1.1.2}/src/airflow/providers/edge3/cli/worker.py +9 -8
  15. {apache_airflow_providers_edge3-1.1.1rc1 → apache_airflow_providers_edge3-1.1.2}/src/airflow/providers/edge3/example_dags/integration_test.py +12 -8
  16. {apache_airflow_providers_edge3-1.1.1rc1 → apache_airflow_providers_edge3-1.1.2}/src/airflow/providers/edge3/example_dags/win_test.py +15 -7
  17. {apache_airflow_providers_edge3-1.1.1rc1 → apache_airflow_providers_edge3-1.1.2}/src/airflow/providers/edge3/executors/edge_executor.py +6 -5
  18. {apache_airflow_providers_edge3-1.1.1rc1 → apache_airflow_providers_edge3-1.1.2}/src/airflow/providers/edge3/models/edge_worker.py +35 -1
  19. apache_airflow_providers_edge3-1.1.2/src/airflow/providers/edge3/plugins/edge_executor_plugin.py +250 -0
  20. {apache_airflow_providers_edge3-1.1.1rc1 → apache_airflow_providers_edge3-1.1.2}/src/airflow/providers/edge3/worker_api/auth.py +1 -1
  21. {apache_airflow_providers_edge3-1.1.1rc1 → apache_airflow_providers_edge3-1.1.2}/src/airflow/providers/edge3/worker_api/routes/_v2_compat.py +3 -3
  22. {apache_airflow_providers_edge3-1.1.1rc1 → apache_airflow_providers_edge3-1.1.2}/src/airflow/providers/edge3/worker_api/routes/_v2_routes.py +1 -1
  23. {apache_airflow_providers_edge3-1.1.1rc1/src/airflow/providers → apache_airflow_providers_edge3-1.1.2/tests/unit}/__init__.py +1 -1
  24. {apache_airflow_providers_edge3-1.1.1rc1 → apache_airflow_providers_edge3-1.1.2}/tests/unit/edge3/cli/test_worker.py +47 -0
  25. {apache_airflow_providers_edge3-1.1.1rc1 → apache_airflow_providers_edge3-1.1.2}/tests/unit/edge3/executors/test_edge_executor.py +3 -3
  26. {apache_airflow_providers_edge3-1.1.1rc1 → apache_airflow_providers_edge3-1.1.2}/tests/unit/edge3/plugins/test_edge_executor_plugin.py +32 -4
  27. apache_airflow_providers_edge3-1.1.1rc1/src/airflow/providers/edge3/plugins/edge_executor_plugin.py +0 -229
  28. {apache_airflow_providers_edge3-1.1.1rc1 → apache_airflow_providers_edge3-1.1.2}/docs/cli-ref.rst +0 -0
  29. {apache_airflow_providers_edge3-1.1.1rc1 → apache_airflow_providers_edge3-1.1.2}/docs/commits.rst +0 -0
  30. {apache_airflow_providers_edge3-1.1.1rc1 → apache_airflow_providers_edge3-1.1.2}/docs/conf.py +0 -0
  31. {apache_airflow_providers_edge3-1.1.1rc1 → apache_airflow_providers_edge3-1.1.2}/docs/configurations-ref.rst +0 -0
  32. {apache_airflow_providers_edge3-1.1.1rc1 → apache_airflow_providers_edge3-1.1.2}/docs/edge_executor.rst +0 -0
  33. {apache_airflow_providers_edge3-1.1.1rc1 → apache_airflow_providers_edge3-1.1.2}/docs/img/distributed_architecture.svg +0 -0
  34. {apache_airflow_providers_edge3-1.1.1rc1 → apache_airflow_providers_edge3-1.1.2}/docs/img/edge_package.svg +0 -0
  35. {apache_airflow_providers_edge3-1.1.1rc1 → apache_airflow_providers_edge3-1.1.2}/docs/img/worker_hosts.png +0 -0
  36. {apache_airflow_providers_edge3-1.1.1rc1 → apache_airflow_providers_edge3-1.1.2}/docs/img/worker_maintenance.png +0 -0
  37. {apache_airflow_providers_edge3-1.1.1rc1 → apache_airflow_providers_edge3-1.1.2}/docs/installing-providers-from-sources.rst +0 -0
  38. {apache_airflow_providers_edge3-1.1.1rc1 → apache_airflow_providers_edge3-1.1.2}/docs/security.rst +0 -0
  39. {apache_airflow_providers_edge3-1.1.1rc1 → apache_airflow_providers_edge3-1.1.2}/docs/ui_plugin.rst +0 -0
  40. {apache_airflow_providers_edge3-1.1.1rc1 → apache_airflow_providers_edge3-1.1.2}/docs/why_edge.rst +0 -0
  41. {apache_airflow_providers_edge3-1.1.1rc1 → apache_airflow_providers_edge3-1.1.2}/src/airflow/providers/edge3/LICENSE +0 -0
  42. {apache_airflow_providers_edge3-1.1.1rc1 → apache_airflow_providers_edge3-1.1.2}/src/airflow/providers/edge3/cli/__init__.py +0 -0
  43. {apache_airflow_providers_edge3-1.1.1rc1 → apache_airflow_providers_edge3-1.1.2}/src/airflow/providers/edge3/cli/api_client.py +0 -0
  44. {apache_airflow_providers_edge3-1.1.1rc1 → apache_airflow_providers_edge3-1.1.2}/src/airflow/providers/edge3/cli/dataclasses.py +0 -0
  45. {apache_airflow_providers_edge3-1.1.1rc1 → apache_airflow_providers_edge3-1.1.2}/src/airflow/providers/edge3/cli/signalling.py +0 -0
  46. {apache_airflow_providers_edge3-1.1.1rc1 → apache_airflow_providers_edge3-1.1.2}/src/airflow/providers/edge3/example_dags/__init__.py +0 -0
  47. {apache_airflow_providers_edge3-1.1.1rc1 → apache_airflow_providers_edge3-1.1.2}/src/airflow/providers/edge3/example_dags/win_notepad.py +0 -0
  48. {apache_airflow_providers_edge3-1.1.1rc1 → apache_airflow_providers_edge3-1.1.2}/src/airflow/providers/edge3/executors/__init__.py +0 -0
  49. {apache_airflow_providers_edge3-1.1.1rc1 → apache_airflow_providers_edge3-1.1.2}/src/airflow/providers/edge3/get_provider_info.py +0 -0
  50. {apache_airflow_providers_edge3-1.1.1rc1 → apache_airflow_providers_edge3-1.1.2}/src/airflow/providers/edge3/models/__init__.py +0 -0
  51. {apache_airflow_providers_edge3-1.1.1rc1 → apache_airflow_providers_edge3-1.1.2}/src/airflow/providers/edge3/models/edge_job.py +0 -0
  52. {apache_airflow_providers_edge3-1.1.1rc1 → apache_airflow_providers_edge3-1.1.2}/src/airflow/providers/edge3/models/edge_logs.py +0 -0
  53. {apache_airflow_providers_edge3-1.1.1rc1 → apache_airflow_providers_edge3-1.1.2}/src/airflow/providers/edge3/openapi/__init__.py +0 -0
  54. {apache_airflow_providers_edge3-1.1.1rc1 → apache_airflow_providers_edge3-1.1.2}/src/airflow/providers/edge3/openapi/edge_worker_api_v1.yaml +0 -0
  55. {apache_airflow_providers_edge3-1.1.1rc1 → apache_airflow_providers_edge3-1.1.2}/src/airflow/providers/edge3/plugins/__init__.py +0 -0
  56. {apache_airflow_providers_edge3-1.1.1rc1 → apache_airflow_providers_edge3-1.1.2}/src/airflow/providers/edge3/plugins/templates/edge_worker_hosts.html +0 -0
  57. {apache_airflow_providers_edge3-1.1.1rc1 → apache_airflow_providers_edge3-1.1.2}/src/airflow/providers/edge3/plugins/templates/edge_worker_jobs.html +0 -0
  58. {apache_airflow_providers_edge3-1.1.1rc1 → apache_airflow_providers_edge3-1.1.2}/src/airflow/providers/edge3/version_compat.py +0 -0
  59. {apache_airflow_providers_edge3-1.1.1rc1 → apache_airflow_providers_edge3-1.1.2}/src/airflow/providers/edge3/worker_api/__init__.py +0 -0
  60. {apache_airflow_providers_edge3-1.1.1rc1 → apache_airflow_providers_edge3-1.1.2}/src/airflow/providers/edge3/worker_api/app.py +0 -0
  61. {apache_airflow_providers_edge3-1.1.1rc1 → apache_airflow_providers_edge3-1.1.2}/src/airflow/providers/edge3/worker_api/datamodels.py +0 -0
  62. {apache_airflow_providers_edge3-1.1.1rc1 → apache_airflow_providers_edge3-1.1.2}/src/airflow/providers/edge3/worker_api/routes/__init__.py +0 -0
  63. {apache_airflow_providers_edge3-1.1.1rc1 → apache_airflow_providers_edge3-1.1.2}/src/airflow/providers/edge3/worker_api/routes/health.py +0 -0
  64. {apache_airflow_providers_edge3-1.1.1rc1 → apache_airflow_providers_edge3-1.1.2}/src/airflow/providers/edge3/worker_api/routes/jobs.py +0 -0
  65. {apache_airflow_providers_edge3-1.1.1rc1 → apache_airflow_providers_edge3-1.1.2}/src/airflow/providers/edge3/worker_api/routes/logs.py +0 -0
  66. {apache_airflow_providers_edge3-1.1.1rc1 → apache_airflow_providers_edge3-1.1.2}/src/airflow/providers/edge3/worker_api/routes/worker.py +0 -0
  67. {apache_airflow_providers_edge3-1.1.1rc1 → apache_airflow_providers_edge3-1.1.2}/tests/conftest.py +0 -0
  68. {apache_airflow_providers_edge3-1.1.1rc1 → apache_airflow_providers_edge3-1.1.2}/tests/unit/edge3/__init__.py +0 -0
  69. {apache_airflow_providers_edge3-1.1.1rc1 → apache_airflow_providers_edge3-1.1.2}/tests/unit/edge3/cli/__init__.py +0 -0
  70. {apache_airflow_providers_edge3-1.1.1rc1 → apache_airflow_providers_edge3-1.1.2}/tests/unit/edge3/cli/test_api_client.py +0 -0
  71. {apache_airflow_providers_edge3-1.1.1rc1 → apache_airflow_providers_edge3-1.1.2}/tests/unit/edge3/cli/test_dataclasses.py +0 -0
  72. {apache_airflow_providers_edge3-1.1.1rc1 → apache_airflow_providers_edge3-1.1.2}/tests/unit/edge3/cli/test_edge_command.py +0 -0
  73. {apache_airflow_providers_edge3-1.1.1rc1 → apache_airflow_providers_edge3-1.1.2}/tests/unit/edge3/cli/test_signalling.py +0 -0
  74. {apache_airflow_providers_edge3-1.1.1rc1 → apache_airflow_providers_edge3-1.1.2}/tests/unit/edge3/executors/__init__.py +0 -0
  75. {apache_airflow_providers_edge3-1.1.1rc1 → apache_airflow_providers_edge3-1.1.2}/tests/unit/edge3/models/__init__.py +0 -0
  76. {apache_airflow_providers_edge3-1.1.1rc1 → apache_airflow_providers_edge3-1.1.2}/tests/unit/edge3/plugins/__init__.py +0 -0
  77. {apache_airflow_providers_edge3-1.1.1rc1 → apache_airflow_providers_edge3-1.1.2}/tests/unit/edge3/worker_api/__init__.py +0 -0
  78. {apache_airflow_providers_edge3-1.1.1rc1 → apache_airflow_providers_edge3-1.1.2}/tests/unit/edge3/worker_api/routes/__init__.py +0 -0
  79. {apache_airflow_providers_edge3-1.1.1rc1 → apache_airflow_providers_edge3-1.1.2}/tests/unit/edge3/worker_api/routes/test_health.py +0 -0
  80. {apache_airflow_providers_edge3-1.1.1rc1 → apache_airflow_providers_edge3-1.1.2}/tests/unit/edge3/worker_api/routes/test_jobs.py +0 -0
  81. {apache_airflow_providers_edge3-1.1.1rc1 → apache_airflow_providers_edge3-1.1.2}/tests/unit/edge3/worker_api/routes/test_logs.py +0 -0
  82. {apache_airflow_providers_edge3-1.1.1rc1 → apache_airflow_providers_edge3-1.1.2}/tests/unit/edge3/worker_api/routes/test_worker.py +0 -0
@@ -1,11 +1,11 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: apache-airflow-providers-edge3
3
- Version: 1.1.1rc1
3
+ Version: 1.1.2
4
4
  Summary: Provider package apache-airflow-providers-edge3 for Apache Airflow
5
5
  Keywords: airflow-provider,edge3,airflow,integration
6
6
  Author-email: Apache Software Foundation <dev@airflow.apache.org>
7
7
  Maintainer-email: Apache Software Foundation <dev@airflow.apache.org>
8
- Requires-Python: ~=3.9
8
+ Requires-Python: >=3.10
9
9
  Description-Content-Type: text/x-rst
10
10
  Classifier: Development Status :: 5 - Production/Stable
11
11
  Classifier: Environment :: Console
@@ -15,18 +15,17 @@ Classifier: Intended Audience :: System Administrators
15
15
  Classifier: Framework :: Apache Airflow
16
16
  Classifier: Framework :: Apache Airflow :: Provider
17
17
  Classifier: License :: OSI Approved :: Apache Software License
18
- Classifier: Programming Language :: Python :: 3.9
19
18
  Classifier: Programming Language :: Python :: 3.10
20
19
  Classifier: Programming Language :: Python :: 3.11
21
20
  Classifier: Programming Language :: Python :: 3.12
21
+ Classifier: Programming Language :: Python :: 3.13
22
22
  Classifier: Topic :: System :: Monitoring
23
- Requires-Dist: apache-airflow>=2.10.0rc1
24
- Requires-Dist: apache-airflow-providers-fab>=1.5.3rc1
23
+ Requires-Dist: apache-airflow>=2.10.0
25
24
  Requires-Dist: pydantic>=2.11.0
26
25
  Requires-Dist: retryhttp>=1.2.0,!=1.3.0
27
26
  Project-URL: Bug Tracker, https://github.com/apache/airflow/issues
28
- Project-URL: Changelog, https://airflow.staged.apache.org/docs/apache-airflow-providers-edge3/1.1.1/changelog.html
29
- Project-URL: Documentation, https://airflow.staged.apache.org/docs/apache-airflow-providers-edge3/1.1.1
27
+ Project-URL: Changelog, https://airflow.apache.org/docs/apache-airflow-providers-edge3/1.1.2/changelog.html
28
+ Project-URL: Documentation, https://airflow.apache.org/docs/apache-airflow-providers-edge3/1.1.2
30
29
  Project-URL: Mastodon, https://fosstodon.org/@airflow
31
30
  Project-URL: Slack Chat, https://s.apache.org/airflow-slack
32
31
  Project-URL: Source Code, https://github.com/apache/airflow
@@ -57,8 +56,9 @@ Project-URL: YouTube, https://www.youtube.com/channel/UCSXwxpWZQ7XZ1WL3wqevChA/
57
56
 
58
57
  Package ``apache-airflow-providers-edge3``
59
58
 
60
- Release: ``1.1.1``
59
+ Release: ``1.1.2``
61
60
 
61
+ Release Date: ``|PypiReleaseDate|``
62
62
 
63
63
  Handle edge workers on remote sites via HTTP(s) connection and orchestrates work over distributed sites.
64
64
 
@@ -82,7 +82,7 @@ This is a provider package for ``edge3`` provider. All classes for this provider
82
82
  are in ``airflow.providers.edge3`` python package.
83
83
 
84
84
  You can find package information and changelog for the provider
85
- in the `documentation <https://airflow.apache.org/docs/apache-airflow-providers-edge3/1.1.1/>`_.
85
+ in the `documentation <https://airflow.apache.org/docs/apache-airflow-providers-edge3/1.1.2/>`_.
86
86
 
87
87
  Installation
88
88
  ------------
@@ -91,39 +91,19 @@ You can install this package on top of an existing Airflow 2 installation (see `
91
91
  for the minimum Airflow version supported) via
92
92
  ``pip install apache-airflow-providers-edge3``
93
93
 
94
- The package supports the following python versions: 3.9,3.10,3.11,3.12
94
+ The package supports the following python versions: 3.10,3.11,3.12,3.13
95
95
 
96
96
  Requirements
97
97
  ------------
98
98
 
99
- ================================ ===================
100
- PIP package Version required
101
- ================================ ===================
102
- ``apache-airflow`` ``>=2.10.0``
103
- ``apache-airflow-providers-fab`` ``>=1.5.3``
104
- ``pydantic`` ``>=2.11.0``
105
- ``retryhttp`` ``>=1.2.0,!=1.3.0``
106
- ================================ ===================
107
-
108
- Cross provider package dependencies
109
- -----------------------------------
110
-
111
- Those are dependencies that might be needed in order to use all the features of the package.
112
- You need to install the specified providers in order to use them.
113
-
114
- You can install such cross-provider dependencies when installing from PyPI. For example:
115
-
116
- .. code-block:: bash
117
-
118
- pip install apache-airflow-providers-edge3[fab]
119
-
120
-
121
- ============================================================================================== =======
122
- Dependent package Extra
123
- ============================================================================================== =======
124
- `apache-airflow-providers-fab <https://airflow.apache.org/docs/apache-airflow-providers-fab>`_ ``fab``
125
- ============================================================================================== =======
99
+ ================== ===================
100
+ PIP package Version required
101
+ ================== ===================
102
+ ``apache-airflow`` ``>=2.10.0``
103
+ ``pydantic`` ``>=2.11.0``
104
+ ``retryhttp`` ``>=1.2.0,!=1.3.0``
105
+ ================== ===================
126
106
 
127
107
  The changelog for the provider package can be found in the
128
- `changelog <https://airflow.apache.org/docs/apache-airflow-providers-edge3/1.1.1/changelog.html>`_.
108
+ `changelog <https://airflow.apache.org/docs/apache-airflow-providers-edge3/1.1.2/changelog.html>`_.
129
109
 
@@ -23,8 +23,9 @@
23
23
 
24
24
  Package ``apache-airflow-providers-edge3``
25
25
 
26
- Release: ``1.1.1``
26
+ Release: ``1.1.2``
27
27
 
28
+ Release Date: ``|PypiReleaseDate|``
28
29
 
29
30
  Handle edge workers on remote sites via HTTP(s) connection and orchestrates work over distributed sites.
30
31
 
@@ -48,7 +49,7 @@ This is a provider package for ``edge3`` provider. All classes for this provider
48
49
  are in ``airflow.providers.edge3`` python package.
49
50
 
50
51
  You can find package information and changelog for the provider
51
- in the `documentation <https://airflow.apache.org/docs/apache-airflow-providers-edge3/1.1.1/>`_.
52
+ in the `documentation <https://airflow.apache.org/docs/apache-airflow-providers-edge3/1.1.2/>`_.
52
53
 
53
54
  Installation
54
55
  ------------
@@ -57,38 +58,18 @@ You can install this package on top of an existing Airflow 2 installation (see `
57
58
  for the minimum Airflow version supported) via
58
59
  ``pip install apache-airflow-providers-edge3``
59
60
 
60
- The package supports the following python versions: 3.9,3.10,3.11,3.12
61
+ The package supports the following python versions: 3.10,3.11,3.12,3.13
61
62
 
62
63
  Requirements
63
64
  ------------
64
65
 
65
- ================================ ===================
66
- PIP package Version required
67
- ================================ ===================
68
- ``apache-airflow`` ``>=2.10.0``
69
- ``apache-airflow-providers-fab`` ``>=1.5.3``
70
- ``pydantic`` ``>=2.11.0``
71
- ``retryhttp`` ``>=1.2.0,!=1.3.0``
72
- ================================ ===================
73
-
74
- Cross provider package dependencies
75
- -----------------------------------
76
-
77
- Those are dependencies that might be needed in order to use all the features of the package.
78
- You need to install the specified providers in order to use them.
79
-
80
- You can install such cross-provider dependencies when installing from PyPI. For example:
81
-
82
- .. code-block:: bash
83
-
84
- pip install apache-airflow-providers-edge3[fab]
85
-
86
-
87
- ============================================================================================== =======
88
- Dependent package Extra
89
- ============================================================================================== =======
90
- `apache-airflow-providers-fab <https://airflow.apache.org/docs/apache-airflow-providers-fab>`_ ``fab``
91
- ============================================================================================== =======
66
+ ================== ===================
67
+ PIP package Version required
68
+ ================== ===================
69
+ ``apache-airflow`` ``>=2.10.0``
70
+ ``pydantic`` ``>=2.11.0``
71
+ ``retryhttp`` ``>=1.2.0,!=1.3.0``
72
+ ================== ===================
92
73
 
93
74
  The changelog for the provider package can be found in the
94
- `changelog <https://airflow.apache.org/docs/apache-airflow-providers-edge3/1.1.1/changelog.html>`_.
75
+ `changelog <https://airflow.apache.org/docs/apache-airflow-providers-edge3/1.1.2/changelog.html>`_.
@@ -141,7 +141,7 @@ The following states are used to track the worker:
141
141
  TERMINATING->OFFLINE[label="on clean shutdown if running tasks = 0"];
142
142
  }
143
143
 
144
- See also https://github.com/apache/airflow/blob/main/providers/edge3/src/airflow/providers/edge3/models/edge_worker.py#L45
144
+ See also :py:class:`airflow.providers.edge3.models.edge_worker.EdgeWorkerState`
145
145
  for a documentation of details of all states of the Edge Worker.
146
146
 
147
147
  Feature Backlog Edge Provider
@@ -171,7 +171,7 @@ The following features are known missing and will be implemented in increments:
171
171
 
172
172
  - Tests
173
173
 
174
- - System tests in Github, test the deployment of the worker with a Dag execution
174
+ - System tests in GitHub, test the deployment of the worker with a Dag execution
175
175
  - Test/Support on Windows for Edge Worker
176
176
 
177
177
  - Scaling test - Check and define boundaries of workers/jobs. Today it is known to
@@ -27,6 +27,56 @@
27
27
  Changelog
28
28
  ---------
29
29
 
30
+ 1.1.2
31
+ .....
32
+
33
+ Bug Fixes
34
+ ~~~~~~~~~
35
+
36
+ * ``Fix UnboundLocalError for 'edge_job_command_len' (#52328)``
37
+ * ``Extend run detection to dev-mode to load plugin (#53576)``
38
+ * ``Add queue and remove queue cli commands for EdgeExecutor (#53505)``
39
+ * ``Ensure Edge Plugin for API endpoint is only loaded on API-Server and AF2 Webserver (#52952)``
40
+ * ``Fix unreachable code mypy warnings in edge3 provider (#53430)``
41
+ * ``Make edge3 provider compatible with mypy 1.16.1 (#53104)``
42
+ * ``Fix task configuration defaults for AbstractOperator (#52871)``
43
+
44
+ Misc
45
+ ~~~~
46
+
47
+ * ``Remove upper-binding for "python-requires" (#52980)``
48
+ * ``Temporarily switch to use >=,< pattern instead of '~=' (#52967)``
49
+ * ``Deprecate decorators from Core (#53629)``
50
+ * ``Add Python 3.13 support for Airflow. (#46891)``
51
+ * ``Cleanup type ignores in edge3 provider where possible (#53248)``
52
+ * ``Remove type ignore across codebase after mypy upgrade (#53243)``
53
+ * ``Remove deprecation in Edge for DEFAULT_QUEUE (#52954)``
54
+ * ``Move 'BaseHook' implementation to task SDK (#51873)``
55
+ * ``Force the definition of 'execution_api_server_url' based on 'api_url' (#52184)``
56
+ * ``Drop support for Python 3.9 (#52072)``
57
+ * ``Remove FAB dependency from Edge3 Provider (#51995)``
58
+
59
+ Doc-only
60
+ ~~~~~~~~
61
+
62
+ * ``Clean some leftovers of Python 3.9 removal - All the rest (#52432)``
63
+ * ``Update documentation for forcing core execution_api_server_url (#52447)``
64
+ * ``Fix spelling in edge provider (#52169)``
65
+ * ``Add docs for edge execution_api_server_url (#52082)``
66
+ * ``Include docs for Windows (#52004)``
67
+ * ``Document EdgeExecutor migration from 'internal_api_secret_key' to 'jwt_secret' (#51905)``
68
+ * ``Fix Edge State Model Link (#51860)``
69
+
70
+ .. Below changes are excluded from the changelog. Move them to
71
+ appropriate section above if needed. Do not delete the lines(!):
72
+ * ``Revert "Revert "Remove FAB dependency from Edge3 Provider (#51995)"" (#52000)``
73
+ * ``Revert "Remove FAB dependency from Edge3 Provider (#51995)" (#51998)``
74
+ * ``Make dag_version_id in TI non-nullable (#50825)``
75
+ * ``Fix spelling of GitHub brand name (#53735)``
76
+ * ``Replace mock.patch("utcnow") with time_machine in Edge Executor (#53670)``
77
+ * ``Prepare release for July 2025 1st provider wave (#52727)``
78
+
79
+
30
80
  1.1.1
31
81
  .....
32
82
 
@@ -44,12 +44,19 @@ Here are a few imperative requirements for your workers:
44
44
 
45
45
  Minimum Airflow configuration settings for the Edge Worker to make it running is:
46
46
 
47
+ - Section ``[api_auth]``
48
+
49
+ - ``jwt_secret``: A matching secret to that on the api-server (starting from version 3.0.0).
50
+
47
51
  - Section ``[core]``
48
52
 
53
+ - ``execution_api_server_url``: If not set, the base URL from ``edge.api_url`` will be used. For example,
54
+ when ``edge.api_url`` is set to ``https://your-hostname-and-port/edge_worker/v1/rpcapi``, it will
55
+ default to ``https://your-hostname-and-port/execution/``.
49
56
  - ``executor``: Executor must be set or added to be ``airflow.providers.edge3.executors.EdgeExecutor``
50
57
  - ``internal_api_secret_key``: An encryption key must be set on api-server and Edge Worker component as
51
58
  shared secret to authenticate traffic. It should be a random string like the fernet key
52
- (but preferably not the same).
59
+ (for versions earlier than 3.0.0).
53
60
 
54
61
  - Section ``[edge]``
55
62
 
@@ -183,3 +190,5 @@ instance. The commands are:
183
190
  - ``airflow edge remote-edge-worker-exit-maintenance``: Request a remote edge worker to exit maintenance mode
184
191
  - ``airflow edge shutdown-remote-edge-worker``: Shuts down a remote edge worker gracefully
185
192
  - ``airflow edge remove-remote-edge-worker``: Remove a worker instance from the cluster
193
+ - ``airflow edge add-worker-queues``: Add queues to an edge worker
194
+ - ``airflow edge remove-worker-queues``: Remove queues from an edge worker
@@ -90,7 +90,9 @@ Additional REST API endpoints are provided to distribute tasks and manage the ed
90
90
  are provided by the API server.
91
91
 
92
92
 
93
- Release: 1.1.1
93
+ Release: 1.1.2
94
+
95
+ Release Date: ``|PypiReleaseDate|``
94
96
 
95
97
  Provider package
96
98
  ----------------
@@ -110,39 +112,10 @@ Requirements
110
112
 
111
113
  The minimum Apache Airflow version supported by this provider distribution is ``2.10.0``.
112
114
 
113
- ================================ ===================
114
- PIP package Version required
115
- ================================ ===================
116
- ``apache-airflow`` ``>=2.10.0``
117
- ``apache-airflow-providers-fab`` ``>=1.5.3``
118
- ``pydantic`` ``>=2.11.0``
119
- ``retryhttp`` ``>=1.2.0,!=1.3.0``
120
- ================================ ===================
121
-
122
- Cross provider package dependencies
123
- -----------------------------------
124
-
125
- Those are dependencies that might be needed in order to use all the features of the package.
126
- You need to install the specified provider distributions in order to use them.
127
-
128
- You can install such cross-provider dependencies when installing from PyPI. For example:
129
-
130
- .. code-block:: bash
131
-
132
- pip install apache-airflow-providers-edge3[fab]
133
-
134
-
135
- ============================================================================================== =======
136
- Dependent package Extra
137
- ============================================================================================== =======
138
- `apache-airflow-providers-fab <https://airflow.apache.org/docs/apache-airflow-providers-fab>`_ ``fab``
139
- ============================================================================================== =======
140
-
141
- Downloading official packages
142
- -----------------------------
143
-
144
- You can download officially released packages and verify their checksums and signatures from the
145
- `Official Apache Download site <https://downloads.apache.org/airflow/providers/>`_
146
-
147
- * `The apache-airflow-providers-edge3 1.1.1 sdist package <https://downloads.apache.org/airflow/providers/apache_airflow_providers_edge3-1.1.1.tar.gz>`_ (`asc <https://downloads.apache.org/airflow/providers/apache_airflow_providers_edge3-1.1.1.tar.gz.asc>`__, `sha512 <https://downloads.apache.org/airflow/providers/apache_airflow_providers_edge3-1.1.1.tar.gz.sha512>`__)
148
- * `The apache-airflow-providers-edge3 1.1.1 wheel package <https://downloads.apache.org/airflow/providers/apache_airflow_providers_edge3-1.1.1-py3-none-any.whl>`_ (`asc <https://downloads.apache.org/airflow/providers/apache_airflow_providers_edge3-1.1.1-py3-none-any.whl.asc>`__, `sha512 <https://downloads.apache.org/airflow/providers/apache_airflow_providers_edge3-1.1.1-py3-none-any.whl.sha512>`__)
115
+ ================== ===================
116
+ PIP package Version required
117
+ ================== ===================
118
+ ``apache-airflow`` ``>=2.10.0``
119
+ ``pydantic`` ``>=2.11.0``
120
+ ``retryhttp`` ``>=1.2.0,!=1.3.0``
121
+ ================== ===================
@@ -29,7 +29,7 @@ Install Edge Worker on Windows
29
29
  The setup was tested on Windows 10 with Python 3.12.8, 64-bit. Backend for tests was Airflow 2.10.5.
30
30
  To setup a instance of Edge Worker on Windows, you need to follow the steps below:
31
31
 
32
- 1. Install Python 3.9 or higher.
32
+ 1. Install Python 3.10 or higher.
33
33
  2. Create an empty folder as base to start with. In our example it is ``C:\Airflow``.
34
34
  3. Start Shell/Command Line in ``C:\Airflow`` and create a new virtual environment via: ``python -m venv venv``
35
35
  4. Activate the virtual environment via: ``venv\Scripts\activate.bat``
@@ -39,8 +39,8 @@ To setup a instance of Edge Worker on Windows, you need to follow the steps belo
39
39
  (At least the DAG files which should be executed on the edge alongside the dependencies.)
40
40
  7. Collect needed parameters from your running Airflow backend, at least the following:
41
41
 
42
+ - ``api_auth`` / ``jwt_token``: The shared secret key between the api-server and the Edge Worker
42
43
  - ``edge`` / ``api_url``: The HTTP(s) endpoint where the Edge Worker connects to
43
- - ``core`` / ``internal_api_secret_key``: The shared secret key between the api-server and the Edge Worker
44
44
  - Any proxy details if applicable for your environment.
45
45
 
46
46
  8. Create a worker start script to prevent repeated typing. Create a new file ``start_worker.bat`` in
@@ -49,11 +49,16 @@ To setup a instance of Edge Worker on Windows, you need to follow the steps belo
49
49
  .. code-block:: bash
50
50
 
51
51
  @echo off
52
+ REM For versions 3.0.0 or later
53
+ set AIRFLOW__API_AUTH__JWT_SECRET=<matching the api-server...>
54
+ REM for versions earlier than 3.0.0
55
+ set AIRFLOW__CORE__INTERNAL_API_SECRET_KEY=<use this as configured centrally in api-server...>
56
+
57
+ REM For all versions
52
58
  set AIRFLOW__CORE__DAGS_FOLDER=dags
53
59
  set AIRFLOW__LOGGING__BASE_LOG_FOLDER=edge_logs
54
60
  set AIRFLOW__EDGE__API_URL=https://your-hostname-and-port/edge_worker/v1/rpcapi
55
61
  set AIRFLOW__CORE__EXECUTOR=airflow.providers.edge3.executors.edge_executor.EdgeExecutor
56
- set AIRFLOW__CORE__INTERNAL_API_SECRET_KEY=<use this as configured centrally in api-server...>
57
62
  set AIRFLOW__CORE__LOAD_EXAMPLES=False
58
63
  set AIRFLOW_ENABLE_AIP_44=true
59
64
  @REM Add if needed: set http_proxy=http://my-company-proxy.com:3128
@@ -33,13 +33,14 @@ description: |
33
33
  are provided by the API server.
34
34
 
35
35
  state: ready
36
- source-date-epoch: 1749896427
36
+ source-date-epoch: 1751473321
37
37
 
38
38
  # Note that those versions are maintained by release manager - do not update them manually
39
39
  # with the exception of case where other provider in sources has >= new provider version.
40
40
  # In such case adding >= NEW_VERSION and bumping to NEW_VERSION in a provider have
41
41
  # to be done in the same PR
42
42
  versions:
43
+ - 1.1.2
43
44
  - 1.1.1
44
45
  - 1.1.0
45
46
  - 1.0.0
@@ -25,7 +25,7 @@ build-backend = "flit_core.buildapi"
25
25
 
26
26
  [project]
27
27
  name = "apache-airflow-providers-edge3"
28
- version = "1.1.1rc1"
28
+ version = "1.1.2"
29
29
  description = "Provider package apache-airflow-providers-edge3 for Apache Airflow"
30
30
  readme = "README.rst"
31
31
  authors = [
@@ -44,21 +44,20 @@ classifiers = [
44
44
  "Framework :: Apache Airflow",
45
45
  "Framework :: Apache Airflow :: Provider",
46
46
  "License :: OSI Approved :: Apache Software License",
47
- "Programming Language :: Python :: 3.9",
48
47
  "Programming Language :: Python :: 3.10",
49
48
  "Programming Language :: Python :: 3.11",
50
49
  "Programming Language :: Python :: 3.12",
50
+ "Programming Language :: Python :: 3.13",
51
51
  "Topic :: System :: Monitoring",
52
52
  ]
53
- requires-python = "~=3.9"
53
+ requires-python = ">=3.10"
54
54
 
55
55
  # The dependencies should be modified in place in the generated file.
56
56
  # Any change in the dependencies is preserved when the file is regenerated
57
57
  # Make sure to run ``breeze static-checks --type update-providers-dependencies --all-files``
58
58
  # After you modify the dependencies, and rebuild your Breeze CI image with ``breeze ci-image build``
59
59
  dependencies = [
60
- "apache-airflow>=2.10.0rc1",
61
- "apache-airflow-providers-fab>=1.5.3rc1",
60
+ "apache-airflow>=2.10.0",
62
61
  "pydantic>=2.11.0",
63
62
  "retryhttp>=1.2.0,!=1.3.0",
64
63
  ]
@@ -68,7 +67,6 @@ dev = [
68
67
  "apache-airflow",
69
68
  "apache-airflow-task-sdk",
70
69
  "apache-airflow-devel-common",
71
- "apache-airflow-providers-fab",
72
70
  # Additional devel dependencies (do not remove this line and add extra development dependencies)
73
71
  ]
74
72
 
@@ -98,8 +96,8 @@ apache-airflow-providers-common-sql = {workspace = true}
98
96
  apache-airflow-providers-standard = {workspace = true}
99
97
 
100
98
  [project.urls]
101
- "Documentation" = "https://airflow.staged.apache.org/docs/apache-airflow-providers-edge3/1.1.1"
102
- "Changelog" = "https://airflow.staged.apache.org/docs/apache-airflow-providers-edge3/1.1.1/changelog.html"
99
+ "Documentation" = "https://airflow.apache.org/docs/apache-airflow-providers-edge3/1.1.2"
100
+ "Changelog" = "https://airflow.apache.org/docs/apache-airflow-providers-edge3/1.1.2/changelog.html"
103
101
  "Bug Tracker" = "https://github.com/apache/airflow/issues"
104
102
  "Source Code" = "https://github.com/apache/airflow"
105
103
  "Slack Chat" = "https://s.apache.org/airflow-slack"
@@ -14,4 +14,4 @@
14
14
  # KIND, either express or implied. See the License for the
15
15
  # specific language governing permissions and limitations
16
16
  # under the License.
17
- __path__ = __import__("pkgutil").extend_path(__path__, __name__) # type: ignore
17
+ __path__ = __import__("pkgutil").extend_path(__path__, __name__)
@@ -14,4 +14,4 @@
14
14
  # KIND, either express or implied. See the License for the
15
15
  # specific language governing permissions and limitations
16
16
  # under the License.
17
- __path__ = __import__("pkgutil").extend_path(__path__, __name__) # type: ignore
17
+ __path__ = __import__("pkgutil").extend_path(__path__, __name__)
@@ -29,7 +29,7 @@ from airflow import __version__ as airflow_version
29
29
 
30
30
  __all__ = ["__version__"]
31
31
 
32
- __version__ = "1.1.1"
32
+ __version__ = "1.1.2"
33
33
 
34
34
  if packaging.version.parse(packaging.version.parse(airflow_version).base_version) < packaging.version.parse(
35
35
  "2.10.0"
@@ -64,7 +64,7 @@ EDGE_WORKER_HEADER = "\n".join(
64
64
  @providers_configuration_loaded
65
65
  def force_use_internal_api_on_edge_worker():
66
66
  """
67
- Ensure that the environment is configured for the internal API without needing to declare it outside.
67
+ Ensure the environment is configured for the internal API without explicit declaration.
68
68
 
69
69
  This is only required for an Edge worker and must to be done before the Click CLI wrapper is initiated.
70
70
  That is because the CLI wrapper will attempt to establish a DB connection, which will fail before the
@@ -351,6 +351,48 @@ def remote_worker_request_shutdown(args) -> None:
351
351
  logger.info("Requested shutdown of Edge Worker host %s by %s.", args.edge_hostname, getuser())
352
352
 
353
353
 
354
+ @cli_utils.action_cli(check_db=False)
355
+ @providers_configuration_loaded
356
+ def add_worker_queues(args) -> None:
357
+ """Add queues to an edge worker."""
358
+ _check_valid_db_connection()
359
+ _check_if_registered_edge_host(hostname=args.edge_hostname)
360
+ from airflow.providers.edge3.models.edge_worker import add_worker_queues
361
+
362
+ queues = args.queues.split(",") if args.queues else []
363
+ if not queues:
364
+ raise SystemExit("Error: No queues specified to add.")
365
+
366
+ try:
367
+ add_worker_queues(args.edge_hostname, queues)
368
+ logger.info("Added queues %s to Edge Worker host %s by %s.", queues, args.edge_hostname, getuser())
369
+ except TypeError as e:
370
+ logger.error(str(e))
371
+ raise SystemExit
372
+
373
+
374
+ @cli_utils.action_cli(check_db=False)
375
+ @providers_configuration_loaded
376
+ def remove_worker_queues(args) -> None:
377
+ """Remove queues from an edge worker."""
378
+ _check_valid_db_connection()
379
+ _check_if_registered_edge_host(hostname=args.edge_hostname)
380
+ from airflow.providers.edge3.models.edge_worker import remove_worker_queues
381
+
382
+ queues = args.queues.split(",") if args.queues else []
383
+ if not queues:
384
+ raise SystemExit("Error: No queues specified to remove.")
385
+
386
+ try:
387
+ remove_worker_queues(args.edge_hostname, queues)
388
+ logger.info(
389
+ "Removed queues %s from Edge Worker host %s by %s.", queues, args.edge_hostname, getuser()
390
+ )
391
+ except TypeError as e:
392
+ logger.error(str(e))
393
+ raise SystemExit
394
+
395
+
354
396
  ARG_CONCURRENCY = Arg(
355
397
  ("-c", "--concurrency"),
356
398
  type=int,
@@ -380,6 +422,11 @@ ARG_REQUIRED_MAINTENANCE_COMMENT = Arg(
380
422
  help="Maintenance comments to report reason. Required if enabling maintenance",
381
423
  required=True,
382
424
  )
425
+ ARG_QUEUES_MANAGE = Arg(
426
+ ("-q", "--queues"),
427
+ help="Comma delimited list of queues to add or remove.",
428
+ required=True,
429
+ )
383
430
  ARG_WAIT_MAINT = Arg(
384
431
  ("-w", "--wait"),
385
432
  default=False,
@@ -516,4 +563,22 @@ EDGE_COMMANDS: list[ActionCommand] = [
516
563
  func=remote_worker_request_shutdown,
517
564
  args=(ARG_REQUIRED_EDGE_HOSTNAME,),
518
565
  ),
566
+ ActionCommand(
567
+ name="add-worker-queues",
568
+ help=add_worker_queues.__doc__,
569
+ func=add_worker_queues,
570
+ args=(
571
+ ARG_REQUIRED_EDGE_HOSTNAME,
572
+ ARG_QUEUES_MANAGE,
573
+ ),
574
+ ),
575
+ ActionCommand(
576
+ name="remove-worker-queues",
577
+ help=remove_worker_queues.__doc__,
578
+ func=remove_worker_queues,
579
+ args=(
580
+ ARG_REQUIRED_EDGE_HOSTNAME,
581
+ ARG_QUEUES_MANAGE,
582
+ ),
583
+ ),
519
584
  ]
@@ -26,6 +26,7 @@ from pathlib import Path
26
26
  from subprocess import Popen
27
27
  from time import sleep
28
28
  from typing import TYPE_CHECKING
29
+ from urllib.parse import urlparse
29
30
 
30
31
  from lockfile.pidlockfile import remove_existing_pidfile
31
32
  from requests import HTTPError
@@ -186,11 +187,13 @@ class EdgeWorker:
186
187
  setproctitle(f"airflow edge worker: {workload.ti.key}")
187
188
 
188
189
  try:
189
- base_url = conf.get("api", "base_url", fallback="/")
190
- # If it's a relative URL, use localhost:8080 as the default
191
- if base_url.startswith("/"):
192
- base_url = f"http://localhost:8080{base_url}"
193
- default_execution_api_server = f"{base_url.rstrip('/')}/execution/"
190
+ api_url = conf.get("edge", "api_url")
191
+ execution_api_server_url = conf.get("core", "execution_api_server_url", fallback="")
192
+ if not execution_api_server_url:
193
+ parsed = urlparse(api_url)
194
+ execution_api_server_url = f"{parsed.scheme}://{parsed.netloc}/execution/"
195
+
196
+ logger.info("Worker starting up server=execution_api_server_url=%s", execution_api_server_url)
194
197
 
195
198
  supervise(
196
199
  # This is the "wrong" ti type, but it duck types the same. TODO: Create a protocol for this.
@@ -199,9 +202,7 @@ class EdgeWorker:
199
202
  dag_rel_path=workload.dag_rel_path,
200
203
  bundle_info=workload.bundle_info,
201
204
  token=workload.token,
202
- server=conf.get(
203
- "core", "execution_api_server_url", fallback=default_execution_api_server
204
- ),
205
+ server=execution_api_server_url,
205
206
  log_path=workload.log_path,
206
207
  )
207
208
  return 0
@@ -27,7 +27,11 @@ from datetime import datetime
27
27
  from time import sleep
28
28
 
29
29
  from airflow.exceptions import AirflowNotFoundException
30
- from airflow.hooks.base import BaseHook
30
+
31
+ try:
32
+ from airflow.sdk import BaseHook
33
+ except ImportError:
34
+ from airflow.hooks.base import BaseHook # type: ignore[attr-defined,no-redef]
31
35
  from airflow.utils.trigger_rule import TriggerRule
32
36
 
33
37
  try:
@@ -37,13 +41,13 @@ try:
37
41
  from airflow.sdk import DAG, Param, Variable, task, task_group
38
42
  except ImportError:
39
43
  # Airflow 2.10 compat
40
- from airflow.decorators import task, task_group # type: ignore[no-redef,attr-defined]
41
- from airflow.models.dag import DAG # type: ignore[no-redef,attr-defined,assignment]
42
- from airflow.models.param import Param # type: ignore[no-redef,attr-defined]
43
- from airflow.models.variable import Variable # type: ignore[no-redef,attr-defined]
44
- from airflow.operators.bash import BashOperator # type: ignore[no-redef,attr-defined]
45
- from airflow.operators.empty import EmptyOperator # type: ignore[no-redef,attr-defined]
46
- from airflow.operators.python import PythonOperator # type: ignore[no-redef,attr-defined]
44
+ from airflow.decorators import task, task_group # type: ignore[attr-defined,no-redef]
45
+ from airflow.models.dag import DAG # type: ignore[assignment]
46
+ from airflow.models.param import Param # type: ignore[no-redef]
47
+ from airflow.models.variable import Variable
48
+ from airflow.operators.bash import BashOperator # type: ignore[no-redef]
49
+ from airflow.operators.empty import EmptyOperator # type: ignore[no-redef]
50
+ from airflow.operators.python import PythonOperator # type: ignore[no-redef]
47
51
 
48
52
  with DAG(
49
53
  dag_id="integration_test",