apache-airflow-providers-edge3 1.1.1__tar.gz → 1.1.2rc1__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (82) hide show
  1. {apache_airflow_providers_edge3-1.1.1 → apache_airflow_providers_edge3-1.1.2rc1}/PKG-INFO +16 -38
  2. {apache_airflow_providers_edge3-1.1.1 → apache_airflow_providers_edge3-1.1.2rc1}/README.rst +11 -31
  3. {apache_airflow_providers_edge3-1.1.1 → apache_airflow_providers_edge3-1.1.2rc1}/docs/architecture.rst +1 -1
  4. {apache_airflow_providers_edge3-1.1.1 → apache_airflow_providers_edge3-1.1.2rc1}/docs/changelog.rst +32 -0
  5. {apache_airflow_providers_edge3-1.1.1 → apache_airflow_providers_edge3-1.1.2rc1}/docs/deployment.rst +8 -1
  6. {apache_airflow_providers_edge3-1.1.1 → apache_airflow_providers_edge3-1.1.2rc1}/docs/index.rst +8 -37
  7. {apache_airflow_providers_edge3-1.1.1 → apache_airflow_providers_edge3-1.1.2rc1}/docs/install_on_windows.rst +8 -3
  8. {apache_airflow_providers_edge3-1.1.1 → apache_airflow_providers_edge3-1.1.2rc1}/provider.yaml +2 -1
  9. {apache_airflow_providers_edge3-1.1.1 → apache_airflow_providers_edge3-1.1.2rc1}/pyproject.toml +5 -8
  10. {apache_airflow_providers_edge3-1.1.1 → apache_airflow_providers_edge3-1.1.2rc1}/src/airflow/providers/edge3/__init__.py +1 -1
  11. {apache_airflow_providers_edge3-1.1.1 → apache_airflow_providers_edge3-1.1.2rc1}/src/airflow/providers/edge3/cli/edge_command.py +1 -1
  12. {apache_airflow_providers_edge3-1.1.1 → apache_airflow_providers_edge3-1.1.2rc1}/src/airflow/providers/edge3/cli/worker.py +9 -8
  13. {apache_airflow_providers_edge3-1.1.1 → apache_airflow_providers_edge3-1.1.2rc1}/src/airflow/providers/edge3/example_dags/integration_test.py +5 -1
  14. {apache_airflow_providers_edge3-1.1.1 → apache_airflow_providers_edge3-1.1.2rc1}/src/airflow/providers/edge3/example_dags/win_test.py +7 -3
  15. {apache_airflow_providers_edge3-1.1.1 → apache_airflow_providers_edge3-1.1.2rc1}/src/airflow/providers/edge3/executors/edge_executor.py +5 -4
  16. apache_airflow_providers_edge3-1.1.2rc1/src/airflow/providers/edge3/plugins/edge_executor_plugin.py +237 -0
  17. {apache_airflow_providers_edge3-1.1.1 → apache_airflow_providers_edge3-1.1.2rc1}/src/airflow/providers/edge3/worker_api/routes/_v2_compat.py +2 -2
  18. {apache_airflow_providers_edge3-1.1.1 → apache_airflow_providers_edge3-1.1.2rc1}/tests/unit/edge3/cli/test_worker.py +46 -0
  19. {apache_airflow_providers_edge3-1.1.1 → apache_airflow_providers_edge3-1.1.2rc1}/tests/unit/edge3/plugins/test_edge_executor_plugin.py +5 -2
  20. apache_airflow_providers_edge3-1.1.1/src/airflow/providers/edge3/plugins/edge_executor_plugin.py +0 -229
  21. {apache_airflow_providers_edge3-1.1.1 → apache_airflow_providers_edge3-1.1.2rc1}/docs/cli-ref.rst +0 -0
  22. {apache_airflow_providers_edge3-1.1.1 → apache_airflow_providers_edge3-1.1.2rc1}/docs/commits.rst +0 -0
  23. {apache_airflow_providers_edge3-1.1.1 → apache_airflow_providers_edge3-1.1.2rc1}/docs/conf.py +0 -0
  24. {apache_airflow_providers_edge3-1.1.1 → apache_airflow_providers_edge3-1.1.2rc1}/docs/configurations-ref.rst +0 -0
  25. {apache_airflow_providers_edge3-1.1.1 → apache_airflow_providers_edge3-1.1.2rc1}/docs/edge_executor.rst +0 -0
  26. {apache_airflow_providers_edge3-1.1.1 → apache_airflow_providers_edge3-1.1.2rc1}/docs/img/distributed_architecture.svg +0 -0
  27. {apache_airflow_providers_edge3-1.1.1 → apache_airflow_providers_edge3-1.1.2rc1}/docs/img/edge_package.svg +0 -0
  28. {apache_airflow_providers_edge3-1.1.1 → apache_airflow_providers_edge3-1.1.2rc1}/docs/img/worker_hosts.png +0 -0
  29. {apache_airflow_providers_edge3-1.1.1 → apache_airflow_providers_edge3-1.1.2rc1}/docs/img/worker_maintenance.png +0 -0
  30. {apache_airflow_providers_edge3-1.1.1 → apache_airflow_providers_edge3-1.1.2rc1}/docs/installing-providers-from-sources.rst +0 -0
  31. {apache_airflow_providers_edge3-1.1.1 → apache_airflow_providers_edge3-1.1.2rc1}/docs/security.rst +0 -0
  32. {apache_airflow_providers_edge3-1.1.1 → apache_airflow_providers_edge3-1.1.2rc1}/docs/ui_plugin.rst +0 -0
  33. {apache_airflow_providers_edge3-1.1.1 → apache_airflow_providers_edge3-1.1.2rc1}/docs/why_edge.rst +0 -0
  34. {apache_airflow_providers_edge3-1.1.1 → apache_airflow_providers_edge3-1.1.2rc1}/src/airflow/__init__.py +0 -0
  35. {apache_airflow_providers_edge3-1.1.1 → apache_airflow_providers_edge3-1.1.2rc1}/src/airflow/providers/__init__.py +0 -0
  36. {apache_airflow_providers_edge3-1.1.1 → apache_airflow_providers_edge3-1.1.2rc1}/src/airflow/providers/edge3/LICENSE +0 -0
  37. {apache_airflow_providers_edge3-1.1.1 → apache_airflow_providers_edge3-1.1.2rc1}/src/airflow/providers/edge3/cli/__init__.py +0 -0
  38. {apache_airflow_providers_edge3-1.1.1 → apache_airflow_providers_edge3-1.1.2rc1}/src/airflow/providers/edge3/cli/api_client.py +0 -0
  39. {apache_airflow_providers_edge3-1.1.1 → apache_airflow_providers_edge3-1.1.2rc1}/src/airflow/providers/edge3/cli/dataclasses.py +0 -0
  40. {apache_airflow_providers_edge3-1.1.1 → apache_airflow_providers_edge3-1.1.2rc1}/src/airflow/providers/edge3/cli/signalling.py +0 -0
  41. {apache_airflow_providers_edge3-1.1.1 → apache_airflow_providers_edge3-1.1.2rc1}/src/airflow/providers/edge3/example_dags/__init__.py +0 -0
  42. {apache_airflow_providers_edge3-1.1.1 → apache_airflow_providers_edge3-1.1.2rc1}/src/airflow/providers/edge3/example_dags/win_notepad.py +0 -0
  43. {apache_airflow_providers_edge3-1.1.1 → apache_airflow_providers_edge3-1.1.2rc1}/src/airflow/providers/edge3/executors/__init__.py +0 -0
  44. {apache_airflow_providers_edge3-1.1.1 → apache_airflow_providers_edge3-1.1.2rc1}/src/airflow/providers/edge3/get_provider_info.py +0 -0
  45. {apache_airflow_providers_edge3-1.1.1 → apache_airflow_providers_edge3-1.1.2rc1}/src/airflow/providers/edge3/models/__init__.py +0 -0
  46. {apache_airflow_providers_edge3-1.1.1 → apache_airflow_providers_edge3-1.1.2rc1}/src/airflow/providers/edge3/models/edge_job.py +0 -0
  47. {apache_airflow_providers_edge3-1.1.1 → apache_airflow_providers_edge3-1.1.2rc1}/src/airflow/providers/edge3/models/edge_logs.py +0 -0
  48. {apache_airflow_providers_edge3-1.1.1 → apache_airflow_providers_edge3-1.1.2rc1}/src/airflow/providers/edge3/models/edge_worker.py +0 -0
  49. {apache_airflow_providers_edge3-1.1.1 → apache_airflow_providers_edge3-1.1.2rc1}/src/airflow/providers/edge3/openapi/__init__.py +0 -0
  50. {apache_airflow_providers_edge3-1.1.1 → apache_airflow_providers_edge3-1.1.2rc1}/src/airflow/providers/edge3/openapi/edge_worker_api_v1.yaml +0 -0
  51. {apache_airflow_providers_edge3-1.1.1 → apache_airflow_providers_edge3-1.1.2rc1}/src/airflow/providers/edge3/plugins/__init__.py +0 -0
  52. {apache_airflow_providers_edge3-1.1.1 → apache_airflow_providers_edge3-1.1.2rc1}/src/airflow/providers/edge3/plugins/templates/edge_worker_hosts.html +0 -0
  53. {apache_airflow_providers_edge3-1.1.1 → apache_airflow_providers_edge3-1.1.2rc1}/src/airflow/providers/edge3/plugins/templates/edge_worker_jobs.html +0 -0
  54. {apache_airflow_providers_edge3-1.1.1 → apache_airflow_providers_edge3-1.1.2rc1}/src/airflow/providers/edge3/version_compat.py +0 -0
  55. {apache_airflow_providers_edge3-1.1.1 → apache_airflow_providers_edge3-1.1.2rc1}/src/airflow/providers/edge3/worker_api/__init__.py +0 -0
  56. {apache_airflow_providers_edge3-1.1.1 → apache_airflow_providers_edge3-1.1.2rc1}/src/airflow/providers/edge3/worker_api/app.py +0 -0
  57. {apache_airflow_providers_edge3-1.1.1 → apache_airflow_providers_edge3-1.1.2rc1}/src/airflow/providers/edge3/worker_api/auth.py +0 -0
  58. {apache_airflow_providers_edge3-1.1.1 → apache_airflow_providers_edge3-1.1.2rc1}/src/airflow/providers/edge3/worker_api/datamodels.py +0 -0
  59. {apache_airflow_providers_edge3-1.1.1 → apache_airflow_providers_edge3-1.1.2rc1}/src/airflow/providers/edge3/worker_api/routes/__init__.py +0 -0
  60. {apache_airflow_providers_edge3-1.1.1 → apache_airflow_providers_edge3-1.1.2rc1}/src/airflow/providers/edge3/worker_api/routes/_v2_routes.py +0 -0
  61. {apache_airflow_providers_edge3-1.1.1 → apache_airflow_providers_edge3-1.1.2rc1}/src/airflow/providers/edge3/worker_api/routes/health.py +0 -0
  62. {apache_airflow_providers_edge3-1.1.1 → apache_airflow_providers_edge3-1.1.2rc1}/src/airflow/providers/edge3/worker_api/routes/jobs.py +0 -0
  63. {apache_airflow_providers_edge3-1.1.1 → apache_airflow_providers_edge3-1.1.2rc1}/src/airflow/providers/edge3/worker_api/routes/logs.py +0 -0
  64. {apache_airflow_providers_edge3-1.1.1 → apache_airflow_providers_edge3-1.1.2rc1}/src/airflow/providers/edge3/worker_api/routes/worker.py +0 -0
  65. {apache_airflow_providers_edge3-1.1.1 → apache_airflow_providers_edge3-1.1.2rc1}/tests/conftest.py +0 -0
  66. {apache_airflow_providers_edge3-1.1.1 → apache_airflow_providers_edge3-1.1.2rc1}/tests/unit/__init__.py +0 -0
  67. {apache_airflow_providers_edge3-1.1.1 → apache_airflow_providers_edge3-1.1.2rc1}/tests/unit/edge3/__init__.py +0 -0
  68. {apache_airflow_providers_edge3-1.1.1 → apache_airflow_providers_edge3-1.1.2rc1}/tests/unit/edge3/cli/__init__.py +0 -0
  69. {apache_airflow_providers_edge3-1.1.1 → apache_airflow_providers_edge3-1.1.2rc1}/tests/unit/edge3/cli/test_api_client.py +0 -0
  70. {apache_airflow_providers_edge3-1.1.1 → apache_airflow_providers_edge3-1.1.2rc1}/tests/unit/edge3/cli/test_dataclasses.py +0 -0
  71. {apache_airflow_providers_edge3-1.1.1 → apache_airflow_providers_edge3-1.1.2rc1}/tests/unit/edge3/cli/test_edge_command.py +0 -0
  72. {apache_airflow_providers_edge3-1.1.1 → apache_airflow_providers_edge3-1.1.2rc1}/tests/unit/edge3/cli/test_signalling.py +0 -0
  73. {apache_airflow_providers_edge3-1.1.1 → apache_airflow_providers_edge3-1.1.2rc1}/tests/unit/edge3/executors/__init__.py +0 -0
  74. {apache_airflow_providers_edge3-1.1.1 → apache_airflow_providers_edge3-1.1.2rc1}/tests/unit/edge3/executors/test_edge_executor.py +0 -0
  75. {apache_airflow_providers_edge3-1.1.1 → apache_airflow_providers_edge3-1.1.2rc1}/tests/unit/edge3/models/__init__.py +0 -0
  76. {apache_airflow_providers_edge3-1.1.1 → apache_airflow_providers_edge3-1.1.2rc1}/tests/unit/edge3/plugins/__init__.py +0 -0
  77. {apache_airflow_providers_edge3-1.1.1 → apache_airflow_providers_edge3-1.1.2rc1}/tests/unit/edge3/worker_api/__init__.py +0 -0
  78. {apache_airflow_providers_edge3-1.1.1 → apache_airflow_providers_edge3-1.1.2rc1}/tests/unit/edge3/worker_api/routes/__init__.py +0 -0
  79. {apache_airflow_providers_edge3-1.1.1 → apache_airflow_providers_edge3-1.1.2rc1}/tests/unit/edge3/worker_api/routes/test_health.py +0 -0
  80. {apache_airflow_providers_edge3-1.1.1 → apache_airflow_providers_edge3-1.1.2rc1}/tests/unit/edge3/worker_api/routes/test_jobs.py +0 -0
  81. {apache_airflow_providers_edge3-1.1.1 → apache_airflow_providers_edge3-1.1.2rc1}/tests/unit/edge3/worker_api/routes/test_logs.py +0 -0
  82. {apache_airflow_providers_edge3-1.1.1 → apache_airflow_providers_edge3-1.1.2rc1}/tests/unit/edge3/worker_api/routes/test_worker.py +0 -0
@@ -1,11 +1,11 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: apache-airflow-providers-edge3
3
- Version: 1.1.1
3
+ Version: 1.1.2rc1
4
4
  Summary: Provider package apache-airflow-providers-edge3 for Apache Airflow
5
5
  Keywords: airflow-provider,edge3,airflow,integration
6
6
  Author-email: Apache Software Foundation <dev@airflow.apache.org>
7
7
  Maintainer-email: Apache Software Foundation <dev@airflow.apache.org>
8
- Requires-Python: ~=3.9
8
+ Requires-Python: ~=3.10
9
9
  Description-Content-Type: text/x-rst
10
10
  Classifier: Development Status :: 5 - Production/Stable
11
11
  Classifier: Environment :: Console
@@ -15,18 +15,16 @@ Classifier: Intended Audience :: System Administrators
15
15
  Classifier: Framework :: Apache Airflow
16
16
  Classifier: Framework :: Apache Airflow :: Provider
17
17
  Classifier: License :: OSI Approved :: Apache Software License
18
- Classifier: Programming Language :: Python :: 3.9
19
18
  Classifier: Programming Language :: Python :: 3.10
20
19
  Classifier: Programming Language :: Python :: 3.11
21
20
  Classifier: Programming Language :: Python :: 3.12
22
21
  Classifier: Topic :: System :: Monitoring
23
- Requires-Dist: apache-airflow>=2.10.0
24
- Requires-Dist: apache-airflow-providers-fab>=1.5.3
22
+ Requires-Dist: apache-airflow>=2.10.0rc1
25
23
  Requires-Dist: pydantic>=2.11.0
26
24
  Requires-Dist: retryhttp>=1.2.0,!=1.3.0
27
25
  Project-URL: Bug Tracker, https://github.com/apache/airflow/issues
28
- Project-URL: Changelog, https://airflow.apache.org/docs/apache-airflow-providers-edge3/1.1.1/changelog.html
29
- Project-URL: Documentation, https://airflow.apache.org/docs/apache-airflow-providers-edge3/1.1.1
26
+ Project-URL: Changelog, https://airflow.staged.apache.org/docs/apache-airflow-providers-edge3/1.1.2/changelog.html
27
+ Project-URL: Documentation, https://airflow.staged.apache.org/docs/apache-airflow-providers-edge3/1.1.2
30
28
  Project-URL: Mastodon, https://fosstodon.org/@airflow
31
29
  Project-URL: Slack Chat, https://s.apache.org/airflow-slack
32
30
  Project-URL: Source Code, https://github.com/apache/airflow
@@ -57,7 +55,7 @@ Project-URL: YouTube, https://www.youtube.com/channel/UCSXwxpWZQ7XZ1WL3wqevChA/
57
55
 
58
56
  Package ``apache-airflow-providers-edge3``
59
57
 
60
- Release: ``1.1.1``
58
+ Release: ``1.1.2``
61
59
 
62
60
 
63
61
  Handle edge workers on remote sites via HTTP(s) connection and orchestrates work over distributed sites.
@@ -82,7 +80,7 @@ This is a provider package for ``edge3`` provider. All classes for this provider
82
80
  are in ``airflow.providers.edge3`` python package.
83
81
 
84
82
  You can find package information and changelog for the provider
85
- in the `documentation <https://airflow.apache.org/docs/apache-airflow-providers-edge3/1.1.1/>`_.
83
+ in the `documentation <https://airflow.apache.org/docs/apache-airflow-providers-edge3/1.1.2/>`_.
86
84
 
87
85
  Installation
88
86
  ------------
@@ -91,39 +89,19 @@ You can install this package on top of an existing Airflow 2 installation (see `
91
89
  for the minimum Airflow version supported) via
92
90
  ``pip install apache-airflow-providers-edge3``
93
91
 
94
- The package supports the following python versions: 3.9,3.10,3.11,3.12
92
+ The package supports the following python versions: 3.10,3.11,3.12
95
93
 
96
94
  Requirements
97
95
  ------------
98
96
 
99
- ================================ ===================
100
- PIP package Version required
101
- ================================ ===================
102
- ``apache-airflow`` ``>=2.10.0``
103
- ``apache-airflow-providers-fab`` ``>=1.5.3``
104
- ``pydantic`` ``>=2.11.0``
105
- ``retryhttp`` ``>=1.2.0,!=1.3.0``
106
- ================================ ===================
107
-
108
- Cross provider package dependencies
109
- -----------------------------------
110
-
111
- Those are dependencies that might be needed in order to use all the features of the package.
112
- You need to install the specified providers in order to use them.
113
-
114
- You can install such cross-provider dependencies when installing from PyPI. For example:
115
-
116
- .. code-block:: bash
117
-
118
- pip install apache-airflow-providers-edge3[fab]
119
-
120
-
121
- ============================================================================================== =======
122
- Dependent package Extra
123
- ============================================================================================== =======
124
- `apache-airflow-providers-fab <https://airflow.apache.org/docs/apache-airflow-providers-fab>`_ ``fab``
125
- ============================================================================================== =======
97
+ ================== ===================
98
+ PIP package Version required
99
+ ================== ===================
100
+ ``apache-airflow`` ``>=2.10.0``
101
+ ``pydantic`` ``>=2.11.0``
102
+ ``retryhttp`` ``>=1.2.0,!=1.3.0``
103
+ ================== ===================
126
104
 
127
105
  The changelog for the provider package can be found in the
128
- `changelog <https://airflow.apache.org/docs/apache-airflow-providers-edge3/1.1.1/changelog.html>`_.
106
+ `changelog <https://airflow.apache.org/docs/apache-airflow-providers-edge3/1.1.2/changelog.html>`_.
129
107
 
@@ -23,7 +23,7 @@
23
23
 
24
24
  Package ``apache-airflow-providers-edge3``
25
25
 
26
- Release: ``1.1.1``
26
+ Release: ``1.1.2``
27
27
 
28
28
 
29
29
  Handle edge workers on remote sites via HTTP(s) connection and orchestrates work over distributed sites.
@@ -48,7 +48,7 @@ This is a provider package for ``edge3`` provider. All classes for this provider
48
48
  are in ``airflow.providers.edge3`` python package.
49
49
 
50
50
  You can find package information and changelog for the provider
51
- in the `documentation <https://airflow.apache.org/docs/apache-airflow-providers-edge3/1.1.1/>`_.
51
+ in the `documentation <https://airflow.apache.org/docs/apache-airflow-providers-edge3/1.1.2/>`_.
52
52
 
53
53
  Installation
54
54
  ------------
@@ -57,38 +57,18 @@ You can install this package on top of an existing Airflow 2 installation (see `
57
57
  for the minimum Airflow version supported) via
58
58
  ``pip install apache-airflow-providers-edge3``
59
59
 
60
- The package supports the following python versions: 3.9,3.10,3.11,3.12
60
+ The package supports the following python versions: 3.10,3.11,3.12
61
61
 
62
62
  Requirements
63
63
  ------------
64
64
 
65
- ================================ ===================
66
- PIP package Version required
67
- ================================ ===================
68
- ``apache-airflow`` ``>=2.10.0``
69
- ``apache-airflow-providers-fab`` ``>=1.5.3``
70
- ``pydantic`` ``>=2.11.0``
71
- ``retryhttp`` ``>=1.2.0,!=1.3.0``
72
- ================================ ===================
73
-
74
- Cross provider package dependencies
75
- -----------------------------------
76
-
77
- Those are dependencies that might be needed in order to use all the features of the package.
78
- You need to install the specified providers in order to use them.
79
-
80
- You can install such cross-provider dependencies when installing from PyPI. For example:
81
-
82
- .. code-block:: bash
83
-
84
- pip install apache-airflow-providers-edge3[fab]
85
-
86
-
87
- ============================================================================================== =======
88
- Dependent package Extra
89
- ============================================================================================== =======
90
- `apache-airflow-providers-fab <https://airflow.apache.org/docs/apache-airflow-providers-fab>`_ ``fab``
91
- ============================================================================================== =======
65
+ ================== ===================
66
+ PIP package Version required
67
+ ================== ===================
68
+ ``apache-airflow`` ``>=2.10.0``
69
+ ``pydantic`` ``>=2.11.0``
70
+ ``retryhttp`` ``>=1.2.0,!=1.3.0``
71
+ ================== ===================
92
72
 
93
73
  The changelog for the provider package can be found in the
94
- `changelog <https://airflow.apache.org/docs/apache-airflow-providers-edge3/1.1.1/changelog.html>`_.
74
+ `changelog <https://airflow.apache.org/docs/apache-airflow-providers-edge3/1.1.2/changelog.html>`_.
@@ -141,7 +141,7 @@ The following states are used to track the worker:
141
141
  TERMINATING->OFFLINE[label="on clean shutdown if running tasks = 0"];
142
142
  }
143
143
 
144
- See also https://github.com/apache/airflow/blob/main/providers/edge3/src/airflow/providers/edge3/models/edge_worker.py#L45
144
+ See also :py:class:`airflow.providers.edge3.models.edge_worker.EdgeWorkerState`
145
145
  for a documentation of details of all states of the Edge Worker.
146
146
 
147
147
  Feature Backlog Edge Provider
@@ -27,6 +27,38 @@
27
27
  Changelog
28
28
  ---------
29
29
 
30
+ 1.1.2
31
+ .....
32
+
33
+ Bug Fixes
34
+ ~~~~~~~~~
35
+
36
+ * ``Fix UnboundLocalError for 'edge_job_command_len' (#52328)``
37
+
38
+ Misc
39
+ ~~~~
40
+
41
+ * ``Move 'BaseHook' implementation to task SDK (#51873)``
42
+ * ``Force the definition of 'execution_api_server_url' based on 'api_url' (#52184)``
43
+ * ``Drop support for Python 3.9 (#52072)``
44
+ * ``Remove FAB dependency from Edge3 Provider (#51995)``
45
+
46
+ Doc-only
47
+ ~~~~~~~~
48
+
49
+ * ``Clean some leftovers of Python 3.9 removal - All the rest (#52432)``
50
+ * ``Update documentation for forcing core execution_api_server_url (#52447)``
51
+ * ``Fix spelling in edge provider (#52169)``
52
+ * ``Add docs for edge execution_api_server_url (#52082)``
53
+ * ``Include docs for Windows (#52004)``
54
+ * ``Document EdgeExecutor migration from 'internal_api_secret_key' to 'jwt_secret' (#51905)``
55
+ * ``Fix Edge State Model Link (#51860)``
56
+
57
+ .. Below changes are excluded from the changelog. Move them to
58
+ appropriate section above if needed. Do not delete the lines(!):
59
+ * ``Revert "Revert "Remove FAB dependency from Edge3 Provider (#51995)"" (#52000)``
60
+ * ``Revert "Remove FAB dependency from Edge3 Provider (#51995)" (#51998)``
61
+
30
62
  1.1.1
31
63
  .....
32
64
 
@@ -44,12 +44,19 @@ Here are a few imperative requirements for your workers:
44
44
 
45
45
  Minimum Airflow configuration settings for the Edge Worker to make it running is:
46
46
 
47
+ - Section ``[api_auth]``
48
+
49
+ - ``jwt_secret``: A matching secret to that on the api-server (starting from version 3.0.0).
50
+
47
51
  - Section ``[core]``
48
52
 
53
+ - ``execution_api_server_url``: If not set, the base URL from ``edge.api_url`` will be used. For example,
54
+ when ``edge.api_url`` is set to ``https://your-hostname-and-port/edge_worker/v1/rpcapi``, it will
55
+ default to ``https://your-hostname-and-port/execution/``.
49
56
  - ``executor``: Executor must be set or added to be ``airflow.providers.edge3.executors.EdgeExecutor``
50
57
  - ``internal_api_secret_key``: An encryption key must be set on api-server and Edge Worker component as
51
58
  shared secret to authenticate traffic. It should be a random string like the fernet key
52
- (but preferably not the same).
59
+ (for versions earlier than 3.0.0).
53
60
 
54
61
  - Section ``[edge]``
55
62
 
@@ -90,7 +90,7 @@ Additional REST API endpoints are provided to distribute tasks and manage the ed
90
90
  are provided by the API server.
91
91
 
92
92
 
93
- Release: 1.1.1
93
+ Release: 1.1.2
94
94
 
95
95
  Provider package
96
96
  ----------------
@@ -110,39 +110,10 @@ Requirements
110
110
 
111
111
  The minimum Apache Airflow version supported by this provider distribution is ``2.10.0``.
112
112
 
113
- ================================ ===================
114
- PIP package Version required
115
- ================================ ===================
116
- ``apache-airflow`` ``>=2.10.0``
117
- ``apache-airflow-providers-fab`` ``>=1.5.3``
118
- ``pydantic`` ``>=2.11.0``
119
- ``retryhttp`` ``>=1.2.0,!=1.3.0``
120
- ================================ ===================
121
-
122
- Cross provider package dependencies
123
- -----------------------------------
124
-
125
- Those are dependencies that might be needed in order to use all the features of the package.
126
- You need to install the specified provider distributions in order to use them.
127
-
128
- You can install such cross-provider dependencies when installing from PyPI. For example:
129
-
130
- .. code-block:: bash
131
-
132
- pip install apache-airflow-providers-edge3[fab]
133
-
134
-
135
- ============================================================================================== =======
136
- Dependent package Extra
137
- ============================================================================================== =======
138
- `apache-airflow-providers-fab <https://airflow.apache.org/docs/apache-airflow-providers-fab>`_ ``fab``
139
- ============================================================================================== =======
140
-
141
- Downloading official packages
142
- -----------------------------
143
-
144
- You can download officially released packages and verify their checksums and signatures from the
145
- `Official Apache Download site <https://downloads.apache.org/airflow/providers/>`_
146
-
147
- * `The apache-airflow-providers-edge3 1.1.1 sdist package <https://downloads.apache.org/airflow/providers/apache_airflow_providers_edge3-1.1.1.tar.gz>`_ (`asc <https://downloads.apache.org/airflow/providers/apache_airflow_providers_edge3-1.1.1.tar.gz.asc>`__, `sha512 <https://downloads.apache.org/airflow/providers/apache_airflow_providers_edge3-1.1.1.tar.gz.sha512>`__)
148
- * `The apache-airflow-providers-edge3 1.1.1 wheel package <https://downloads.apache.org/airflow/providers/apache_airflow_providers_edge3-1.1.1-py3-none-any.whl>`_ (`asc <https://downloads.apache.org/airflow/providers/apache_airflow_providers_edge3-1.1.1-py3-none-any.whl.asc>`__, `sha512 <https://downloads.apache.org/airflow/providers/apache_airflow_providers_edge3-1.1.1-py3-none-any.whl.sha512>`__)
113
+ ================== ===================
114
+ PIP package Version required
115
+ ================== ===================
116
+ ``apache-airflow`` ``>=2.10.0``
117
+ ``pydantic`` ``>=2.11.0``
118
+ ``retryhttp`` ``>=1.2.0,!=1.3.0``
119
+ ================== ===================
@@ -29,7 +29,7 @@ Install Edge Worker on Windows
29
29
  The setup was tested on Windows 10 with Python 3.12.8, 64-bit. Backend for tests was Airflow 2.10.5.
30
30
  To setup a instance of Edge Worker on Windows, you need to follow the steps below:
31
31
 
32
- 1. Install Python 3.9 or higher.
32
+ 1. Install Python 3.10 or higher.
33
33
  2. Create an empty folder as base to start with. In our example it is ``C:\Airflow``.
34
34
  3. Start Shell/Command Line in ``C:\Airflow`` and create a new virtual environment via: ``python -m venv venv``
35
35
  4. Activate the virtual environment via: ``venv\Scripts\activate.bat``
@@ -39,8 +39,8 @@ To setup a instance of Edge Worker on Windows, you need to follow the steps belo
39
39
  (At least the DAG files which should be executed on the edge alongside the dependencies.)
40
40
  7. Collect needed parameters from your running Airflow backend, at least the following:
41
41
 
42
+ - ``api_auth`` / ``jwt_token``: The shared secret key between the api-server and the Edge Worker
42
43
  - ``edge`` / ``api_url``: The HTTP(s) endpoint where the Edge Worker connects to
43
- - ``core`` / ``internal_api_secret_key``: The shared secret key between the api-server and the Edge Worker
44
44
  - Any proxy details if applicable for your environment.
45
45
 
46
46
  8. Create a worker start script to prevent repeated typing. Create a new file ``start_worker.bat`` in
@@ -49,11 +49,16 @@ To setup a instance of Edge Worker on Windows, you need to follow the steps belo
49
49
  .. code-block:: bash
50
50
 
51
51
  @echo off
52
+ REM For versions 3.0.0 or later
53
+ set AIRFLOW__API_AUTH__JWT_SECRET=<matching the api-server...>
54
+ REM for versions earlier than 3.0.0
55
+ set AIRFLOW__CORE__INTERNAL_API_SECRET_KEY=<use this as configured centrally in api-server...>
56
+
57
+ REM For all versions
52
58
  set AIRFLOW__CORE__DAGS_FOLDER=dags
53
59
  set AIRFLOW__LOGGING__BASE_LOG_FOLDER=edge_logs
54
60
  set AIRFLOW__EDGE__API_URL=https://your-hostname-and-port/edge_worker/v1/rpcapi
55
61
  set AIRFLOW__CORE__EXECUTOR=airflow.providers.edge3.executors.edge_executor.EdgeExecutor
56
- set AIRFLOW__CORE__INTERNAL_API_SECRET_KEY=<use this as configured centrally in api-server...>
57
62
  set AIRFLOW__CORE__LOAD_EXAMPLES=False
58
63
  set AIRFLOW_ENABLE_AIP_44=true
59
64
  @REM Add if needed: set http_proxy=http://my-company-proxy.com:3128
@@ -33,13 +33,14 @@ description: |
33
33
  are provided by the API server.
34
34
 
35
35
  state: ready
36
- source-date-epoch: 1749896427
36
+ source-date-epoch: 1751473321
37
37
 
38
38
  # Note that those versions are maintained by release manager - do not update them manually
39
39
  # with the exception of case where other provider in sources has >= new provider version.
40
40
  # In such case adding >= NEW_VERSION and bumping to NEW_VERSION in a provider have
41
41
  # to be done in the same PR
42
42
  versions:
43
+ - 1.1.2
43
44
  - 1.1.1
44
45
  - 1.1.0
45
46
  - 1.0.0
@@ -25,7 +25,7 @@ build-backend = "flit_core.buildapi"
25
25
 
26
26
  [project]
27
27
  name = "apache-airflow-providers-edge3"
28
- version = "1.1.1"
28
+ version = "1.1.2rc1"
29
29
  description = "Provider package apache-airflow-providers-edge3 for Apache Airflow"
30
30
  readme = "README.rst"
31
31
  authors = [
@@ -44,21 +44,19 @@ classifiers = [
44
44
  "Framework :: Apache Airflow",
45
45
  "Framework :: Apache Airflow :: Provider",
46
46
  "License :: OSI Approved :: Apache Software License",
47
- "Programming Language :: Python :: 3.9",
48
47
  "Programming Language :: Python :: 3.10",
49
48
  "Programming Language :: Python :: 3.11",
50
49
  "Programming Language :: Python :: 3.12",
51
50
  "Topic :: System :: Monitoring",
52
51
  ]
53
- requires-python = "~=3.9"
52
+ requires-python = "~=3.10"
54
53
 
55
54
  # The dependencies should be modified in place in the generated file.
56
55
  # Any change in the dependencies is preserved when the file is regenerated
57
56
  # Make sure to run ``breeze static-checks --type update-providers-dependencies --all-files``
58
57
  # After you modify the dependencies, and rebuild your Breeze CI image with ``breeze ci-image build``
59
58
  dependencies = [
60
- "apache-airflow>=2.10.0",
61
- "apache-airflow-providers-fab>=1.5.3",
59
+ "apache-airflow>=2.10.0rc1",
62
60
  "pydantic>=2.11.0",
63
61
  "retryhttp>=1.2.0,!=1.3.0",
64
62
  ]
@@ -68,7 +66,6 @@ dev = [
68
66
  "apache-airflow",
69
67
  "apache-airflow-task-sdk",
70
68
  "apache-airflow-devel-common",
71
- "apache-airflow-providers-fab",
72
69
  # Additional devel dependencies (do not remove this line and add extra development dependencies)
73
70
  ]
74
71
 
@@ -98,8 +95,8 @@ apache-airflow-providers-common-sql = {workspace = true}
98
95
  apache-airflow-providers-standard = {workspace = true}
99
96
 
100
97
  [project.urls]
101
- "Documentation" = "https://airflow.apache.org/docs/apache-airflow-providers-edge3/1.1.1"
102
- "Changelog" = "https://airflow.apache.org/docs/apache-airflow-providers-edge3/1.1.1/changelog.html"
98
+ "Documentation" = "https://airflow.staged.apache.org/docs/apache-airflow-providers-edge3/1.1.2"
99
+ "Changelog" = "https://airflow.staged.apache.org/docs/apache-airflow-providers-edge3/1.1.2/changelog.html"
103
100
  "Bug Tracker" = "https://github.com/apache/airflow/issues"
104
101
  "Source Code" = "https://github.com/apache/airflow"
105
102
  "Slack Chat" = "https://s.apache.org/airflow-slack"
@@ -29,7 +29,7 @@ from airflow import __version__ as airflow_version
29
29
 
30
30
  __all__ = ["__version__"]
31
31
 
32
- __version__ = "1.1.1"
32
+ __version__ = "1.1.2"
33
33
 
34
34
  if packaging.version.parse(packaging.version.parse(airflow_version).base_version) < packaging.version.parse(
35
35
  "2.10.0"
@@ -64,7 +64,7 @@ EDGE_WORKER_HEADER = "\n".join(
64
64
  @providers_configuration_loaded
65
65
  def force_use_internal_api_on_edge_worker():
66
66
  """
67
- Ensure that the environment is configured for the internal API without needing to declare it outside.
67
+ Ensure the environment is configured for the internal API without explicit declaration.
68
68
 
69
69
  This is only required for an Edge worker and must to be done before the Click CLI wrapper is initiated.
70
70
  That is because the CLI wrapper will attempt to establish a DB connection, which will fail before the
@@ -26,6 +26,7 @@ from pathlib import Path
26
26
  from subprocess import Popen
27
27
  from time import sleep
28
28
  from typing import TYPE_CHECKING
29
+ from urllib.parse import urlparse
29
30
 
30
31
  from lockfile.pidlockfile import remove_existing_pidfile
31
32
  from requests import HTTPError
@@ -186,11 +187,13 @@ class EdgeWorker:
186
187
  setproctitle(f"airflow edge worker: {workload.ti.key}")
187
188
 
188
189
  try:
189
- base_url = conf.get("api", "base_url", fallback="/")
190
- # If it's a relative URL, use localhost:8080 as the default
191
- if base_url.startswith("/"):
192
- base_url = f"http://localhost:8080{base_url}"
193
- default_execution_api_server = f"{base_url.rstrip('/')}/execution/"
190
+ api_url = conf.get("edge", "api_url")
191
+ execution_api_server_url = conf.get("core", "execution_api_server_url", fallback=...)
192
+ if execution_api_server_url is ...:
193
+ parsed = urlparse(api_url)
194
+ execution_api_server_url = f"{parsed.scheme}://{parsed.netloc}/execution/"
195
+
196
+ logger.info("Worker starting up server=execution_api_server_url=%s", execution_api_server_url)
194
197
 
195
198
  supervise(
196
199
  # This is the "wrong" ti type, but it duck types the same. TODO: Create a protocol for this.
@@ -199,9 +202,7 @@ class EdgeWorker:
199
202
  dag_rel_path=workload.dag_rel_path,
200
203
  bundle_info=workload.bundle_info,
201
204
  token=workload.token,
202
- server=conf.get(
203
- "core", "execution_api_server_url", fallback=default_execution_api_server
204
- ),
205
+ server=execution_api_server_url,
205
206
  log_path=workload.log_path,
206
207
  )
207
208
  return 0
@@ -27,7 +27,11 @@ from datetime import datetime
27
27
  from time import sleep
28
28
 
29
29
  from airflow.exceptions import AirflowNotFoundException
30
- from airflow.hooks.base import BaseHook
30
+
31
+ try:
32
+ from airflow.sdk import BaseHook
33
+ except ImportError:
34
+ from airflow.hooks.base import BaseHook # type: ignore[attr-defined,no-redef]
31
35
  from airflow.utils.trigger_rule import TriggerRule
32
36
 
33
37
  try:
@@ -26,19 +26,23 @@ and serves as a PoC test for the Windows worker.
26
26
  from __future__ import annotations
27
27
 
28
28
  import os
29
- from collections.abc import Container, Sequence
29
+ from collections.abc import Callable, Container, Sequence
30
30
  from datetime import datetime
31
31
  from subprocess import STDOUT, Popen
32
32
  from time import sleep
33
- from typing import TYPE_CHECKING, Any, Callable
33
+ from typing import TYPE_CHECKING, Any
34
34
 
35
35
  from airflow.decorators import task, task_group
36
36
  from airflow.exceptions import AirflowException, AirflowNotFoundException, AirflowSkipException
37
- from airflow.hooks.base import BaseHook
38
37
  from airflow.models import BaseOperator
39
38
  from airflow.models.dag import DAG
40
39
  from airflow.models.variable import Variable
41
40
  from airflow.providers.standard.operators.empty import EmptyOperator
41
+
42
+ try:
43
+ from airflow.sdk import BaseHook
44
+ except ImportError:
45
+ from airflow.hooks.base import BaseHook # type: ignore[attr-defined,no-redef]
42
46
  from airflow.sdk import Param
43
47
  from airflow.sdk.execution_time.context import context_to_airflow_vars
44
48
  from airflow.utils.trigger_rule import TriggerRule
@@ -21,7 +21,7 @@ import contextlib
21
21
  from collections.abc import Sequence
22
22
  from copy import deepcopy
23
23
  from datetime import datetime, timedelta
24
- from typing import TYPE_CHECKING, Any, Optional
24
+ from typing import TYPE_CHECKING, Any
25
25
 
26
26
  from sqlalchemy import delete, inspect, text
27
27
  from sqlalchemy.exc import NoSuchTableError
@@ -52,7 +52,7 @@ if TYPE_CHECKING:
52
52
  # TODO: Airflow 2 type hints; remove when Airflow 2 support is removed
53
53
  CommandType = Sequence[str]
54
54
  # Task tuple to send to be executed
55
- TaskTuple = tuple[TaskInstanceKey, CommandType, Optional[str], Optional[Any]]
55
+ TaskTuple = tuple[TaskInstanceKey, CommandType, str | None, Any | None]
56
56
 
57
57
  PARALLELISM: int = conf.getint("core", "PARALLELISM")
58
58
 
@@ -72,6 +72,7 @@ class EdgeExecutor(BaseExecutor):
72
72
  """
73
73
  inspector = inspect(engine)
74
74
  edge_job_columns = None
75
+ edge_job_command_len = None
75
76
  with contextlib.suppress(NoSuchTableError):
76
77
  edge_job_schema = inspector.get_columns("edge_job")
77
78
  edge_job_columns = [column["name"] for column in edge_job_schema]
@@ -113,7 +114,7 @@ class EdgeExecutor(BaseExecutor):
113
114
 
114
115
  def _process_tasks(self, task_tuples: list[TaskTuple]) -> None:
115
116
  """
116
- Temponary overwrite of _process_tasks function.
117
+ Temporary overwrite of _process_tasks function.
117
118
 
118
119
  Idea is to not change the interface of the execute_async function in BaseExecutor as it will be changed in Airflow 3.
119
120
  Edge worker needs task_instance in execute_async but BaseExecutor deletes this out of the self.queued_tasks.
@@ -132,7 +133,7 @@ class EdgeExecutor(BaseExecutor):
132
133
  session: Session = NEW_SESSION,
133
134
  ) -> None:
134
135
  """Execute asynchronously. Airflow 2.10 entry point to execute a task."""
135
- # Use of a temponary trick to get task instance, will be changed with Airflow 3.0.0
136
+ # Use of a temporary trick to get task instance, will be changed with Airflow 3.0.0
136
137
  # code works together with _process_tasks overwrite to get task instance.
137
138
  # TaskInstance in fourth element
138
139
  task_instance = self.edge_queued_tasks[key][3] # type: ignore[index]