apache-airflow-providers-edge3 1.1.2rc1__tar.gz → 1.1.3__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (81) hide show
  1. {apache_airflow_providers_edge3-1.1.2rc1 → apache_airflow_providers_edge3-1.1.3}/PKG-INFO +11 -9
  2. {apache_airflow_providers_edge3-1.1.2rc1 → apache_airflow_providers_edge3-1.1.3}/README.rst +5 -4
  3. {apache_airflow_providers_edge3-1.1.2rc1 → apache_airflow_providers_edge3-1.1.3}/docs/architecture.rst +1 -1
  4. {apache_airflow_providers_edge3-1.1.2rc1 → apache_airflow_providers_edge3-1.1.3}/docs/changelog.rst +30 -0
  5. {apache_airflow_providers_edge3-1.1.2rc1 → apache_airflow_providers_edge3-1.1.3}/docs/deployment.rst +2 -0
  6. {apache_airflow_providers_edge3-1.1.2rc1 → apache_airflow_providers_edge3-1.1.3}/docs/index.rst +3 -1
  7. {apache_airflow_providers_edge3-1.1.2rc1 → apache_airflow_providers_edge3-1.1.3}/provider.yaml +2 -1
  8. {apache_airflow_providers_edge3-1.1.2rc1 → apache_airflow_providers_edge3-1.1.3}/pyproject.toml +6 -5
  9. {apache_airflow_providers_edge3-1.1.2rc1 → apache_airflow_providers_edge3-1.1.3}/src/airflow/__init__.py +1 -1
  10. {apache_airflow_providers_edge3-1.1.2rc1/tests/unit → apache_airflow_providers_edge3-1.1.3/src/airflow/providers}/__init__.py +1 -1
  11. {apache_airflow_providers_edge3-1.1.2rc1 → apache_airflow_providers_edge3-1.1.3}/src/airflow/providers/edge3/__init__.py +1 -1
  12. {apache_airflow_providers_edge3-1.1.2rc1 → apache_airflow_providers_edge3-1.1.3}/src/airflow/providers/edge3/cli/edge_command.py +65 -0
  13. {apache_airflow_providers_edge3-1.1.2rc1 → apache_airflow_providers_edge3-1.1.3}/src/airflow/providers/edge3/cli/worker.py +2 -2
  14. {apache_airflow_providers_edge3-1.1.2rc1 → apache_airflow_providers_edge3-1.1.3}/src/airflow/providers/edge3/example_dags/integration_test.py +7 -7
  15. {apache_airflow_providers_edge3-1.1.2rc1 → apache_airflow_providers_edge3-1.1.3}/src/airflow/providers/edge3/example_dags/win_test.py +9 -5
  16. {apache_airflow_providers_edge3-1.1.2rc1 → apache_airflow_providers_edge3-1.1.3}/src/airflow/providers/edge3/executors/edge_executor.py +53 -13
  17. {apache_airflow_providers_edge3-1.1.2rc1 → apache_airflow_providers_edge3-1.1.3}/src/airflow/providers/edge3/models/edge_worker.py +35 -1
  18. {apache_airflow_providers_edge3-1.1.2rc1 → apache_airflow_providers_edge3-1.1.3}/src/airflow/providers/edge3/plugins/edge_executor_plugin.py +14 -1
  19. {apache_airflow_providers_edge3-1.1.2rc1 → apache_airflow_providers_edge3-1.1.3}/src/airflow/providers/edge3/worker_api/auth.py +1 -1
  20. {apache_airflow_providers_edge3-1.1.2rc1 → apache_airflow_providers_edge3-1.1.3}/src/airflow/providers/edge3/worker_api/routes/_v2_compat.py +1 -1
  21. {apache_airflow_providers_edge3-1.1.2rc1 → apache_airflow_providers_edge3-1.1.3}/src/airflow/providers/edge3/worker_api/routes/_v2_routes.py +1 -1
  22. {apache_airflow_providers_edge3-1.1.2rc1/src/airflow/providers → apache_airflow_providers_edge3-1.1.3/tests/unit}/__init__.py +1 -1
  23. {apache_airflow_providers_edge3-1.1.2rc1 → apache_airflow_providers_edge3-1.1.3}/tests/unit/edge3/cli/test_worker.py +1 -0
  24. {apache_airflow_providers_edge3-1.1.2rc1 → apache_airflow_providers_edge3-1.1.3}/tests/unit/edge3/executors/test_edge_executor.py +100 -3
  25. {apache_airflow_providers_edge3-1.1.2rc1 → apache_airflow_providers_edge3-1.1.3}/tests/unit/edge3/plugins/test_edge_executor_plugin.py +27 -2
  26. {apache_airflow_providers_edge3-1.1.2rc1 → apache_airflow_providers_edge3-1.1.3}/docs/cli-ref.rst +0 -0
  27. {apache_airflow_providers_edge3-1.1.2rc1 → apache_airflow_providers_edge3-1.1.3}/docs/commits.rst +0 -0
  28. {apache_airflow_providers_edge3-1.1.2rc1 → apache_airflow_providers_edge3-1.1.3}/docs/conf.py +0 -0
  29. {apache_airflow_providers_edge3-1.1.2rc1 → apache_airflow_providers_edge3-1.1.3}/docs/configurations-ref.rst +0 -0
  30. {apache_airflow_providers_edge3-1.1.2rc1 → apache_airflow_providers_edge3-1.1.3}/docs/edge_executor.rst +0 -0
  31. {apache_airflow_providers_edge3-1.1.2rc1 → apache_airflow_providers_edge3-1.1.3}/docs/img/distributed_architecture.svg +0 -0
  32. {apache_airflow_providers_edge3-1.1.2rc1 → apache_airflow_providers_edge3-1.1.3}/docs/img/edge_package.svg +0 -0
  33. {apache_airflow_providers_edge3-1.1.2rc1 → apache_airflow_providers_edge3-1.1.3}/docs/img/worker_hosts.png +0 -0
  34. {apache_airflow_providers_edge3-1.1.2rc1 → apache_airflow_providers_edge3-1.1.3}/docs/img/worker_maintenance.png +0 -0
  35. {apache_airflow_providers_edge3-1.1.2rc1 → apache_airflow_providers_edge3-1.1.3}/docs/install_on_windows.rst +0 -0
  36. {apache_airflow_providers_edge3-1.1.2rc1 → apache_airflow_providers_edge3-1.1.3}/docs/installing-providers-from-sources.rst +0 -0
  37. {apache_airflow_providers_edge3-1.1.2rc1 → apache_airflow_providers_edge3-1.1.3}/docs/security.rst +0 -0
  38. {apache_airflow_providers_edge3-1.1.2rc1 → apache_airflow_providers_edge3-1.1.3}/docs/ui_plugin.rst +0 -0
  39. {apache_airflow_providers_edge3-1.1.2rc1 → apache_airflow_providers_edge3-1.1.3}/docs/why_edge.rst +0 -0
  40. {apache_airflow_providers_edge3-1.1.2rc1 → apache_airflow_providers_edge3-1.1.3}/src/airflow/providers/edge3/LICENSE +0 -0
  41. {apache_airflow_providers_edge3-1.1.2rc1 → apache_airflow_providers_edge3-1.1.3}/src/airflow/providers/edge3/cli/__init__.py +0 -0
  42. {apache_airflow_providers_edge3-1.1.2rc1 → apache_airflow_providers_edge3-1.1.3}/src/airflow/providers/edge3/cli/api_client.py +0 -0
  43. {apache_airflow_providers_edge3-1.1.2rc1 → apache_airflow_providers_edge3-1.1.3}/src/airflow/providers/edge3/cli/dataclasses.py +0 -0
  44. {apache_airflow_providers_edge3-1.1.2rc1 → apache_airflow_providers_edge3-1.1.3}/src/airflow/providers/edge3/cli/signalling.py +0 -0
  45. {apache_airflow_providers_edge3-1.1.2rc1 → apache_airflow_providers_edge3-1.1.3}/src/airflow/providers/edge3/example_dags/__init__.py +0 -0
  46. {apache_airflow_providers_edge3-1.1.2rc1 → apache_airflow_providers_edge3-1.1.3}/src/airflow/providers/edge3/example_dags/win_notepad.py +0 -0
  47. {apache_airflow_providers_edge3-1.1.2rc1 → apache_airflow_providers_edge3-1.1.3}/src/airflow/providers/edge3/executors/__init__.py +0 -0
  48. {apache_airflow_providers_edge3-1.1.2rc1 → apache_airflow_providers_edge3-1.1.3}/src/airflow/providers/edge3/get_provider_info.py +0 -0
  49. {apache_airflow_providers_edge3-1.1.2rc1 → apache_airflow_providers_edge3-1.1.3}/src/airflow/providers/edge3/models/__init__.py +0 -0
  50. {apache_airflow_providers_edge3-1.1.2rc1 → apache_airflow_providers_edge3-1.1.3}/src/airflow/providers/edge3/models/edge_job.py +0 -0
  51. {apache_airflow_providers_edge3-1.1.2rc1 → apache_airflow_providers_edge3-1.1.3}/src/airflow/providers/edge3/models/edge_logs.py +0 -0
  52. {apache_airflow_providers_edge3-1.1.2rc1 → apache_airflow_providers_edge3-1.1.3}/src/airflow/providers/edge3/openapi/__init__.py +0 -0
  53. {apache_airflow_providers_edge3-1.1.2rc1 → apache_airflow_providers_edge3-1.1.3}/src/airflow/providers/edge3/openapi/edge_worker_api_v1.yaml +0 -0
  54. {apache_airflow_providers_edge3-1.1.2rc1 → apache_airflow_providers_edge3-1.1.3}/src/airflow/providers/edge3/plugins/__init__.py +0 -0
  55. {apache_airflow_providers_edge3-1.1.2rc1 → apache_airflow_providers_edge3-1.1.3}/src/airflow/providers/edge3/plugins/templates/edge_worker_hosts.html +0 -0
  56. {apache_airflow_providers_edge3-1.1.2rc1 → apache_airflow_providers_edge3-1.1.3}/src/airflow/providers/edge3/plugins/templates/edge_worker_jobs.html +0 -0
  57. {apache_airflow_providers_edge3-1.1.2rc1 → apache_airflow_providers_edge3-1.1.3}/src/airflow/providers/edge3/version_compat.py +0 -0
  58. {apache_airflow_providers_edge3-1.1.2rc1 → apache_airflow_providers_edge3-1.1.3}/src/airflow/providers/edge3/worker_api/__init__.py +0 -0
  59. {apache_airflow_providers_edge3-1.1.2rc1 → apache_airflow_providers_edge3-1.1.3}/src/airflow/providers/edge3/worker_api/app.py +0 -0
  60. {apache_airflow_providers_edge3-1.1.2rc1 → apache_airflow_providers_edge3-1.1.3}/src/airflow/providers/edge3/worker_api/datamodels.py +0 -0
  61. {apache_airflow_providers_edge3-1.1.2rc1 → apache_airflow_providers_edge3-1.1.3}/src/airflow/providers/edge3/worker_api/routes/__init__.py +0 -0
  62. {apache_airflow_providers_edge3-1.1.2rc1 → apache_airflow_providers_edge3-1.1.3}/src/airflow/providers/edge3/worker_api/routes/health.py +0 -0
  63. {apache_airflow_providers_edge3-1.1.2rc1 → apache_airflow_providers_edge3-1.1.3}/src/airflow/providers/edge3/worker_api/routes/jobs.py +0 -0
  64. {apache_airflow_providers_edge3-1.1.2rc1 → apache_airflow_providers_edge3-1.1.3}/src/airflow/providers/edge3/worker_api/routes/logs.py +0 -0
  65. {apache_airflow_providers_edge3-1.1.2rc1 → apache_airflow_providers_edge3-1.1.3}/src/airflow/providers/edge3/worker_api/routes/worker.py +0 -0
  66. {apache_airflow_providers_edge3-1.1.2rc1 → apache_airflow_providers_edge3-1.1.3}/tests/conftest.py +0 -0
  67. {apache_airflow_providers_edge3-1.1.2rc1 → apache_airflow_providers_edge3-1.1.3}/tests/unit/edge3/__init__.py +0 -0
  68. {apache_airflow_providers_edge3-1.1.2rc1 → apache_airflow_providers_edge3-1.1.3}/tests/unit/edge3/cli/__init__.py +0 -0
  69. {apache_airflow_providers_edge3-1.1.2rc1 → apache_airflow_providers_edge3-1.1.3}/tests/unit/edge3/cli/test_api_client.py +0 -0
  70. {apache_airflow_providers_edge3-1.1.2rc1 → apache_airflow_providers_edge3-1.1.3}/tests/unit/edge3/cli/test_dataclasses.py +0 -0
  71. {apache_airflow_providers_edge3-1.1.2rc1 → apache_airflow_providers_edge3-1.1.3}/tests/unit/edge3/cli/test_edge_command.py +0 -0
  72. {apache_airflow_providers_edge3-1.1.2rc1 → apache_airflow_providers_edge3-1.1.3}/tests/unit/edge3/cli/test_signalling.py +0 -0
  73. {apache_airflow_providers_edge3-1.1.2rc1 → apache_airflow_providers_edge3-1.1.3}/tests/unit/edge3/executors/__init__.py +0 -0
  74. {apache_airflow_providers_edge3-1.1.2rc1 → apache_airflow_providers_edge3-1.1.3}/tests/unit/edge3/models/__init__.py +0 -0
  75. {apache_airflow_providers_edge3-1.1.2rc1 → apache_airflow_providers_edge3-1.1.3}/tests/unit/edge3/plugins/__init__.py +0 -0
  76. {apache_airflow_providers_edge3-1.1.2rc1 → apache_airflow_providers_edge3-1.1.3}/tests/unit/edge3/worker_api/__init__.py +0 -0
  77. {apache_airflow_providers_edge3-1.1.2rc1 → apache_airflow_providers_edge3-1.1.3}/tests/unit/edge3/worker_api/routes/__init__.py +0 -0
  78. {apache_airflow_providers_edge3-1.1.2rc1 → apache_airflow_providers_edge3-1.1.3}/tests/unit/edge3/worker_api/routes/test_health.py +0 -0
  79. {apache_airflow_providers_edge3-1.1.2rc1 → apache_airflow_providers_edge3-1.1.3}/tests/unit/edge3/worker_api/routes/test_jobs.py +0 -0
  80. {apache_airflow_providers_edge3-1.1.2rc1 → apache_airflow_providers_edge3-1.1.3}/tests/unit/edge3/worker_api/routes/test_logs.py +0 -0
  81. {apache_airflow_providers_edge3-1.1.2rc1 → apache_airflow_providers_edge3-1.1.3}/tests/unit/edge3/worker_api/routes/test_worker.py +0 -0
@@ -1,11 +1,11 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: apache-airflow-providers-edge3
3
- Version: 1.1.2rc1
3
+ Version: 1.1.3
4
4
  Summary: Provider package apache-airflow-providers-edge3 for Apache Airflow
5
5
  Keywords: airflow-provider,edge3,airflow,integration
6
6
  Author-email: Apache Software Foundation <dev@airflow.apache.org>
7
7
  Maintainer-email: Apache Software Foundation <dev@airflow.apache.org>
8
- Requires-Python: ~=3.10
8
+ Requires-Python: >=3.10
9
9
  Description-Content-Type: text/x-rst
10
10
  Classifier: Development Status :: 5 - Production/Stable
11
11
  Classifier: Environment :: Console
@@ -18,13 +18,14 @@ Classifier: License :: OSI Approved :: Apache Software License
18
18
  Classifier: Programming Language :: Python :: 3.10
19
19
  Classifier: Programming Language :: Python :: 3.11
20
20
  Classifier: Programming Language :: Python :: 3.12
21
+ Classifier: Programming Language :: Python :: 3.13
21
22
  Classifier: Topic :: System :: Monitoring
22
- Requires-Dist: apache-airflow>=2.10.0rc1
23
+ Requires-Dist: apache-airflow>=2.10.0
23
24
  Requires-Dist: pydantic>=2.11.0
24
25
  Requires-Dist: retryhttp>=1.2.0,!=1.3.0
25
26
  Project-URL: Bug Tracker, https://github.com/apache/airflow/issues
26
- Project-URL: Changelog, https://airflow.staged.apache.org/docs/apache-airflow-providers-edge3/1.1.2/changelog.html
27
- Project-URL: Documentation, https://airflow.staged.apache.org/docs/apache-airflow-providers-edge3/1.1.2
27
+ Project-URL: Changelog, https://airflow.apache.org/docs/apache-airflow-providers-edge3/1.1.3/changelog.html
28
+ Project-URL: Documentation, https://airflow.apache.org/docs/apache-airflow-providers-edge3/1.1.3
28
29
  Project-URL: Mastodon, https://fosstodon.org/@airflow
29
30
  Project-URL: Slack Chat, https://s.apache.org/airflow-slack
30
31
  Project-URL: Source Code, https://github.com/apache/airflow
@@ -55,8 +56,9 @@ Project-URL: YouTube, https://www.youtube.com/channel/UCSXwxpWZQ7XZ1WL3wqevChA/
55
56
 
56
57
  Package ``apache-airflow-providers-edge3``
57
58
 
58
- Release: ``1.1.2``
59
+ Release: ``1.1.3``
59
60
 
61
+ Release Date: ``|PypiReleaseDate|``
60
62
 
61
63
  Handle edge workers on remote sites via HTTP(s) connection and orchestrates work over distributed sites.
62
64
 
@@ -80,7 +82,7 @@ This is a provider package for ``edge3`` provider. All classes for this provider
80
82
  are in ``airflow.providers.edge3`` python package.
81
83
 
82
84
  You can find package information and changelog for the provider
83
- in the `documentation <https://airflow.apache.org/docs/apache-airflow-providers-edge3/1.1.2/>`_.
85
+ in the `documentation <https://airflow.apache.org/docs/apache-airflow-providers-edge3/1.1.3/>`_.
84
86
 
85
87
  Installation
86
88
  ------------
@@ -89,7 +91,7 @@ You can install this package on top of an existing Airflow 2 installation (see `
89
91
  for the minimum Airflow version supported) via
90
92
  ``pip install apache-airflow-providers-edge3``
91
93
 
92
- The package supports the following python versions: 3.10,3.11,3.12
94
+ The package supports the following python versions: 3.10,3.11,3.12,3.13
93
95
 
94
96
  Requirements
95
97
  ------------
@@ -103,5 +105,5 @@ PIP package Version required
103
105
  ================== ===================
104
106
 
105
107
  The changelog for the provider package can be found in the
106
- `changelog <https://airflow.apache.org/docs/apache-airflow-providers-edge3/1.1.2/changelog.html>`_.
108
+ `changelog <https://airflow.apache.org/docs/apache-airflow-providers-edge3/1.1.3/changelog.html>`_.
107
109
 
@@ -23,8 +23,9 @@
23
23
 
24
24
  Package ``apache-airflow-providers-edge3``
25
25
 
26
- Release: ``1.1.2``
26
+ Release: ``1.1.3``
27
27
 
28
+ Release Date: ``|PypiReleaseDate|``
28
29
 
29
30
  Handle edge workers on remote sites via HTTP(s) connection and orchestrates work over distributed sites.
30
31
 
@@ -48,7 +49,7 @@ This is a provider package for ``edge3`` provider. All classes for this provider
48
49
  are in ``airflow.providers.edge3`` python package.
49
50
 
50
51
  You can find package information and changelog for the provider
51
- in the `documentation <https://airflow.apache.org/docs/apache-airflow-providers-edge3/1.1.2/>`_.
52
+ in the `documentation <https://airflow.apache.org/docs/apache-airflow-providers-edge3/1.1.3/>`_.
52
53
 
53
54
  Installation
54
55
  ------------
@@ -57,7 +58,7 @@ You can install this package on top of an existing Airflow 2 installation (see `
57
58
  for the minimum Airflow version supported) via
58
59
  ``pip install apache-airflow-providers-edge3``
59
60
 
60
- The package supports the following python versions: 3.10,3.11,3.12
61
+ The package supports the following python versions: 3.10,3.11,3.12,3.13
61
62
 
62
63
  Requirements
63
64
  ------------
@@ -71,4 +72,4 @@ PIP package Version required
71
72
  ================== ===================
72
73
 
73
74
  The changelog for the provider package can be found in the
74
- `changelog <https://airflow.apache.org/docs/apache-airflow-providers-edge3/1.1.2/changelog.html>`_.
75
+ `changelog <https://airflow.apache.org/docs/apache-airflow-providers-edge3/1.1.3/changelog.html>`_.
@@ -171,7 +171,7 @@ The following features are known missing and will be implemented in increments:
171
171
 
172
172
  - Tests
173
173
 
174
- - System tests in Github, test the deployment of the worker with a Dag execution
174
+ - System tests in GitHub, test the deployment of the worker with a Dag execution
175
175
  - Test/Support on Windows for Edge Worker
176
176
 
177
177
  - Scaling test - Check and define boundaries of workers/jobs. Today it is known to
@@ -27,6 +27,18 @@
27
27
  Changelog
28
28
  ---------
29
29
 
30
+ 1.1.3
31
+ .....
32
+
33
+ Bug Fixes
34
+ ~~~~~~~~~
35
+
36
+ * ``Fix: Prevent duplicate edge_job insertions for deferrable tasks in EdgeExecutor (#53610) (#53927)``
37
+
38
+ .. Below changes are excluded from the changelog. Move them to
39
+ appropriate section above if needed. Do not delete the lines(!):
40
+ * ``Remove parameter from Edge example (#53997)``
41
+
30
42
  1.1.2
31
43
  .....
32
44
 
@@ -34,10 +46,23 @@ Bug Fixes
34
46
  ~~~~~~~~~
35
47
 
36
48
  * ``Fix UnboundLocalError for 'edge_job_command_len' (#52328)``
49
+ * ``Extend run detection to dev-mode to load plugin (#53576)``
50
+ * ``Add queue and remove queue cli commands for EdgeExecutor (#53505)``
51
+ * ``Ensure Edge Plugin for API endpoint is only loaded on API-Server and AF2 Webserver (#52952)``
52
+ * ``Fix unreachable code mypy warnings in edge3 provider (#53430)``
53
+ * ``Make edge3 provider compatible with mypy 1.16.1 (#53104)``
54
+ * ``Fix task configuration defaults for AbstractOperator (#52871)``
37
55
 
38
56
  Misc
39
57
  ~~~~
40
58
 
59
+ * ``Remove upper-binding for "python-requires" (#52980)``
60
+ * ``Temporarily switch to use >=,< pattern instead of '~=' (#52967)``
61
+ * ``Deprecate decorators from Core (#53629)``
62
+ * ``Add Python 3.13 support for Airflow. (#46891)``
63
+ * ``Cleanup type ignores in edge3 provider where possible (#53248)``
64
+ * ``Remove type ignore across codebase after mypy upgrade (#53243)``
65
+ * ``Remove deprecation in Edge for DEFAULT_QUEUE (#52954)``
41
66
  * ``Move 'BaseHook' implementation to task SDK (#51873)``
42
67
  * ``Force the definition of 'execution_api_server_url' based on 'api_url' (#52184)``
43
68
  * ``Drop support for Python 3.9 (#52072)``
@@ -58,6 +83,11 @@ Doc-only
58
83
  appropriate section above if needed. Do not delete the lines(!):
59
84
  * ``Revert "Revert "Remove FAB dependency from Edge3 Provider (#51995)"" (#52000)``
60
85
  * ``Revert "Remove FAB dependency from Edge3 Provider (#51995)" (#51998)``
86
+ * ``Make dag_version_id in TI non-nullable (#50825)``
87
+ * ``Fix spelling of GitHub brand name (#53735)``
88
+ * ``Replace mock.patch("utcnow") with time_machine in Edge Executor (#53670)``
89
+ * ``Prepare release for July 2025 1st provider wave (#52727)``
90
+
61
91
 
62
92
  1.1.1
63
93
  .....
@@ -190,3 +190,5 @@ instance. The commands are:
190
190
  - ``airflow edge remote-edge-worker-exit-maintenance``: Request a remote edge worker to exit maintenance mode
191
191
  - ``airflow edge shutdown-remote-edge-worker``: Shuts down a remote edge worker gracefully
192
192
  - ``airflow edge remove-remote-edge-worker``: Remove a worker instance from the cluster
193
+ - ``airflow edge add-worker-queues``: Add queues to an edge worker
194
+ - ``airflow edge remove-worker-queues``: Remove queues from an edge worker
@@ -90,7 +90,9 @@ Additional REST API endpoints are provided to distribute tasks and manage the ed
90
90
  are provided by the API server.
91
91
 
92
92
 
93
- Release: 1.1.2
93
+ Release: 1.1.3
94
+
95
+ Release Date: ``|PypiReleaseDate|``
94
96
 
95
97
  Provider package
96
98
  ----------------
@@ -33,13 +33,14 @@ description: |
33
33
  are provided by the API server.
34
34
 
35
35
  state: ready
36
- source-date-epoch: 1751473321
36
+ source-date-epoch: 1754503142
37
37
 
38
38
  # Note that those versions are maintained by release manager - do not update them manually
39
39
  # with the exception of case where other provider in sources has >= new provider version.
40
40
  # In such case adding >= NEW_VERSION and bumping to NEW_VERSION in a provider have
41
41
  # to be done in the same PR
42
42
  versions:
43
+ - 1.1.3
43
44
  - 1.1.2
44
45
  - 1.1.1
45
46
  - 1.1.0
@@ -25,7 +25,7 @@ build-backend = "flit_core.buildapi"
25
25
 
26
26
  [project]
27
27
  name = "apache-airflow-providers-edge3"
28
- version = "1.1.2rc1"
28
+ version = "1.1.3"
29
29
  description = "Provider package apache-airflow-providers-edge3 for Apache Airflow"
30
30
  readme = "README.rst"
31
31
  authors = [
@@ -47,16 +47,17 @@ classifiers = [
47
47
  "Programming Language :: Python :: 3.10",
48
48
  "Programming Language :: Python :: 3.11",
49
49
  "Programming Language :: Python :: 3.12",
50
+ "Programming Language :: Python :: 3.13",
50
51
  "Topic :: System :: Monitoring",
51
52
  ]
52
- requires-python = "~=3.10"
53
+ requires-python = ">=3.10"
53
54
 
54
55
  # The dependencies should be modified in place in the generated file.
55
56
  # Any change in the dependencies is preserved when the file is regenerated
56
57
  # Make sure to run ``breeze static-checks --type update-providers-dependencies --all-files``
57
58
  # After you modify the dependencies, and rebuild your Breeze CI image with ``breeze ci-image build``
58
59
  dependencies = [
59
- "apache-airflow>=2.10.0rc1",
60
+ "apache-airflow>=2.10.0",
60
61
  "pydantic>=2.11.0",
61
62
  "retryhttp>=1.2.0,!=1.3.0",
62
63
  ]
@@ -95,8 +96,8 @@ apache-airflow-providers-common-sql = {workspace = true}
95
96
  apache-airflow-providers-standard = {workspace = true}
96
97
 
97
98
  [project.urls]
98
- "Documentation" = "https://airflow.staged.apache.org/docs/apache-airflow-providers-edge3/1.1.2"
99
- "Changelog" = "https://airflow.staged.apache.org/docs/apache-airflow-providers-edge3/1.1.2/changelog.html"
99
+ "Documentation" = "https://airflow.apache.org/docs/apache-airflow-providers-edge3/1.1.3"
100
+ "Changelog" = "https://airflow.apache.org/docs/apache-airflow-providers-edge3/1.1.3/changelog.html"
100
101
  "Bug Tracker" = "https://github.com/apache/airflow/issues"
101
102
  "Source Code" = "https://github.com/apache/airflow"
102
103
  "Slack Chat" = "https://s.apache.org/airflow-slack"
@@ -14,4 +14,4 @@
14
14
  # KIND, either express or implied. See the License for the
15
15
  # specific language governing permissions and limitations
16
16
  # under the License.
17
- __path__ = __import__("pkgutil").extend_path(__path__, __name__) # type: ignore
17
+ __path__ = __import__("pkgutil").extend_path(__path__, __name__)
@@ -14,4 +14,4 @@
14
14
  # KIND, either express or implied. See the License for the
15
15
  # specific language governing permissions and limitations
16
16
  # under the License.
17
- __path__ = __import__("pkgutil").extend_path(__path__, __name__) # type: ignore
17
+ __path__ = __import__("pkgutil").extend_path(__path__, __name__)
@@ -29,7 +29,7 @@ from airflow import __version__ as airflow_version
29
29
 
30
30
  __all__ = ["__version__"]
31
31
 
32
- __version__ = "1.1.2"
32
+ __version__ = "1.1.3"
33
33
 
34
34
  if packaging.version.parse(packaging.version.parse(airflow_version).base_version) < packaging.version.parse(
35
35
  "2.10.0"
@@ -351,6 +351,48 @@ def remote_worker_request_shutdown(args) -> None:
351
351
  logger.info("Requested shutdown of Edge Worker host %s by %s.", args.edge_hostname, getuser())
352
352
 
353
353
 
354
+ @cli_utils.action_cli(check_db=False)
355
+ @providers_configuration_loaded
356
+ def add_worker_queues(args) -> None:
357
+ """Add queues to an edge worker."""
358
+ _check_valid_db_connection()
359
+ _check_if_registered_edge_host(hostname=args.edge_hostname)
360
+ from airflow.providers.edge3.models.edge_worker import add_worker_queues
361
+
362
+ queues = args.queues.split(",") if args.queues else []
363
+ if not queues:
364
+ raise SystemExit("Error: No queues specified to add.")
365
+
366
+ try:
367
+ add_worker_queues(args.edge_hostname, queues)
368
+ logger.info("Added queues %s to Edge Worker host %s by %s.", queues, args.edge_hostname, getuser())
369
+ except TypeError as e:
370
+ logger.error(str(e))
371
+ raise SystemExit
372
+
373
+
374
+ @cli_utils.action_cli(check_db=False)
375
+ @providers_configuration_loaded
376
+ def remove_worker_queues(args) -> None:
377
+ """Remove queues from an edge worker."""
378
+ _check_valid_db_connection()
379
+ _check_if_registered_edge_host(hostname=args.edge_hostname)
380
+ from airflow.providers.edge3.models.edge_worker import remove_worker_queues
381
+
382
+ queues = args.queues.split(",") if args.queues else []
383
+ if not queues:
384
+ raise SystemExit("Error: No queues specified to remove.")
385
+
386
+ try:
387
+ remove_worker_queues(args.edge_hostname, queues)
388
+ logger.info(
389
+ "Removed queues %s from Edge Worker host %s by %s.", queues, args.edge_hostname, getuser()
390
+ )
391
+ except TypeError as e:
392
+ logger.error(str(e))
393
+ raise SystemExit
394
+
395
+
354
396
  ARG_CONCURRENCY = Arg(
355
397
  ("-c", "--concurrency"),
356
398
  type=int,
@@ -380,6 +422,11 @@ ARG_REQUIRED_MAINTENANCE_COMMENT = Arg(
380
422
  help="Maintenance comments to report reason. Required if enabling maintenance",
381
423
  required=True,
382
424
  )
425
+ ARG_QUEUES_MANAGE = Arg(
426
+ ("-q", "--queues"),
427
+ help="Comma delimited list of queues to add or remove.",
428
+ required=True,
429
+ )
383
430
  ARG_WAIT_MAINT = Arg(
384
431
  ("-w", "--wait"),
385
432
  default=False,
@@ -516,4 +563,22 @@ EDGE_COMMANDS: list[ActionCommand] = [
516
563
  func=remote_worker_request_shutdown,
517
564
  args=(ARG_REQUIRED_EDGE_HOSTNAME,),
518
565
  ),
566
+ ActionCommand(
567
+ name="add-worker-queues",
568
+ help=add_worker_queues.__doc__,
569
+ func=add_worker_queues,
570
+ args=(
571
+ ARG_REQUIRED_EDGE_HOSTNAME,
572
+ ARG_QUEUES_MANAGE,
573
+ ),
574
+ ),
575
+ ActionCommand(
576
+ name="remove-worker-queues",
577
+ help=remove_worker_queues.__doc__,
578
+ func=remove_worker_queues,
579
+ args=(
580
+ ARG_REQUIRED_EDGE_HOSTNAME,
581
+ ARG_QUEUES_MANAGE,
582
+ ),
583
+ ),
519
584
  ]
@@ -188,8 +188,8 @@ class EdgeWorker:
188
188
 
189
189
  try:
190
190
  api_url = conf.get("edge", "api_url")
191
- execution_api_server_url = conf.get("core", "execution_api_server_url", fallback=...)
192
- if execution_api_server_url is ...:
191
+ execution_api_server_url = conf.get("core", "execution_api_server_url", fallback="")
192
+ if not execution_api_server_url:
193
193
  parsed = urlparse(api_url)
194
194
  execution_api_server_url = f"{parsed.scheme}://{parsed.netloc}/execution/"
195
195
 
@@ -41,13 +41,13 @@ try:
41
41
  from airflow.sdk import DAG, Param, Variable, task, task_group
42
42
  except ImportError:
43
43
  # Airflow 2.10 compat
44
- from airflow.decorators import task, task_group # type: ignore[no-redef,attr-defined]
45
- from airflow.models.dag import DAG # type: ignore[no-redef,attr-defined,assignment]
46
- from airflow.models.param import Param # type: ignore[no-redef,attr-defined]
47
- from airflow.models.variable import Variable # type: ignore[no-redef,attr-defined]
48
- from airflow.operators.bash import BashOperator # type: ignore[no-redef,attr-defined]
49
- from airflow.operators.empty import EmptyOperator # type: ignore[no-redef,attr-defined]
50
- from airflow.operators.python import PythonOperator # type: ignore[no-redef,attr-defined]
44
+ from airflow.decorators import task, task_group # type: ignore[attr-defined,no-redef]
45
+ from airflow.models.dag import DAG # type: ignore[assignment]
46
+ from airflow.models.param import Param # type: ignore[no-redef]
47
+ from airflow.models.variable import Variable
48
+ from airflow.operators.bash import BashOperator # type: ignore[no-redef]
49
+ from airflow.operators.empty import EmptyOperator # type: ignore[no-redef]
50
+ from airflow.operators.python import PythonOperator # type: ignore[no-redef]
51
51
 
52
52
  with DAG(
53
53
  dag_id="integration_test",
@@ -32,7 +32,11 @@ from subprocess import STDOUT, Popen
32
32
  from time import sleep
33
33
  from typing import TYPE_CHECKING, Any
34
34
 
35
- from airflow.decorators import task, task_group
35
+ try:
36
+ from airflow.sdk import task, task_group
37
+ except ImportError:
38
+ # Airflow 2 path
39
+ from airflow.decorators import task, task_group # type: ignore[attr-defined,no-redef]
36
40
  from airflow.exceptions import AirflowException, AirflowNotFoundException, AirflowSkipException
37
41
  from airflow.models import BaseOperator
38
42
  from airflow.models.dag import DAG
@@ -52,11 +56,11 @@ if TYPE_CHECKING:
52
56
  try:
53
57
  from airflow.sdk.types import RuntimeTaskInstanceProtocol as TaskInstance
54
58
  except ImportError:
55
- from airflow.models import TaskInstance # type: ignore[assignment, no-redef]
59
+ from airflow.models import TaskInstance # type: ignore[assignment]
56
60
  from airflow.utils.context import Context
57
61
 
58
62
  try:
59
- from airflow.operators.python import PythonOperator # type: ignore
63
+ from airflow.operators.python import PythonOperator
60
64
  except ImportError:
61
65
  from airflow.providers.common.compat.standard.operators import PythonOperator
62
66
 
@@ -277,7 +281,7 @@ with DAG(
277
281
 
278
282
  @task.virtualenv(requirements="numpy")
279
283
  def virtualenv():
280
- import numpy # type: ignore
284
+ import numpy
281
285
 
282
286
  print(f"Welcome to virtualenv with numpy version {numpy.__version__}.")
283
287
 
@@ -297,7 +301,7 @@ with DAG(
297
301
  except AirflowNotFoundException:
298
302
  print("Connection 'integration_test' not found... but also OK.")
299
303
 
300
- command = CmdOperator(task_id="command", command="echo Parameter is {{params.mapping_count}}")
304
+ command = CmdOperator(task_id="command", command="echo Hello World")
301
305
 
302
306
  def python_call():
303
307
  print("Hello world")
@@ -30,7 +30,6 @@ from sqlalchemy.orm import Session
30
30
  from airflow.cli.cli_config import GroupCommand
31
31
  from airflow.configuration import conf
32
32
  from airflow.executors.base_executor import BaseExecutor
33
- from airflow.models.abstractoperator import DEFAULT_QUEUE
34
33
  from airflow.models.taskinstance import TaskInstance, TaskInstanceState
35
34
  from airflow.providers.edge3.cli.edge_command import EDGE_COMMANDS
36
35
  from airflow.providers.edge3.models.edge_job import EdgeJobModel
@@ -55,6 +54,7 @@ if TYPE_CHECKING:
55
54
  TaskTuple = tuple[TaskInstanceKey, CommandType, str | None, Any | None]
56
55
 
57
56
  PARALLELISM: int = conf.getint("core", "PARALLELISM")
57
+ DEFAULT_QUEUE: str = conf.get_mandatory_value("operators", "default_queue")
58
58
 
59
59
 
60
60
  class EdgeExecutor(BaseExecutor):
@@ -140,20 +140,40 @@ class EdgeExecutor(BaseExecutor):
140
140
  del self.edge_queued_tasks[key]
141
141
 
142
142
  self.validate_airflow_tasks_run_command(command) # type: ignore[attr-defined]
143
- session.add(
144
- EdgeJobModel(
143
+
144
+ # Check if job already exists with same dag_id, task_id, run_id, map_index, try_number
145
+ existing_job = (
146
+ session.query(EdgeJobModel)
147
+ .filter_by(
145
148
  dag_id=key.dag_id,
146
149
  task_id=key.task_id,
147
150
  run_id=key.run_id,
148
151
  map_index=key.map_index,
149
152
  try_number=key.try_number,
150
- state=TaskInstanceState.QUEUED,
151
- queue=queue or DEFAULT_QUEUE,
152
- concurrency_slots=task_instance.pool_slots,
153
- command=str(command),
154
153
  )
154
+ .first()
155
155
  )
156
156
 
157
+ if existing_job:
158
+ existing_job.state = TaskInstanceState.QUEUED
159
+ existing_job.queue = queue or DEFAULT_QUEUE
160
+ existing_job.concurrency_slots = task_instance.pool_slots
161
+ existing_job.command = str(command)
162
+ else:
163
+ session.add(
164
+ EdgeJobModel(
165
+ dag_id=key.dag_id,
166
+ task_id=key.task_id,
167
+ run_id=key.run_id,
168
+ map_index=key.map_index,
169
+ try_number=key.try_number,
170
+ state=TaskInstanceState.QUEUED,
171
+ queue=queue or DEFAULT_QUEUE,
172
+ concurrency_slots=task_instance.pool_slots,
173
+ command=str(command),
174
+ )
175
+ )
176
+
157
177
  @provide_session
158
178
  def queue_workload(
159
179
  self,
@@ -168,20 +188,40 @@ class EdgeExecutor(BaseExecutor):
168
188
 
169
189
  task_instance = workload.ti
170
190
  key = task_instance.key
171
- session.add(
172
- EdgeJobModel(
191
+
192
+ # Check if job already exists with same dag_id, task_id, run_id, map_index, try_number
193
+ existing_job = (
194
+ session.query(EdgeJobModel)
195
+ .filter_by(
173
196
  dag_id=key.dag_id,
174
197
  task_id=key.task_id,
175
198
  run_id=key.run_id,
176
199
  map_index=key.map_index,
177
200
  try_number=key.try_number,
178
- state=TaskInstanceState.QUEUED,
179
- queue=task_instance.queue,
180
- concurrency_slots=task_instance.pool_slots,
181
- command=workload.model_dump_json(),
182
201
  )
202
+ .first()
183
203
  )
184
204
 
205
+ if existing_job:
206
+ existing_job.state = TaskInstanceState.QUEUED
207
+ existing_job.queue = task_instance.queue
208
+ existing_job.concurrency_slots = task_instance.pool_slots
209
+ existing_job.command = workload.model_dump_json()
210
+ else:
211
+ session.add(
212
+ EdgeJobModel(
213
+ dag_id=key.dag_id,
214
+ task_id=key.task_id,
215
+ run_id=key.run_id,
216
+ map_index=key.map_index,
217
+ try_number=key.try_number,
218
+ state=TaskInstanceState.QUEUED,
219
+ queue=task_instance.queue,
220
+ concurrency_slots=task_instance.pool_slots,
221
+ command=workload.model_dump_json(),
222
+ )
223
+ )
224
+
185
225
  def _check_worker_liveness(self, session: Session) -> bool:
186
226
  """Reset worker state if heartbeat timed out."""
187
227
  changed = False
@@ -109,7 +109,7 @@ class EdgeWorkerModel(Base, LoggingMixin):
109
109
  super().__init__()
110
110
 
111
111
  @property
112
- def sysinfo_json(self) -> dict:
112
+ def sysinfo_json(self) -> dict | None:
113
113
  return json.loads(self.sysinfo) if self.sysinfo else None
114
114
 
115
115
  @property
@@ -283,3 +283,37 @@ def request_shutdown(worker_name: str, session: Session = NEW_SESSION) -> None:
283
283
  EdgeWorkerState.UNKNOWN,
284
284
  ):
285
285
  worker.state = EdgeWorkerState.SHUTDOWN_REQUEST
286
+
287
+
288
+ @provide_session
289
+ def add_worker_queues(worker_name: str, queues: list[str], session: Session = NEW_SESSION) -> None:
290
+ """Add queues to an edge worker."""
291
+ query = select(EdgeWorkerModel).where(EdgeWorkerModel.worker_name == worker_name)
292
+ worker: EdgeWorkerModel = session.scalar(query)
293
+ if worker.state in (
294
+ EdgeWorkerState.OFFLINE,
295
+ EdgeWorkerState.OFFLINE_MAINTENANCE,
296
+ EdgeWorkerState.UNKNOWN,
297
+ ):
298
+ error_message = f"Cannot add queues to edge worker {worker_name} as it is in {worker.state} state!"
299
+ logger.error(error_message)
300
+ raise TypeError(error_message)
301
+ worker.add_queues(queues)
302
+
303
+
304
+ @provide_session
305
+ def remove_worker_queues(worker_name: str, queues: list[str], session: Session = NEW_SESSION) -> None:
306
+ """Remove queues from an edge worker."""
307
+ query = select(EdgeWorkerModel).where(EdgeWorkerModel.worker_name == worker_name)
308
+ worker: EdgeWorkerModel = session.scalar(query)
309
+ if worker.state in (
310
+ EdgeWorkerState.OFFLINE,
311
+ EdgeWorkerState.OFFLINE_MAINTENANCE,
312
+ EdgeWorkerState.UNKNOWN,
313
+ ):
314
+ error_message = (
315
+ f"Cannot remove queues from edge worker {worker_name} as it is in {worker.state} state!"
316
+ )
317
+ logger.error(error_message)
318
+ raise TypeError(error_message)
319
+ worker.remove_queues(queues)
@@ -17,6 +17,7 @@
17
17
 
18
18
  from __future__ import annotations
19
19
 
20
+ import sys
20
21
  from typing import TYPE_CHECKING, Any
21
22
 
22
23
  from airflow.configuration import conf
@@ -213,12 +214,24 @@ try:
213
214
  except AirflowConfigException:
214
215
  EDGE_EXECUTOR_ACTIVE = False
215
216
 
217
+ # Load the API endpoint only on api-server (Airflow 3.x) or webserver (Airflow 2.x)
218
+ # todo(jscheffl): Remove this check when the discussion in
219
+ # https://lists.apache.org/thread/w170czq6r7bslkqp1tk6bjjjo0789wgl
220
+ # resulted in a proper API to selective initialize. Maybe backcompat-shim
221
+ # is also needed to support Airflow-versions prior the rework.
222
+ if AIRFLOW_V_3_0_PLUS:
223
+ RUNNING_ON_APISERVER = (len(sys.argv) > 1 and sys.argv[1] in ["api-server"]) or (
224
+ len(sys.argv) > 2 and sys.argv[2] == "airflow-core/src/airflow/api_fastapi/main.py"
225
+ )
226
+ else:
227
+ RUNNING_ON_APISERVER = "gunicorn" in sys.argv[0] and "airflow-webserver" in sys.argv
228
+
216
229
 
217
230
  class EdgeExecutorPlugin(AirflowPlugin):
218
231
  """EdgeExecutor Plugin - provides API endpoints for Edge Workers in Webserver."""
219
232
 
220
233
  name = "edge_executor"
221
- if EDGE_EXECUTOR_ACTIVE:
234
+ if EDGE_EXECUTOR_ACTIVE and RUNNING_ON_APISERVER:
222
235
  if AIRFLOW_V_3_0_PLUS:
223
236
  fastapi_apps = [_get_api_endpoint()]
224
237
  else:
@@ -58,7 +58,7 @@ if AIRFLOW_V_3_0_PLUS:
58
58
 
59
59
  else:
60
60
  # Airflow 2.10 compatibility
61
- from airflow.utils.jwt_signer import JWTSigner # type: ignore
61
+ from airflow.utils.jwt_signer import JWTSigner
62
62
 
63
63
  @cache
64
64
  def jwt_signer() -> JWTSigner:
@@ -127,7 +127,7 @@ else:
127
127
 
128
128
  # In Airflow 3 with AIP-72 we get workload addressed by ExecuteTask
129
129
  # But in Airflow 2.10 it is a command line array
130
- ExecuteTask = list[str] # type: ignore[no-redef,assignment,misc]
130
+ ExecuteTask = list[str] # type: ignore[assignment,misc]
131
131
 
132
132
  def parse_command(command: str) -> ExecuteTask:
133
133
  from ast import literal_eval
@@ -66,7 +66,7 @@ def rpcapi_v2(body: dict[str, Any]) -> APIResponse:
66
66
  # Note: Except the method map this _was_ a 100% copy of internal API module
67
67
  # airflow.api_internal.endpoints.rpc_api_endpoint.internal_airflow_api()
68
68
  # As of rework for FastAPI in Airflow 3.0, this is updated and to be removed in the future.
69
- from airflow.api_internal.endpoints.rpc_api_endpoint import ( # type: ignore[attr-defined]
69
+ from airflow.api_internal.endpoints.rpc_api_endpoint import (
70
70
  # Note: This is just for compatibility with Airflow 2.10, not working for Airflow 3 / main as removed
71
71
  initialize_method_map,
72
72
  )
@@ -14,4 +14,4 @@
14
14
  # KIND, either express or implied. See the License for the
15
15
  # specific language governing permissions and limitations
16
16
  # under the License.
17
- __path__ = __import__("pkgutil").extend_path(__path__, __name__) # type: ignore
17
+ __path__ = __import__("pkgutil").extend_path(__path__, __name__)
@@ -63,6 +63,7 @@ MOCK_COMMAND = (
63
63
  "dag_id": "mock",
64
64
  "run_id": "mock",
65
65
  "try_number": 1,
66
+ "dag_version_id": "01234567-89ab-cdef-0123-456789abcdef",
66
67
  "pool_slots": 1,
67
68
  "queue": "default",
68
69
  "priority_weight": 1,
@@ -21,6 +21,7 @@ from datetime import datetime, timedelta
21
21
  from unittest.mock import MagicMock, patch
22
22
 
23
23
  import pytest
24
+ import time_machine
24
25
 
25
26
  from airflow.configuration import conf
26
27
  from airflow.models.taskinstancekey import TaskInstanceKey
@@ -246,6 +247,11 @@ class TestEdgeExecutor:
246
247
 
247
248
  # Prepare some data
248
249
  with create_session() as session:
250
+ # Clear existing workers to avoid unique constraint violation
251
+ session.query(EdgeWorkerModel).delete()
252
+ session.commit()
253
+
254
+ # Add workers with different states
249
255
  for worker_name, state, last_heartbeat in [
250
256
  (
251
257
  "inactive_timed_out_worker",
@@ -275,9 +281,7 @@ class TestEdgeExecutor:
275
281
  )
276
282
  session.commit()
277
283
 
278
- with patch(
279
- "airflow.utils.timezone.utcnow", return_value=datetime(2023, 1, 1, 1, 0, 0, tzinfo=timezone.utc)
280
- ):
284
+ with time_machine.travel(datetime(2023, 1, 1, 1, 0, 0, tzinfo=timezone.utc), tick=False):
281
285
  with conf_vars({("edge", "heartbeat_interval"): "10"}):
282
286
  executor.sync()
283
287
 
@@ -328,13 +332,106 @@ class TestEdgeExecutor:
328
332
  queue="default",
329
333
  priority_weight=1,
330
334
  start_date=timezone.utcnow(),
335
+ dag_version_id="4d828a62-a417-4936-a7a6-2b3fabacecab",
336
+ ),
337
+ dag_rel_path="mock.py",
338
+ log_path="mock.log",
339
+ bundle_info={"name": "n/a", "version": "no matter"},
340
+ )
341
+ executor.queue_workload(workload=workload)
342
+
343
+ with create_session() as session:
344
+ jobs = session.query(EdgeJobModel).all()
345
+ assert len(jobs) == 1
346
+
347
+ @pytest.mark.skipif(AIRFLOW_V_3_0_PLUS, reason="API only available in Airflow <3.0")
348
+ def test_execute_async_updates_existing_job(self):
349
+ executor, key = self.get_test_executor()
350
+
351
+ # First insert a job with the same key
352
+ with create_session() as session:
353
+ session.add(
354
+ EdgeJobModel(
355
+ dag_id=key.dag_id,
356
+ run_id=key.run_id,
357
+ task_id=key.task_id,
358
+ map_index=key.map_index,
359
+ try_number=key.try_number,
360
+ state=TaskInstanceState.SCHEDULED,
361
+ queue="default",
362
+ concurrency_slots=1,
363
+ command="old-command",
364
+ last_update=timezone.utcnow(),
365
+ )
366
+ )
367
+ session.commit()
368
+
369
+ # Trigger execute_async which should update the existing job
370
+ executor.edge_queued_tasks = deepcopy(executor.queued_tasks)
371
+ executor.execute_async(key=key, command=["airflow", "tasks", "run", "new", "command"])
372
+
373
+ with create_session() as session:
374
+ jobs = session.query(EdgeJobModel).all()
375
+ assert len(jobs) == 1
376
+ job = jobs[0]
377
+ assert job.state == TaskInstanceState.QUEUED
378
+ assert job.command != "old-command"
379
+ assert "new" in job.command
380
+
381
+ @pytest.mark.skipif(not AIRFLOW_V_3_0_PLUS, reason="API only available in Airflow 3.0+")
382
+ def test_queue_workload_updates_existing_job(self):
383
+ from uuid import uuid4
384
+
385
+ from airflow.executors.workloads import ExecuteTask, TaskInstance
386
+
387
+ executor = self.get_test_executor()[0]
388
+
389
+ key = TaskInstanceKey(dag_id="mock", run_id="mock", task_id="mock", map_index=-1, try_number=1)
390
+
391
+ # Insert an existing job
392
+ with create_session() as session:
393
+ session.add(
394
+ EdgeJobModel(
395
+ dag_id=key.dag_id,
396
+ task_id=key.task_id,
397
+ run_id=key.run_id,
398
+ map_index=key.map_index,
399
+ try_number=key.try_number,
400
+ state=TaskInstanceState.SCHEDULED,
401
+ queue="default",
402
+ command="old-command",
403
+ concurrency_slots=1,
404
+ last_update=timezone.utcnow(),
405
+ )
406
+ )
407
+ session.commit()
408
+
409
+ # Queue a workload with same key
410
+ workload = ExecuteTask(
411
+ token="mock",
412
+ ti=TaskInstance(
413
+ id=uuid4(),
414
+ task_id=key.task_id,
415
+ dag_id=key.dag_id,
416
+ run_id=key.run_id,
417
+ try_number=key.try_number,
418
+ map_index=key.map_index,
419
+ pool_slots=1,
420
+ queue="updated-queue",
421
+ priority_weight=1,
422
+ start_date=timezone.utcnow(),
423
+ dag_version_id=uuid4(),
331
424
  ),
332
425
  dag_rel_path="mock.py",
333
426
  log_path="mock.log",
334
427
  bundle_info={"name": "n/a", "version": "no matter"},
335
428
  )
429
+
336
430
  executor.queue_workload(workload=workload)
337
431
 
338
432
  with create_session() as session:
339
433
  jobs = session.query(EdgeJobModel).all()
340
434
  assert len(jobs) == 1
435
+ job = jobs[0]
436
+ assert job.queue == "updated-queue"
437
+ assert job.command != "old-command"
@@ -17,6 +17,7 @@
17
17
  from __future__ import annotations
18
18
 
19
19
  import importlib
20
+ from unittest.mock import patch
20
21
 
21
22
  import pytest
22
23
  import time_machine
@@ -44,17 +45,20 @@ def test_plugin_inactive():
44
45
 
45
46
 
46
47
  @pytest.mark.db_test
47
- def test_plugin_active():
48
- with conf_vars({("edge", "api_enabled"): "true"}):
48
+ def test_plugin_active_apiserver():
49
+ mock_cli = ["airflow", "api-server"] if AIRFLOW_V_3_0_PLUS else ["gunicorn", "airflow-webserver"]
50
+ with conf_vars({("edge", "api_enabled"): "true"}), patch("sys.argv", mock_cli):
49
51
  importlib.reload(edge_executor_plugin)
50
52
 
51
53
  from airflow.providers.edge3.plugins.edge_executor_plugin import (
52
54
  EDGE_EXECUTOR_ACTIVE,
55
+ RUNNING_ON_APISERVER,
53
56
  EdgeExecutorPlugin,
54
57
  )
55
58
 
56
59
  rep = EdgeExecutorPlugin()
57
60
  assert EDGE_EXECUTOR_ACTIVE
61
+ assert RUNNING_ON_APISERVER
58
62
  if AIRFLOW_V_3_0_PLUS:
59
63
  assert len(rep.appbuilder_views) == 0
60
64
  assert len(rep.flask_blueprints) == 0
@@ -64,6 +68,27 @@ def test_plugin_active():
64
68
  assert len(rep.flask_blueprints) == 2
65
69
 
66
70
 
71
+ @patch("sys.argv", ["airflow", "some-other-command"])
72
+ def test_plugin_active_non_apiserver():
73
+ with conf_vars({("edge", "api_enabled"): "true"}):
74
+ importlib.reload(edge_executor_plugin)
75
+
76
+ from airflow.providers.edge3.plugins.edge_executor_plugin import (
77
+ EDGE_EXECUTOR_ACTIVE,
78
+ RUNNING_ON_APISERVER,
79
+ EdgeExecutorPlugin,
80
+ )
81
+
82
+ rep = EdgeExecutorPlugin()
83
+ assert EDGE_EXECUTOR_ACTIVE
84
+ assert not RUNNING_ON_APISERVER
85
+ assert len(rep.appbuilder_views) == 0
86
+ assert len(rep.flask_blueprints) == 0
87
+ assert len(rep.appbuilder_views) == 0
88
+ if AIRFLOW_V_3_0_PLUS:
89
+ assert len(rep.fastapi_apps) == 0
90
+
91
+
67
92
  @pytest.fixture
68
93
  def plugin():
69
94
  from airflow.providers.edge3.plugins.edge_executor_plugin import EdgeExecutorPlugin