apache-airflow-providers-cncf-kubernetes 8.4.2rc1__tar.gz → 9.0.0__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of apache-airflow-providers-cncf-kubernetes might be problematic. Click here for more details.

Files changed (59) hide show
  1. {apache_airflow_providers_cncf_kubernetes-8.4.2rc1 → apache_airflow_providers_cncf_kubernetes-9.0.0}/PKG-INFO +9 -10
  2. {apache_airflow_providers_cncf_kubernetes-8.4.2rc1 → apache_airflow_providers_cncf_kubernetes-9.0.0}/README.rst +4 -4
  3. {apache_airflow_providers_cncf_kubernetes-8.4.2rc1 → apache_airflow_providers_cncf_kubernetes-9.0.0}/airflow/providers/cncf/kubernetes/__init__.py +1 -1
  4. {apache_airflow_providers_cncf_kubernetes-8.4.2rc1 → apache_airflow_providers_cncf_kubernetes-9.0.0}/airflow/providers/cncf/kubernetes/executors/kubernetes_executor.py +49 -62
  5. {apache_airflow_providers_cncf_kubernetes-8.4.2rc1 → apache_airflow_providers_cncf_kubernetes-9.0.0}/airflow/providers/cncf/kubernetes/executors/local_kubernetes_executor.py +28 -6
  6. {apache_airflow_providers_cncf_kubernetes-8.4.2rc1 → apache_airflow_providers_cncf_kubernetes-9.0.0}/airflow/providers/cncf/kubernetes/get_provider_info.py +1 -0
  7. {apache_airflow_providers_cncf_kubernetes-8.4.2rc1 → apache_airflow_providers_cncf_kubernetes-9.0.0}/airflow/providers/cncf/kubernetes/hooks/kubernetes.py +2 -0
  8. {apache_airflow_providers_cncf_kubernetes-8.4.2rc1 → apache_airflow_providers_cncf_kubernetes-9.0.0}/airflow/providers/cncf/kubernetes/kubernetes_helper_functions.py +1 -1
  9. {apache_airflow_providers_cncf_kubernetes-8.4.2rc1 → apache_airflow_providers_cncf_kubernetes-9.0.0}/airflow/providers/cncf/kubernetes/operators/pod.py +10 -6
  10. {apache_airflow_providers_cncf_kubernetes-8.4.2rc1 → apache_airflow_providers_cncf_kubernetes-9.0.0}/airflow/providers/cncf/kubernetes/operators/spark_kubernetes.py +10 -5
  11. {apache_airflow_providers_cncf_kubernetes-8.4.2rc1 → apache_airflow_providers_cncf_kubernetes-9.0.0}/airflow/providers/cncf/kubernetes/pod_generator.py +5 -1
  12. {apache_airflow_providers_cncf_kubernetes-8.4.2rc1 → apache_airflow_providers_cncf_kubernetes-9.0.0}/airflow/providers/cncf/kubernetes/utils/pod_manager.py +20 -6
  13. {apache_airflow_providers_cncf_kubernetes-8.4.2rc1 → apache_airflow_providers_cncf_kubernetes-9.0.0}/pyproject.toml +5 -6
  14. {apache_airflow_providers_cncf_kubernetes-8.4.2rc1 → apache_airflow_providers_cncf_kubernetes-9.0.0}/airflow/providers/cncf/kubernetes/LICENSE +0 -0
  15. {apache_airflow_providers_cncf_kubernetes-8.4.2rc1 → apache_airflow_providers_cncf_kubernetes-9.0.0}/airflow/providers/cncf/kubernetes/backcompat/__init__.py +0 -0
  16. {apache_airflow_providers_cncf_kubernetes-8.4.2rc1 → apache_airflow_providers_cncf_kubernetes-9.0.0}/airflow/providers/cncf/kubernetes/backcompat/backwards_compat_converters.py +0 -0
  17. {apache_airflow_providers_cncf_kubernetes-8.4.2rc1 → apache_airflow_providers_cncf_kubernetes-9.0.0}/airflow/providers/cncf/kubernetes/callbacks.py +0 -0
  18. {apache_airflow_providers_cncf_kubernetes-8.4.2rc1 → apache_airflow_providers_cncf_kubernetes-9.0.0}/airflow/providers/cncf/kubernetes/cli/__init__.py +0 -0
  19. {apache_airflow_providers_cncf_kubernetes-8.4.2rc1 → apache_airflow_providers_cncf_kubernetes-9.0.0}/airflow/providers/cncf/kubernetes/cli/kubernetes_command.py +0 -0
  20. {apache_airflow_providers_cncf_kubernetes-8.4.2rc1 → apache_airflow_providers_cncf_kubernetes-9.0.0}/airflow/providers/cncf/kubernetes/decorators/__init__.py +0 -0
  21. {apache_airflow_providers_cncf_kubernetes-8.4.2rc1 → apache_airflow_providers_cncf_kubernetes-9.0.0}/airflow/providers/cncf/kubernetes/decorators/kubernetes.py +0 -0
  22. {apache_airflow_providers_cncf_kubernetes-8.4.2rc1 → apache_airflow_providers_cncf_kubernetes-9.0.0}/airflow/providers/cncf/kubernetes/executors/__init__.py +0 -0
  23. {apache_airflow_providers_cncf_kubernetes-8.4.2rc1 → apache_airflow_providers_cncf_kubernetes-9.0.0}/airflow/providers/cncf/kubernetes/executors/kubernetes_executor_types.py +0 -0
  24. {apache_airflow_providers_cncf_kubernetes-8.4.2rc1 → apache_airflow_providers_cncf_kubernetes-9.0.0}/airflow/providers/cncf/kubernetes/executors/kubernetes_executor_utils.py +0 -0
  25. {apache_airflow_providers_cncf_kubernetes-8.4.2rc1 → apache_airflow_providers_cncf_kubernetes-9.0.0}/airflow/providers/cncf/kubernetes/hooks/__init__.py +0 -0
  26. {apache_airflow_providers_cncf_kubernetes-8.4.2rc1 → apache_airflow_providers_cncf_kubernetes-9.0.0}/airflow/providers/cncf/kubernetes/k8s_model.py +0 -0
  27. {apache_airflow_providers_cncf_kubernetes-8.4.2rc1 → apache_airflow_providers_cncf_kubernetes-9.0.0}/airflow/providers/cncf/kubernetes/kube_client.py +0 -0
  28. {apache_airflow_providers_cncf_kubernetes-8.4.2rc1 → apache_airflow_providers_cncf_kubernetes-9.0.0}/airflow/providers/cncf/kubernetes/kube_config.py +0 -0
  29. {apache_airflow_providers_cncf_kubernetes-8.4.2rc1 → apache_airflow_providers_cncf_kubernetes-9.0.0}/airflow/providers/cncf/kubernetes/kubernetes_executor_templates/__init__.py +0 -0
  30. {apache_airflow_providers_cncf_kubernetes-8.4.2rc1 → apache_airflow_providers_cncf_kubernetes-9.0.0}/airflow/providers/cncf/kubernetes/kubernetes_executor_templates/basic_template.yaml +0 -0
  31. {apache_airflow_providers_cncf_kubernetes-8.4.2rc1 → apache_airflow_providers_cncf_kubernetes-9.0.0}/airflow/providers/cncf/kubernetes/operators/__init__.py +0 -0
  32. {apache_airflow_providers_cncf_kubernetes-8.4.2rc1 → apache_airflow_providers_cncf_kubernetes-9.0.0}/airflow/providers/cncf/kubernetes/operators/custom_object_launcher.py +0 -0
  33. {apache_airflow_providers_cncf_kubernetes-8.4.2rc1 → apache_airflow_providers_cncf_kubernetes-9.0.0}/airflow/providers/cncf/kubernetes/operators/job.py +0 -0
  34. {apache_airflow_providers_cncf_kubernetes-8.4.2rc1 → apache_airflow_providers_cncf_kubernetes-9.0.0}/airflow/providers/cncf/kubernetes/operators/kubernetes_pod.py +0 -0
  35. {apache_airflow_providers_cncf_kubernetes-8.4.2rc1 → apache_airflow_providers_cncf_kubernetes-9.0.0}/airflow/providers/cncf/kubernetes/operators/resource.py +0 -0
  36. {apache_airflow_providers_cncf_kubernetes-8.4.2rc1 → apache_airflow_providers_cncf_kubernetes-9.0.0}/airflow/providers/cncf/kubernetes/pod_generator_deprecated.py +0 -0
  37. {apache_airflow_providers_cncf_kubernetes-8.4.2rc1 → apache_airflow_providers_cncf_kubernetes-9.0.0}/airflow/providers/cncf/kubernetes/pod_launcher_deprecated.py +0 -0
  38. {apache_airflow_providers_cncf_kubernetes-8.4.2rc1 → apache_airflow_providers_cncf_kubernetes-9.0.0}/airflow/providers/cncf/kubernetes/pod_template_file_examples/__init__.py +0 -0
  39. {apache_airflow_providers_cncf_kubernetes-8.4.2rc1 → apache_airflow_providers_cncf_kubernetes-9.0.0}/airflow/providers/cncf/kubernetes/pod_template_file_examples/dags_in_image_template.yaml +0 -0
  40. {apache_airflow_providers_cncf_kubernetes-8.4.2rc1 → apache_airflow_providers_cncf_kubernetes-9.0.0}/airflow/providers/cncf/kubernetes/pod_template_file_examples/dags_in_volume_template.yaml +0 -0
  41. {apache_airflow_providers_cncf_kubernetes-8.4.2rc1 → apache_airflow_providers_cncf_kubernetes-9.0.0}/airflow/providers/cncf/kubernetes/pod_template_file_examples/git_sync_template.yaml +0 -0
  42. {apache_airflow_providers_cncf_kubernetes-8.4.2rc1 → apache_airflow_providers_cncf_kubernetes-9.0.0}/airflow/providers/cncf/kubernetes/python_kubernetes_script.jinja2 +0 -0
  43. {apache_airflow_providers_cncf_kubernetes-8.4.2rc1 → apache_airflow_providers_cncf_kubernetes-9.0.0}/airflow/providers/cncf/kubernetes/python_kubernetes_script.py +0 -0
  44. {apache_airflow_providers_cncf_kubernetes-8.4.2rc1 → apache_airflow_providers_cncf_kubernetes-9.0.0}/airflow/providers/cncf/kubernetes/resource_convert/__init__.py +0 -0
  45. {apache_airflow_providers_cncf_kubernetes-8.4.2rc1 → apache_airflow_providers_cncf_kubernetes-9.0.0}/airflow/providers/cncf/kubernetes/resource_convert/configmap.py +0 -0
  46. {apache_airflow_providers_cncf_kubernetes-8.4.2rc1 → apache_airflow_providers_cncf_kubernetes-9.0.0}/airflow/providers/cncf/kubernetes/resource_convert/env_variable.py +0 -0
  47. {apache_airflow_providers_cncf_kubernetes-8.4.2rc1 → apache_airflow_providers_cncf_kubernetes-9.0.0}/airflow/providers/cncf/kubernetes/resource_convert/secret.py +0 -0
  48. {apache_airflow_providers_cncf_kubernetes-8.4.2rc1 → apache_airflow_providers_cncf_kubernetes-9.0.0}/airflow/providers/cncf/kubernetes/secret.py +0 -0
  49. {apache_airflow_providers_cncf_kubernetes-8.4.2rc1 → apache_airflow_providers_cncf_kubernetes-9.0.0}/airflow/providers/cncf/kubernetes/sensors/__init__.py +0 -0
  50. {apache_airflow_providers_cncf_kubernetes-8.4.2rc1 → apache_airflow_providers_cncf_kubernetes-9.0.0}/airflow/providers/cncf/kubernetes/sensors/spark_kubernetes.py +0 -0
  51. {apache_airflow_providers_cncf_kubernetes-8.4.2rc1 → apache_airflow_providers_cncf_kubernetes-9.0.0}/airflow/providers/cncf/kubernetes/template_rendering.py +0 -0
  52. {apache_airflow_providers_cncf_kubernetes-8.4.2rc1 → apache_airflow_providers_cncf_kubernetes-9.0.0}/airflow/providers/cncf/kubernetes/triggers/__init__.py +0 -0
  53. {apache_airflow_providers_cncf_kubernetes-8.4.2rc1 → apache_airflow_providers_cncf_kubernetes-9.0.0}/airflow/providers/cncf/kubernetes/triggers/job.py +0 -0
  54. {apache_airflow_providers_cncf_kubernetes-8.4.2rc1 → apache_airflow_providers_cncf_kubernetes-9.0.0}/airflow/providers/cncf/kubernetes/triggers/kubernetes_pod.py +0 -0
  55. {apache_airflow_providers_cncf_kubernetes-8.4.2rc1 → apache_airflow_providers_cncf_kubernetes-9.0.0}/airflow/providers/cncf/kubernetes/triggers/pod.py +0 -0
  56. {apache_airflow_providers_cncf_kubernetes-8.4.2rc1 → apache_airflow_providers_cncf_kubernetes-9.0.0}/airflow/providers/cncf/kubernetes/utils/__init__.py +0 -0
  57. {apache_airflow_providers_cncf_kubernetes-8.4.2rc1 → apache_airflow_providers_cncf_kubernetes-9.0.0}/airflow/providers/cncf/kubernetes/utils/delete_from.py +0 -0
  58. {apache_airflow_providers_cncf_kubernetes-8.4.2rc1 → apache_airflow_providers_cncf_kubernetes-9.0.0}/airflow/providers/cncf/kubernetes/utils/k8s_resource_iterator.py +0 -0
  59. {apache_airflow_providers_cncf_kubernetes-8.4.2rc1 → apache_airflow_providers_cncf_kubernetes-9.0.0}/airflow/providers/cncf/kubernetes/utils/xcom_sidecar.py +0 -0
@@ -1,11 +1,11 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: apache-airflow-providers-cncf-kubernetes
3
- Version: 8.4.2rc1
3
+ Version: 9.0.0
4
4
  Summary: Provider package apache-airflow-providers-cncf-kubernetes for Apache Airflow
5
5
  Keywords: airflow-provider,cncf.kubernetes,airflow,integration
6
6
  Author-email: Apache Software Foundation <dev@airflow.apache.org>
7
7
  Maintainer-email: Apache Software Foundation <dev@airflow.apache.org>
8
- Requires-Python: ~=3.8
8
+ Requires-Python: ~=3.9
9
9
  Description-Content-Type: text/x-rst
10
10
  Classifier: Development Status :: 5 - Production/Stable
11
11
  Classifier: Environment :: Console
@@ -15,22 +15,21 @@ Classifier: Intended Audience :: System Administrators
15
15
  Classifier: Framework :: Apache Airflow
16
16
  Classifier: Framework :: Apache Airflow :: Provider
17
17
  Classifier: License :: OSI Approved :: Apache Software License
18
- Classifier: Programming Language :: Python :: 3.8
19
18
  Classifier: Programming Language :: Python :: 3.9
20
19
  Classifier: Programming Language :: Python :: 3.10
21
20
  Classifier: Programming Language :: Python :: 3.11
22
21
  Classifier: Programming Language :: Python :: 3.12
23
22
  Classifier: Topic :: System :: Monitoring
24
23
  Requires-Dist: aiofiles>=23.2.0
25
- Requires-Dist: apache-airflow>=2.8.0rc0
24
+ Requires-Dist: apache-airflow>=2.8.0
26
25
  Requires-Dist: asgiref>=3.5.2
27
26
  Requires-Dist: cryptography>=41.0.0
28
27
  Requires-Dist: google-re2>=1.0
29
28
  Requires-Dist: kubernetes>=29.0.0,<=30.1.0
30
29
  Requires-Dist: kubernetes_asyncio>=29.0.0,<=30.1.0
31
30
  Project-URL: Bug Tracker, https://github.com/apache/airflow/issues
32
- Project-URL: Changelog, https://airflow.apache.org/docs/apache-airflow-providers-cncf-kubernetes/8.4.2/changelog.html
33
- Project-URL: Documentation, https://airflow.apache.org/docs/apache-airflow-providers-cncf-kubernetes/8.4.2
31
+ Project-URL: Changelog, https://airflow.apache.org/docs/apache-airflow-providers-cncf-kubernetes/9.0.0/changelog.html
32
+ Project-URL: Documentation, https://airflow.apache.org/docs/apache-airflow-providers-cncf-kubernetes/9.0.0
34
33
  Project-URL: Slack Chat, https://s.apache.org/airflow-slack
35
34
  Project-URL: Source Code, https://github.com/apache/airflow
36
35
  Project-URL: Twitter, https://twitter.com/ApacheAirflow
@@ -80,7 +79,7 @@ Project-URL: YouTube, https://www.youtube.com/channel/UCSXwxpWZQ7XZ1WL3wqevChA/
80
79
 
81
80
  Package ``apache-airflow-providers-cncf-kubernetes``
82
81
 
83
- Release: ``8.4.2.rc1``
82
+ Release: ``9.0.0``
84
83
 
85
84
 
86
85
  `Kubernetes <https://kubernetes.io/>`__
@@ -93,7 +92,7 @@ This is a provider package for ``cncf.kubernetes`` provider. All classes for thi
93
92
  are in ``airflow.providers.cncf.kubernetes`` python package.
94
93
 
95
94
  You can find package information and changelog for the provider
96
- in the `documentation <https://airflow.apache.org/docs/apache-airflow-providers-cncf-kubernetes/8.4.2/>`_.
95
+ in the `documentation <https://airflow.apache.org/docs/apache-airflow-providers-cncf-kubernetes/9.0.0/>`_.
97
96
 
98
97
  Installation
99
98
  ------------
@@ -102,7 +101,7 @@ You can install this package on top of an existing Airflow 2 installation (see `
102
101
  for the minimum Airflow version supported) via
103
102
  ``pip install apache-airflow-providers-cncf-kubernetes``
104
103
 
105
- The package supports the following python versions: 3.8,3.9,3.10,3.11,3.12
104
+ The package supports the following python versions: 3.9,3.10,3.11,3.12
106
105
 
107
106
  Requirements
108
107
  ------------
@@ -120,4 +119,4 @@ PIP package Version required
120
119
  ====================== =====================
121
120
 
122
121
  The changelog for the provider package can be found in the
123
- `changelog <https://airflow.apache.org/docs/apache-airflow-providers-cncf-kubernetes/8.4.2/changelog.html>`_.
122
+ `changelog <https://airflow.apache.org/docs/apache-airflow-providers-cncf-kubernetes/9.0.0/changelog.html>`_.
@@ -42,7 +42,7 @@
42
42
 
43
43
  Package ``apache-airflow-providers-cncf-kubernetes``
44
44
 
45
- Release: ``8.4.2.rc1``
45
+ Release: ``9.0.0``
46
46
 
47
47
 
48
48
  `Kubernetes <https://kubernetes.io/>`__
@@ -55,7 +55,7 @@ This is a provider package for ``cncf.kubernetes`` provider. All classes for thi
55
55
  are in ``airflow.providers.cncf.kubernetes`` python package.
56
56
 
57
57
  You can find package information and changelog for the provider
58
- in the `documentation <https://airflow.apache.org/docs/apache-airflow-providers-cncf-kubernetes/8.4.2/>`_.
58
+ in the `documentation <https://airflow.apache.org/docs/apache-airflow-providers-cncf-kubernetes/9.0.0/>`_.
59
59
 
60
60
  Installation
61
61
  ------------
@@ -64,7 +64,7 @@ You can install this package on top of an existing Airflow 2 installation (see `
64
64
  for the minimum Airflow version supported) via
65
65
  ``pip install apache-airflow-providers-cncf-kubernetes``
66
66
 
67
- The package supports the following python versions: 3.8,3.9,3.10,3.11,3.12
67
+ The package supports the following python versions: 3.9,3.10,3.11,3.12
68
68
 
69
69
  Requirements
70
70
  ------------
@@ -82,4 +82,4 @@ PIP package Version required
82
82
  ====================== =====================
83
83
 
84
84
  The changelog for the provider package can be found in the
85
- `changelog <https://airflow.apache.org/docs/apache-airflow-providers-cncf-kubernetes/8.4.2/changelog.html>`_.
85
+ `changelog <https://airflow.apache.org/docs/apache-airflow-providers-cncf-kubernetes/9.0.0/changelog.html>`_.
@@ -29,7 +29,7 @@ from airflow import __version__ as airflow_version
29
29
 
30
30
  __all__ = ["__version__"]
31
31
 
32
- __version__ = "8.4.2"
32
+ __version__ = "9.0.0"
33
33
 
34
34
  if packaging.version.parse(packaging.version.parse(airflow_version).base_version) < packaging.version.parse(
35
35
  "2.8.0"
@@ -179,6 +179,36 @@ class KubernetesExecutor(BaseExecutor):
179
179
  return pod_generator.datetime_to_label_safe_datestring(input_value)
180
180
  return pod_generator.make_safe_label_value(input_value)
181
181
 
182
+ def get_pod_combined_search_str_to_pod_map(self) -> dict[str, k8s.V1Pod]:
183
+ """
184
+ List the worker pods owned by this scheduler and create a map containing pod combined search str -> pod.
185
+
186
+ For every pod, it creates two below entries in the map
187
+ dag_id={dag_id},task_id={task_id},airflow-worker={airflow_worker},<map_index={map_index}>,run_id={run_id}
188
+ """
189
+ # airflow worker label selector batch call
190
+ kwargs = {"label_selector": f"airflow-worker={self._make_safe_label_value(str(self.job_id))}"}
191
+ if self.kube_config.kube_client_request_args:
192
+ kwargs.update(self.kube_config.kube_client_request_args)
193
+ pod_list = self._list_pods(kwargs)
194
+
195
+ # create a set against pod query label fields
196
+ pod_combined_search_str_to_pod_map = {}
197
+ for pod in pod_list:
198
+ dag_id = pod.metadata.annotations.get("dag_id", None)
199
+ task_id = pod.metadata.annotations.get("task_id", None)
200
+ map_index = pod.metadata.annotations.get("map_index", None)
201
+ run_id = pod.metadata.annotations.get("run_id", None)
202
+ if dag_id is None or task_id is None:
203
+ continue
204
+ search_base_str = f"dag_id={dag_id},task_id={task_id}"
205
+ if map_index is not None:
206
+ search_base_str += f",map_index={map_index}"
207
+ if run_id is not None:
208
+ search_str = f"{search_base_str},run_id={run_id}"
209
+ pod_combined_search_str_to_pod_map[search_str] = pod
210
+ return pod_combined_search_str_to_pod_map
211
+
182
212
  @provide_session
183
213
  def clear_not_launched_queued_tasks(self, session: Session = NEW_SESSION) -> None:
184
214
  """
@@ -218,32 +248,7 @@ class KubernetesExecutor(BaseExecutor):
218
248
  if not queued_tis:
219
249
  return
220
250
 
221
- # airflow worker label selector batch call
222
- kwargs = {"label_selector": f"airflow-worker={self._make_safe_label_value(str(self.job_id))}"}
223
- if self.kube_config.kube_client_request_args:
224
- kwargs.update(self.kube_config.kube_client_request_args)
225
- pod_list = self._list_pods(kwargs)
226
-
227
- # create a set against pod query label fields
228
- label_search_set = set()
229
- for pod in pod_list:
230
- dag_id = pod.metadata.labels.get("dag_id", None)
231
- task_id = pod.metadata.labels.get("task_id", None)
232
- airflow_worker = pod.metadata.labels.get("airflow-worker", None)
233
- map_index = pod.metadata.labels.get("map_index", None)
234
- run_id = pod.metadata.labels.get("run_id", None)
235
- execution_date = pod.metadata.labels.get("execution_date", None)
236
- if dag_id is None or task_id is None or airflow_worker is None:
237
- continue
238
- label_search_base_str = f"dag_id={dag_id},task_id={task_id},airflow-worker={airflow_worker}"
239
- if map_index is not None:
240
- label_search_base_str += f",map_index={map_index}"
241
- if run_id is not None:
242
- label_search_str = f"{label_search_base_str},run_id={run_id}"
243
- label_search_set.add(label_search_str)
244
- if execution_date is not None:
245
- label_search_str = f"{label_search_base_str},execution_date={execution_date}"
246
- label_search_set.add(label_search_str)
251
+ pod_combined_search_str_to_pod_map = self.get_pod_combined_search_str_to_pod_map()
247
252
 
248
253
  for ti in queued_tis:
249
254
  self.log.debug("Checking task instance %s", ti)
@@ -253,24 +258,13 @@ class KubernetesExecutor(BaseExecutor):
253
258
  continue
254
259
 
255
260
  # Build the pod selector
256
- base_label_selector = (
257
- f"dag_id={self._make_safe_label_value(ti.dag_id)},"
258
- f"task_id={self._make_safe_label_value(ti.task_id)},"
259
- f"airflow-worker={self._make_safe_label_value(str(ti.queued_by_job_id))}"
260
- )
261
+ base_selector = f"dag_id={ti.dag_id},task_id={ti.task_id}"
261
262
  if ti.map_index >= 0:
262
263
  # Old tasks _couldn't_ be mapped, so we don't have to worry about compat
263
- base_label_selector += f",map_index={ti.map_index}"
264
+ base_selector += f",map_index={ti.map_index}"
264
265
 
265
- # Try run_id first
266
- label_search_str = f"{base_label_selector},run_id={self._make_safe_label_value(ti.run_id)}"
267
- if label_search_str in label_search_set:
268
- continue
269
- # Fallback to old style of using execution_date
270
- label_search_str = (
271
- f"{base_label_selector},execution_date={self._make_safe_label_value(ti.execution_date)}"
272
- )
273
- if label_search_str in label_search_set:
266
+ search_str = f"{base_selector},run_id={ti.run_id}"
267
+ if search_str in pod_combined_search_str_to_pod_map:
274
268
  continue
275
269
  self.log.info("TaskInstance: %s found in queued state but was not launched, rescheduling", ti)
276
270
  session.execute(
@@ -603,34 +597,27 @@ class KubernetesExecutor(BaseExecutor):
603
597
  :param tis: List of Task Instances to clean up
604
598
  :return: List of readable task instances for a warning message
605
599
  """
606
- from airflow.providers.cncf.kubernetes.pod_generator import PodGenerator
607
-
608
600
  if TYPE_CHECKING:
609
601
  assert self.kube_client
610
602
  assert self.kube_scheduler
611
- readable_tis = []
603
+ readable_tis: list[str] = []
604
+ if not tis:
605
+ return readable_tis
606
+ pod_combined_search_str_to_pod_map = self.get_pod_combined_search_str_to_pod_map()
612
607
  for ti in tis:
613
- selector = PodGenerator.build_selector_for_k8s_executor_pod(
614
- dag_id=ti.dag_id,
615
- task_id=ti.task_id,
616
- try_number=ti.try_number,
617
- map_index=ti.map_index,
618
- run_id=ti.run_id,
619
- airflow_worker=ti.queued_by_job_id,
620
- )
621
- namespace = self._get_pod_namespace(ti)
622
- pod_list = self.kube_client.list_namespaced_pod(
623
- namespace=namespace,
624
- label_selector=selector,
625
- ).items
626
- if not pod_list:
608
+ # Build the pod selector
609
+ base_label_selector = f"dag_id={ti.dag_id},task_id={ti.task_id}"
610
+ if ti.map_index >= 0:
611
+ # Old tasks _couldn't_ be mapped, so we don't have to worry about compat
612
+ base_label_selector += f",map_index={ti.map_index}"
613
+
614
+ search_str = f"{base_label_selector},run_id={ti.run_id}"
615
+ pod = pod_combined_search_str_to_pod_map.get(search_str, None)
616
+ if not pod:
627
617
  self.log.warning("Cannot find pod for ti %s", ti)
628
618
  continue
629
- elif len(pod_list) > 1:
630
- self.log.warning("Found multiple pods for ti %s: %s", ti, pod_list)
631
- continue
632
619
  readable_tis.append(repr(ti))
633
- self.kube_scheduler.delete_pod(pod_name=pod_list[0].metadata.name, namespace=namespace)
620
+ self.kube_scheduler.delete_pod(pod_name=pod.metadata.name, namespace=pod.metadata.namespace)
634
621
  return readable_tis
635
622
 
636
623
  def adopt_launched_task(
@@ -20,18 +20,22 @@ from __future__ import annotations
20
20
  from typing import TYPE_CHECKING, Sequence
21
21
 
22
22
  from airflow.configuration import conf
23
+ from airflow.executors.base_executor import BaseExecutor
23
24
  from airflow.providers.cncf.kubernetes.executors.kubernetes_executor import KubernetesExecutor
24
- from airflow.utils.log.logging_mixin import LoggingMixin
25
25
 
26
26
  if TYPE_CHECKING:
27
27
  from airflow.callbacks.base_callback_sink import BaseCallbackSink
28
28
  from airflow.callbacks.callback_requests import CallbackRequest
29
- from airflow.executors.base_executor import CommandType, EventBufferValueType, QueuedTaskInstanceType
29
+ from airflow.executors.base_executor import (
30
+ CommandType,
31
+ EventBufferValueType,
32
+ QueuedTaskInstanceType,
33
+ )
30
34
  from airflow.executors.local_executor import LocalExecutor
31
35
  from airflow.models.taskinstance import SimpleTaskInstance, TaskInstance, TaskInstanceKey
32
36
 
33
37
 
34
- class LocalKubernetesExecutor(LoggingMixin):
38
+ class LocalKubernetesExecutor(BaseExecutor):
35
39
  """
36
40
  Chooses between LocalExecutor and KubernetesExecutor based on the queue defined on the task.
37
41
 
@@ -57,11 +61,21 @@ class LocalKubernetesExecutor(LoggingMixin):
57
61
 
58
62
  def __init__(self, local_executor: LocalExecutor, kubernetes_executor: KubernetesExecutor):
59
63
  super().__init__()
60
- self._job_id: str | None = None
64
+ self._job_id: int | str | None = None
61
65
  self.local_executor = local_executor
62
66
  self.kubernetes_executor = kubernetes_executor
63
67
  self.kubernetes_executor.kubernetes_queue = self.KUBERNETES_QUEUE
64
68
 
69
+ @property
70
+ def _task_event_logs(self):
71
+ self.local_executor._task_event_logs += self.kubernetes_executor._task_event_logs
72
+ self.kubernetes_executor._task_event_logs.clear()
73
+ return self.local_executor._task_event_logs
74
+
75
+ @_task_event_logs.setter
76
+ def _task_event_logs(self, value):
77
+ """Not implemented for hybrid executors."""
78
+
65
79
  @property
66
80
  def queued_tasks(self) -> dict[TaskInstanceKey, QueuedTaskInstanceType]:
67
81
  """Return queued tasks from local and kubernetes executor."""
@@ -70,13 +84,21 @@ class LocalKubernetesExecutor(LoggingMixin):
70
84
 
71
85
  return queued_tasks
72
86
 
87
+ @queued_tasks.setter
88
+ def queued_tasks(self, value) -> None:
89
+ """Not implemented for hybrid executors."""
90
+
73
91
  @property
74
92
  def running(self) -> set[TaskInstanceKey]:
75
93
  """Return running tasks from local and kubernetes executor."""
76
94
  return self.local_executor.running.union(self.kubernetes_executor.running)
77
95
 
96
+ @running.setter
97
+ def running(self, value) -> None:
98
+ """Not implemented for hybrid executors."""
99
+
78
100
  @property
79
- def job_id(self) -> str | None:
101
+ def job_id(self) -> int | str | None:
80
102
  """
81
103
  Inherited attribute from BaseExecutor.
82
104
 
@@ -86,7 +108,7 @@ class LocalKubernetesExecutor(LoggingMixin):
86
108
  return self._job_id
87
109
 
88
110
  @job_id.setter
89
- def job_id(self, value: str | None) -> None:
111
+ def job_id(self, value: int | str | None) -> None:
90
112
  """Expose job ID for SchedulerJob."""
91
113
  self._job_id = value
92
114
  self.kubernetes_executor.job_id = value
@@ -30,6 +30,7 @@ def get_provider_info():
30
30
  "state": "ready",
31
31
  "source-date-epoch": 1726860352,
32
32
  "versions": [
33
+ "9.0.0",
33
34
  "8.4.2",
34
35
  "8.4.1",
35
36
  "8.4.0",
@@ -250,6 +250,8 @@ class KubernetesHook(BaseHook, PodOperatorHookProtocol):
250
250
  if kubeconfig is not None:
251
251
  with tempfile.NamedTemporaryFile() as temp_config:
252
252
  self.log.debug("loading kube_config from: connection kube_config")
253
+ if isinstance(kubeconfig, dict):
254
+ kubeconfig = json.dumps(kubeconfig)
253
255
  temp_config.write(kubeconfig.encode())
254
256
  temp_config.flush()
255
257
  self._is_in_cluster = False
@@ -19,6 +19,7 @@ from __future__ import annotations
19
19
  import logging
20
20
  import secrets
21
21
  import string
22
+ from functools import cache
22
23
  from typing import TYPE_CHECKING
23
24
 
24
25
  import pendulum
@@ -26,7 +27,6 @@ from deprecated import deprecated
26
27
  from kubernetes.client.rest import ApiException
27
28
  from slugify import slugify
28
29
 
29
- from airflow.compat.functools import cache
30
30
  from airflow.configuration import conf
31
31
  from airflow.exceptions import AirflowProviderDeprecationWarning
32
32
 
@@ -80,7 +80,6 @@ from airflow.providers.cncf.kubernetes.utils.pod_manager import (
80
80
  PodNotFoundException,
81
81
  PodOperatorHookProtocol,
82
82
  PodPhase,
83
- check_exception_is_kubernetes_api_unauthorized,
84
83
  container_is_succeeded,
85
84
  get_container_termination_message,
86
85
  )
@@ -113,6 +112,10 @@ class PodReattachFailure(AirflowException):
113
112
  """When we expect to be able to find a pod but cannot."""
114
113
 
115
114
 
115
+ class PodCredentialsExpiredFailure(AirflowException):
116
+ """When pod fails to refresh credentials."""
117
+
118
+
116
119
  class KubernetesPodOperator(BaseOperator):
117
120
  """
118
121
  Execute a task in a Kubernetes Pod.
@@ -652,9 +655,8 @@ class KubernetesPodOperator(BaseOperator):
652
655
  return result
653
656
 
654
657
  @tenacity.retry(
655
- stop=tenacity.stop_after_attempt(3),
656
658
  wait=tenacity.wait_exponential(max=15),
657
- retry=tenacity.retry_if_exception(lambda exc: check_exception_is_kubernetes_api_unauthorized(exc)),
659
+ retry=tenacity.retry_if_exception_type(PodCredentialsExpiredFailure),
658
660
  reraise=True,
659
661
  )
660
662
  def await_pod_completion(self, pod: k8s.V1Pod):
@@ -675,6 +677,10 @@ class KubernetesPodOperator(BaseOperator):
675
677
  "Failed to check container status due to permission error. Refreshing credentials and retrying."
676
678
  )
677
679
  self._refresh_cached_properties()
680
+ self.pod_manager.read_pod(
681
+ pod=pod
682
+ ) # attempt using refreshed credentials, raises if still invalid
683
+ raise PodCredentialsExpiredFailure("Kubernetes credentials expired, retrying after refresh.")
678
684
  raise exc
679
685
 
680
686
  def _refresh_cached_properties(self):
@@ -784,9 +790,7 @@ class KubernetesPodOperator(BaseOperator):
784
790
  since_time=last_log_time,
785
791
  )
786
792
 
787
- if pod_log_status.running:
788
- self.log.info("Container still running; deferring again.")
789
- self.invoke_defer_method(pod_log_status.last_log_time)
793
+ self.invoke_defer_method(pod_log_status.last_log_time)
790
794
  else:
791
795
  self.invoke_defer_method()
792
796
 
@@ -17,7 +17,6 @@
17
17
  # under the License.
18
18
  from __future__ import annotations
19
19
 
20
- import re
21
20
  from functools import cached_property
22
21
  from pathlib import Path
23
22
  from typing import TYPE_CHECKING, Any
@@ -83,7 +82,7 @@ class SparkKubernetesOperator(KubernetesPodOperator):
83
82
  image: str | None = None,
84
83
  code_path: str | None = None,
85
84
  namespace: str = "default",
86
- name: str = "default",
85
+ name: str | None = None,
87
86
  application_file: str | None = None,
88
87
  template_spec=None,
89
88
  get_logs: bool = True,
@@ -103,7 +102,6 @@ class SparkKubernetesOperator(KubernetesPodOperator):
103
102
  self.code_path = code_path
104
103
  self.application_file = application_file
105
104
  self.template_spec = template_spec
106
- self.name = self.create_job_name()
107
105
  self.kubernetes_conn_id = kubernetes_conn_id
108
106
  self.startup_timeout_seconds = startup_timeout_seconds
109
107
  self.reattach_on_restart = reattach_on_restart
@@ -161,8 +159,13 @@ class SparkKubernetesOperator(KubernetesPodOperator):
161
159
  return template_body
162
160
 
163
161
  def create_job_name(self):
164
- initial_name = add_unique_suffix(name=self.task_id, max_len=MAX_LABEL_LEN)
165
- return re.sub(r"[^a-z0-9-]+", "-", initial_name.lower())
162
+ name = (
163
+ self.name or self.template_body.get("spark", {}).get("metadata", {}).get("name") or self.task_id
164
+ )
165
+
166
+ updated_name = add_unique_suffix(name=name, max_len=MAX_LABEL_LEN)
167
+
168
+ return self._set_name(updated_name)
166
169
 
167
170
  @staticmethod
168
171
  def _get_pod_identifying_label_string(labels) -> str:
@@ -282,6 +285,8 @@ class SparkKubernetesOperator(KubernetesPodOperator):
282
285
  return CustomObjectsApi()
283
286
 
284
287
  def execute(self, context: Context):
288
+ self.name = self.create_job_name()
289
+
285
290
  self.log.info("Creating sparkApplication.")
286
291
  self.launcher = CustomObjectLauncher(
287
292
  name=self.name,
@@ -477,6 +477,7 @@ class PodGenerator:
477
477
  execution_date=None,
478
478
  run_id=None,
479
479
  airflow_worker=None,
480
+ include_version=False,
480
481
  ):
481
482
  """
482
483
  Generate selector for kubernetes executor pod.
@@ -491,6 +492,7 @@ class PodGenerator:
491
492
  execution_date=execution_date,
492
493
  run_id=run_id,
493
494
  airflow_worker=airflow_worker,
495
+ include_version=include_version,
494
496
  )
495
497
  label_strings = [f"{label_id}={label}" for label_id, label in sorted(labels.items())]
496
498
  selector = ",".join(label_strings)
@@ -509,6 +511,7 @@ class PodGenerator:
509
511
  map_index=None,
510
512
  execution_date=None,
511
513
  run_id=None,
514
+ include_version=True,
512
515
  ):
513
516
  """
514
517
  Generate labels for kubernetes executor pod.
@@ -520,8 +523,9 @@ class PodGenerator:
520
523
  "task_id": make_safe_label_value(task_id),
521
524
  "try_number": str(try_number),
522
525
  "kubernetes_executor": "True",
523
- "airflow_version": airflow_version.replace("+", "-"),
524
526
  }
527
+ if include_version:
528
+ labels["airflow_version"] = airflow_version.replace("+", "-")
525
529
  if airflow_worker is not None:
526
530
  labels["airflow-worker"] = make_safe_label_value(str(airflow_worker))
527
531
  if map_index is not None and map_index >= 0:
@@ -19,7 +19,6 @@
19
19
  from __future__ import annotations
20
20
 
21
21
  import enum
22
- import itertools
23
22
  import json
24
23
  import math
25
24
  import time
@@ -721,14 +720,29 @@ class PodManager(LoggingMixin):
721
720
  except HTTPError as e:
722
721
  raise AirflowException(f"There was an error reading the kubernetes API: {e}")
723
722
 
724
- def await_xcom_sidecar_container_start(self, pod: V1Pod) -> None:
723
+ def await_xcom_sidecar_container_start(
724
+ self, pod: V1Pod, timeout: int = 900, log_interval: int = 30
725
+ ) -> None:
726
+ """Check if the sidecar container has reached the 'Running' state before performing do_xcom_push."""
725
727
  self.log.info("Checking if xcom sidecar container is started.")
726
- for attempt in itertools.count():
728
+ start_time = time.time()
729
+ last_log_time = start_time
730
+
731
+ while True:
732
+ elapsed_time = time.time() - start_time
727
733
  if self.container_is_running(pod, PodDefaults.SIDECAR_CONTAINER_NAME):
728
- self.log.info("The xcom sidecar container is started.")
734
+ self.log.info("The xcom sidecar container has started.")
729
735
  break
730
- if not attempt:
731
- self.log.warning("The xcom sidecar container is not yet started.")
736
+ if (time.time() - last_log_time) >= log_interval:
737
+ self.log.warning(
738
+ "Still waiting for the xcom sidecar container to start. Elapsed time: %d seconds.",
739
+ int(elapsed_time),
740
+ )
741
+ last_log_time = time.time()
742
+ if elapsed_time > timeout:
743
+ raise AirflowException(
744
+ f"Xcom sidecar container did not start within {timeout // 60} minutes."
745
+ )
732
746
  time.sleep(1)
733
747
 
734
748
  def extract_xcom(self, pod: V1Pod) -> str:
@@ -28,7 +28,7 @@ build-backend = "flit_core.buildapi"
28
28
 
29
29
  [project]
30
30
  name = "apache-airflow-providers-cncf-kubernetes"
31
- version = "8.4.2.rc1"
31
+ version = "9.0.0"
32
32
  description = "Provider package apache-airflow-providers-cncf-kubernetes for Apache Airflow"
33
33
  readme = "README.rst"
34
34
  authors = [
@@ -47,17 +47,16 @@ classifiers = [
47
47
  "Framework :: Apache Airflow",
48
48
  "Framework :: Apache Airflow :: Provider",
49
49
  "License :: OSI Approved :: Apache Software License",
50
- "Programming Language :: Python :: 3.8",
51
50
  "Programming Language :: Python :: 3.9",
52
51
  "Programming Language :: Python :: 3.10",
53
52
  "Programming Language :: Python :: 3.11",
54
53
  "Programming Language :: Python :: 3.12",
55
54
  "Topic :: System :: Monitoring",
56
55
  ]
57
- requires-python = "~=3.8"
56
+ requires-python = "~=3.9"
58
57
  dependencies = [
59
58
  "aiofiles>=23.2.0",
60
- "apache-airflow>=2.8.0rc0",
59
+ "apache-airflow>=2.8.0",
61
60
  "asgiref>=3.5.2",
62
61
  "cryptography>=41.0.0",
63
62
  "google-re2>=1.0",
@@ -66,8 +65,8 @@ dependencies = [
66
65
  ]
67
66
 
68
67
  [project.urls]
69
- "Documentation" = "https://airflow.apache.org/docs/apache-airflow-providers-cncf-kubernetes/8.4.2"
70
- "Changelog" = "https://airflow.apache.org/docs/apache-airflow-providers-cncf-kubernetes/8.4.2/changelog.html"
68
+ "Documentation" = "https://airflow.apache.org/docs/apache-airflow-providers-cncf-kubernetes/9.0.0"
69
+ "Changelog" = "https://airflow.apache.org/docs/apache-airflow-providers-cncf-kubernetes/9.0.0/changelog.html"
71
70
  "Bug Tracker" = "https://github.com/apache/airflow/issues"
72
71
  "Source Code" = "https://github.com/apache/airflow"
73
72
  "Slack Chat" = "https://s.apache.org/airflow-slack"