apache-airflow-providers-cncf-kubernetes 10.10.0rc1__py3-none-any.whl → 10.11.0rc2__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (16) hide show
  1. airflow/providers/cncf/kubernetes/__init__.py +3 -3
  2. airflow/providers/cncf/kubernetes/exceptions.py +1 -3
  3. airflow/providers/cncf/kubernetes/hooks/kubernetes.py +22 -9
  4. airflow/providers/cncf/kubernetes/kubernetes_helper_functions.py +61 -16
  5. airflow/providers/cncf/kubernetes/operators/job.py +9 -3
  6. airflow/providers/cncf/kubernetes/operators/pod.py +26 -13
  7. airflow/providers/cncf/kubernetes/operators/resource.py +2 -8
  8. airflow/providers/cncf/kubernetes/operators/spark_kubernetes.py +18 -3
  9. airflow/providers/cncf/kubernetes/utils/pod_manager.py +14 -38
  10. airflow/providers/cncf/kubernetes/version_compat.py +5 -1
  11. {apache_airflow_providers_cncf_kubernetes-10.10.0rc1.dist-info → apache_airflow_providers_cncf_kubernetes-10.11.0rc2.dist-info}/METADATA +10 -10
  12. {apache_airflow_providers_cncf_kubernetes-10.10.0rc1.dist-info → apache_airflow_providers_cncf_kubernetes-10.11.0rc2.dist-info}/RECORD +16 -16
  13. {apache_airflow_providers_cncf_kubernetes-10.10.0rc1.dist-info → apache_airflow_providers_cncf_kubernetes-10.11.0rc2.dist-info}/WHEEL +0 -0
  14. {apache_airflow_providers_cncf_kubernetes-10.10.0rc1.dist-info → apache_airflow_providers_cncf_kubernetes-10.11.0rc2.dist-info}/entry_points.txt +0 -0
  15. {apache_airflow_providers_cncf_kubernetes-10.10.0rc1.dist-info → apache_airflow_providers_cncf_kubernetes-10.11.0rc2.dist-info}/licenses/LICENSE +0 -0
  16. {apache_airflow_providers_cncf_kubernetes-10.10.0rc1.dist-info → apache_airflow_providers_cncf_kubernetes-10.11.0rc2.dist-info}/licenses/NOTICE +0 -0
@@ -29,11 +29,11 @@ from airflow import __version__ as airflow_version
29
29
 
30
30
  __all__ = ["__version__"]
31
31
 
32
- __version__ = "10.10.0"
32
+ __version__ = "10.11.0"
33
33
 
34
34
  if packaging.version.parse(packaging.version.parse(airflow_version).base_version) < packaging.version.parse(
35
- "2.10.0"
35
+ "2.11.0"
36
36
  ):
37
37
  raise RuntimeError(
38
- f"The package `apache-airflow-providers-cncf-kubernetes:{__version__}` needs Apache Airflow 2.10.0+"
38
+ f"The package `apache-airflow-providers-cncf-kubernetes:{__version__}` needs Apache Airflow 2.11.0+"
39
39
  )
@@ -16,9 +16,7 @@
16
16
  # under the License.
17
17
  from __future__ import annotations
18
18
 
19
- from airflow.exceptions import (
20
- AirflowException,
21
- )
19
+ from airflow.exceptions import AirflowException
22
20
 
23
21
 
24
22
  class PodMutationHookException(AirflowException):
@@ -27,7 +27,6 @@ from typing import TYPE_CHECKING, Any, Protocol
27
27
 
28
28
  import aiofiles
29
29
  import requests
30
- import tenacity
31
30
  from asgiref.sync import sync_to_async
32
31
  from kubernetes import client, config, utils, watch
33
32
  from kubernetes.client.models import V1Deployment
@@ -39,7 +38,7 @@ from airflow.exceptions import AirflowException, AirflowNotFoundException
39
38
  from airflow.models import Connection
40
39
  from airflow.providers.cncf.kubernetes.exceptions import KubernetesApiError, KubernetesApiPermissionError
41
40
  from airflow.providers.cncf.kubernetes.kube_client import _disable_verify_ssl, _enable_tcp_keepalive
42
- from airflow.providers.cncf.kubernetes.kubernetes_helper_functions import should_retry_creation
41
+ from airflow.providers.cncf.kubernetes.kubernetes_helper_functions import generic_api_retry
43
42
  from airflow.providers.cncf.kubernetes.utils.container import (
44
43
  container_is_completed,
45
44
  container_is_running,
@@ -390,6 +389,7 @@ class KubernetesHook(BaseHook, PodOperatorHookProtocol):
390
389
  self.log.debug("Response: %s", response)
391
390
  return response
392
391
 
392
+ @generic_api_retry
393
393
  def get_custom_object(
394
394
  self, group: str, version: str, plural: str, name: str, namespace: str | None = None
395
395
  ):
@@ -412,6 +412,7 @@ class KubernetesHook(BaseHook, PodOperatorHookProtocol):
412
412
  )
413
413
  return response
414
414
 
415
+ @generic_api_retry
415
416
  def delete_custom_object(
416
417
  self, group: str, version: str, plural: str, name: str, namespace: str | None = None, **kwargs
417
418
  ):
@@ -540,12 +541,7 @@ class KubernetesHook(BaseHook, PodOperatorHookProtocol):
540
541
  name=name, namespace=namespace, pretty=True, **kwargs
541
542
  )
542
543
 
543
- @tenacity.retry(
544
- stop=tenacity.stop_after_attempt(3),
545
- wait=tenacity.wait_random_exponential(),
546
- reraise=True,
547
- retry=tenacity.retry_if_exception(should_retry_creation),
548
- )
544
+ @generic_api_retry
549
545
  def create_job(
550
546
  self,
551
547
  job: V1Job,
@@ -572,6 +568,7 @@ class KubernetesHook(BaseHook, PodOperatorHookProtocol):
572
568
  raise e
573
569
  return resp
574
570
 
571
+ @generic_api_retry
575
572
  def get_job(self, job_name: str, namespace: str) -> V1Job:
576
573
  """
577
574
  Get Job of specified name and namespace.
@@ -582,6 +579,7 @@ class KubernetesHook(BaseHook, PodOperatorHookProtocol):
582
579
  """
583
580
  return self.batch_v1_client.read_namespaced_job(name=job_name, namespace=namespace, pretty=True)
584
581
 
582
+ @generic_api_retry
585
583
  def get_job_status(self, job_name: str, namespace: str) -> V1Job:
586
584
  """
587
585
  Get job with status of specified name and namespace.
@@ -611,6 +609,7 @@ class KubernetesHook(BaseHook, PodOperatorHookProtocol):
611
609
  self.log.info("The job '%s' is incomplete. Sleeping for %i sec.", job_name, job_poll_interval)
612
610
  sleep(job_poll_interval)
613
611
 
612
+ @generic_api_retry
614
613
  def list_jobs_all_namespaces(self) -> V1JobList:
615
614
  """
616
615
  Get list of Jobs from all namespaces.
@@ -619,6 +618,7 @@ class KubernetesHook(BaseHook, PodOperatorHookProtocol):
619
618
  """
620
619
  return self.batch_v1_client.list_job_for_all_namespaces(pretty=True)
621
620
 
621
+ @generic_api_retry
622
622
  def list_jobs_from_namespace(self, namespace: str) -> V1JobList:
623
623
  """
624
624
  Get list of Jobs from dedicated namespace.
@@ -674,6 +674,7 @@ class KubernetesHook(BaseHook, PodOperatorHookProtocol):
674
674
  return bool(next((c for c in conditions if c.type == "Complete" and c.status), None))
675
675
  return False
676
676
 
677
+ @generic_api_retry
677
678
  def patch_namespaced_job(self, job_name: str, namespace: str, body: object) -> V1Job:
678
679
  """
679
680
  Update the specified Job.
@@ -831,6 +832,13 @@ class AsyncKubernetesHook(KubernetesHook):
831
832
  "Reading kubernetes configuration file from connection "
832
833
  "object and writing temporary config file with its content",
833
834
  )
835
+ if isinstance(kubeconfig, dict):
836
+ self.log.debug(
837
+ LOADING_KUBE_CONFIG_FILE_RESOURCE.format(
838
+ "connection kube_config dictionary (serializing)"
839
+ )
840
+ )
841
+ kubeconfig = json.dumps(kubeconfig)
834
842
  await temp_config.write(kubeconfig.encode())
835
843
  await temp_config.flush()
836
844
  self._is_in_cluster = False
@@ -872,6 +880,7 @@ class AsyncKubernetesHook(KubernetesHook):
872
880
  if kube_client is not None:
873
881
  await kube_client.close()
874
882
 
883
+ @generic_api_retry
875
884
  async def get_pod(self, name: str, namespace: str) -> V1Pod:
876
885
  """
877
886
  Get pod's object.
@@ -892,6 +901,7 @@ class AsyncKubernetesHook(KubernetesHook):
892
901
  raise KubernetesApiPermissionError("Permission denied (403) from Kubernetes API.") from e
893
902
  raise KubernetesApiError from e
894
903
 
904
+ @generic_api_retry
895
905
  async def delete_pod(self, name: str, namespace: str):
896
906
  """
897
907
  Delete pod's object.
@@ -910,6 +920,7 @@ class AsyncKubernetesHook(KubernetesHook):
910
920
  if str(e.status) != "404":
911
921
  raise
912
922
 
923
+ @generic_api_retry
913
924
  async def read_logs(
914
925
  self, name: str, namespace: str, container_name: str | None = None, since_seconds: int | None = None
915
926
  ) -> list[str]:
@@ -932,7 +943,7 @@ class AsyncKubernetesHook(KubernetesHook):
932
943
  logs = await v1_api.read_namespaced_pod_log(
933
944
  name=name,
934
945
  namespace=namespace,
935
- container_name=container_name,
946
+ container=container_name,
936
947
  follow=False,
937
948
  timestamps=True,
938
949
  since_seconds=since_seconds,
@@ -942,6 +953,7 @@ class AsyncKubernetesHook(KubernetesHook):
942
953
  except HTTPError as e:
943
954
  raise KubernetesApiError from e
944
955
 
956
+ @generic_api_retry
945
957
  async def get_pod_events(self, name: str, namespace: str) -> CoreV1EventList:
946
958
  """Get pod's events."""
947
959
  async with self.get_conn() as connection:
@@ -957,6 +969,7 @@ class AsyncKubernetesHook(KubernetesHook):
957
969
  raise KubernetesApiPermissionError("Permission denied (403) from Kubernetes API.") from e
958
970
  raise KubernetesApiError from e
959
971
 
972
+ @generic_api_retry
960
973
  async def get_job_status(self, name: str, namespace: str) -> V1Job:
961
974
  """
962
975
  Get job's status object.
@@ -23,10 +23,14 @@ from functools import cache
23
23
  from typing import TYPE_CHECKING
24
24
 
25
25
  import pendulum
26
- from kubernetes.client.rest import ApiException
26
+ import tenacity
27
+ from kubernetes.client.rest import ApiException as SyncApiException
28
+ from kubernetes_asyncio.client.exceptions import ApiException as AsyncApiException
27
29
  from slugify import slugify
30
+ from urllib3.exceptions import HTTPError
28
31
 
29
32
  from airflow.configuration import conf
33
+ from airflow.exceptions import AirflowException
30
34
  from airflow.providers.cncf.kubernetes.backcompat import get_logical_date_key
31
35
 
32
36
  if TYPE_CHECKING:
@@ -39,6 +43,62 @@ alphanum_lower = string.ascii_lowercase + string.digits
39
43
  POD_NAME_MAX_LENGTH = 63 # Matches Linux kernel's HOST_NAME_MAX default value minus 1.
40
44
 
41
45
 
46
+ class PodLaunchFailedException(AirflowException):
47
+ """When pod launching fails in KubernetesPodOperator."""
48
+
49
+
50
+ class KubernetesApiException(AirflowException):
51
+ """When communication with kubernetes API fails."""
52
+
53
+
54
+ API_RETRIES = conf.getint("workers", "api_retries", fallback=5)
55
+ API_RETRY_WAIT_MIN = conf.getfloat("workers", "api_retry_wait_min", fallback=1)
56
+ API_RETRY_WAIT_MAX = conf.getfloat("workers", "api_retry_wait_max", fallback=15)
57
+
58
+ _default_wait = tenacity.wait_exponential(min=API_RETRY_WAIT_MIN, max=API_RETRY_WAIT_MAX)
59
+
60
+ TRANSIENT_STATUS_CODES = {409, 429, 500, 502, 503, 504}
61
+
62
+
63
+ def _should_retry_api(exc: BaseException) -> bool:
64
+ """Retry on selected ApiException status codes, plus plain HTTP/timeout errors."""
65
+ if isinstance(exc, (SyncApiException, AsyncApiException)):
66
+ return exc.status in TRANSIENT_STATUS_CODES
67
+ return isinstance(exc, (HTTPError, KubernetesApiException))
68
+
69
+
70
+ class WaitRetryAfterOrExponential(tenacity.wait.wait_base):
71
+ """Wait strategy that honors Retry-After header on 429, else falls back to exponential backoff."""
72
+
73
+ def __call__(self, retry_state):
74
+ exc = retry_state.outcome.exception() if retry_state.outcome else None
75
+ if isinstance(exc, (SyncApiException, AsyncApiException)) and exc.status == 429:
76
+ retry_after = (exc.headers or {}).get("Retry-After")
77
+ if retry_after:
78
+ try:
79
+ return float(int(retry_after))
80
+ except ValueError:
81
+ pass
82
+ # Inline exponential fallback
83
+ return _default_wait(retry_state)
84
+
85
+
86
+ def generic_api_retry(func):
87
+ """
88
+ Retry to Kubernetes API calls.
89
+
90
+ - Retries only transient ApiException status codes.
91
+ - Honors Retry-After on 429.
92
+ """
93
+ return tenacity.retry(
94
+ stop=tenacity.stop_after_attempt(API_RETRIES),
95
+ wait=WaitRetryAfterOrExponential(),
96
+ retry=tenacity.retry_if_exception(_should_retry_api),
97
+ reraise=True,
98
+ before_sleep=tenacity.before_sleep_log(log, logging.WARNING),
99
+ )(func)
100
+
101
+
42
102
  def rand_str(num):
43
103
  """
44
104
  Generate random lowercase alphanumeric string of length num.
@@ -148,18 +208,3 @@ def annotations_for_logging_task_metadata(annotation_set):
148
208
  else:
149
209
  annotations_for_logging = "<omitted>"
150
210
  return annotations_for_logging
151
-
152
-
153
- def should_retry_creation(exception: BaseException) -> bool:
154
- """
155
- Check if an Exception indicates a transient error and warrants retrying.
156
-
157
- This function is needed for preventing 'No agent available' error. The error appears time to time
158
- when users try to create a Resource or Job. This issue is inside kubernetes and in the current moment
159
- has no solution. Like a temporary solution we decided to retry Job or Resource creation request each
160
- time when this error appears.
161
- More about this issue here: https://github.com/cert-manager/cert-manager/issues/6457
162
- """
163
- if isinstance(exception, ApiException):
164
- return str(exception.status) == "500"
165
- return False
@@ -35,6 +35,7 @@ from airflow.configuration import conf
35
35
  from airflow.exceptions import AirflowException, AirflowProviderDeprecationWarning
36
36
  from airflow.providers.cncf.kubernetes.hooks.kubernetes import KubernetesHook
37
37
  from airflow.providers.cncf.kubernetes.kubernetes_helper_functions import (
38
+ POD_NAME_MAX_LENGTH,
38
39
  add_unique_suffix,
39
40
  create_unique_id,
40
41
  )
@@ -56,6 +57,8 @@ if TYPE_CHECKING:
56
57
 
57
58
  log = logging.getLogger(__name__)
58
59
 
60
+ JOB_NAME_PREFIX = "job-"
61
+
59
62
 
60
63
  class KubernetesJobOperator(KubernetesPodOperator):
61
64
  """
@@ -378,15 +381,18 @@ class KubernetesJobOperator(KubernetesPodOperator):
378
381
 
379
382
  job = self.reconcile_jobs(job_template, job)
380
383
 
384
+ # Account for job name prefix when generating/truncating the name
385
+ max_base_length = POD_NAME_MAX_LENGTH - len(JOB_NAME_PREFIX)
386
+
381
387
  if not job.metadata.name:
382
388
  job.metadata.name = create_unique_id(
383
- task_id=self.task_id, unique=self.random_name_suffix, max_length=80
389
+ task_id=self.task_id, unique=self.random_name_suffix, max_length=max_base_length
384
390
  )
385
391
  elif self.random_name_suffix:
386
392
  # user has supplied job name, we're just adding suffix
387
- job.metadata.name = add_unique_suffix(name=job.metadata.name)
393
+ job.metadata.name = add_unique_suffix(name=job.metadata.name, max_len=max_base_length)
388
394
 
389
- job.metadata.name = f"job-{job.metadata.name}"
395
+ job.metadata.name = f"{JOB_NAME_PREFIX}{job.metadata.name}"
390
396
 
391
397
  if not job.metadata.namespace:
392
398
  hook_namespace = self.hook.get_namespace()
@@ -41,11 +41,6 @@ from kubernetes.stream import stream
41
41
  from urllib3.exceptions import HTTPError
42
42
 
43
43
  from airflow.configuration import conf
44
- from airflow.exceptions import (
45
- AirflowException,
46
- AirflowSkipException,
47
- TaskDeferred,
48
- )
49
44
  from airflow.providers.cncf.kubernetes import pod_generator
50
45
  from airflow.providers.cncf.kubernetes.backcompat.backwards_compat_converters import (
51
46
  convert_affinity,
@@ -65,6 +60,7 @@ from airflow.providers.cncf.kubernetes.kubernetes_helper_functions import (
65
60
  POD_NAME_MAX_LENGTH,
66
61
  add_unique_suffix,
67
62
  create_unique_id,
63
+ generic_api_retry,
68
64
  )
69
65
  from airflow.providers.cncf.kubernetes.pod_generator import PodGenerator
70
66
  from airflow.providers.cncf.kubernetes.triggers.pod import KubernetesPodTrigger
@@ -82,12 +78,13 @@ from airflow.providers.cncf.kubernetes.utils.pod_manager import (
82
78
  PodPhase,
83
79
  )
84
80
  from airflow.providers.cncf.kubernetes.version_compat import AIRFLOW_V_3_1_PLUS
85
- from airflow.providers.common.compat.sdk import XCOM_RETURN_KEY
81
+ from airflow.providers.common.compat.sdk import XCOM_RETURN_KEY, AirflowSkipException, TaskDeferred
86
82
 
87
83
  if AIRFLOW_V_3_1_PLUS:
88
84
  from airflow.sdk import BaseOperator
89
85
  else:
90
86
  from airflow.models import BaseOperator
87
+ from airflow.exceptions import AirflowException
91
88
  from airflow.settings import pod_mutation_hook
92
89
  from airflow.utils import yaml
93
90
  from airflow.utils.helpers import prune_dict, validate_key
@@ -126,6 +123,10 @@ class PodCredentialsExpiredFailure(AirflowException):
126
123
  """When pod fails to refresh credentials."""
127
124
 
128
125
 
126
+ class FoundMoreThanOnePodFailure(AirflowException):
127
+ """When during reconnect more than one matching pod was found."""
128
+
129
+
129
130
  class KubernetesPodOperator(BaseOperator):
130
131
  """
131
132
  Execute a task in a Kubernetes Pod.
@@ -563,6 +564,7 @@ class KubernetesPodOperator(BaseOperator):
563
564
  callback.on_sync_client_creation(client=client, operator=self)
564
565
  return client
565
566
 
567
+ @generic_api_retry
566
568
  def find_pod(self, namespace: str, context: Context, *, exclude_checked: bool = True) -> k8s.V1Pod | None:
567
569
  """Return an already-running pod for this task instance if one exists."""
568
570
  label_selector = self._build_find_pod_label_selector(context, exclude_checked=exclude_checked)
@@ -579,7 +581,7 @@ class KubernetesPodOperator(BaseOperator):
579
581
  self.log_matching_pod(pod=pod, context=context)
580
582
  elif num_pods > 1:
581
583
  if self.reattach_on_restart:
582
- raise AirflowException(f"More than one pod running with labels {label_selector}")
584
+ raise FoundMoreThanOnePodFailure(f"More than one pod running with labels {label_selector}")
583
585
  self.log.warning("Found more than one pod running with labels %s, resolving ...", label_selector)
584
586
  pod = self.process_duplicate_label_pods(pod_list)
585
587
  self.log_matching_pod(pod=pod, context=context)
@@ -935,6 +937,8 @@ class KubernetesPodOperator(BaseOperator):
935
937
  raise
936
938
  finally:
937
939
  self._clean(event=event, context=context, result=xcom_sidecar_output)
940
+ if self.do_xcom_push:
941
+ return xcom_sidecar_output
938
942
 
939
943
  def _clean(self, event: dict[str, Any], result: dict | None, context: Context) -> None:
940
944
  if self.pod is None:
@@ -1198,11 +1202,16 @@ class KubernetesPodOperator(BaseOperator):
1198
1202
  def patch_already_checked(self, pod: k8s.V1Pod, *, reraise=True):
1199
1203
  """Add an "already checked" label to ensure we don't reattach on retries."""
1200
1204
  with _optionally_suppress(reraise=reraise):
1201
- self.client.patch_namespaced_pod(
1202
- name=pod.metadata.name,
1203
- namespace=pod.metadata.namespace,
1204
- body={"metadata": {"labels": {self.POD_CHECKED_KEY: "True"}}},
1205
- )
1205
+
1206
+ @generic_api_retry
1207
+ def _patch_with_retry():
1208
+ self.client.patch_namespaced_pod(
1209
+ name=pod.metadata.name,
1210
+ namespace=pod.metadata.namespace,
1211
+ body={"metadata": {"labels": {self.POD_CHECKED_KEY: "True"}}},
1212
+ )
1213
+
1214
+ _patch_with_retry()
1206
1215
 
1207
1216
  def on_kill(self) -> None:
1208
1217
  self._killed = True
@@ -1215,8 +1224,12 @@ class KubernetesPodOperator(BaseOperator):
1215
1224
  if self.termination_grace_period is not None:
1216
1225
  kwargs.update(grace_period_seconds=self.termination_grace_period)
1217
1226
 
1218
- try:
1227
+ @generic_api_retry
1228
+ def _delete_with_retry():
1219
1229
  self.client.delete_namespaced_pod(**kwargs)
1230
+
1231
+ try:
1232
+ _delete_with_retry()
1220
1233
  except kubernetes.client.exceptions.ApiException:
1221
1234
  self.log.exception("Unable to delete pod %s", self.pod.metadata.name)
1222
1235
 
@@ -23,13 +23,12 @@ from collections.abc import Sequence
23
23
  from functools import cached_property
24
24
  from typing import TYPE_CHECKING
25
25
 
26
- import tenacity
27
26
  import yaml
28
27
  from kubernetes.utils import create_from_yaml
29
28
 
30
29
  from airflow.exceptions import AirflowException
31
30
  from airflow.providers.cncf.kubernetes.hooks.kubernetes import KubernetesHook
32
- from airflow.providers.cncf.kubernetes.kubernetes_helper_functions import should_retry_creation
31
+ from airflow.providers.cncf.kubernetes.kubernetes_helper_functions import generic_api_retry
33
32
  from airflow.providers.cncf.kubernetes.utils.delete_from import delete_from_yaml
34
33
  from airflow.providers.cncf.kubernetes.utils.k8s_resource_iterator import k8s_resource_iterator
35
34
  from airflow.providers.cncf.kubernetes.version_compat import AIRFLOW_V_3_1_PLUS
@@ -132,12 +131,7 @@ class KubernetesCreateResourceOperator(KubernetesResourceBaseOperator):
132
131
  else:
133
132
  self.custom_object_client.create_cluster_custom_object(group, version, plural, body)
134
133
 
135
- @tenacity.retry(
136
- stop=tenacity.stop_after_attempt(3),
137
- wait=tenacity.wait_random_exponential(),
138
- reraise=True,
139
- retry=tenacity.retry_if_exception(should_retry_creation),
140
- )
134
+ @generic_api_retry
141
135
  def _create_objects(self, objects):
142
136
  self.log.info("Starting resource creation")
143
137
  if not self.custom_resource_definition:
@@ -286,6 +286,16 @@ class SparkKubernetesOperator(KubernetesPodOperator):
286
286
  def custom_obj_api(self) -> CustomObjectsApi:
287
287
  return CustomObjectsApi()
288
288
 
289
+ @cached_property
290
+ def launcher(self) -> CustomObjectLauncher:
291
+ return CustomObjectLauncher(
292
+ name=self.name,
293
+ namespace=self.namespace,
294
+ kube_client=self.client,
295
+ custom_obj_api=self.custom_obj_api,
296
+ template_body=self.template_body,
297
+ )
298
+
289
299
  def get_or_create_spark_crd(self, launcher: CustomObjectLauncher, context) -> k8s.V1Pod:
290
300
  if self.reattach_on_restart:
291
301
  driver_pod = self.find_spark_job(context)
@@ -323,6 +333,8 @@ class SparkKubernetesOperator(KubernetesPodOperator):
323
333
  )
324
334
  self.pod = existing_pod
325
335
  self.pod_request_obj = None
336
+ if self.pod.metadata.name.endswith("-driver"):
337
+ self.name = self.pod.metadata.name.removesuffix("-driver")
326
338
  return
327
339
 
328
340
  if "spark" not in template_body:
@@ -361,9 +373,12 @@ class SparkKubernetesOperator(KubernetesPodOperator):
361
373
  return self.find_spark_job(context, exclude_checked=exclude_checked)
362
374
 
363
375
  def on_kill(self) -> None:
364
- if self.launcher:
365
- self.log.debug("Deleting spark job for task %s", self.task_id)
366
- self.launcher.delete_spark_job()
376
+ self.log.debug("Deleting spark job for task %s", self.task_id)
377
+ job_name = self.name
378
+ if self.pod and self.pod.metadata and self.pod.metadata.name:
379
+ if self.pod.metadata.name.endswith("-driver"):
380
+ job_name = self.pod.metadata.name.removesuffix("-driver")
381
+ self.launcher.delete_spark_job(spark_job_name=job_name)
367
382
 
368
383
  def patch_already_checked(self, pod: k8s.V1Pod, *, reraise=True):
369
384
  """Add an "already checked" annotation to ensure we don't reattach on retries."""
@@ -30,7 +30,6 @@ from datetime import timedelta
30
30
  from typing import TYPE_CHECKING, Literal, cast
31
31
 
32
32
  import pendulum
33
- import tenacity
34
33
  from kubernetes import client, watch
35
34
  from kubernetes.client.rest import ApiException
36
35
  from kubernetes.stream import stream as kubernetes_stream
@@ -40,6 +39,11 @@ from urllib3.exceptions import HTTPError, TimeoutError
40
39
 
41
40
  from airflow.exceptions import AirflowException
42
41
  from airflow.providers.cncf.kubernetes.callbacks import ExecutionMode, KubernetesPodOperatorCallback
42
+ from airflow.providers.cncf.kubernetes.kubernetes_helper_functions import (
43
+ KubernetesApiException,
44
+ PodLaunchFailedException,
45
+ generic_api_retry,
46
+ )
43
47
  from airflow.providers.cncf.kubernetes.utils.container import (
44
48
  container_is_completed,
45
49
  container_is_running,
@@ -71,17 +75,6 @@ Sentinel for no xcom result.
71
75
  """
72
76
 
73
77
 
74
- class PodLaunchFailedException(AirflowException):
75
- """When pod launching fails in KubernetesPodOperator."""
76
-
77
-
78
- def should_retry_start_pod(exception: BaseException) -> bool:
79
- """Check if an Exception indicates a transient error and warrants retrying."""
80
- if isinstance(exception, ApiException):
81
- return str(exception.status) == "409"
82
- return False
83
-
84
-
85
78
  class PodPhase:
86
79
  """
87
80
  Possible pod phases.
@@ -344,6 +337,7 @@ class PodManager(LoggingMixin):
344
337
  raise e
345
338
  return resp
346
339
 
340
+ @generic_api_retry
347
341
  def delete_pod(self, pod: V1Pod) -> None:
348
342
  """Delete POD."""
349
343
  try:
@@ -355,12 +349,7 @@ class PodManager(LoggingMixin):
355
349
  if str(e.status) != "404":
356
350
  raise
357
351
 
358
- @tenacity.retry(
359
- stop=tenacity.stop_after_attempt(3),
360
- wait=tenacity.wait_random_exponential(),
361
- reraise=True,
362
- retry=tenacity.retry_if_exception(should_retry_start_pod),
363
- )
352
+ @generic_api_retry
364
353
  def create_pod(self, pod: V1Pod) -> V1Pod:
365
354
  """Launch the pod asynchronously."""
366
355
  return self.run_pod_async(pod)
@@ -718,7 +707,7 @@ class PodManager(LoggingMixin):
718
707
  remote_pod = self.read_pod(pod)
719
708
  return container_is_terminated(pod=remote_pod, container_name=container_name)
720
709
 
721
- @tenacity.retry(stop=tenacity.stop_after_attempt(6), wait=tenacity.wait_exponential(max=15), reraise=True)
710
+ @generic_api_retry
722
711
  def read_pod_logs(
723
712
  self,
724
713
  pod: V1Pod,
@@ -761,7 +750,6 @@ class PodManager(LoggingMixin):
761
750
  post_termination_timeout=post_termination_timeout,
762
751
  )
763
752
 
764
- @tenacity.retry(stop=tenacity.stop_after_attempt(3), wait=tenacity.wait_exponential(), reraise=True)
765
753
  def get_init_container_names(self, pod: V1Pod) -> list[str]:
766
754
  """
767
755
  Return container names from the POD except for the airflow-xcom-sidecar container.
@@ -770,7 +758,6 @@ class PodManager(LoggingMixin):
770
758
  """
771
759
  return [container_spec.name for container_spec in pod.spec.init_containers]
772
760
 
773
- @tenacity.retry(stop=tenacity.stop_after_attempt(3), wait=tenacity.wait_exponential(), reraise=True)
774
761
  def get_container_names(self, pod: V1Pod) -> list[str]:
775
762
  """
776
763
  Return container names from the POD except for the airflow-xcom-sidecar container.
@@ -784,7 +771,7 @@ class PodManager(LoggingMixin):
784
771
  if container_spec.name != PodDefaults.SIDECAR_CONTAINER_NAME
785
772
  ]
786
773
 
787
- @tenacity.retry(stop=tenacity.stop_after_attempt(3), wait=tenacity.wait_exponential(), reraise=True)
774
+ @generic_api_retry
788
775
  def read_pod_events(self, pod: V1Pod) -> CoreV1EventList:
789
776
  """Read events from the POD."""
790
777
  try:
@@ -792,15 +779,15 @@ class PodManager(LoggingMixin):
792
779
  namespace=pod.metadata.namespace, field_selector=f"involvedObject.name={pod.metadata.name}"
793
780
  )
794
781
  except HTTPError as e:
795
- raise AirflowException(f"There was an error reading the kubernetes API: {e}")
782
+ raise KubernetesApiException(f"There was an error reading the kubernetes API: {e}")
796
783
 
797
- @tenacity.retry(stop=tenacity.stop_after_attempt(3), wait=tenacity.wait_exponential(), reraise=True)
784
+ @generic_api_retry
798
785
  def read_pod(self, pod: V1Pod) -> V1Pod:
799
786
  """Read POD information."""
800
787
  try:
801
788
  return self._client.read_namespaced_pod(pod.metadata.name, pod.metadata.namespace)
802
789
  except HTTPError as e:
803
- raise AirflowException(f"There was an error reading the kubernetes API: {e}")
790
+ raise KubernetesApiException(f"There was an error reading the kubernetes API: {e}")
804
791
 
805
792
  def await_xcom_sidecar_container_start(
806
793
  self, pod: V1Pod, timeout: int = 900, log_interval: int = 30
@@ -839,11 +826,7 @@ class PodManager(LoggingMixin):
839
826
  finally:
840
827
  self.extract_xcom_kill(pod)
841
828
 
842
- @tenacity.retry(
843
- stop=tenacity.stop_after_attempt(5),
844
- wait=tenacity.wait_exponential(multiplier=1, min=4, max=10),
845
- reraise=True,
846
- )
829
+ @generic_api_retry
847
830
  def extract_xcom_json(self, pod: V1Pod) -> str:
848
831
  """Retrieve XCom value and also check if xcom json is valid."""
849
832
  command = (
@@ -884,11 +867,7 @@ class PodManager(LoggingMixin):
884
867
  raise AirflowException(f"Failed to extract xcom from pod: {pod.metadata.name}")
885
868
  return result
886
869
 
887
- @tenacity.retry(
888
- stop=tenacity.stop_after_attempt(5),
889
- wait=tenacity.wait_exponential(multiplier=1, min=4, max=10),
890
- reraise=True,
891
- )
870
+ @generic_api_retry
892
871
  def extract_xcom_kill(self, pod: V1Pod):
893
872
  """Kill xcom sidecar container."""
894
873
  with closing(
@@ -992,7 +971,6 @@ class AsyncPodManager(LoggingMixin):
992
971
  self._callbacks = callbacks or []
993
972
  self.stop_watching_events = False
994
973
 
995
- @tenacity.retry(stop=tenacity.stop_after_attempt(5), wait=tenacity.wait_exponential(), reraise=True)
996
974
  async def read_pod(self, pod: V1Pod) -> V1Pod:
997
975
  """Read POD information."""
998
976
  return await self._hook.get_pod(
@@ -1000,7 +978,6 @@ class AsyncPodManager(LoggingMixin):
1000
978
  pod.metadata.namespace,
1001
979
  )
1002
980
 
1003
- @tenacity.retry(stop=tenacity.stop_after_attempt(5), wait=tenacity.wait_exponential(), reraise=True)
1004
981
  async def read_pod_events(self, pod: V1Pod) -> CoreV1EventList:
1005
982
  """Get pod's events."""
1006
983
  return await self._hook.get_pod_events(
@@ -1034,7 +1011,6 @@ class AsyncPodManager(LoggingMixin):
1034
1011
  check_interval=check_interval,
1035
1012
  )
1036
1013
 
1037
- @tenacity.retry(stop=tenacity.stop_after_attempt(5), wait=tenacity.wait_exponential(), reraise=True)
1038
1014
  async def fetch_container_logs_before_current_sec(
1039
1015
  self, pod: V1Pod, container_name: str, since_time: DateTime | None = None
1040
1016
  ) -> DateTime | None:
@@ -35,4 +35,8 @@ def get_base_airflow_version_tuple() -> tuple[int, int, int]:
35
35
  AIRFLOW_V_3_0_PLUS = get_base_airflow_version_tuple() >= (3, 0, 0)
36
36
  AIRFLOW_V_3_1_PLUS = get_base_airflow_version_tuple() >= (3, 1, 0)
37
37
 
38
- __all__ = ["AIRFLOW_V_3_0_PLUS", "AIRFLOW_V_3_1_PLUS"]
38
+
39
+ __all__ = [
40
+ "AIRFLOW_V_3_0_PLUS",
41
+ "AIRFLOW_V_3_1_PLUS",
42
+ ]
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: apache-airflow-providers-cncf-kubernetes
3
- Version: 10.10.0rc1
3
+ Version: 10.11.0rc2
4
4
  Summary: Provider package apache-airflow-providers-cncf-kubernetes for Apache Airflow
5
5
  Keywords: airflow-provider,cncf.kubernetes,airflow,integration
6
6
  Author-email: Apache Software Foundation <dev@airflow.apache.org>
@@ -23,15 +23,15 @@ Classifier: Topic :: System :: Monitoring
23
23
  License-File: LICENSE
24
24
  License-File: NOTICE
25
25
  Requires-Dist: aiofiles>=23.2.0
26
- Requires-Dist: apache-airflow>=2.10.0rc1
27
- Requires-Dist: apache-airflow-providers-common-compat>=1.8.0rc1
26
+ Requires-Dist: apache-airflow>=2.11.0rc1
27
+ Requires-Dist: apache-airflow-providers-common-compat>=1.10.0rc1
28
28
  Requires-Dist: asgiref>=3.5.2
29
29
  Requires-Dist: cryptography>=41.0.0,<46.0.0
30
30
  Requires-Dist: kubernetes>=32.0.0,<35.0.0
31
31
  Requires-Dist: kubernetes_asyncio>=32.0.0,<35.0.0
32
32
  Project-URL: Bug Tracker, https://github.com/apache/airflow/issues
33
- Project-URL: Changelog, https://airflow.staged.apache.org/docs/apache-airflow-providers-cncf-kubernetes/10.10.0/changelog.html
34
- Project-URL: Documentation, https://airflow.staged.apache.org/docs/apache-airflow-providers-cncf-kubernetes/10.10.0
33
+ Project-URL: Changelog, https://airflow.staged.apache.org/docs/apache-airflow-providers-cncf-kubernetes/10.11.0/changelog.html
34
+ Project-URL: Documentation, https://airflow.staged.apache.org/docs/apache-airflow-providers-cncf-kubernetes/10.11.0
35
35
  Project-URL: Mastodon, https://fosstodon.org/@airflow
36
36
  Project-URL: Slack Chat, https://s.apache.org/airflow-slack
37
37
  Project-URL: Source Code, https://github.com/apache/airflow
@@ -62,7 +62,7 @@ Project-URL: YouTube, https://www.youtube.com/channel/UCSXwxpWZQ7XZ1WL3wqevChA/
62
62
 
63
63
  Package ``apache-airflow-providers-cncf-kubernetes``
64
64
 
65
- Release: ``10.10.0``
65
+ Release: ``10.11.0``
66
66
 
67
67
 
68
68
  `Kubernetes <https://kubernetes.io/>`__
@@ -75,7 +75,7 @@ This is a provider package for ``cncf.kubernetes`` provider. All classes for thi
75
75
  are in ``airflow.providers.cncf.kubernetes`` python package.
76
76
 
77
77
  You can find package information and changelog for the provider
78
- in the `documentation <https://airflow.apache.org/docs/apache-airflow-providers-cncf-kubernetes/10.10.0/>`_.
78
+ in the `documentation <https://airflow.apache.org/docs/apache-airflow-providers-cncf-kubernetes/10.11.0/>`_.
79
79
 
80
80
  Installation
81
81
  ------------
@@ -93,8 +93,8 @@ Requirements
93
93
  PIP package Version required
94
94
  ========================================== ====================
95
95
  ``aiofiles`` ``>=23.2.0``
96
- ``apache-airflow`` ``>=2.10.0``
97
- ``apache-airflow-providers-common-compat`` ``>=1.8.0``
96
+ ``apache-airflow`` ``>=2.11.0``
97
+ ``apache-airflow-providers-common-compat`` ``>=1.10.0``
98
98
  ``asgiref`` ``>=3.5.2``
99
99
  ``cryptography`` ``>=41.0.0,<46.0.0``
100
100
  ``kubernetes`` ``>=32.0.0,<35.0.0``
@@ -121,5 +121,5 @@ Dependent package
121
121
  ================================================================================================================== =================
122
122
 
123
123
  The changelog for the provider package can be found in the
124
- `changelog <https://airflow.apache.org/docs/apache-airflow-providers-cncf-kubernetes/10.10.0/changelog.html>`_.
124
+ `changelog <https://airflow.apache.org/docs/apache-airflow-providers-cncf-kubernetes/10.11.0/changelog.html>`_.
125
125
 
@@ -1,17 +1,17 @@
1
- airflow/providers/cncf/kubernetes/__init__.py,sha256=sNnyDVLE3RiR85dOsTdjZbfX_6-IOzWAwUJxqcoUygY,1506
1
+ airflow/providers/cncf/kubernetes/__init__.py,sha256=zMf0rIC0OSd2EGyIIrmU9prmt1N4o7PNXEQI5J9DQr8,1506
2
2
  airflow/providers/cncf/kubernetes/callbacks.py,sha256=1nCLXFJKtr5FM9ApB8Drw5VAGSC3TDFsPSTMtRnAR3Q,6085
3
- airflow/providers/cncf/kubernetes/exceptions.py,sha256=EGvP2n8c43t58e_-SAWpMopQxzX6PJmaflo1HP-RTY4,1403
3
+ airflow/providers/cncf/kubernetes/exceptions.py,sha256=-H_htmfpVE158nLU4V3KRjG3k1PcuiYUVMjZjMRp0GA,1394
4
4
  airflow/providers/cncf/kubernetes/get_provider_info.py,sha256=7fYqFWd1K0j8LgVkB_HYambOGPTrBDuaVXenLUPtF8g,16600
5
5
  airflow/providers/cncf/kubernetes/k8s_model.py,sha256=xmdFhX29DjegoZ-cq8-KDL9soVYXf4OpU6fAGr3cPTU,2101
6
6
  airflow/providers/cncf/kubernetes/kube_client.py,sha256=AaTY2UhhKVa-qrhMvpiQjdUJhrQyndwQ_5PoRmWJy3k,5714
7
7
  airflow/providers/cncf/kubernetes/kube_config.py,sha256=UsxzPjsonzy5a6e0P8XjenT-ncmX4R6KB1EqDfWpLnM,6191
8
- airflow/providers/cncf/kubernetes/kubernetes_helper_functions.py,sha256=IuGf9U-jKlL8h2u_npPsC7WBr9SpAgb-oMOmlcjFxxY,5624
8
+ airflow/providers/cncf/kubernetes/kubernetes_helper_functions.py,sha256=HZ4ARkQ9NsN3hEDHA0uheiNRsOgJks-DmftpIqVhcic,7185
9
9
  airflow/providers/cncf/kubernetes/pod_generator.py,sha256=0VEcAtT2SzAFwSDsQWe2QdrY2mDV8s4hBw0qLcmIMGw,21038
10
10
  airflow/providers/cncf/kubernetes/python_kubernetes_script.jinja2,sha256=I0EHRGwLHjSiX85e51HBIoddRDnC8TJPFrDBqQq_NJg,1776
11
11
  airflow/providers/cncf/kubernetes/python_kubernetes_script.py,sha256=KnTlZSWCZhwvj89fSc2kgIRTaI4iLNKPquHc2wXnluo,3460
12
12
  airflow/providers/cncf/kubernetes/secret.py,sha256=0aHyYJOnveutfQKH7riAfz9IPB5hhDYBDYzYEDuXrmU,5317
13
13
  airflow/providers/cncf/kubernetes/template_rendering.py,sha256=WSUBhjGSDhjNtA4IFlbYyX50rvYN6UA4dMk0cPqgOjo,3618
14
- airflow/providers/cncf/kubernetes/version_compat.py,sha256=DjaeLV-sLSz4WqmMThVH2CPp5eS4Q_nRj62kNSXugdU,1659
14
+ airflow/providers/cncf/kubernetes/version_compat.py,sha256=MpWxT1g5WGhlmooHPsjyFHtjQsFZ8FEIrOQrtRnu8Pw,1671
15
15
  airflow/providers/cncf/kubernetes/backcompat/__init__.py,sha256=KXF76f3v1jIFUBNz8kwxVMvm7i4mNo35LbIG9IijBNc,1299
16
16
  airflow/providers/cncf/kubernetes/backcompat/backwards_compat_converters.py,sha256=FkRRtIEucp2hYrecGVYVgyPI6-b7hE7X7L17Z3r459Y,4303
17
17
  airflow/providers/cncf/kubernetes/cli/__init__.py,sha256=9hdXHABrVpkbpjZgUft39kOFL2xSGeG4GEua0Hmelus,785
@@ -25,16 +25,16 @@ airflow/providers/cncf/kubernetes/executors/kubernetes_executor_types.py,sha256=
25
25
  airflow/providers/cncf/kubernetes/executors/kubernetes_executor_utils.py,sha256=wNvHSyGkEWFIPzxzinE5DhM2K4JTYDdIMqJxZCkGWNo,31503
26
26
  airflow/providers/cncf/kubernetes/executors/local_kubernetes_executor.py,sha256=CWCN4b6Ircs-3tCxJjBsrjl4Q0ABBJIwqlZr7a5lW6k,12243
27
27
  airflow/providers/cncf/kubernetes/hooks/__init__.py,sha256=9hdXHABrVpkbpjZgUft39kOFL2xSGeG4GEua0Hmelus,785
28
- airflow/providers/cncf/kubernetes/hooks/kubernetes.py,sha256=6PerH9keut_mNd1wZJgUHjyDFbNPF6Z9ihIciKD6lCE,40290
28
+ airflow/providers/cncf/kubernetes/hooks/kubernetes.py,sha256=E1lno2SExEcwdv0BY-8WDSDypviH9iiU1WumgOoPFK0,40688
29
29
  airflow/providers/cncf/kubernetes/kubernetes_executor_templates/__init__.py,sha256=9hdXHABrVpkbpjZgUft39kOFL2xSGeG4GEua0Hmelus,785
30
30
  airflow/providers/cncf/kubernetes/kubernetes_executor_templates/basic_template.yaml,sha256=yzJmXN4ZyB4aDwI_GIugpL9-f1YMVy__X-LQSbeU95A,2567
31
31
  airflow/providers/cncf/kubernetes/operators/__init__.py,sha256=mlJxuZLkd5x-iq2SBwD3mvRQpt3YR7wjz_nceyF1IaI,787
32
32
  airflow/providers/cncf/kubernetes/operators/custom_object_launcher.py,sha256=8Pysyo_iScGAD_fW5TDk0qeyoUfNgQCZpr8z47mYm4g,15447
33
- airflow/providers/cncf/kubernetes/operators/job.py,sha256=B4C3CbcJTnhqJQmMAbvWrvQGAU8_gfyOmYbsJ1NvraA,26896
33
+ airflow/providers/cncf/kubernetes/operators/job.py,sha256=BJSu86ilhMN3RCspDjO0zZt6Z-4HAVCGmcep9Pk7g2E,27142
34
34
  airflow/providers/cncf/kubernetes/operators/kueue.py,sha256=iDyw9hYaMWVLtBwjsmSXLsSoWW-uEEvh8stptgKOFVQ,5543
35
- airflow/providers/cncf/kubernetes/operators/pod.py,sha256=aTep9YyHQX9F5gTqt3WT1uNu1775RYovqdBA7X6CqYo,64084
36
- airflow/providers/cncf/kubernetes/operators/resource.py,sha256=hm-ZVhqS08CiF1Csmd06KxAr40oelehuxUOwaSh30D0,7695
37
- airflow/providers/cncf/kubernetes/operators/spark_kubernetes.py,sha256=I_e1Jj4Y_xjapA5MH_sYa1P5ROF10JA5Xf2V4HYg5pQ,15991
35
+ airflow/providers/cncf/kubernetes/operators/pod.py,sha256=St9VmM1sJHQy1gEjBh2B0UzPSlPZqUBD1A6WBkNGTbs,64553
36
+ airflow/providers/cncf/kubernetes/operators/resource.py,sha256=ygID_Qxr0W0-575WXKi0yeKPuxFedBR9kP3qnQEFZz0,7489
37
+ airflow/providers/cncf/kubernetes/operators/spark_kubernetes.py,sha256=xQadv8XPQEq_TLPmU9O-45XMqH6h9C0MZfF09jtdz5k,16668
38
38
  airflow/providers/cncf/kubernetes/pod_template_file_examples/__init__.py,sha256=9hdXHABrVpkbpjZgUft39kOFL2xSGeG4GEua0Hmelus,785
39
39
  airflow/providers/cncf/kubernetes/pod_template_file_examples/dags_in_image_template.yaml,sha256=7JdppZ-XDBpv2Bnde2SthhcME8w3b8xQdPAK1fJGW60,2256
40
40
  airflow/providers/cncf/kubernetes/pod_template_file_examples/dags_in_volume_template.yaml,sha256=-Pk_EwKpyWRYZKOnumUxVrDeAfFJ0nr3WZ7JNnvppzg,2442
@@ -52,11 +52,11 @@ airflow/providers/cncf/kubernetes/utils/__init__.py,sha256=ClZN0VPjWySdVwS_ktH7r
52
52
  airflow/providers/cncf/kubernetes/utils/container.py,sha256=tuhWyMZrqCGDUT4kzwjhEgJrr0JvD9lMXbFeuMDoh-4,4813
53
53
  airflow/providers/cncf/kubernetes/utils/delete_from.py,sha256=poObZSoEJwQyaYWilEURs8f4CDY2sn_pfwS31Lf579A,5195
54
54
  airflow/providers/cncf/kubernetes/utils/k8s_resource_iterator.py,sha256=pl-G-2WhZVbewKkwmL9AxPo1hAQWHHEPK43b-ruF4-w,1937
55
- airflow/providers/cncf/kubernetes/utils/pod_manager.py,sha256=hgW7GJwic5Jwz07i4ABHgwnu36zwuHogpBkgiMSJVuM,45208
55
+ airflow/providers/cncf/kubernetes/utils/pod_manager.py,sha256=zNgd5vzo0BziOmtPDrxdRxMB9nRg_cAZo9KDHINFHLE,43775
56
56
  airflow/providers/cncf/kubernetes/utils/xcom_sidecar.py,sha256=k6bdmVJ21OrAwGmWwledRrAmaty9ZrmbuM-IbaI4mqo,2519
57
- apache_airflow_providers_cncf_kubernetes-10.10.0rc1.dist-info/entry_points.txt,sha256=ByD3QJJyP9CfmTYtpNI1953akD38RUDgpGXLaq9vpOw,111
58
- apache_airflow_providers_cncf_kubernetes-10.10.0rc1.dist-info/licenses/LICENSE,sha256=gXPVwptPlW1TJ4HSuG5OMPg-a3h43OGMkZRR1rpwfJA,10850
59
- apache_airflow_providers_cncf_kubernetes-10.10.0rc1.dist-info/licenses/NOTICE,sha256=E3-_E02gwwSEFzeeWPKmnIjOoos3hW28CLISV6sYrbQ,168
60
- apache_airflow_providers_cncf_kubernetes-10.10.0rc1.dist-info/WHEEL,sha256=G2gURzTEtmeR8nrdXUJfNiB3VYVxigPQ-bEQujpNiNs,82
61
- apache_airflow_providers_cncf_kubernetes-10.10.0rc1.dist-info/METADATA,sha256=OaMSPebQ_k2PKhr_355YJAIpjwLGf8ogsg0UFYadWyw,5753
62
- apache_airflow_providers_cncf_kubernetes-10.10.0rc1.dist-info/RECORD,,
57
+ apache_airflow_providers_cncf_kubernetes-10.11.0rc2.dist-info/entry_points.txt,sha256=ByD3QJJyP9CfmTYtpNI1953akD38RUDgpGXLaq9vpOw,111
58
+ apache_airflow_providers_cncf_kubernetes-10.11.0rc2.dist-info/licenses/LICENSE,sha256=gXPVwptPlW1TJ4HSuG5OMPg-a3h43OGMkZRR1rpwfJA,10850
59
+ apache_airflow_providers_cncf_kubernetes-10.11.0rc2.dist-info/licenses/NOTICE,sha256=E3-_E02gwwSEFzeeWPKmnIjOoos3hW28CLISV6sYrbQ,168
60
+ apache_airflow_providers_cncf_kubernetes-10.11.0rc2.dist-info/WHEEL,sha256=G2gURzTEtmeR8nrdXUJfNiB3VYVxigPQ-bEQujpNiNs,82
61
+ apache_airflow_providers_cncf_kubernetes-10.11.0rc2.dist-info/METADATA,sha256=8Du9XjQaoSNbFYCApNMte_rGFfsrcdtH4P3PlmhpQmo,5755
62
+ apache_airflow_providers_cncf_kubernetes-10.11.0rc2.dist-info/RECORD,,