apache-airflow-providers-cncf-kubernetes 10.8.2rc1__py3-none-any.whl → 10.9.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of apache-airflow-providers-cncf-kubernetes might be problematic. Click here for more details.

@@ -29,7 +29,7 @@ from airflow import __version__ as airflow_version
29
29
 
30
30
  __all__ = ["__version__"]
31
31
 
32
- __version__ = "10.8.2"
32
+ __version__ = "10.9.0"
33
33
 
34
34
  if packaging.version.parse(packaging.version.parse(airflow_version).base_version) < packaging.version.parse(
35
35
  "2.10.0"
@@ -31,7 +31,7 @@ from airflow.providers.cncf.kubernetes.operators.pod import KubernetesPodOperato
31
31
  from airflow.providers.cncf.kubernetes.python_kubernetes_script import (
32
32
  write_python_script,
33
33
  )
34
- from airflow.providers.cncf.kubernetes.version_compat import (
34
+ from airflow.providers.common.compat.sdk import (
35
35
  DecoratedOperator,
36
36
  TaskDecorator,
37
37
  task_decorator_factory,
@@ -21,7 +21,7 @@ from collections.abc import Callable, Sequence
21
21
  from typing import TYPE_CHECKING
22
22
 
23
23
  from airflow.providers.cncf.kubernetes.operators.pod import KubernetesPodOperator
24
- from airflow.providers.cncf.kubernetes.version_compat import (
24
+ from airflow.providers.common.compat.sdk import (
25
25
  DecoratedOperator,
26
26
  TaskDecorator,
27
27
  context_merge,
@@ -23,7 +23,7 @@ import tempfile
23
23
  from collections.abc import Generator
24
24
  from functools import cached_property
25
25
  from time import sleep
26
- from typing import TYPE_CHECKING, Any
26
+ from typing import TYPE_CHECKING, Any, Protocol
27
27
 
28
28
  import aiofiles
29
29
  import requests
@@ -39,12 +39,11 @@ from airflow.exceptions import AirflowException, AirflowNotFoundException
39
39
  from airflow.models import Connection
40
40
  from airflow.providers.cncf.kubernetes.kube_client import _disable_verify_ssl, _enable_tcp_keepalive
41
41
  from airflow.providers.cncf.kubernetes.kubernetes_helper_functions import should_retry_creation
42
- from airflow.providers.cncf.kubernetes.utils.pod_manager import (
43
- PodOperatorHookProtocol,
42
+ from airflow.providers.cncf.kubernetes.utils.container import (
44
43
  container_is_completed,
45
44
  container_is_running,
46
45
  )
47
- from airflow.providers.cncf.kubernetes.version_compat import BaseHook
46
+ from airflow.providers.common.compat.sdk import BaseHook
48
47
  from airflow.utils import yaml
49
48
 
50
49
  if TYPE_CHECKING:
@@ -69,6 +68,36 @@ def _load_body_to_dict(body: str) -> dict:
69
68
  return body_dict
70
69
 
71
70
 
71
+ class PodOperatorHookProtocol(Protocol):
72
+ """
73
+ Protocol to define methods relied upon by KubernetesPodOperator.
74
+
75
+ Subclasses of KubernetesPodOperator, such as GKEStartPodOperator, may use
76
+ hooks that don't extend KubernetesHook. We use this protocol to document the
77
+ methods used by KPO and ensure that these methods exist on such other hooks.
78
+ """
79
+
80
+ @property
81
+ def core_v1_client(self) -> client.CoreV1Api:
82
+ """Get authenticated client object."""
83
+
84
+ @property
85
+ def is_in_cluster(self) -> bool:
86
+ """Expose whether the hook is configured with ``load_incluster_config`` or not."""
87
+
88
+ def get_pod(self, name: str, namespace: str) -> V1Pod:
89
+ """Read pod object from kubernetes API."""
90
+
91
+ def get_namespace(self) -> str | None:
92
+ """Return the namespace that defined in the connection."""
93
+
94
+ def get_xcom_sidecar_container_image(self) -> str | None:
95
+ """Return the xcom sidecar image that defined in the connection."""
96
+
97
+ def get_xcom_sidecar_container_resources(self) -> str | None:
98
+ """Return the xcom sidecar resources that defined in the connection."""
99
+
100
+
72
101
  class KubernetesHook(BaseHook, PodOperatorHookProtocol):
73
102
  """
74
103
  Creates Kubernetes API connection.
@@ -692,8 +721,9 @@ class KubernetesHook(BaseHook, PodOperatorHookProtocol):
692
721
  try:
693
722
  deployment = self.get_deployment_status(name=name, namespace=namespace)
694
723
  except Exception as e:
695
- self.log.exception("Exception occurred while checking for Deployment status.")
696
- raise e
724
+ msg = "Exception occurred while checking for Deployment status."
725
+ self.log.exception(msg)
726
+ raise ValueError(msg) from e
697
727
 
698
728
  deployment_status = V1Deployment.to_dict(deployment)["status"]
699
729
  replicas = deployment_status["replicas"]
@@ -238,7 +238,9 @@ class CustomObjectLauncher(LoggingMixin):
238
238
 
239
239
  def get_body(self):
240
240
  self.body: dict = SparkJobSpec(**self.template_body["spark"])
241
- self.body.metadata = {"name": self.name, "namespace": self.namespace}
241
+ if not hasattr(self.body, "metadata") or not isinstance(self.body.metadata, dict):
242
+ self.body.metadata = {}
243
+ self.body.metadata.update({"name": self.name, "namespace": self.namespace})
242
244
  if self.template_body.get("kubernetes"):
243
245
  k8s_spec: dict = KubernetesSpec(**self.template_body["kubernetes"])
244
246
  self.body.spec["volumes"] = k8s_spec.volumes
@@ -69,18 +69,20 @@ from airflow.providers.cncf.kubernetes.kubernetes_helper_functions import (
69
69
  from airflow.providers.cncf.kubernetes.pod_generator import PodGenerator
70
70
  from airflow.providers.cncf.kubernetes.triggers.pod import KubernetesPodTrigger
71
71
  from airflow.providers.cncf.kubernetes.utils import xcom_sidecar
72
+ from airflow.providers.cncf.kubernetes.utils.container import (
73
+ container_is_succeeded,
74
+ get_container_termination_message,
75
+ )
72
76
  from airflow.providers.cncf.kubernetes.utils.pod_manager import (
73
77
  EMPTY_XCOM_RESULT,
74
78
  OnFinishAction,
75
79
  PodLaunchFailedException,
76
80
  PodManager,
77
81
  PodNotFoundException,
78
- PodOperatorHookProtocol,
79
82
  PodPhase,
80
- container_is_succeeded,
81
- get_container_termination_message,
82
83
  )
83
- from airflow.providers.cncf.kubernetes.version_compat import AIRFLOW_V_3_1_PLUS, XCOM_RETURN_KEY
84
+ from airflow.providers.cncf.kubernetes.version_compat import AIRFLOW_V_3_1_PLUS
85
+ from airflow.providers.common.compat.sdk import XCOM_RETURN_KEY
84
86
 
85
87
  if AIRFLOW_V_3_1_PLUS:
86
88
  from airflow.sdk import BaseOperator
@@ -95,6 +97,7 @@ if TYPE_CHECKING:
95
97
  import jinja2
96
98
  from pendulum import DateTime
97
99
 
100
+ from airflow.providers.cncf.kubernetes.hooks.kubernetes import PodOperatorHookProtocol
98
101
  from airflow.providers.cncf.kubernetes.secret import Secret
99
102
 
100
103
  try:
@@ -209,8 +212,9 @@ class KubernetesPodOperator(BaseOperator):
209
212
  :param priority_class_name: priority class name for the launched Pod
210
213
  :param pod_runtime_info_envs: (Optional) A list of environment variables,
211
214
  to be set in the container.
212
- :param termination_grace_period: Termination grace period if task killed in UI,
213
- defaults to kubernetes default
215
+ :param termination_grace_period: Termination grace period (in seconds) for the pod.
216
+ This sets the pod's ``terminationGracePeriodSeconds`` and is also used as the grace period
217
+ when deleting the pod if the task is killed. If not specified, uses the Kubernetes default (30 seconds).
214
218
  :param configmaps: (Optional) A list of names of config maps from which it collects ConfigMaps
215
219
  to populate the environment variables with. The contents of the target
216
220
  ConfigMap's Data field will represent the key-value pairs as environment variables.
@@ -735,20 +739,9 @@ class KubernetesPodOperator(BaseOperator):
735
739
  )
736
740
  finally:
737
741
  pod_to_clean = self.pod or self.pod_request_obj
738
- self.cleanup(
739
- pod=pod_to_clean,
740
- remote_pod=self.remote_pod,
741
- xcom_result=result,
742
- context=context,
742
+ self.post_complete_action(
743
+ pod=pod_to_clean, remote_pod=self.remote_pod, context=context, result=result
743
744
  )
744
- for callback in self.callbacks:
745
- callback.on_pod_cleanup(
746
- pod=pod_to_clean,
747
- client=self.client,
748
- mode=ExecutionMode.SYNC,
749
- context=context,
750
- operator=self,
751
- )
752
745
 
753
746
  if self.do_xcom_push:
754
747
  return result
@@ -819,11 +812,20 @@ class KubernetesPodOperator(BaseOperator):
819
812
  def execute_async(self, context: Context) -> None:
820
813
  if self.pod_request_obj is None:
821
814
  self.pod_request_obj = self.build_pod_request_obj(context)
815
+ for callback in self.callbacks:
816
+ callback.on_pod_manifest_created(
817
+ pod_request=self.pod_request_obj,
818
+ client=self.client,
819
+ mode=ExecutionMode.SYNC,
820
+ context=context,
821
+ operator=self,
822
+ )
822
823
  if self.pod is None:
823
824
  self.pod = self.get_or_create_pod( # must set `self.pod` for `on_kill`
824
825
  pod_request_obj=self.pod_request_obj,
825
826
  context=context,
826
827
  )
828
+
827
829
  if self.callbacks:
828
830
  pod = self.find_pod(self.pod.metadata.namespace, context=context)
829
831
  for callback in self.callbacks:
@@ -886,6 +888,7 @@ class KubernetesPodOperator(BaseOperator):
886
888
  grab the latest logs and defer back to the trigger again.
887
889
  """
888
890
  self.pod = None
891
+ xcom_sidecar_output = None
889
892
  try:
890
893
  pod_name = event["name"]
891
894
  pod_namespace = event["namespace"]
@@ -909,20 +912,37 @@ class KubernetesPodOperator(BaseOperator):
909
912
  follow = self.logging_interval is None
910
913
  last_log_time = event.get("last_log_time")
911
914
 
912
- if event["status"] in ("error", "failed", "timeout"):
913
- event_message = event.get("message", "No message provided")
914
- self.log.error(
915
- "Trigger emitted an %s event, failing the task: %s", event["status"], event_message
916
- )
917
- # fetch some logs when pod is failed
915
+ if event["status"] in ("error", "failed", "timeout", "success"):
918
916
  if self.get_logs:
919
917
  self._write_logs(self.pod, follow=follow, since_time=last_log_time)
920
918
 
921
- if self.do_xcom_push:
922
- _ = self.extract_xcom(pod=self.pod)
919
+ for callback in self.callbacks:
920
+ callback.on_pod_completion(
921
+ pod=self.pod,
922
+ client=self.client,
923
+ mode=ExecutionMode.SYNC,
924
+ context=context,
925
+ operator=self,
926
+ )
927
+ for callback in self.callbacks:
928
+ callback.on_pod_teardown(
929
+ pod=self.pod,
930
+ client=self.client,
931
+ mode=ExecutionMode.SYNC,
932
+ context=context,
933
+ operator=self,
934
+ )
935
+
936
+ xcom_sidecar_output = self.extract_xcom(pod=self.pod) if self.do_xcom_push else None
923
937
 
924
- message = event.get("stack_trace", event["message"])
925
- raise AirflowException(message)
938
+ if event["status"] != "success":
939
+ self.log.error(
940
+ "Trigger emitted an %s event, failing the task: %s", event["status"], event["message"]
941
+ )
942
+ message = event.get("stack_trace", event["message"])
943
+ raise AirflowException(message)
944
+
945
+ return xcom_sidecar_output
926
946
 
927
947
  if event["status"] == "running":
928
948
  if self.get_logs:
@@ -940,22 +960,12 @@ class KubernetesPodOperator(BaseOperator):
940
960
  self.invoke_defer_method(pod_log_status.last_log_time)
941
961
  else:
942
962
  self.invoke_defer_method()
943
-
944
- elif event["status"] == "success":
945
- # fetch some logs when pod is executed successfully
946
- if self.get_logs:
947
- self._write_logs(self.pod, follow=follow, since_time=last_log_time)
948
-
949
- if self.do_xcom_push:
950
- xcom_sidecar_output = self.extract_xcom(pod=self.pod)
951
- return xcom_sidecar_output
952
- return
953
963
  except TaskDeferred:
954
964
  raise
955
965
  finally:
956
- self._clean(event, context)
966
+ self._clean(event=event, context=context, result=xcom_sidecar_output)
957
967
 
958
- def _clean(self, event: dict[str, Any], context: Context) -> None:
968
+ def _clean(self, event: dict[str, Any], result: dict | None, context: Context) -> None:
959
969
  if event["status"] == "running":
960
970
  return
961
971
  istio_enabled = self.is_istio_enabled(self.pod)
@@ -979,6 +989,7 @@ class KubernetesPodOperator(BaseOperator):
979
989
  pod=self.pod,
980
990
  remote_pod=self.pod,
981
991
  context=context,
992
+ result=result,
982
993
  )
983
994
 
984
995
  def _write_logs(self, pod: k8s.V1Pod, follow: bool = False, since_time: DateTime | None = None) -> None:
@@ -1008,11 +1019,15 @@ class KubernetesPodOperator(BaseOperator):
1008
1019
  e if not isinstance(e, ApiException) else e.reason,
1009
1020
  )
1010
1021
 
1011
- def post_complete_action(self, *, pod, remote_pod, context: Context, **kwargs) -> None:
1022
+ def post_complete_action(
1023
+ self, *, pod: k8s.V1Pod, remote_pod: k8s.V1Pod, context: Context, result: dict | None, **kwargs
1024
+ ) -> None:
1012
1025
  """Actions that must be done after operator finishes logic of the deferrable_execution."""
1013
1026
  self.cleanup(
1014
1027
  pod=pod,
1015
1028
  remote_pod=remote_pod,
1029
+ xcom_result=result,
1030
+ context=context,
1016
1031
  )
1017
1032
  for callback in self.callbacks:
1018
1033
  callback.on_pod_cleanup(
@@ -1305,6 +1320,7 @@ class KubernetesPodOperator(BaseOperator):
1305
1320
  priority_class_name=self.priority_class_name,
1306
1321
  volumes=self.volumes,
1307
1322
  active_deadline_seconds=self.active_deadline_seconds,
1323
+ termination_grace_period_seconds=self.termination_grace_period,
1308
1324
  ),
1309
1325
  )
1310
1326
 
@@ -25,12 +25,7 @@ from kubernetes import client
25
25
 
26
26
  from airflow.exceptions import AirflowException
27
27
  from airflow.providers.cncf.kubernetes.hooks.kubernetes import KubernetesHook
28
- from airflow.providers.cncf.kubernetes.version_compat import AIRFLOW_V_3_0_PLUS
29
-
30
- if AIRFLOW_V_3_0_PLUS:
31
- from airflow.sdk import BaseSensorOperator
32
- else:
33
- from airflow.sensors.base import BaseSensorOperator # type: ignore[no-redef]
28
+ from airflow.providers.common.compat.sdk import BaseSensorOperator
34
29
 
35
30
  if TYPE_CHECKING:
36
31
  from airflow.utils.context import Context
@@ -0,0 +1,118 @@
1
+ # Licensed to the Apache Software Foundation (ASF) under one
2
+ # or more contributor license agreements. See the NOTICE file
3
+ # distributed with this work for additional information
4
+ # regarding copyright ownership. The ASF licenses this file
5
+ # to you under the Apache License, Version 2.0 (the
6
+ # "License"); you may not use this file except in compliance
7
+ # with the License. You may obtain a copy of the License at
8
+ #
9
+ # http://www.apache.org/licenses/LICENSE-2.0
10
+ #
11
+ # Unless required by applicable law or agreed to in writing,
12
+ # software distributed under the License is distributed on an
13
+ # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
14
+ # KIND, either express or implied. See the License for the
15
+ # specific language governing permissions and limitations
16
+ # under the License.
17
+ """Helper functions for inspecting and interacting with containers in a Kubernetes Pod."""
18
+
19
+ from __future__ import annotations
20
+
21
+ from contextlib import suppress
22
+ from typing import TYPE_CHECKING
23
+
24
+ if TYPE_CHECKING:
25
+ from kubernetes.client.models.v1_container_status import V1ContainerStatus
26
+ from kubernetes.client.models.v1_pod import V1Pod
27
+
28
+
29
+ def get_container_status(pod: V1Pod, container_name: str) -> V1ContainerStatus | None:
30
+ """Retrieve container status."""
31
+ if pod and pod.status:
32
+ container_statuses = []
33
+ if pod.status.container_statuses:
34
+ container_statuses.extend(pod.status.container_statuses)
35
+ if pod.status.init_container_statuses:
36
+ container_statuses.extend(pod.status.init_container_statuses)
37
+
38
+ else:
39
+ container_statuses = None
40
+
41
+ if container_statuses:
42
+ # In general the variable container_statuses can store multiple items matching different containers.
43
+ # The following generator expression yields all items that have name equal to the container_name.
44
+ # The function next() here calls the generator to get only the first value. If there's nothing found
45
+ # then None is returned.
46
+ return next((x for x in container_statuses if x.name == container_name), None)
47
+ return None
48
+
49
+
50
+ def container_is_running(pod: V1Pod, container_name: str) -> bool:
51
+ """
52
+ Examine V1Pod ``pod`` to determine whether ``container_name`` is running.
53
+
54
+ If that container is present and running, returns True. Returns False otherwise.
55
+ """
56
+ container_status = get_container_status(pod, container_name)
57
+ if not container_status:
58
+ return False
59
+ return container_status.state.running is not None
60
+
61
+
62
+ def container_is_completed(pod: V1Pod, container_name: str) -> bool:
63
+ """
64
+ Examine V1Pod ``pod`` to determine whether ``container_name`` is completed.
65
+
66
+ If that container is present and completed, returns True. Returns False otherwise.
67
+ """
68
+ container_status = get_container_status(pod, container_name)
69
+ if not container_status:
70
+ return False
71
+ return container_status.state.terminated is not None
72
+
73
+
74
+ def container_is_succeeded(pod: V1Pod, container_name: str) -> bool:
75
+ """
76
+ Examine V1Pod ``pod`` to determine whether ``container_name`` is completed and succeeded.
77
+
78
+ If that container is present and completed and succeeded, returns True. Returns False otherwise.
79
+ """
80
+ container_status = get_container_status(pod, container_name)
81
+ if not container_status or container_status.state.terminated is None:
82
+ return False
83
+ return container_status.state.terminated.exit_code == 0
84
+
85
+
86
+ def container_is_wait(pod: V1Pod, container_name: str) -> bool:
87
+ """
88
+ Examine V1Pod ``pod`` to determine whether ``container_name`` is waiting.
89
+
90
+ If that container is present and waiting, returns True. Returns False otherwise.
91
+ """
92
+ container_status = get_container_status(pod, container_name)
93
+ if not container_status:
94
+ return False
95
+
96
+ return container_status.state.waiting is not None
97
+
98
+
99
+ def container_is_terminated(pod: V1Pod, container_name: str) -> bool:
100
+ """
101
+ Examine V1Pod ``pod`` to determine whether ``container_name`` is terminated.
102
+
103
+ If that container is present and terminated, returns True. Returns False otherwise.
104
+ """
105
+ container_statuses = pod.status.container_statuses if pod and pod.status else None
106
+ if not container_statuses:
107
+ return False
108
+ container_status = next((x for x in container_statuses if x.name == container_name), None)
109
+ if not container_status:
110
+ return False
111
+ return container_status.state.terminated is not None
112
+
113
+
114
+ def get_container_termination_message(pod: V1Pod, container_name: str):
115
+ with suppress(AttributeError, TypeError):
116
+ container_statuses = pod.status.container_statuses
117
+ container_status = next((x for x in container_statuses if x.name == container_name), None)
118
+ return container_status.state.terminated.message if container_status else None
@@ -24,10 +24,10 @@ import json
24
24
  import math
25
25
  import time
26
26
  from collections.abc import Callable, Generator, Iterable
27
- from contextlib import closing, suppress
27
+ from contextlib import closing
28
28
  from dataclasses import dataclass
29
29
  from datetime import timedelta
30
- from typing import TYPE_CHECKING, Literal, Protocol, cast
30
+ from typing import TYPE_CHECKING, Literal, cast
31
31
 
32
32
  import pendulum
33
33
  import tenacity
@@ -40,6 +40,13 @@ from urllib3.exceptions import HTTPError, TimeoutError
40
40
 
41
41
  from airflow.exceptions import AirflowException
42
42
  from airflow.providers.cncf.kubernetes.callbacks import ExecutionMode, KubernetesPodOperatorCallback
43
+ from airflow.providers.cncf.kubernetes.utils.container import (
44
+ container_is_completed,
45
+ container_is_running,
46
+ container_is_terminated,
47
+ container_is_wait,
48
+ get_container_status,
49
+ )
43
50
  from airflow.providers.cncf.kubernetes.utils.xcom_sidecar import PodDefaults
44
51
  from airflow.utils.log.logging_mixin import LoggingMixin
45
52
  from airflow.utils.timezone import utcnow
@@ -48,7 +55,6 @@ if TYPE_CHECKING:
48
55
  from kubernetes.client.models.core_v1_event_list import CoreV1EventList
49
56
  from kubernetes.client.models.v1_container_state import V1ContainerState
50
57
  from kubernetes.client.models.v1_container_state_waiting import V1ContainerStateWaiting
51
- from kubernetes.client.models.v1_container_status import V1ContainerStatus
52
58
  from kubernetes.client.models.v1_object_reference import V1ObjectReference
53
59
  from kubernetes.client.models.v1_pod import V1Pod
54
60
  from kubernetes.client.models.v1_pod_condition import V1PodCondition
@@ -89,131 +95,6 @@ class PodPhase:
89
95
  terminal_states = {FAILED, SUCCEEDED}
90
96
 
91
97
 
92
- class PodOperatorHookProtocol(Protocol):
93
- """
94
- Protocol to define methods relied upon by KubernetesPodOperator.
95
-
96
- Subclasses of KubernetesPodOperator, such as GKEStartPodOperator, may use
97
- hooks that don't extend KubernetesHook. We use this protocol to document the
98
- methods used by KPO and ensure that these methods exist on such other hooks.
99
- """
100
-
101
- @property
102
- def core_v1_client(self) -> client.CoreV1Api:
103
- """Get authenticated client object."""
104
-
105
- @property
106
- def is_in_cluster(self) -> bool:
107
- """Expose whether the hook is configured with ``load_incluster_config`` or not."""
108
-
109
- def get_pod(self, name: str, namespace: str) -> V1Pod:
110
- """Read pod object from kubernetes API."""
111
-
112
- def get_namespace(self) -> str | None:
113
- """Return the namespace that defined in the connection."""
114
-
115
- def get_xcom_sidecar_container_image(self) -> str | None:
116
- """Return the xcom sidecar image that defined in the connection."""
117
-
118
- def get_xcom_sidecar_container_resources(self) -> str | None:
119
- """Return the xcom sidecar resources that defined in the connection."""
120
-
121
-
122
- def get_container_status(pod: V1Pod, container_name: str) -> V1ContainerStatus | None:
123
- """Retrieve container status."""
124
- if pod and pod.status:
125
- container_statuses = []
126
- if pod.status.container_statuses:
127
- container_statuses.extend(pod.status.container_statuses)
128
- if pod.status.init_container_statuses:
129
- container_statuses.extend(pod.status.init_container_statuses)
130
-
131
- else:
132
- container_statuses = None
133
-
134
- if container_statuses:
135
- # In general the variable container_statuses can store multiple items matching different containers.
136
- # The following generator expression yields all items that have name equal to the container_name.
137
- # The function next() here calls the generator to get only the first value. If there's nothing found
138
- # then None is returned.
139
- return next((x for x in container_statuses if x.name == container_name), None)
140
- return None
141
-
142
-
143
- def container_is_running(pod: V1Pod, container_name: str) -> bool:
144
- """
145
- Examine V1Pod ``pod`` to determine whether ``container_name`` is running.
146
-
147
- If that container is present and running, returns True. Returns False otherwise.
148
- """
149
- container_status = get_container_status(pod, container_name)
150
- if not container_status:
151
- return False
152
- return container_status.state.running is not None
153
-
154
-
155
- def container_is_completed(pod: V1Pod, container_name: str) -> bool:
156
- """
157
- Examine V1Pod ``pod`` to determine whether ``container_name`` is completed.
158
-
159
- If that container is present and completed, returns True. Returns False otherwise.
160
- """
161
- container_status = get_container_status(pod, container_name)
162
- if not container_status:
163
- return False
164
- return container_status.state.terminated is not None
165
-
166
-
167
- def container_is_succeeded(pod: V1Pod, container_name: str) -> bool:
168
- """
169
- Examine V1Pod ``pod`` to determine whether ``container_name`` is completed and succeeded.
170
-
171
- If that container is present and completed and succeeded, returns True. Returns False otherwise.
172
- """
173
- if not container_is_completed(pod, container_name):
174
- return False
175
-
176
- container_status = get_container_status(pod, container_name)
177
- if not container_status:
178
- return False
179
- return container_status.state.terminated.exit_code == 0
180
-
181
-
182
- def container_is_wait(pod: V1Pod, container_name: str) -> bool:
183
- """
184
- Examine V1Pod ``pod`` to determine whether ``container_name`` is waiting.
185
-
186
- If that container is present and waiting, returns True. Returns False otherwise.
187
- """
188
- container_status = get_container_status(pod, container_name)
189
- if not container_status:
190
- return False
191
-
192
- return container_status.state.waiting is not None
193
-
194
-
195
- def container_is_terminated(pod: V1Pod, container_name: str) -> bool:
196
- """
197
- Examine V1Pod ``pod`` to determine whether ``container_name`` is terminated.
198
-
199
- If that container is present and terminated, returns True. Returns False otherwise.
200
- """
201
- container_statuses = pod.status.container_statuses if pod and pod.status else None
202
- if not container_statuses:
203
- return False
204
- container_status = next((x for x in container_statuses if x.name == container_name), None)
205
- if not container_status:
206
- return False
207
- return container_status.state.terminated is not None
208
-
209
-
210
- def get_container_termination_message(pod: V1Pod, container_name: str):
211
- with suppress(AttributeError, TypeError):
212
- container_statuses = pod.status.container_statuses
213
- container_status = next((x for x in container_statuses if x.name == container_name), None)
214
- return container_status.state.terminated.message if container_status else None
215
-
216
-
217
98
  def check_exception_is_kubernetes_api_unauthorized(exc: BaseException):
218
99
  return isinstance(exc, ApiException) and exc.status and str(exc.status) == "401"
219
100
 
@@ -35,34 +35,4 @@ def get_base_airflow_version_tuple() -> tuple[int, int, int]:
35
35
  AIRFLOW_V_3_0_PLUS = get_base_airflow_version_tuple() >= (3, 0, 0)
36
36
  AIRFLOW_V_3_1_PLUS = get_base_airflow_version_tuple() >= (3, 1, 0)
37
37
 
38
- if AIRFLOW_V_3_1_PLUS:
39
- from airflow.models.xcom import XCOM_RETURN_KEY
40
- from airflow.sdk import BaseHook
41
- from airflow.sdk.definitions.context import context_merge
42
- else:
43
- from airflow.hooks.base import BaseHook # type: ignore[attr-defined,no-redef]
44
- from airflow.utils.context import context_merge # type: ignore[attr-defined, no-redef]
45
- from airflow.utils.xcom import XCOM_RETURN_KEY # type: ignore[no-redef]
46
-
47
- if AIRFLOW_V_3_0_PLUS:
48
- from airflow.sdk.bases.decorator import DecoratedOperator, TaskDecorator, task_decorator_factory
49
- else:
50
- from airflow.decorators.base import ( # type: ignore[no-redef]
51
- DecoratedOperator,
52
- TaskDecorator,
53
- task_decorator_factory,
54
- )
55
-
56
- # BaseOperator and BaseSensorOperator removed from version_compat to avoid circular imports
57
- # Import them directly in files that need them instead
58
-
59
- __all__ = [
60
- "AIRFLOW_V_3_0_PLUS",
61
- "AIRFLOW_V_3_1_PLUS",
62
- "BaseHook",
63
- "DecoratedOperator",
64
- "TaskDecorator",
65
- "task_decorator_factory",
66
- "XCOM_RETURN_KEY",
67
- "context_merge",
68
- ]
38
+ __all__ = ["AIRFLOW_V_3_0_PLUS", "AIRFLOW_V_3_1_PLUS"]
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: apache-airflow-providers-cncf-kubernetes
3
- Version: 10.8.2rc1
3
+ Version: 10.9.0
4
4
  Summary: Provider package apache-airflow-providers-cncf-kubernetes for Apache Airflow
5
5
  Keywords: airflow-provider,cncf.kubernetes,airflow,integration
6
6
  Author-email: Apache Software Foundation <dev@airflow.apache.org>
@@ -21,14 +21,15 @@ Classifier: Programming Language :: Python :: 3.12
21
21
  Classifier: Programming Language :: Python :: 3.13
22
22
  Classifier: Topic :: System :: Monitoring
23
23
  Requires-Dist: aiofiles>=23.2.0
24
- Requires-Dist: apache-airflow>=2.10.0rc1
24
+ Requires-Dist: apache-airflow>=2.10.0
25
+ Requires-Dist: apache-airflow-providers-common-compat>=1.8.0
25
26
  Requires-Dist: asgiref>=3.5.2
26
27
  Requires-Dist: cryptography>=41.0.0
27
28
  Requires-Dist: kubernetes>=32.0.0,<34.0.0
28
29
  Requires-Dist: kubernetes_asyncio>=32.0.0,<34.0.0
29
30
  Project-URL: Bug Tracker, https://github.com/apache/airflow/issues
30
- Project-URL: Changelog, https://airflow.staged.apache.org/docs/apache-airflow-providers-cncf-kubernetes/10.8.2/changelog.html
31
- Project-URL: Documentation, https://airflow.staged.apache.org/docs/apache-airflow-providers-cncf-kubernetes/10.8.2
31
+ Project-URL: Changelog, https://airflow.apache.org/docs/apache-airflow-providers-cncf-kubernetes/10.9.0/changelog.html
32
+ Project-URL: Documentation, https://airflow.apache.org/docs/apache-airflow-providers-cncf-kubernetes/10.9.0
32
33
  Project-URL: Mastodon, https://fosstodon.org/@airflow
33
34
  Project-URL: Slack Chat, https://s.apache.org/airflow-slack
34
35
  Project-URL: Source Code, https://github.com/apache/airflow
@@ -59,7 +60,7 @@ Project-URL: YouTube, https://www.youtube.com/channel/UCSXwxpWZQ7XZ1WL3wqevChA/
59
60
 
60
61
  Package ``apache-airflow-providers-cncf-kubernetes``
61
62
 
62
- Release: ``10.8.2``
63
+ Release: ``10.9.0``
63
64
 
64
65
 
65
66
  `Kubernetes <https://kubernetes.io/>`__
@@ -72,7 +73,7 @@ This is a provider package for ``cncf.kubernetes`` provider. All classes for thi
72
73
  are in ``airflow.providers.cncf.kubernetes`` python package.
73
74
 
74
75
  You can find package information and changelog for the provider
75
- in the `documentation <https://airflow.apache.org/docs/apache-airflow-providers-cncf-kubernetes/10.8.2/>`_.
76
+ in the `documentation <https://airflow.apache.org/docs/apache-airflow-providers-cncf-kubernetes/10.9.0/>`_.
76
77
 
77
78
  Installation
78
79
  ------------
@@ -86,17 +87,37 @@ The package supports the following python versions: 3.10,3.11,3.12,3.13
86
87
  Requirements
87
88
  ------------
88
89
 
89
- ====================== ====================
90
- PIP package Version required
91
- ====================== ====================
92
- ``aiofiles`` ``>=23.2.0``
93
- ``apache-airflow`` ``>=2.10.0``
94
- ``asgiref`` ``>=3.5.2``
95
- ``cryptography`` ``>=41.0.0``
96
- ``kubernetes`` ``>=32.0.0,<34.0.0``
97
- ``kubernetes_asyncio`` ``>=32.0.0,<34.0.0``
98
- ====================== ====================
90
+ ========================================== ====================
91
+ PIP package Version required
92
+ ========================================== ====================
93
+ ``aiofiles`` ``>=23.2.0``
94
+ ``apache-airflow`` ``>=2.10.0``
95
+ ``apache-airflow-providers-common-compat`` ``>=1.8.0``
96
+ ``asgiref`` ``>=3.5.2``
97
+ ``cryptography`` ``>=41.0.0``
98
+ ``kubernetes`` ``>=32.0.0,<34.0.0``
99
+ ``kubernetes_asyncio`` ``>=32.0.0,<34.0.0``
100
+ ========================================== ====================
101
+
102
+ Cross provider package dependencies
103
+ -----------------------------------
104
+
105
+ Those are dependencies that might be needed in order to use all the features of the package.
106
+ You need to install the specified providers in order to use them.
107
+
108
+ You can install such cross-provider dependencies when installing from PyPI. For example:
109
+
110
+ .. code-block:: bash
111
+
112
+ pip install apache-airflow-providers-cncf-kubernetes[common.compat]
113
+
114
+
115
+ ================================================================================================================== =================
116
+ Dependent package Extra
117
+ ================================================================================================================== =================
118
+ `apache-airflow-providers-common-compat <https://airflow.apache.org/docs/apache-airflow-providers-common-compat>`_ ``common.compat``
119
+ ================================================================================================================== =================
99
120
 
100
121
  The changelog for the provider package can be found in the
101
- `changelog <https://airflow.apache.org/docs/apache-airflow-providers-cncf-kubernetes/10.8.2/changelog.html>`_.
122
+ `changelog <https://airflow.apache.org/docs/apache-airflow-providers-cncf-kubernetes/10.9.0/changelog.html>`_.
102
123
 
@@ -1,5 +1,5 @@
1
1
  airflow/providers/cncf/kubernetes/LICENSE,sha256=gXPVwptPlW1TJ4HSuG5OMPg-a3h43OGMkZRR1rpwfJA,10850
2
- airflow/providers/cncf/kubernetes/__init__.py,sha256=5PtcS7Wnp2KxTrjbMZ2hRAAbND1m7QJ7rYnWKYQqNbo,1505
2
+ airflow/providers/cncf/kubernetes/__init__.py,sha256=U5NM-X8nmp4MMSxoVUH9YnQbvF9YX9Pm5iLERLdBedc,1505
3
3
  airflow/providers/cncf/kubernetes/callbacks.py,sha256=1nCLXFJKtr5FM9ApB8Drw5VAGSC3TDFsPSTMtRnAR3Q,6085
4
4
  airflow/providers/cncf/kubernetes/exceptions.py,sha256=3cNEZTnrltBsqwzHiLfckwYYc_IWY1g4PcRs6zuMWWA,1137
5
5
  airflow/providers/cncf/kubernetes/get_provider_info.py,sha256=Git4HycOcHrb4zD9W7ZYsqNDkQSQ4uipSJO_GaPiroE,16041
@@ -12,28 +12,28 @@ airflow/providers/cncf/kubernetes/python_kubernetes_script.jinja2,sha256=I0EHRGw
12
12
  airflow/providers/cncf/kubernetes/python_kubernetes_script.py,sha256=KnTlZSWCZhwvj89fSc2kgIRTaI4iLNKPquHc2wXnluo,3460
13
13
  airflow/providers/cncf/kubernetes/secret.py,sha256=wj-T9gouqau_X14slAstGmnSxqXJQzdLwUdURzHna0I,5209
14
14
  airflow/providers/cncf/kubernetes/template_rendering.py,sha256=WSUBhjGSDhjNtA4IFlbYyX50rvYN6UA4dMk0cPqgOjo,3618
15
- airflow/providers/cncf/kubernetes/version_compat.py,sha256=1k5uiliHIpi5jqdaGmwxUS_GZBv9N-vyjrfhdlkDbw8,2674
15
+ airflow/providers/cncf/kubernetes/version_compat.py,sha256=DjaeLV-sLSz4WqmMThVH2CPp5eS4Q_nRj62kNSXugdU,1659
16
16
  airflow/providers/cncf/kubernetes/backcompat/__init__.py,sha256=KXF76f3v1jIFUBNz8kwxVMvm7i4mNo35LbIG9IijBNc,1299
17
17
  airflow/providers/cncf/kubernetes/backcompat/backwards_compat_converters.py,sha256=FkRRtIEucp2hYrecGVYVgyPI6-b7hE7X7L17Z3r459Y,4303
18
18
  airflow/providers/cncf/kubernetes/cli/__init__.py,sha256=9hdXHABrVpkbpjZgUft39kOFL2xSGeG4GEua0Hmelus,785
19
19
  airflow/providers/cncf/kubernetes/cli/kubernetes_command.py,sha256=S6CBIaBm2wa-XisPKcn1Axy1fErIvCt9RwPn4gawGXc,8297
20
20
  airflow/providers/cncf/kubernetes/decorators/__init__.py,sha256=mlJxuZLkd5x-iq2SBwD3mvRQpt3YR7wjz_nceyF1IaI,787
21
- airflow/providers/cncf/kubernetes/decorators/kubernetes.py,sha256=d27TR2k-NbpwQSwHd7L265ZZYXiRBlPg7na7RsrH1Ik,6216
22
- airflow/providers/cncf/kubernetes/decorators/kubernetes_cmd.py,sha256=tkQWnyr5PkldaDwVzsDyP_qYznl01ewtc_kkSpLYKtI,4690
21
+ airflow/providers/cncf/kubernetes/decorators/kubernetes.py,sha256=IHgVEUabKmrP_MdnYN0FKIdpLMaaSqzdqLVE1nSSI_o,6203
22
+ airflow/providers/cncf/kubernetes/decorators/kubernetes_cmd.py,sha256=XZqfsVUhHBP1rPuT20uueoyv-Pr7i08_chmQHDtHHXs,4677
23
23
  airflow/providers/cncf/kubernetes/executors/__init__.py,sha256=mlJxuZLkd5x-iq2SBwD3mvRQpt3YR7wjz_nceyF1IaI,787
24
24
  airflow/providers/cncf/kubernetes/executors/kubernetes_executor.py,sha256=sP0mpCL4DHcwy0AkaNXPMmjL1MbhHT7yHVVOtbBRmUo,34323
25
25
  airflow/providers/cncf/kubernetes/executors/kubernetes_executor_types.py,sha256=F0IlLbC6qKMVNZwqnbgUPxwFsZdcRhot2kwBhzc9gSM,2698
26
26
  airflow/providers/cncf/kubernetes/executors/kubernetes_executor_utils.py,sha256=wNvHSyGkEWFIPzxzinE5DhM2K4JTYDdIMqJxZCkGWNo,31503
27
27
  airflow/providers/cncf/kubernetes/executors/local_kubernetes_executor.py,sha256=CWCN4b6Ircs-3tCxJjBsrjl4Q0ABBJIwqlZr7a5lW6k,12243
28
28
  airflow/providers/cncf/kubernetes/hooks/__init__.py,sha256=9hdXHABrVpkbpjZgUft39kOFL2xSGeG4GEua0Hmelus,785
29
- airflow/providers/cncf/kubernetes/hooks/kubernetes.py,sha256=lsqlNxcqNkN_FGrT8aBCuWJlVV9Oo7BFwL9cWyoHZTw,37792
29
+ airflow/providers/cncf/kubernetes/hooks/kubernetes.py,sha256=iZqNKSxObv1BPPggo3aRC3hJ4f5HDSD92Oh636WM-N4,38938
30
30
  airflow/providers/cncf/kubernetes/kubernetes_executor_templates/__init__.py,sha256=9hdXHABrVpkbpjZgUft39kOFL2xSGeG4GEua0Hmelus,785
31
31
  airflow/providers/cncf/kubernetes/kubernetes_executor_templates/basic_template.yaml,sha256=yzJmXN4ZyB4aDwI_GIugpL9-f1YMVy__X-LQSbeU95A,2567
32
32
  airflow/providers/cncf/kubernetes/operators/__init__.py,sha256=mlJxuZLkd5x-iq2SBwD3mvRQpt3YR7wjz_nceyF1IaI,787
33
- airflow/providers/cncf/kubernetes/operators/custom_object_launcher.py,sha256=jTVHQt1vp5gELrLNyM-DrZ1ywgmTy3Hh1i6wyl7AGS0,15314
33
+ airflow/providers/cncf/kubernetes/operators/custom_object_launcher.py,sha256=8Pysyo_iScGAD_fW5TDk0qeyoUfNgQCZpr8z47mYm4g,15447
34
34
  airflow/providers/cncf/kubernetes/operators/job.py,sha256=B4C3CbcJTnhqJQmMAbvWrvQGAU8_gfyOmYbsJ1NvraA,26896
35
35
  airflow/providers/cncf/kubernetes/operators/kueue.py,sha256=iDyw9hYaMWVLtBwjsmSXLsSoWW-uEEvh8stptgKOFVQ,5543
36
- airflow/providers/cncf/kubernetes/operators/pod.py,sha256=6BSyJNtmkPiLHNMamjDKxWMWSTEYIL0OMqpFtLNr2Do,64090
36
+ airflow/providers/cncf/kubernetes/operators/pod.py,sha256=6_8MJqChyCGlvqm9BlWXML08htChE0GDBOGjcoffO_g,64963
37
37
  airflow/providers/cncf/kubernetes/operators/resource.py,sha256=hm-ZVhqS08CiF1Csmd06KxAr40oelehuxUOwaSh30D0,7695
38
38
  airflow/providers/cncf/kubernetes/operators/spark_kubernetes.py,sha256=I_e1Jj4Y_xjapA5MH_sYa1P5ROF10JA5Xf2V4HYg5pQ,15991
39
39
  airflow/providers/cncf/kubernetes/pod_template_file_examples/__init__.py,sha256=9hdXHABrVpkbpjZgUft39kOFL2xSGeG4GEua0Hmelus,785
@@ -45,16 +45,17 @@ airflow/providers/cncf/kubernetes/resource_convert/configmap.py,sha256=gf7DdVeD0
45
45
  airflow/providers/cncf/kubernetes/resource_convert/env_variable.py,sha256=vBeR__dLHsG619rxHTmY1SSefSTdUhnD4HRKzzQJutM,1462
46
46
  airflow/providers/cncf/kubernetes/resource_convert/secret.py,sha256=ElZCMbTWeTKoPeIJ1fTvlqRXM8nGkWj2MrIlVckX6Ag,1494
47
47
  airflow/providers/cncf/kubernetes/sensors/__init__.py,sha256=9hdXHABrVpkbpjZgUft39kOFL2xSGeG4GEua0Hmelus,785
48
- airflow/providers/cncf/kubernetes/sensors/spark_kubernetes.py,sha256=43fm2s5yL8Jx5r0g4tLMj9UGUTtGQRjzUjkvLsj2A0Y,5543
48
+ airflow/providers/cncf/kubernetes/sensors/spark_kubernetes.py,sha256=ml5DPNzIUGa6VazENjuq-Hj5G6a04GO6YGo8tNH5ubY,5371
49
49
  airflow/providers/cncf/kubernetes/triggers/__init__.py,sha256=9hdXHABrVpkbpjZgUft39kOFL2xSGeG4GEua0Hmelus,785
50
50
  airflow/providers/cncf/kubernetes/triggers/job.py,sha256=_lLP6ZYRV4kdwb7U0w5QFnlY1E9deZ5wtg-nrlfl6-8,7505
51
51
  airflow/providers/cncf/kubernetes/triggers/pod.py,sha256=AVk0-dJN_wjMeZtImMxan4JZ7sSl-allC8ga1o3WhKM,13388
52
52
  airflow/providers/cncf/kubernetes/utils/__init__.py,sha256=ClZN0VPjWySdVwS_ktH7rrgL9VLAcs3OSJSB9s3zaYw,863
53
+ airflow/providers/cncf/kubernetes/utils/container.py,sha256=tuhWyMZrqCGDUT4kzwjhEgJrr0JvD9lMXbFeuMDoh-4,4813
53
54
  airflow/providers/cncf/kubernetes/utils/delete_from.py,sha256=poObZSoEJwQyaYWilEURs8f4CDY2sn_pfwS31Lf579A,5195
54
55
  airflow/providers/cncf/kubernetes/utils/k8s_resource_iterator.py,sha256=pl-G-2WhZVbewKkwmL9AxPo1hAQWHHEPK43b-ruF4-w,1937
55
- airflow/providers/cncf/kubernetes/utils/pod_manager.py,sha256=p2M-P2x1Vx8L6-V-VjZX79vymSey93AOSEcwWIu-RiY,42768
56
+ airflow/providers/cncf/kubernetes/utils/pod_manager.py,sha256=H6Zpzfzd2bnIp3bOBB9BJGFpYc4wO5FbAG3XfaMdisU,38018
56
57
  airflow/providers/cncf/kubernetes/utils/xcom_sidecar.py,sha256=k6bdmVJ21OrAwGmWwledRrAmaty9ZrmbuM-IbaI4mqo,2519
57
- apache_airflow_providers_cncf_kubernetes-10.8.2rc1.dist-info/entry_points.txt,sha256=ByD3QJJyP9CfmTYtpNI1953akD38RUDgpGXLaq9vpOw,111
58
- apache_airflow_providers_cncf_kubernetes-10.8.2rc1.dist-info/WHEEL,sha256=G2gURzTEtmeR8nrdXUJfNiB3VYVxigPQ-bEQujpNiNs,82
59
- apache_airflow_providers_cncf_kubernetes-10.8.2rc1.dist-info/METADATA,sha256=zMpYaADs1C-ptkFeDX-iVo_zfYjcPkLdCTIKskQ1STs,4322
60
- apache_airflow_providers_cncf_kubernetes-10.8.2rc1.dist-info/RECORD,,
58
+ apache_airflow_providers_cncf_kubernetes-10.9.0.dist-info/entry_points.txt,sha256=ByD3QJJyP9CfmTYtpNI1953akD38RUDgpGXLaq9vpOw,111
59
+ apache_airflow_providers_cncf_kubernetes-10.9.0.dist-info/WHEEL,sha256=G2gURzTEtmeR8nrdXUJfNiB3VYVxigPQ-bEQujpNiNs,82
60
+ apache_airflow_providers_cncf_kubernetes-10.9.0.dist-info/METADATA,sha256=DjK_atHSSIh-unsExH3QmTwxheL8L13p61g-sgbaoOc,5697
61
+ apache_airflow_providers_cncf_kubernetes-10.9.0.dist-info/RECORD,,