apache-airflow-providers-cncf-kubernetes 10.11.0rc2__py3-none-any.whl → 10.12.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (27) hide show
  1. airflow/providers/cncf/kubernetes/__init__.py +1 -1
  2. airflow/providers/cncf/kubernetes/backcompat/backwards_compat_converters.py +1 -1
  3. airflow/providers/cncf/kubernetes/callbacks.py +1 -1
  4. airflow/providers/cncf/kubernetes/decorators/kubernetes.py +8 -3
  5. airflow/providers/cncf/kubernetes/decorators/kubernetes_cmd.py +6 -3
  6. airflow/providers/cncf/kubernetes/exceptions.py +6 -0
  7. airflow/providers/cncf/kubernetes/executors/kubernetes_executor.py +1 -2
  8. airflow/providers/cncf/kubernetes/executors/kubernetes_executor_utils.py +1 -1
  9. airflow/providers/cncf/kubernetes/hooks/kubernetes.py +96 -9
  10. airflow/providers/cncf/kubernetes/kubernetes_helper_functions.py +5 -5
  11. airflow/providers/cncf/kubernetes/operators/custom_object_launcher.py +1 -1
  12. airflow/providers/cncf/kubernetes/operators/job.py +4 -4
  13. airflow/providers/cncf/kubernetes/operators/kueue.py +1 -1
  14. airflow/providers/cncf/kubernetes/operators/pod.py +63 -24
  15. airflow/providers/cncf/kubernetes/operators/resource.py +1 -1
  16. airflow/providers/cncf/kubernetes/operators/spark_kubernetes.py +2 -6
  17. airflow/providers/cncf/kubernetes/resource_convert/env_variable.py +1 -1
  18. airflow/providers/cncf/kubernetes/sensors/spark_kubernetes.py +2 -3
  19. airflow/providers/cncf/kubernetes/template_rendering.py +1 -1
  20. airflow/providers/cncf/kubernetes/triggers/pod.py +23 -8
  21. airflow/providers/cncf/kubernetes/utils/pod_manager.py +84 -48
  22. {apache_airflow_providers_cncf_kubernetes-10.11.0rc2.dist-info → apache_airflow_providers_cncf_kubernetes-10.12.0.dist-info}/METADATA +11 -9
  23. {apache_airflow_providers_cncf_kubernetes-10.11.0rc2.dist-info → apache_airflow_providers_cncf_kubernetes-10.12.0.dist-info}/RECORD +27 -27
  24. {apache_airflow_providers_cncf_kubernetes-10.11.0rc2.dist-info → apache_airflow_providers_cncf_kubernetes-10.12.0.dist-info}/WHEEL +0 -0
  25. {apache_airflow_providers_cncf_kubernetes-10.11.0rc2.dist-info → apache_airflow_providers_cncf_kubernetes-10.12.0.dist-info}/entry_points.txt +0 -0
  26. {apache_airflow_providers_cncf_kubernetes-10.11.0rc2.dist-info → apache_airflow_providers_cncf_kubernetes-10.12.0.dist-info}/licenses/LICENSE +0 -0
  27. {apache_airflow_providers_cncf_kubernetes-10.11.0rc2.dist-info → apache_airflow_providers_cncf_kubernetes-10.12.0.dist-info}/licenses/NOTICE +0 -0
@@ -29,7 +29,7 @@ from airflow import __version__ as airflow_version
29
29
 
30
30
  __all__ = ["__version__"]
31
31
 
32
- __version__ = "10.11.0"
32
+ __version__ = "10.12.0"
33
33
 
34
34
  if packaging.version.parse(packaging.version.parse(airflow_version).base_version) < packaging.version.parse(
35
35
  "2.11.0"
@@ -20,7 +20,7 @@ from __future__ import annotations
20
20
 
21
21
  from kubernetes.client import ApiClient, models as k8s
22
22
 
23
- from airflow.exceptions import AirflowException
23
+ from airflow.providers.common.compat.sdk import AirflowException
24
24
 
25
25
 
26
26
  def _convert_kube_model_object(obj, new_class):
@@ -24,7 +24,7 @@ import kubernetes_asyncio.client as async_k8s
24
24
 
25
25
  if TYPE_CHECKING:
26
26
  from airflow.providers.cncf.kubernetes.operators.pod import KubernetesPodOperator
27
- from airflow.utils.context import Context
27
+ from airflow.sdk import Context
28
28
 
29
29
  client_type: TypeAlias = k8s.CoreV1Api | async_k8s.CoreV1Api
30
30
 
@@ -38,7 +38,7 @@ from airflow.providers.common.compat.sdk import (
38
38
  )
39
39
 
40
40
  if TYPE_CHECKING:
41
- from airflow.utils.context import Context
41
+ from airflow.sdk import Context
42
42
 
43
43
  _PYTHON_SCRIPT_ENV = "__PYTHON_SCRIPT"
44
44
  _PYTHON_INPUT_ENV = "__PYTHON_INPUT"
@@ -87,7 +87,13 @@ class _KubernetesDecoratedOperator(DecoratedOperator, KubernetesPodOperator):
87
87
  def _generate_cmds(self) -> list[str]:
88
88
  script_filename = "/tmp/script.py"
89
89
  input_filename = "/tmp/script.in"
90
- output_filename = "/airflow/xcom/return.json"
90
+
91
+ if getattr(self, "do_xcom_push", False):
92
+ output_filename = "/airflow/xcom/return.json"
93
+ make_xcom_dir_cmd = "mkdir -p /airflow/xcom"
94
+ else:
95
+ output_filename = "/dev/null"
96
+ make_xcom_dir_cmd = ":" # shell no-op
91
97
 
92
98
  write_local_script_file_cmd = (
93
99
  f"{_generate_decoded_command(quote(_PYTHON_SCRIPT_ENV), quote(script_filename))}"
@@ -95,7 +101,6 @@ class _KubernetesDecoratedOperator(DecoratedOperator, KubernetesPodOperator):
95
101
  write_local_input_file_cmd = (
96
102
  f"{_generate_decoded_command(quote(_PYTHON_INPUT_ENV), quote(input_filename))}"
97
103
  )
98
- make_xcom_dir_cmd = "mkdir -p /airflow/xcom"
99
104
  exec_python_cmd = f"python {script_filename} {input_filename} {output_filename}"
100
105
  return [
101
106
  "bash",
@@ -30,13 +30,14 @@ from airflow.providers.common.compat.sdk import (
30
30
  from airflow.utils.operator_helpers import determine_kwargs
31
31
 
32
32
  if TYPE_CHECKING:
33
- from airflow.utils.context import Context
33
+ from airflow.sdk import Context
34
34
 
35
35
 
36
36
  class _KubernetesCmdDecoratedOperator(DecoratedOperator, KubernetesPodOperator):
37
37
  custom_operator_name = "@task.kubernetes_cmd"
38
38
 
39
- template_fields: Sequence[str] = KubernetesPodOperator.template_fields
39
+ template_fields: Sequence[str] = tuple({"op_args", "op_kwargs", *KubernetesPodOperator.template_fields})
40
+
40
41
  overwrite_rtif_after_execution: bool = True
41
42
 
42
43
  def __init__(self, *, python_callable: Callable, args_only: bool = False, **kwargs) -> None:
@@ -69,6 +70,8 @@ class _KubernetesCmdDecoratedOperator(DecoratedOperator, KubernetesPodOperator):
69
70
  )
70
71
 
71
72
  def execute(self, context: Context):
73
+ self.render_template_fields(context)
74
+
72
75
  generated = self._generate_cmds(context)
73
76
  if self.args_only:
74
77
  self.cmds = []
@@ -76,7 +79,7 @@ class _KubernetesCmdDecoratedOperator(DecoratedOperator, KubernetesPodOperator):
76
79
  else:
77
80
  self.cmds = generated
78
81
  self.arguments = []
79
- context["ti"].render_templates() # type: ignore[attr-defined]
82
+ self.render_template_fields(context)
80
83
  return super().execute(context)
81
84
 
82
85
  def _generate_cmds(self, context: Context) -> list[str]:
@@ -18,6 +18,12 @@ from __future__ import annotations
18
18
 
19
19
  from airflow.exceptions import AirflowException
20
20
 
21
+ # Todo: we cannot have a backcompat import for AirflowException yet
22
+ # because PodMutationHookException is redefined in airflow.exception
23
+ # Remove this and either import AirflowException from common.sdk or
24
+ # import it from airflow.sdk.exceptions when PodMutationHookException
25
+ # is removed from airflow.exceptions
26
+
21
27
 
22
28
  class PodMutationHookException(AirflowException):
23
29
  """Raised when exception happens during Pod Mutation Hook execution."""
@@ -30,7 +30,6 @@ import logging
30
30
  import multiprocessing
31
31
  import time
32
32
  from collections import Counter, defaultdict
33
- from collections.abc import Sequence
34
33
  from contextlib import suppress
35
34
  from datetime import datetime
36
35
  from queue import Empty, Queue
@@ -71,7 +70,7 @@ from airflow.providers.cncf.kubernetes.executors.kubernetes_executor_types impor
71
70
  )
72
71
  from airflow.providers.cncf.kubernetes.kube_config import KubeConfig
73
72
  from airflow.providers.cncf.kubernetes.kubernetes_helper_functions import annotations_to_key
74
- from airflow.stats import Stats
73
+ from airflow.providers.common.compat.sdk import Stats
75
74
  from airflow.utils.log.logging_mixin import remove_escape_codes
76
75
  from airflow.utils.session import NEW_SESSION, provide_session
77
76
  from airflow.utils.state import TaskInstanceState
@@ -27,7 +27,6 @@ from kubernetes import client, watch
27
27
  from kubernetes.client.rest import ApiException
28
28
  from urllib3.exceptions import ReadTimeoutError
29
29
 
30
- from airflow.exceptions import AirflowException
31
30
  from airflow.providers.cncf.kubernetes.backcompat import get_logical_date_key
32
31
  from airflow.providers.cncf.kubernetes.executors.kubernetes_executor_types import (
33
32
  ADOPTED,
@@ -46,6 +45,7 @@ from airflow.providers.cncf.kubernetes.kubernetes_helper_functions import (
46
45
  create_unique_id,
47
46
  )
48
47
  from airflow.providers.cncf.kubernetes.pod_generator import PodGenerator, workload_to_command_args
48
+ from airflow.providers.common.compat.sdk import AirflowException
49
49
  from airflow.utils.log.logging_mixin import LoggingMixin
50
50
  from airflow.utils.singleton import Singleton
51
51
  from airflow.utils.state import TaskInstanceState
@@ -20,7 +20,6 @@ import asyncio
20
20
  import contextlib
21
21
  import json
22
22
  import tempfile
23
- from collections.abc import Generator
24
23
  from functools import cached_property
25
24
  from time import sleep
26
25
  from typing import TYPE_CHECKING, Any, Protocol
@@ -31,10 +30,9 @@ from asgiref.sync import sync_to_async
31
30
  from kubernetes import client, config, utils, watch
32
31
  from kubernetes.client.models import V1Deployment
33
32
  from kubernetes.config import ConfigException
34
- from kubernetes_asyncio import client as async_client, config as async_config
33
+ from kubernetes_asyncio import client as async_client, config as async_config, watch as async_watch
35
34
  from urllib3.exceptions import HTTPError
36
35
 
37
- from airflow.exceptions import AirflowException, AirflowNotFoundException
38
36
  from airflow.models import Connection
39
37
  from airflow.providers.cncf.kubernetes.exceptions import KubernetesApiError, KubernetesApiPermissionError
40
38
  from airflow.providers.cncf.kubernetes.kube_client import _disable_verify_ssl, _enable_tcp_keepalive
@@ -43,12 +41,14 @@ from airflow.providers.cncf.kubernetes.utils.container import (
43
41
  container_is_completed,
44
42
  container_is_running,
45
43
  )
46
- from airflow.providers.common.compat.sdk import BaseHook
44
+ from airflow.providers.common.compat.sdk import AirflowException, AirflowNotFoundException, BaseHook
47
45
  from airflow.utils import yaml
48
46
 
49
47
  if TYPE_CHECKING:
48
+ from collections.abc import AsyncGenerator, Generator
49
+
50
50
  from kubernetes.client import V1JobList
51
- from kubernetes.client.models import CoreV1EventList, V1Job, V1Pod
51
+ from kubernetes.client.models import CoreV1Event, CoreV1EventList, V1Job, V1Pod
52
52
 
53
53
  LOADING_KUBE_CONFIG_FILE_RESOURCE = "Loading Kubernetes configuration file kube_config from {}..."
54
54
 
@@ -778,11 +778,14 @@ def _get_bool(val) -> bool | None:
778
778
  class AsyncKubernetesHook(KubernetesHook):
779
779
  """Hook to use Kubernetes SDK asynchronously."""
780
780
 
781
- def __init__(self, config_dict: dict | None = None, *args, **kwargs):
781
+ def __init__(
782
+ self, config_dict: dict | None = None, connection_extras: dict | None = None, *args, **kwargs
783
+ ):
782
784
  super().__init__(*args, **kwargs)
783
785
 
784
786
  self.config_dict = config_dict
785
- self._extras: dict | None = None
787
+ self._extras: dict | None = connection_extras
788
+ self._event_polling_fallback = False
786
789
 
787
790
  async def _load_config(self):
788
791
  """Return Kubernetes API session for use with requests."""
@@ -954,14 +957,24 @@ class AsyncKubernetesHook(KubernetesHook):
954
957
  raise KubernetesApiError from e
955
958
 
956
959
  @generic_api_retry
957
- async def get_pod_events(self, name: str, namespace: str) -> CoreV1EventList:
958
- """Get pod's events."""
960
+ async def get_pod_events(
961
+ self, name: str, namespace: str, resource_version: str | None = None
962
+ ) -> CoreV1EventList:
963
+ """
964
+ Get pod events.
965
+
966
+ :param name: Pod name to get events for
967
+ :param namespace: Kubernetes namespace
968
+ :param resource_version: Only return events not older than this resource version
969
+ """
959
970
  async with self.get_conn() as connection:
960
971
  try:
961
972
  v1_api = async_client.CoreV1Api(connection)
962
973
  events: CoreV1EventList = await v1_api.list_namespaced_event(
963
974
  field_selector=f"involvedObject.name={name}",
964
975
  namespace=namespace,
976
+ resource_version=resource_version,
977
+ resource_version_match="NotOlderThan" if resource_version else None,
965
978
  )
966
979
  return events
967
980
  except HTTPError as e:
@@ -969,6 +982,80 @@ class AsyncKubernetesHook(KubernetesHook):
969
982
  raise KubernetesApiPermissionError("Permission denied (403) from Kubernetes API.") from e
970
983
  raise KubernetesApiError from e
971
984
 
985
+ @generic_api_retry
986
+ async def watch_pod_events(
987
+ self,
988
+ name: str,
989
+ namespace: str,
990
+ resource_version: str | None = None,
991
+ timeout_seconds: int = 30,
992
+ ) -> AsyncGenerator[CoreV1Event]:
993
+ """
994
+ Watch pod events using Kubernetes Watch API.
995
+
996
+ :param name: Pod name to watch events for
997
+ :param namespace: Kubernetes namespace
998
+ :param resource_version: Only return events not older than this resource version
999
+ :param timeout_seconds: Timeout in seconds for the watch stream
1000
+ """
1001
+ if self._event_polling_fallback:
1002
+ async for event_polled in self.watch_pod_events_polling_fallback(
1003
+ name, namespace, resource_version, timeout_seconds
1004
+ ):
1005
+ yield event_polled
1006
+
1007
+ try:
1008
+ w = async_watch.Watch()
1009
+ async with self.get_conn() as connection:
1010
+ v1_api = async_client.CoreV1Api(connection)
1011
+
1012
+ async for event_watched in w.stream(
1013
+ v1_api.list_namespaced_event,
1014
+ namespace=namespace,
1015
+ field_selector=f"involvedObject.name={name}",
1016
+ resource_version=resource_version,
1017
+ timeout_seconds=timeout_seconds,
1018
+ ):
1019
+ event: CoreV1Event = event_watched.get("object")
1020
+ yield event
1021
+
1022
+ except async_client.exceptions.ApiException as e:
1023
+ if hasattr(e, "status") and e.status == 403:
1024
+ self.log.warning(
1025
+ "Triggerer does not have Kubernetes API permission to 'watch' events: %s Falling back to polling.",
1026
+ str(e),
1027
+ )
1028
+ self._event_polling_fallback = True
1029
+ async for event_polled in self.watch_pod_events_polling_fallback(
1030
+ name, namespace, resource_version, timeout_seconds
1031
+ ):
1032
+ yield event_polled
1033
+
1034
+ finally:
1035
+ w.stop()
1036
+
1037
+ async def watch_pod_events_polling_fallback(
1038
+ self,
1039
+ name: str,
1040
+ namespace: str,
1041
+ resource_version: str | None = None,
1042
+ interval: int = 30,
1043
+ ) -> AsyncGenerator[CoreV1Event]:
1044
+ """
1045
+ Fallback method to poll pod event at regular intervals.
1046
+
1047
+ This is required when the Airflow triggerer does not have permission to watch events.
1048
+
1049
+ :param name: Pod name to watch events for
1050
+ :param namespace: Kubernetes namespace
1051
+ :param resource_version: Only return events not older than this resource version
1052
+ :param interval: Polling interval in seconds
1053
+ """
1054
+ events: CoreV1EventList = await self.get_pod_events(name, namespace, resource_version)
1055
+ for event in events.items:
1056
+ yield event
1057
+ await asyncio.sleep(interval)
1058
+
972
1059
  @generic_api_retry
973
1060
  async def get_job_status(self, name: str, namespace: str) -> V1Job:
974
1061
  """
@@ -27,11 +27,12 @@ import tenacity
27
27
  from kubernetes.client.rest import ApiException as SyncApiException
28
28
  from kubernetes_asyncio.client.exceptions import ApiException as AsyncApiException
29
29
  from slugify import slugify
30
+ from sqlalchemy import select
30
31
  from urllib3.exceptions import HTTPError
31
32
 
32
33
  from airflow.configuration import conf
33
- from airflow.exceptions import AirflowException
34
34
  from airflow.providers.cncf.kubernetes.backcompat import get_logical_date_key
35
+ from airflow.providers.common.compat.sdk import AirflowException
35
36
 
36
37
  if TYPE_CHECKING:
37
38
  from airflow.models.taskinstancekey import TaskInstanceKey
@@ -175,15 +176,14 @@ def annotations_to_key(annotations: dict[str, str]) -> TaskInstanceKey:
175
176
  raise RuntimeError("Session not configured. Call configure_orm() first.")
176
177
  session = Session()
177
178
 
178
- task_instance_run_id = (
179
- session.query(TaskInstance.run_id)
179
+ task_instance_run_id = session.scalar(
180
+ select(TaskInstance.run_id)
180
181
  .join(TaskInstance.dag_run)
181
- .filter(
182
+ .where(
182
183
  TaskInstance.dag_id == dag_id,
183
184
  TaskInstance.task_id == task_id,
184
185
  getattr(DagRun, logical_date_key) == logical_date,
185
186
  )
186
- .scalar()
187
187
  )
188
188
  else:
189
189
  task_instance_run_id = annotation_run_id
@@ -28,7 +28,6 @@ import tenacity
28
28
  from kubernetes.client import CoreV1Api, CustomObjectsApi, models as k8s
29
29
  from kubernetes.client.rest import ApiException
30
30
 
31
- from airflow.exceptions import AirflowException
32
31
  from airflow.providers.cncf.kubernetes.resource_convert.configmap import (
33
32
  convert_configmap,
34
33
  convert_configmap_to_volume,
@@ -39,6 +38,7 @@ from airflow.providers.cncf.kubernetes.resource_convert.secret import (
39
38
  convert_secret,
40
39
  )
41
40
  from airflow.providers.cncf.kubernetes.utils.pod_manager import PodManager
41
+ from airflow.providers.common.compat.sdk import AirflowException
42
42
  from airflow.utils.log.logging_mixin import LoggingMixin
43
43
 
44
44
 
@@ -32,7 +32,7 @@ from kubernetes.client.api_client import ApiClient
32
32
  from kubernetes.client.rest import ApiException
33
33
 
34
34
  from airflow.configuration import conf
35
- from airflow.exceptions import AirflowException, AirflowProviderDeprecationWarning
35
+ from airflow.exceptions import AirflowProviderDeprecationWarning
36
36
  from airflow.providers.cncf.kubernetes.hooks.kubernetes import KubernetesHook
37
37
  from airflow.providers.cncf.kubernetes.kubernetes_helper_functions import (
38
38
  POD_NAME_MAX_LENGTH,
@@ -44,16 +44,16 @@ from airflow.providers.cncf.kubernetes.pod_generator import PodGenerator, merge_
44
44
  from airflow.providers.cncf.kubernetes.triggers.job import KubernetesJobTrigger
45
45
  from airflow.providers.cncf.kubernetes.utils.pod_manager import EMPTY_XCOM_RESULT, PodNotFoundException
46
46
  from airflow.providers.cncf.kubernetes.version_compat import AIRFLOW_V_3_1_PLUS
47
+ from airflow.providers.common.compat.sdk import AirflowException
48
+ from airflow.utils import yaml
47
49
 
48
50
  if AIRFLOW_V_3_1_PLUS:
49
51
  from airflow.sdk import BaseOperator
50
52
  else:
51
53
  from airflow.models import BaseOperator
52
- from airflow.utils import yaml
53
- from airflow.utils.context import Context
54
54
 
55
55
  if TYPE_CHECKING:
56
- from airflow.utils.context import Context
56
+ from airflow.sdk import Context
57
57
 
58
58
  log = logging.getLogger(__name__)
59
59
 
@@ -24,10 +24,10 @@ from functools import cached_property
24
24
 
25
25
  from kubernetes.utils import FailToCreateError
26
26
 
27
- from airflow.exceptions import AirflowException
28
27
  from airflow.providers.cncf.kubernetes.hooks.kubernetes import KubernetesHook
29
28
  from airflow.providers.cncf.kubernetes.operators.job import KubernetesJobOperator
30
29
  from airflow.providers.cncf.kubernetes.version_compat import AIRFLOW_V_3_1_PLUS
30
+ from airflow.providers.common.compat.sdk import AirflowException
31
31
 
32
32
  if AIRFLOW_V_3_1_PLUS:
33
33
  from airflow.sdk import BaseOperator
@@ -81,10 +81,12 @@ from airflow.providers.cncf.kubernetes.version_compat import AIRFLOW_V_3_1_PLUS
81
81
  from airflow.providers.common.compat.sdk import XCOM_RETURN_KEY, AirflowSkipException, TaskDeferred
82
82
 
83
83
  if AIRFLOW_V_3_1_PLUS:
84
- from airflow.sdk import BaseOperator
84
+ from airflow.sdk import BaseHook, BaseOperator
85
85
  else:
86
+ from airflow.hooks.base import BaseHook # type: ignore[attr-defined, no-redef]
86
87
  from airflow.models import BaseOperator
87
- from airflow.exceptions import AirflowException
88
+
89
+ from airflow.providers.common.compat.sdk import AirflowException, AirflowNotFoundException
88
90
  from airflow.settings import pod_mutation_hook
89
91
  from airflow.utils import yaml
90
92
  from airflow.utils.helpers import prune_dict, validate_key
@@ -96,12 +98,7 @@ if TYPE_CHECKING:
96
98
 
97
99
  from airflow.providers.cncf.kubernetes.hooks.kubernetes import PodOperatorHookProtocol
98
100
  from airflow.providers.cncf.kubernetes.secret import Secret
99
-
100
- try:
101
- from airflow.sdk.definitions.context import Context
102
- except ImportError:
103
- # TODO: Remove once provider drops support for Airflow 2
104
- from airflow.utils.context import Context
101
+ from airflow.sdk import Context
105
102
 
106
103
  alphanum_lower = string.ascii_lowercase + string.digits
107
104
 
@@ -234,7 +231,8 @@ class KubernetesPodOperator(BaseOperator):
234
231
  :param log_pod_spec_on_failure: Log the pod's specification if a failure occurs
235
232
  :param on_finish_action: What to do when the pod reaches its final state, or the execution is interrupted.
236
233
  If "delete_pod", the pod will be deleted regardless its state; if "delete_succeeded_pod",
237
- only succeeded pod will be deleted. You can set to "keep_pod" to keep the pod.
234
+ only succeeded pod will be deleted. You can set to "keep_pod" to keep the pod. "delete_active_pod" deletes
235
+ pods that are still active (Pending or Running).
238
236
  :param termination_message_policy: The termination message policy of the base container.
239
237
  Default value is "File"
240
238
  :param active_deadline_seconds: The active_deadline_seconds which translates to active_deadline_seconds
@@ -630,14 +628,26 @@ class KubernetesPodOperator(BaseOperator):
630
628
  try:
631
629
 
632
630
  async def _await_pod_start():
633
- events_task = self.pod_manager.watch_pod_events(pod, self.startup_check_interval_seconds)
634
- pod_start_task = self.pod_manager.await_pod_start(
635
- pod=pod,
636
- schedule_timeout=self.schedule_timeout_seconds,
637
- startup_timeout=self.startup_timeout_seconds,
638
- check_interval=self.startup_check_interval_seconds,
631
+ # Start event stream in background
632
+ events_task = asyncio.create_task(
633
+ self.pod_manager.watch_pod_events(pod, self.startup_check_interval_seconds)
639
634
  )
640
- await asyncio.gather(pod_start_task, events_task)
635
+
636
+ # Await pod start completion
637
+ try:
638
+ await self.pod_manager.await_pod_start(
639
+ pod=pod,
640
+ schedule_timeout=self.schedule_timeout_seconds,
641
+ startup_timeout=self.startup_timeout_seconds,
642
+ check_interval=self.startup_check_interval_seconds,
643
+ )
644
+ finally:
645
+ # Stop watching events
646
+ events_task.cancel()
647
+ try:
648
+ await events_task
649
+ except asyncio.CancelledError:
650
+ pass
641
651
 
642
652
  asyncio.run(_await_pod_start())
643
653
  except PodLaunchFailedException:
@@ -856,6 +866,21 @@ class KubernetesPodOperator(BaseOperator):
856
866
  def invoke_defer_method(self, last_log_time: DateTime | None = None) -> None:
857
867
  """Redefine triggers which are being used in child classes."""
858
868
  self.convert_config_file_to_dict()
869
+
870
+ connection_extras = None
871
+ if self.kubernetes_conn_id:
872
+ try:
873
+ conn = BaseHook.get_connection(self.kubernetes_conn_id)
874
+ except AirflowNotFoundException:
875
+ self.log.warning(
876
+ "Could not resolve connection extras for deferral: connection `%s` not found. "
877
+ "Triggerer will try to resolve it from its own environment.",
878
+ self.kubernetes_conn_id,
879
+ )
880
+ else:
881
+ connection_extras = conn.extra_dejson
882
+ self.log.info("Successfully resolved connection extras for deferral.")
883
+
859
884
  trigger_start_time = datetime.datetime.now(tz=datetime.timezone.utc)
860
885
  self.defer(
861
886
  trigger=KubernetesPodTrigger(
@@ -863,6 +888,7 @@ class KubernetesPodOperator(BaseOperator):
863
888
  pod_namespace=self.pod.metadata.namespace, # type: ignore[union-attr]
864
889
  trigger_start_time=trigger_start_time,
865
890
  kubernetes_conn_id=self.kubernetes_conn_id,
891
+ connection_extras=connection_extras,
866
892
  cluster_context=self.cluster_context,
867
893
  config_dict=self._config_dict,
868
894
  in_cluster=self.in_cluster,
@@ -937,8 +963,9 @@ class KubernetesPodOperator(BaseOperator):
937
963
  raise
938
964
  finally:
939
965
  self._clean(event=event, context=context, result=xcom_sidecar_output)
940
- if self.do_xcom_push:
941
- return xcom_sidecar_output
966
+
967
+ if self.do_xcom_push and xcom_sidecar_output:
968
+ context["ti"].xcom_push(XCOM_RETURN_KEY, xcom_sidecar_output)
942
969
 
943
970
  def _clean(self, event: dict[str, Any], result: dict | None, context: Context) -> None:
944
971
  if self.pod is None:
@@ -1028,7 +1055,11 @@ class KubernetesPodOperator(BaseOperator):
1028
1055
  pod_phase = remote_pod.status.phase if hasattr(remote_pod, "status") else None
1029
1056
 
1030
1057
  # if the pod fails or success, but we don't want to delete it
1031
- if pod_phase != PodPhase.SUCCEEDED or self.on_finish_action == OnFinishAction.KEEP_POD:
1058
+ if (
1059
+ pod_phase != PodPhase.SUCCEEDED
1060
+ or self.on_finish_action == OnFinishAction.KEEP_POD
1061
+ or self.on_finish_action == OnFinishAction.DELETE_ACTIVE_POD
1062
+ ):
1032
1063
  self.patch_already_checked(remote_pod, reraise=False)
1033
1064
 
1034
1065
  failed = (pod_phase != PodPhase.SUCCEEDED and not istio_enabled) or (
@@ -1164,13 +1195,21 @@ class KubernetesPodOperator(BaseOperator):
1164
1195
  def process_pod_deletion(self, pod: k8s.V1Pod, *, reraise=True) -> bool:
1165
1196
  with _optionally_suppress(reraise=reraise):
1166
1197
  if pod is not None:
1167
- should_delete_pod = (self.on_finish_action == OnFinishAction.DELETE_POD) or (
1168
- self.on_finish_action == OnFinishAction.DELETE_SUCCEEDED_POD
1169
- and (
1170
- pod.status.phase == PodPhase.SUCCEEDED
1171
- or container_is_succeeded(pod, self.base_container_name)
1198
+ should_delete_pod = (
1199
+ (self.on_finish_action == OnFinishAction.DELETE_POD)
1200
+ or (
1201
+ self.on_finish_action == OnFinishAction.DELETE_SUCCEEDED_POD
1202
+ and (
1203
+ pod.status.phase == PodPhase.SUCCEEDED
1204
+ or container_is_succeeded(pod, self.base_container_name)
1205
+ )
1206
+ )
1207
+ or (
1208
+ self.on_finish_action == OnFinishAction.DELETE_ACTIVE_POD
1209
+ and (pod.status.phase == PodPhase.RUNNING or pod.status.phase == PodPhase.PENDING)
1172
1210
  )
1173
1211
  )
1212
+
1174
1213
  if should_delete_pod:
1175
1214
  self.log.info("Deleting pod: %s", pod.metadata.name)
1176
1215
  self.pod_manager.delete_pod(pod)
@@ -26,12 +26,12 @@ from typing import TYPE_CHECKING
26
26
  import yaml
27
27
  from kubernetes.utils import create_from_yaml
28
28
 
29
- from airflow.exceptions import AirflowException
30
29
  from airflow.providers.cncf.kubernetes.hooks.kubernetes import KubernetesHook
31
30
  from airflow.providers.cncf.kubernetes.kubernetes_helper_functions import generic_api_retry
32
31
  from airflow.providers.cncf.kubernetes.utils.delete_from import delete_from_yaml
33
32
  from airflow.providers.cncf.kubernetes.utils.k8s_resource_iterator import k8s_resource_iterator
34
33
  from airflow.providers.cncf.kubernetes.version_compat import AIRFLOW_V_3_1_PLUS
34
+ from airflow.providers.common.compat.sdk import AirflowException
35
35
 
36
36
  if AIRFLOW_V_3_1_PLUS:
37
37
  from airflow.sdk import BaseOperator
@@ -23,7 +23,6 @@ from typing import TYPE_CHECKING, Any, cast
23
23
 
24
24
  from kubernetes.client import CoreV1Api, CustomObjectsApi, models as k8s
25
25
 
26
- from airflow.exceptions import AirflowException
27
26
  from airflow.providers.cncf.kubernetes import pod_generator
28
27
  from airflow.providers.cncf.kubernetes.hooks.kubernetes import KubernetesHook, _load_body_to_dict
29
28
  from airflow.providers.cncf.kubernetes.kubernetes_helper_functions import add_unique_suffix
@@ -31,16 +30,13 @@ from airflow.providers.cncf.kubernetes.operators.custom_object_launcher import C
31
30
  from airflow.providers.cncf.kubernetes.operators.pod import KubernetesPodOperator
32
31
  from airflow.providers.cncf.kubernetes.pod_generator import MAX_LABEL_LEN, PodGenerator
33
32
  from airflow.providers.cncf.kubernetes.utils.pod_manager import PodManager
33
+ from airflow.providers.common.compat.sdk import AirflowException
34
34
  from airflow.utils.helpers import prune_dict
35
35
 
36
36
  if TYPE_CHECKING:
37
37
  import jinja2
38
38
 
39
- try:
40
- from airflow.sdk.definitions.context import Context
41
- except ImportError:
42
- # TODO: Remove once provider drops support for Airflow 2
43
- from airflow.utils.context import Context
39
+ from airflow.sdk import Context
44
40
 
45
41
 
46
42
  class SparkKubernetesOperator(KubernetesPodOperator):
@@ -18,7 +18,7 @@ from __future__ import annotations
18
18
 
19
19
  from kubernetes.client import models as k8s
20
20
 
21
- from airflow.exceptions import AirflowException
21
+ from airflow.providers.common.compat.sdk import AirflowException
22
22
 
23
23
 
24
24
  def convert_env_vars(env_vars) -> list[k8s.V1EnvVar]:
@@ -23,12 +23,11 @@ from typing import TYPE_CHECKING
23
23
 
24
24
  from kubernetes import client
25
25
 
26
- from airflow.exceptions import AirflowException
27
26
  from airflow.providers.cncf.kubernetes.hooks.kubernetes import KubernetesHook
28
- from airflow.providers.common.compat.sdk import BaseSensorOperator
27
+ from airflow.providers.common.compat.sdk import AirflowException, BaseSensorOperator
29
28
 
30
29
  if TYPE_CHECKING:
31
- from airflow.utils.context import Context
30
+ from airflow.sdk import Context
32
31
 
33
32
 
34
33
  class SparkKubernetesSensor(BaseSensorOperator):
@@ -22,10 +22,10 @@ from typing import TYPE_CHECKING
22
22
  from jinja2 import TemplateAssertionError, UndefinedError
23
23
  from kubernetes.client.api_client import ApiClient
24
24
 
25
- from airflow.exceptions import AirflowException
26
25
  from airflow.providers.cncf.kubernetes.kube_config import KubeConfig
27
26
  from airflow.providers.cncf.kubernetes.kubernetes_helper_functions import create_unique_id
28
27
  from airflow.providers.cncf.kubernetes.pod_generator import PodGenerator, generate_pod_command_args
28
+ from airflow.providers.common.compat.sdk import AirflowException
29
29
  from airflow.utils.session import NEW_SESSION, provide_session
30
30
 
31
31
  if TYPE_CHECKING:
@@ -88,6 +88,7 @@ class KubernetesPodTrigger(BaseTrigger):
88
88
  trigger_start_time: datetime.datetime,
89
89
  base_container_name: str,
90
90
  kubernetes_conn_id: str | None = None,
91
+ connection_extras: dict | None = None,
91
92
  poll_interval: float = 2,
92
93
  cluster_context: str | None = None,
93
94
  config_dict: dict | None = None,
@@ -107,6 +108,7 @@ class KubernetesPodTrigger(BaseTrigger):
107
108
  self.trigger_start_time = trigger_start_time
108
109
  self.base_container_name = base_container_name
109
110
  self.kubernetes_conn_id = kubernetes_conn_id
111
+ self.connection_extras = connection_extras
110
112
  self.poll_interval = poll_interval
111
113
  self.cluster_context = cluster_context
112
114
  self.config_dict = config_dict
@@ -130,6 +132,7 @@ class KubernetesPodTrigger(BaseTrigger):
130
132
  "pod_namespace": self.pod_namespace,
131
133
  "base_container_name": self.base_container_name,
132
134
  "kubernetes_conn_id": self.kubernetes_conn_id,
135
+ "connection_extras": self.connection_extras,
133
136
  "poll_interval": self.poll_interval,
134
137
  "cluster_context": self.cluster_context,
135
138
  "config_dict": self.config_dict,
@@ -241,14 +244,25 @@ class KubernetesPodTrigger(BaseTrigger):
241
244
  async def _wait_for_pod_start(self) -> ContainerState:
242
245
  """Loops until pod phase leaves ``PENDING`` If timeout is reached, throws error."""
243
246
  pod = await self._get_pod()
244
- events_task = self.pod_manager.watch_pod_events(pod, self.startup_check_interval)
245
- pod_start_task = self.pod_manager.await_pod_start(
246
- pod=pod,
247
- schedule_timeout=self.schedule_timeout,
248
- startup_timeout=self.startup_timeout,
249
- check_interval=self.startup_check_interval,
250
- )
251
- await asyncio.gather(pod_start_task, events_task)
247
+ # Start event stream in background
248
+ events_task = asyncio.create_task(self.pod_manager.watch_pod_events(pod, self.startup_check_interval))
249
+
250
+ # Await pod start completion
251
+ try:
252
+ await self.pod_manager.await_pod_start(
253
+ pod=pod,
254
+ schedule_timeout=self.schedule_timeout,
255
+ startup_timeout=self.startup_timeout,
256
+ check_interval=self.startup_check_interval,
257
+ )
258
+ finally:
259
+ # Stop watching events
260
+ events_task.cancel()
261
+ try:
262
+ await events_task
263
+ except asyncio.CancelledError:
264
+ pass
265
+
252
266
  return self.define_container_state(await self._get_pod())
253
267
 
254
268
  async def _wait_for_container_completion(self) -> TriggerEvent:
@@ -313,6 +327,7 @@ class KubernetesPodTrigger(BaseTrigger):
313
327
  in_cluster=self.in_cluster,
314
328
  config_dict=self.config_dict,
315
329
  cluster_context=self.cluster_context,
330
+ connection_extras=self.connection_extras,
316
331
  )
317
332
 
318
333
  @cached_property
@@ -37,7 +37,6 @@ from pendulum import DateTime
37
37
  from pendulum.parsing.exceptions import ParserError
38
38
  from urllib3.exceptions import HTTPError, TimeoutError
39
39
 
40
- from airflow.exceptions import AirflowException
41
40
  from airflow.providers.cncf.kubernetes.callbacks import ExecutionMode, KubernetesPodOperatorCallback
42
41
  from airflow.providers.cncf.kubernetes.kubernetes_helper_functions import (
43
42
  KubernetesApiException,
@@ -52,10 +51,12 @@ from airflow.providers.cncf.kubernetes.utils.container import (
52
51
  get_container_status,
53
52
  )
54
53
  from airflow.providers.cncf.kubernetes.utils.xcom_sidecar import PodDefaults
54
+ from airflow.providers.common.compat.sdk import AirflowException
55
55
  from airflow.utils.log.logging_mixin import LoggingMixin
56
56
  from airflow.utils.timezone import utcnow
57
57
 
58
58
  if TYPE_CHECKING:
59
+ from kubernetes.client.models.core_v1_event import CoreV1Event
59
60
  from kubernetes.client.models.core_v1_event_list import CoreV1EventList
60
61
  from kubernetes.client.models.v1_container_state import V1ContainerState
61
62
  from kubernetes.client.models.v1_container_state_waiting import V1ContainerStateWaiting
@@ -94,34 +95,21 @@ def check_exception_is_kubernetes_api_unauthorized(exc: BaseException):
94
95
  return isinstance(exc, ApiException) and exc.status and str(exc.status) == "401"
95
96
 
96
97
 
97
- async def watch_pod_events(
98
- pod_manager: PodManager | AsyncPodManager,
99
- pod: V1Pod,
100
- check_interval: float = 1,
98
+ def log_pod_event(
99
+ pod_manager: PodManager | AsyncPodManager, event: CoreV1Event, seen_events: set[str]
101
100
  ) -> None:
102
101
  """
103
- Read pod events and write them to the log.
104
-
105
- This function supports both asynchronous and synchronous pod managers.
102
+ Log a pod event if not already seen.
106
103
 
107
- :param pod_manager: The pod manager instance (PodManager or AsyncPodManager).
108
- :param pod: The pod object to monitor.
109
- :param check_interval: Interval (in seconds) between checks.
104
+ :param pod_manager: The pod manager instance for logging
105
+ :param event: Kubernetes event
106
+ :param seen_events: Set of event UIDs already logged to avoid duplicates
110
107
  """
111
- num_events = 0
112
- is_async = isinstance(pod_manager, AsyncPodManager)
113
- while not pod_manager.stop_watching_events:
114
- if is_async:
115
- events = await pod_manager.read_pod_events(pod)
116
- else:
117
- events = pod_manager.read_pod_events(pod)
118
- for new_event in events.items[num_events:]:
119
- involved_object: V1ObjectReference = new_event.involved_object
120
- pod_manager.log.info(
121
- "The Pod has an Event: %s from %s", new_event.message, involved_object.field_path
122
- )
123
- num_events = len(events.items)
124
- await asyncio.sleep(check_interval)
108
+ event_uid = event.metadata.uid
109
+ if event_uid not in seen_events:
110
+ seen_events.add(event_uid)
111
+ involved_object: V1ObjectReference = event.involved_object
112
+ pod_manager.log.info("The Pod has an Event: %s from %s", event.message, involved_object.field_path)
125
113
 
126
114
 
127
115
  async def await_pod_start(
@@ -170,33 +158,49 @@ async def await_pod_start(
170
158
  pod_manager.log.info("Waiting %ss to get the POD running...", startup_timeout)
171
159
 
172
160
  if time.time() - start_check_time >= startup_timeout:
161
+ pod_manager.stop_watching_events = True
173
162
  pod_manager.log.info("::endgroup::")
174
163
  raise PodLaunchTimeoutException(
175
164
  f"Pod took too long to start. More than {startup_timeout}s. Check the pod events in kubernetes."
176
165
  )
177
166
  else:
178
167
  if time.time() - start_check_time >= schedule_timeout:
168
+ pod_manager.stop_watching_events = True
179
169
  pod_manager.log.info("::endgroup::")
180
170
  raise PodLaunchTimeoutException(
181
171
  f"Pod took too long to be scheduled on the cluster, giving up. More than {schedule_timeout}s. Check the pod events in kubernetes."
182
172
  )
183
173
 
184
- # Check for general problems to terminate early - ErrImagePull
185
- if pod_status.container_statuses:
186
- for container_status in pod_status.container_statuses:
187
- container_state: V1ContainerState = container_status.state
188
- container_waiting: V1ContainerStateWaiting | None = container_state.waiting
189
- if container_waiting:
190
- if container_waiting.reason in ["ErrImagePull", "InvalidImageName"]:
191
- pod_manager.log.info("::endgroup::")
192
- raise PodLaunchFailedException(
193
- f"Pod docker image cannot be pulled, unable to start: {container_waiting.reason}"
194
- f"\n{container_waiting.message}"
195
- )
174
+ # Check for general problems to terminate early
175
+ error_message = detect_pod_terminate_early_issues(remote_pod)
176
+ if error_message:
177
+ pod_manager.log.info("::endgroup::")
178
+ raise PodLaunchFailedException(error_message)
196
179
 
197
180
  await asyncio.sleep(check_interval)
198
181
 
199
182
 
183
+ def detect_pod_terminate_early_issues(pod: V1Pod) -> str | None:
184
+ """
185
+ Identify issues that justify terminating the pod early.
186
+
187
+ :param pod: The pod object to check.
188
+ :return: An error message if an issue is detected; otherwise, None.
189
+ """
190
+ pod_status = pod.status
191
+ if pod_status.container_statuses:
192
+ for container_status in pod_status.container_statuses:
193
+ container_state: V1ContainerState = container_status.state
194
+ container_waiting: V1ContainerStateWaiting | None = container_state.waiting
195
+ if container_waiting:
196
+ if container_waiting.reason in ["ErrImagePull", "ImagePullBackOff", "InvalidImageName"]:
197
+ return (
198
+ f"Pod docker image cannot be pulled, unable to start: {container_waiting.reason}"
199
+ f"\n{container_waiting.message}"
200
+ )
201
+ return None
202
+
203
+
200
204
  class PodLaunchTimeoutException(AirflowException):
201
205
  """When pod does not leave the ``Pending`` phase within specified timeout."""
202
206
 
@@ -354,9 +358,16 @@ class PodManager(LoggingMixin):
354
358
  """Launch the pod asynchronously."""
355
359
  return self.run_pod_async(pod)
356
360
 
357
- async def watch_pod_events(self, pod: V1Pod, check_interval: int = 1) -> None:
358
- """Read pod events and writes into log."""
359
- await watch_pod_events(pod_manager=self, pod=pod, check_interval=check_interval)
361
+ async def watch_pod_events(self, pod: V1Pod, check_interval: float = 10) -> None:
362
+ """Read pod events and write into log."""
363
+ resource_version = None
364
+ seen_events: set[str] = set()
365
+ while not self.stop_watching_events:
366
+ events = self.read_pod_events(pod, resource_version)
367
+ for event in events.items:
368
+ log_pod_event(self, event, seen_events)
369
+ resource_version = event.metadata.resource_version
370
+ await asyncio.sleep(check_interval)
360
371
 
361
372
  async def await_pod_start(
362
373
  self, pod: V1Pod, schedule_timeout: int = 120, startup_timeout: int = 120, check_interval: int = 1
@@ -693,6 +704,9 @@ class PodManager(LoggingMixin):
693
704
  break
694
705
  if istio_enabled and container_is_completed(remote_pod, container_name):
695
706
  break
707
+ # abort waiting if defined issues are detected
708
+ if detect_pod_terminate_early_issues(remote_pod):
709
+ break
696
710
  self.log.info("Pod %s has phase %s", pod.metadata.name, remote_pod.status.phase)
697
711
  time.sleep(2)
698
712
  return remote_pod
@@ -772,11 +786,20 @@ class PodManager(LoggingMixin):
772
786
  ]
773
787
 
774
788
  @generic_api_retry
775
- def read_pod_events(self, pod: V1Pod) -> CoreV1EventList:
776
- """Read events from the POD."""
789
+ def read_pod_events(self, pod: V1Pod, resource_version: str | None = None) -> CoreV1EventList:
790
+ """
791
+ Read events from the POD with optimization parameters to reduce API load.
792
+
793
+ :param pod: The pod to get events for
794
+ :param resource_version: Only return events newer than this resource version
795
+ :param limit: Maximum number of events to return
796
+ """
777
797
  try:
778
798
  return self._client.list_namespaced_event(
779
- namespace=pod.metadata.namespace, field_selector=f"involvedObject.name={pod.metadata.name}"
799
+ namespace=pod.metadata.namespace,
800
+ field_selector=f"involvedObject.name={pod.metadata.name}",
801
+ resource_version=resource_version,
802
+ resource_version_match="NotOlderThan" if resource_version else None,
780
803
  )
781
804
  except HTTPError as e:
782
805
  raise KubernetesApiException(f"There was an error reading the kubernetes API: {e}")
@@ -926,6 +949,7 @@ class OnFinishAction(str, enum.Enum):
926
949
 
927
950
  KEEP_POD = "keep_pod"
928
951
  DELETE_POD = "delete_pod"
952
+ DELETE_ACTIVE_POD = "delete_active_pod"
929
953
  DELETE_SUCCEEDED_POD = "delete_succeeded_pod"
930
954
 
931
955
 
@@ -978,16 +1002,28 @@ class AsyncPodManager(LoggingMixin):
978
1002
  pod.metadata.namespace,
979
1003
  )
980
1004
 
981
- async def read_pod_events(self, pod: V1Pod) -> CoreV1EventList:
1005
+ async def read_pod_events(self, pod: V1Pod, resource_version: str | None = None) -> CoreV1EventList:
982
1006
  """Get pod's events."""
983
1007
  return await self._hook.get_pod_events(
984
1008
  pod.metadata.name,
985
1009
  pod.metadata.namespace,
1010
+ resource_version=resource_version,
986
1011
  )
987
1012
 
988
- async def watch_pod_events(self, pod: V1Pod, check_interval: float = 1) -> None:
989
- """Read pod events and writes into log."""
990
- await watch_pod_events(pod_manager=self, pod=pod, check_interval=check_interval)
1013
+ async def watch_pod_events(self, pod: V1Pod, startup_check_interval: float = 30) -> None:
1014
+ """Watch pod events and write to log."""
1015
+ seen_events: set[str] = set()
1016
+ resource_version = None
1017
+ while not self.stop_watching_events:
1018
+ async for event in self._hook.watch_pod_events(
1019
+ name=pod.metadata.name,
1020
+ namespace=pod.metadata.namespace,
1021
+ resource_version=resource_version,
1022
+ timeout_seconds=startup_check_interval,
1023
+ ):
1024
+ if event:
1025
+ log_pod_event(self, event, seen_events)
1026
+ resource_version = event.metadata.resource_version
991
1027
 
992
1028
  async def await_pod_start(
993
1029
  self, pod: V1Pod, schedule_timeout: int = 120, startup_timeout: int = 120, check_interval: float = 1
@@ -1059,4 +1095,4 @@ class AsyncPodManager(LoggingMixin):
1059
1095
  print(message_to_log)
1060
1096
  else:
1061
1097
  self.log.info("[%s] %s", container_name, message_to_log)
1062
- return now # Return the current time as the last log time to ensure logs from the current second are read in the next fetch.
1098
+ return now # Return the current time as the last log time to ensure logs from the current second are read in the next fetch.
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: apache-airflow-providers-cncf-kubernetes
3
- Version: 10.11.0rc2
3
+ Version: 10.12.0
4
4
  Summary: Provider package apache-airflow-providers-cncf-kubernetes for Apache Airflow
5
5
  Keywords: airflow-provider,cncf.kubernetes,airflow,integration
6
6
  Author-email: Apache Software Foundation <dev@airflow.apache.org>
@@ -23,15 +23,16 @@ Classifier: Topic :: System :: Monitoring
23
23
  License-File: LICENSE
24
24
  License-File: NOTICE
25
25
  Requires-Dist: aiofiles>=23.2.0
26
- Requires-Dist: apache-airflow>=2.11.0rc1
27
- Requires-Dist: apache-airflow-providers-common-compat>=1.10.0rc1
26
+ Requires-Dist: apache-airflow>=2.11.0
27
+ Requires-Dist: apache-airflow-providers-common-compat>=1.10.1
28
28
  Requires-Dist: asgiref>=3.5.2
29
29
  Requires-Dist: cryptography>=41.0.0,<46.0.0
30
30
  Requires-Dist: kubernetes>=32.0.0,<35.0.0
31
+ Requires-Dist: urllib3>=2.1.0,!=2.6.0
31
32
  Requires-Dist: kubernetes_asyncio>=32.0.0,<35.0.0
32
33
  Project-URL: Bug Tracker, https://github.com/apache/airflow/issues
33
- Project-URL: Changelog, https://airflow.staged.apache.org/docs/apache-airflow-providers-cncf-kubernetes/10.11.0/changelog.html
34
- Project-URL: Documentation, https://airflow.staged.apache.org/docs/apache-airflow-providers-cncf-kubernetes/10.11.0
34
+ Project-URL: Changelog, https://airflow.apache.org/docs/apache-airflow-providers-cncf-kubernetes/10.12.0/changelog.html
35
+ Project-URL: Documentation, https://airflow.apache.org/docs/apache-airflow-providers-cncf-kubernetes/10.12.0
35
36
  Project-URL: Mastodon, https://fosstodon.org/@airflow
36
37
  Project-URL: Slack Chat, https://s.apache.org/airflow-slack
37
38
  Project-URL: Source Code, https://github.com/apache/airflow
@@ -62,7 +63,7 @@ Project-URL: YouTube, https://www.youtube.com/channel/UCSXwxpWZQ7XZ1WL3wqevChA/
62
63
 
63
64
  Package ``apache-airflow-providers-cncf-kubernetes``
64
65
 
65
- Release: ``10.11.0``
66
+ Release: ``10.12.0``
66
67
 
67
68
 
68
69
  `Kubernetes <https://kubernetes.io/>`__
@@ -75,7 +76,7 @@ This is a provider package for ``cncf.kubernetes`` provider. All classes for thi
75
76
  are in ``airflow.providers.cncf.kubernetes`` python package.
76
77
 
77
78
  You can find package information and changelog for the provider
78
- in the `documentation <https://airflow.apache.org/docs/apache-airflow-providers-cncf-kubernetes/10.11.0/>`_.
79
+ in the `documentation <https://airflow.apache.org/docs/apache-airflow-providers-cncf-kubernetes/10.12.0/>`_.
79
80
 
80
81
  Installation
81
82
  ------------
@@ -94,10 +95,11 @@ PIP package Version required
94
95
  ========================================== ====================
95
96
  ``aiofiles`` ``>=23.2.0``
96
97
  ``apache-airflow`` ``>=2.11.0``
97
- ``apache-airflow-providers-common-compat`` ``>=1.10.0``
98
+ ``apache-airflow-providers-common-compat`` ``>=1.10.1``
98
99
  ``asgiref`` ``>=3.5.2``
99
100
  ``cryptography`` ``>=41.0.0,<46.0.0``
100
101
  ``kubernetes`` ``>=32.0.0,<35.0.0``
102
+ ``urllib3`` ``>=2.1.0,!=2.6.0``
101
103
  ``kubernetes_asyncio`` ``>=32.0.0,<35.0.0``
102
104
  ========================================== ====================
103
105
 
@@ -121,5 +123,5 @@ Dependent package
121
123
  ================================================================================================================== =================
122
124
 
123
125
  The changelog for the provider package can be found in the
124
- `changelog <https://airflow.apache.org/docs/apache-airflow-providers-cncf-kubernetes/10.11.0/changelog.html>`_.
126
+ `changelog <https://airflow.apache.org/docs/apache-airflow-providers-cncf-kubernetes/10.12.0/changelog.html>`_.
125
127
 
@@ -1,62 +1,62 @@
1
- airflow/providers/cncf/kubernetes/__init__.py,sha256=zMf0rIC0OSd2EGyIIrmU9prmt1N4o7PNXEQI5J9DQr8,1506
2
- airflow/providers/cncf/kubernetes/callbacks.py,sha256=1nCLXFJKtr5FM9ApB8Drw5VAGSC3TDFsPSTMtRnAR3Q,6085
3
- airflow/providers/cncf/kubernetes/exceptions.py,sha256=-H_htmfpVE158nLU4V3KRjG3k1PcuiYUVMjZjMRp0GA,1394
1
+ airflow/providers/cncf/kubernetes/__init__.py,sha256=Rx7dfjv42ZGDeA3EM0NldCpXc9LD-SRghw6MBL_W2EA,1506
2
+ airflow/providers/cncf/kubernetes/callbacks.py,sha256=mHy1d44F4VseALePmgjniN0mrcUf7PCv5jgty43r9Zg,6075
3
+ airflow/providers/cncf/kubernetes/exceptions.py,sha256=iRrXBxaLPqYwUBt9zbadYgRbEDhGTo6I2mhLOa9F3DI,1707
4
4
  airflow/providers/cncf/kubernetes/get_provider_info.py,sha256=7fYqFWd1K0j8LgVkB_HYambOGPTrBDuaVXenLUPtF8g,16600
5
5
  airflow/providers/cncf/kubernetes/k8s_model.py,sha256=xmdFhX29DjegoZ-cq8-KDL9soVYXf4OpU6fAGr3cPTU,2101
6
6
  airflow/providers/cncf/kubernetes/kube_client.py,sha256=AaTY2UhhKVa-qrhMvpiQjdUJhrQyndwQ_5PoRmWJy3k,5714
7
7
  airflow/providers/cncf/kubernetes/kube_config.py,sha256=UsxzPjsonzy5a6e0P8XjenT-ncmX4R6KB1EqDfWpLnM,6191
8
- airflow/providers/cncf/kubernetes/kubernetes_helper_functions.py,sha256=HZ4ARkQ9NsN3hEDHA0uheiNRsOgJks-DmftpIqVhcic,7185
8
+ airflow/providers/cncf/kubernetes/kubernetes_helper_functions.py,sha256=c9MFEVAhhjk8BcPwfIy4QdUioNT9Rqmen26A6MM7yRU,7216
9
9
  airflow/providers/cncf/kubernetes/pod_generator.py,sha256=0VEcAtT2SzAFwSDsQWe2QdrY2mDV8s4hBw0qLcmIMGw,21038
10
10
  airflow/providers/cncf/kubernetes/python_kubernetes_script.jinja2,sha256=I0EHRGwLHjSiX85e51HBIoddRDnC8TJPFrDBqQq_NJg,1776
11
11
  airflow/providers/cncf/kubernetes/python_kubernetes_script.py,sha256=KnTlZSWCZhwvj89fSc2kgIRTaI4iLNKPquHc2wXnluo,3460
12
12
  airflow/providers/cncf/kubernetes/secret.py,sha256=0aHyYJOnveutfQKH7riAfz9IPB5hhDYBDYzYEDuXrmU,5317
13
- airflow/providers/cncf/kubernetes/template_rendering.py,sha256=WSUBhjGSDhjNtA4IFlbYyX50rvYN6UA4dMk0cPqgOjo,3618
13
+ airflow/providers/cncf/kubernetes/template_rendering.py,sha256=6q3k2bSJN06QtnbufGn8Aao3MCdh48xOOxPD6wp7hCE,3635
14
14
  airflow/providers/cncf/kubernetes/version_compat.py,sha256=MpWxT1g5WGhlmooHPsjyFHtjQsFZ8FEIrOQrtRnu8Pw,1671
15
15
  airflow/providers/cncf/kubernetes/backcompat/__init__.py,sha256=KXF76f3v1jIFUBNz8kwxVMvm7i4mNo35LbIG9IijBNc,1299
16
- airflow/providers/cncf/kubernetes/backcompat/backwards_compat_converters.py,sha256=FkRRtIEucp2hYrecGVYVgyPI6-b7hE7X7L17Z3r459Y,4303
16
+ airflow/providers/cncf/kubernetes/backcompat/backwards_compat_converters.py,sha256=3YOZHuAbFe-w1LM2r4w9xmbtIaIdp6ObehXwvh-7iTk,4320
17
17
  airflow/providers/cncf/kubernetes/cli/__init__.py,sha256=9hdXHABrVpkbpjZgUft39kOFL2xSGeG4GEua0Hmelus,785
18
18
  airflow/providers/cncf/kubernetes/cli/kubernetes_command.py,sha256=S6CBIaBm2wa-XisPKcn1Axy1fErIvCt9RwPn4gawGXc,8297
19
19
  airflow/providers/cncf/kubernetes/decorators/__init__.py,sha256=mlJxuZLkd5x-iq2SBwD3mvRQpt3YR7wjz_nceyF1IaI,787
20
- airflow/providers/cncf/kubernetes/decorators/kubernetes.py,sha256=IHgVEUabKmrP_MdnYN0FKIdpLMaaSqzdqLVE1nSSI_o,6203
21
- airflow/providers/cncf/kubernetes/decorators/kubernetes_cmd.py,sha256=XZqfsVUhHBP1rPuT20uueoyv-Pr7i08_chmQHDtHHXs,4677
20
+ airflow/providers/cncf/kubernetes/decorators/kubernetes.py,sha256=1qUiSHseMS31xU5jqRc2dJFq1Kor0yEMx1KKEULHWR4,6358
21
+ airflow/providers/cncf/kubernetes/decorators/kubernetes_cmd.py,sha256=pdH2TGCYVywY0qPTosq7EoGE0oKd03q9OKka4qSsDI4,4722
22
22
  airflow/providers/cncf/kubernetes/executors/__init__.py,sha256=mlJxuZLkd5x-iq2SBwD3mvRQpt3YR7wjz_nceyF1IaI,787
23
- airflow/providers/cncf/kubernetes/executors/kubernetes_executor.py,sha256=Kmn8Go_yJXkS7MPscubDDGe2q_sa9LL7NoYmH2y3OKA,35072
23
+ airflow/providers/cncf/kubernetes/executors/kubernetes_executor.py,sha256=QLSCczfdXSbLp0c8W52UmEqG4Ig2jaDYUS1t5_RAP18,35057
24
24
  airflow/providers/cncf/kubernetes/executors/kubernetes_executor_types.py,sha256=F0IlLbC6qKMVNZwqnbgUPxwFsZdcRhot2kwBhzc9gSM,2698
25
- airflow/providers/cncf/kubernetes/executors/kubernetes_executor_utils.py,sha256=wNvHSyGkEWFIPzxzinE5DhM2K4JTYDdIMqJxZCkGWNo,31503
25
+ airflow/providers/cncf/kubernetes/executors/kubernetes_executor_utils.py,sha256=HTkVoREcZedB2H0JubkXoP6op3GN9EZ8hRb0404ox1M,31520
26
26
  airflow/providers/cncf/kubernetes/executors/local_kubernetes_executor.py,sha256=CWCN4b6Ircs-3tCxJjBsrjl4Q0ABBJIwqlZr7a5lW6k,12243
27
27
  airflow/providers/cncf/kubernetes/hooks/__init__.py,sha256=9hdXHABrVpkbpjZgUft39kOFL2xSGeG4GEua0Hmelus,785
28
- airflow/providers/cncf/kubernetes/hooks/kubernetes.py,sha256=E1lno2SExEcwdv0BY-8WDSDypviH9iiU1WumgOoPFK0,40688
28
+ airflow/providers/cncf/kubernetes/hooks/kubernetes.py,sha256=mJgvTDdsYc71Qzdl-0uARHr8TURuyrSveWzRkGI8-uY,44078
29
29
  airflow/providers/cncf/kubernetes/kubernetes_executor_templates/__init__.py,sha256=9hdXHABrVpkbpjZgUft39kOFL2xSGeG4GEua0Hmelus,785
30
30
  airflow/providers/cncf/kubernetes/kubernetes_executor_templates/basic_template.yaml,sha256=yzJmXN4ZyB4aDwI_GIugpL9-f1YMVy__X-LQSbeU95A,2567
31
31
  airflow/providers/cncf/kubernetes/operators/__init__.py,sha256=mlJxuZLkd5x-iq2SBwD3mvRQpt3YR7wjz_nceyF1IaI,787
32
- airflow/providers/cncf/kubernetes/operators/custom_object_launcher.py,sha256=8Pysyo_iScGAD_fW5TDk0qeyoUfNgQCZpr8z47mYm4g,15447
33
- airflow/providers/cncf/kubernetes/operators/job.py,sha256=BJSu86ilhMN3RCspDjO0zZt6Z-4HAVCGmcep9Pk7g2E,27142
34
- airflow/providers/cncf/kubernetes/operators/kueue.py,sha256=iDyw9hYaMWVLtBwjsmSXLsSoWW-uEEvh8stptgKOFVQ,5543
35
- airflow/providers/cncf/kubernetes/operators/pod.py,sha256=St9VmM1sJHQy1gEjBh2B0UzPSlPZqUBD1A6WBkNGTbs,64553
36
- airflow/providers/cncf/kubernetes/operators/resource.py,sha256=ygID_Qxr0W0-575WXKi0yeKPuxFedBR9kP3qnQEFZz0,7489
37
- airflow/providers/cncf/kubernetes/operators/spark_kubernetes.py,sha256=xQadv8XPQEq_TLPmU9O-45XMqH6h9C0MZfF09jtdz5k,16668
32
+ airflow/providers/cncf/kubernetes/operators/custom_object_launcher.py,sha256=ha3dC4DjAIs2wtmGC504EvViGA-GGce1iOdeS3y1ol0,15464
33
+ airflow/providers/cncf/kubernetes/operators/job.py,sha256=M1XQ7TPEgWvpzdItq7tkikAsiujEObpHb7ARSgPEO6M,27137
34
+ airflow/providers/cncf/kubernetes/operators/kueue.py,sha256=E0ZqMQzH2dtNOAaA2W5bAuaS-zRz_ohfOElQ1N7NSTA,5560
35
+ airflow/providers/cncf/kubernetes/operators/pod.py,sha256=EvOsTyw6VIH3vky1dYA1jYmp-w-GMj0BW3dorawCGsM,66169
36
+ airflow/providers/cncf/kubernetes/operators/resource.py,sha256=NHU8LtC1B8mq9V6SgIwo1GWZREtmC1-plQb1DALpmCc,7506
37
+ airflow/providers/cncf/kubernetes/operators/spark_kubernetes.py,sha256=9DZnzju7KMXN9SG4JgHEKUAaxKXmR-XyImgN-GnIDnU,16513
38
38
  airflow/providers/cncf/kubernetes/pod_template_file_examples/__init__.py,sha256=9hdXHABrVpkbpjZgUft39kOFL2xSGeG4GEua0Hmelus,785
39
39
  airflow/providers/cncf/kubernetes/pod_template_file_examples/dags_in_image_template.yaml,sha256=7JdppZ-XDBpv2Bnde2SthhcME8w3b8xQdPAK1fJGW60,2256
40
40
  airflow/providers/cncf/kubernetes/pod_template_file_examples/dags_in_volume_template.yaml,sha256=-Pk_EwKpyWRYZKOnumUxVrDeAfFJ0nr3WZ7JNnvppzg,2442
41
41
  airflow/providers/cncf/kubernetes/pod_template_file_examples/git_sync_template.yaml,sha256=Pxpa1AiBlf4H8aIc7tUTmH2XNOz84cO0ttMQdlfMJ2c,3020
42
42
  airflow/providers/cncf/kubernetes/resource_convert/__init__.py,sha256=9hdXHABrVpkbpjZgUft39kOFL2xSGeG4GEua0Hmelus,785
43
43
  airflow/providers/cncf/kubernetes/resource_convert/configmap.py,sha256=gf7DdVeD0yKRNCTVCM-SywJDxwEJTYx3ogykAqbxRoU,1873
44
- airflow/providers/cncf/kubernetes/resource_convert/env_variable.py,sha256=vBeR__dLHsG619rxHTmY1SSefSTdUhnD4HRKzzQJutM,1462
44
+ airflow/providers/cncf/kubernetes/resource_convert/env_variable.py,sha256=ZmYvs2njnF0uHcp_qoLTgdF7HNyGli5pyodrkm1-bXQ,1479
45
45
  airflow/providers/cncf/kubernetes/resource_convert/secret.py,sha256=ElZCMbTWeTKoPeIJ1fTvlqRXM8nGkWj2MrIlVckX6Ag,1494
46
46
  airflow/providers/cncf/kubernetes/sensors/__init__.py,sha256=9hdXHABrVpkbpjZgUft39kOFL2xSGeG4GEua0Hmelus,785
47
- airflow/providers/cncf/kubernetes/sensors/spark_kubernetes.py,sha256=ml5DPNzIUGa6VazENjuq-Hj5G6a04GO6YGo8tNH5ubY,5371
47
+ airflow/providers/cncf/kubernetes/sensors/spark_kubernetes.py,sha256=BGB5HzaSU1w1bDN3QnopiyJ_M-Gz2_QEwcCpOPfTS9g,5331
48
48
  airflow/providers/cncf/kubernetes/triggers/__init__.py,sha256=9hdXHABrVpkbpjZgUft39kOFL2xSGeG4GEua0Hmelus,785
49
49
  airflow/providers/cncf/kubernetes/triggers/job.py,sha256=_lLP6ZYRV4kdwb7U0w5QFnlY1E9deZ5wtg-nrlfl6-8,7505
50
- airflow/providers/cncf/kubernetes/triggers/pod.py,sha256=uqfMVdhqmGH1Du3Cyo9ltc0Vn7jYb80xrahLuD54pxE,14810
50
+ airflow/providers/cncf/kubernetes/triggers/pod.py,sha256=G5tUAA1AhA7xoCb03ShE0S7zJND03MOQ3cVNBVDHkyY,15294
51
51
  airflow/providers/cncf/kubernetes/utils/__init__.py,sha256=ClZN0VPjWySdVwS_ktH7rrgL9VLAcs3OSJSB9s3zaYw,863
52
52
  airflow/providers/cncf/kubernetes/utils/container.py,sha256=tuhWyMZrqCGDUT4kzwjhEgJrr0JvD9lMXbFeuMDoh-4,4813
53
53
  airflow/providers/cncf/kubernetes/utils/delete_from.py,sha256=poObZSoEJwQyaYWilEURs8f4CDY2sn_pfwS31Lf579A,5195
54
54
  airflow/providers/cncf/kubernetes/utils/k8s_resource_iterator.py,sha256=pl-G-2WhZVbewKkwmL9AxPo1hAQWHHEPK43b-ruF4-w,1937
55
- airflow/providers/cncf/kubernetes/utils/pod_manager.py,sha256=zNgd5vzo0BziOmtPDrxdRxMB9nRg_cAZo9KDHINFHLE,43775
55
+ airflow/providers/cncf/kubernetes/utils/pod_manager.py,sha256=f6PUeHEiuuI_L4b3KZyN_fCLl6yq6ev1HXhtMxWHEHI,45374
56
56
  airflow/providers/cncf/kubernetes/utils/xcom_sidecar.py,sha256=k6bdmVJ21OrAwGmWwledRrAmaty9ZrmbuM-IbaI4mqo,2519
57
- apache_airflow_providers_cncf_kubernetes-10.11.0rc2.dist-info/entry_points.txt,sha256=ByD3QJJyP9CfmTYtpNI1953akD38RUDgpGXLaq9vpOw,111
58
- apache_airflow_providers_cncf_kubernetes-10.11.0rc2.dist-info/licenses/LICENSE,sha256=gXPVwptPlW1TJ4HSuG5OMPg-a3h43OGMkZRR1rpwfJA,10850
59
- apache_airflow_providers_cncf_kubernetes-10.11.0rc2.dist-info/licenses/NOTICE,sha256=E3-_E02gwwSEFzeeWPKmnIjOoos3hW28CLISV6sYrbQ,168
60
- apache_airflow_providers_cncf_kubernetes-10.11.0rc2.dist-info/WHEEL,sha256=G2gURzTEtmeR8nrdXUJfNiB3VYVxigPQ-bEQujpNiNs,82
61
- apache_airflow_providers_cncf_kubernetes-10.11.0rc2.dist-info/METADATA,sha256=8Du9XjQaoSNbFYCApNMte_rGFfsrcdtH4P3PlmhpQmo,5755
62
- apache_airflow_providers_cncf_kubernetes-10.11.0rc2.dist-info/RECORD,,
57
+ apache_airflow_providers_cncf_kubernetes-10.12.0.dist-info/entry_points.txt,sha256=ByD3QJJyP9CfmTYtpNI1953akD38RUDgpGXLaq9vpOw,111
58
+ apache_airflow_providers_cncf_kubernetes-10.12.0.dist-info/licenses/LICENSE,sha256=gXPVwptPlW1TJ4HSuG5OMPg-a3h43OGMkZRR1rpwfJA,10850
59
+ apache_airflow_providers_cncf_kubernetes-10.12.0.dist-info/licenses/NOTICE,sha256=E3-_E02gwwSEFzeeWPKmnIjOoos3hW28CLISV6sYrbQ,168
60
+ apache_airflow_providers_cncf_kubernetes-10.12.0.dist-info/WHEEL,sha256=G2gURzTEtmeR8nrdXUJfNiB3VYVxigPQ-bEQujpNiNs,82
61
+ apache_airflow_providers_cncf_kubernetes-10.12.0.dist-info/METADATA,sha256=R2loT_RB1FLCzAP_pEEF0aAvUmwibJNq3e59oXJ0ngY,5834
62
+ apache_airflow_providers_cncf_kubernetes-10.12.0.dist-info/RECORD,,