apache-airflow-providers-cncf-kubernetes 10.5.0rc1__py3-none-any.whl → 10.6.0rc1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of apache-airflow-providers-cncf-kubernetes might be problematic. Click here for more details.

@@ -29,7 +29,7 @@ from airflow import __version__ as airflow_version
29
29
 
30
30
  __all__ = ["__version__"]
31
31
 
32
- __version__ = "10.5.0"
32
+ __version__ = "10.6.0"
33
33
 
34
34
  if packaging.version.parse(packaging.version.parse(airflow_version).base_version) < packaging.version.parse(
35
35
  "2.10.0"
@@ -26,16 +26,17 @@ from kubernetes import client
26
26
  from kubernetes.client.api_client import ApiClient
27
27
  from kubernetes.client.rest import ApiException
28
28
 
29
- from airflow.models import DagRun, TaskInstance
29
+ from airflow.models import DagModel, DagRun, TaskInstance
30
30
  from airflow.providers.cncf.kubernetes import pod_generator
31
31
  from airflow.providers.cncf.kubernetes.executors.kubernetes_executor import KubeConfig
32
32
  from airflow.providers.cncf.kubernetes.kube_client import get_kube_client
33
33
  from airflow.providers.cncf.kubernetes.kubernetes_helper_functions import create_unique_id
34
- from airflow.providers.cncf.kubernetes.pod_generator import PodGenerator
34
+ from airflow.providers.cncf.kubernetes.pod_generator import PodGenerator, generate_pod_command_args
35
35
  from airflow.providers.cncf.kubernetes.version_compat import AIRFLOW_V_3_0_PLUS
36
36
  from airflow.utils import cli as cli_utils, yaml
37
37
  from airflow.utils.cli import get_dag
38
38
  from airflow.utils.providers_configuration_loader import providers_configuration_loaded
39
+ from airflow.utils.types import DagRunType
39
40
 
40
41
 
41
42
  @cli_utils.action_cli
@@ -48,14 +49,28 @@ def generate_pod_yaml(args):
48
49
  else:
49
50
  dag = get_dag(subdir=args.subdir, dag_id=args.dag_id)
50
51
  yaml_output_path = args.output_path
52
+
53
+ dm = DagModel(dag_id=dag.dag_id)
54
+
51
55
  if AIRFLOW_V_3_0_PLUS:
52
56
  dr = DagRun(dag.dag_id, logical_date=logical_date)
57
+ dr.run_id = DagRun.generate_run_id(
58
+ run_type=DagRunType.MANUAL, logical_date=logical_date, run_after=logical_date
59
+ )
60
+ dm.bundle_name = args.bundle_name if args.bundle_name else "default"
61
+ dm.relative_fileloc = dag.relative_fileloc
53
62
  else:
54
63
  dr = DagRun(dag.dag_id, execution_date=logical_date)
64
+ dr.run_id = DagRun.generate_run_id(run_type=DagRunType.MANUAL, execution_date=logical_date)
65
+
55
66
  kube_config = KubeConfig()
67
+
56
68
  for task in dag.tasks:
57
- ti = TaskInstance(task, None)
69
+ ti = TaskInstance(task, run_id=dr.run_id)
58
70
  ti.dag_run = dr
71
+ ti.dag_model = dm
72
+
73
+ command_args = generate_pod_command_args(ti)
59
74
  pod = PodGenerator.construct_pod(
60
75
  dag_id=args.dag_id,
61
76
  task_id=ti.task_id,
@@ -63,7 +78,7 @@ def generate_pod_yaml(args):
63
78
  try_number=ti.try_number,
64
79
  kube_image=kube_config.kube_image,
65
80
  date=ti.logical_date if AIRFLOW_V_3_0_PLUS else ti.execution_date,
66
- args=ti.command_as_list(),
81
+ args=command_args,
67
82
  pod_override_object=PodGenerator.from_obj(ti.executor_config),
68
83
  scheduler_job_id="worker-config",
69
84
  namespace=kube_config.executor_namespace,
@@ -76,13 +76,13 @@ from airflow.utils.state import TaskInstanceState
76
76
 
77
77
  if TYPE_CHECKING:
78
78
  import argparse
79
+ from collections.abc import Sequence
79
80
 
80
81
  from kubernetes import client
81
82
  from kubernetes.client import models as k8s
82
83
  from sqlalchemy.orm import Session
83
84
 
84
85
  from airflow.executors import workloads
85
- from airflow.executors.base_executor import CommandType
86
86
  from airflow.models.taskinstance import TaskInstance
87
87
  from airflow.models.taskinstancekey import TaskInstanceKey
88
88
  from airflow.providers.cncf.kubernetes.executors.kubernetes_executor_types import (
@@ -254,7 +254,7 @@ class KubernetesExecutor(BaseExecutor):
254
254
  def execute_async(
255
255
  self,
256
256
  key: TaskInstanceKey,
257
- command: CommandType,
257
+ command: Any,
258
258
  queue: str | None = None,
259
259
  executor_config: Any | None = None,
260
260
  ) -> None:
@@ -292,7 +292,7 @@ class KubernetesExecutor(BaseExecutor):
292
292
  ti = workload.ti
293
293
  self.queued_tasks[ti.key] = workload
294
294
 
295
- def _process_workloads(self, workloads: list[workloads.All]) -> None:
295
+ def _process_workloads(self, workloads: Sequence[workloads.All]) -> None:
296
296
  from airflow.executors.workloads import ExecuteTask
297
297
 
298
298
  # Airflow V3 version
@@ -20,10 +20,14 @@ from typing import TYPE_CHECKING, Any, Optional, Union
20
20
 
21
21
  ADOPTED = "adopted"
22
22
  if TYPE_CHECKING:
23
- from airflow.executors.base_executor import CommandType
23
+ from collections.abc import Sequence
24
+
24
25
  from airflow.models.taskinstance import TaskInstanceKey
25
26
  from airflow.utils.state import TaskInstanceState
26
27
 
28
+ # TODO: Remove after Airflow 2 support is removed
29
+ CommandType = Sequence[str]
30
+
27
31
  # TaskInstance key, command, configuration, pod_template_file
28
32
  KubernetesJobType = tuple[TaskInstanceKey, CommandType, Any, Optional[str]]
29
33
 
@@ -41,7 +41,7 @@ from airflow.providers.cncf.kubernetes.kubernetes_helper_functions import (
41
41
  annotations_to_key,
42
42
  create_unique_id,
43
43
  )
44
- from airflow.providers.cncf.kubernetes.pod_generator import PodGenerator
44
+ from airflow.providers.cncf.kubernetes.pod_generator import PodGenerator, workload_to_command_args
45
45
  from airflow.utils.log.logging_mixin import LoggingMixin
46
46
  from airflow.utils.singleton import Singleton
47
47
  from airflow.utils.state import TaskInstanceState
@@ -387,20 +387,12 @@ class AirflowKubernetesScheduler(LoggingMixin):
387
387
  key, command, kube_executor_config, pod_template_file = next_job
388
388
 
389
389
  dag_id, task_id, run_id, try_number, map_index = key
390
- ser_input = ""
391
390
  if len(command) == 1:
392
391
  from airflow.executors.workloads import ExecuteTask
393
392
 
394
393
  if isinstance(command[0], ExecuteTask):
395
394
  workload = command[0]
396
- ser_input = workload.model_dump_json()
397
- command = [
398
- "python",
399
- "-m",
400
- "airflow.sdk.execution_time.execute_workload",
401
- "--json-path",
402
- "/tmp/execute/input.json",
403
- ]
395
+ command = workload_to_command_args(workload)
404
396
  else:
405
397
  raise ValueError(
406
398
  f"KubernetesExecutor doesn't know how to handle workload of type: {type(command[0])}"
@@ -427,7 +419,6 @@ class AirflowKubernetesScheduler(LoggingMixin):
427
419
  date=None,
428
420
  run_id=run_id,
429
421
  args=list(command),
430
- content_json_for_volume=ser_input,
431
422
  pod_override_object=kube_executor_config,
432
423
  base_worker_pod=base_worker_pod,
433
424
  with_mutation_hook=True,
@@ -18,7 +18,7 @@
18
18
  from __future__ import annotations
19
19
 
20
20
  from collections.abc import Sequence
21
- from typing import TYPE_CHECKING
21
+ from typing import TYPE_CHECKING, Any
22
22
 
23
23
  from deprecated import deprecated
24
24
 
@@ -26,18 +26,17 @@ from airflow.configuration import conf
26
26
  from airflow.exceptions import AirflowProviderDeprecationWarning
27
27
  from airflow.executors.base_executor import BaseExecutor
28
28
  from airflow.providers.cncf.kubernetes.executors.kubernetes_executor import KubernetesExecutor
29
+ from airflow.providers.cncf.kubernetes.version_compat import AIRFLOW_V_3_0_PLUS
29
30
 
30
31
  if TYPE_CHECKING:
31
32
  from airflow.callbacks.base_callback_sink import BaseCallbackSink
32
33
  from airflow.callbacks.callback_requests import CallbackRequest
33
- from airflow.executors.base_executor import (
34
- CommandType,
35
- EventBufferValueType,
36
- QueuedTaskInstanceType,
37
- )
34
+ from airflow.executors.base_executor import EventBufferValueType
38
35
  from airflow.executors.local_executor import LocalExecutor
39
36
  from airflow.models.taskinstance import SimpleTaskInstance, TaskInstance, TaskInstanceKey
40
37
 
38
+ CommandType = Sequence[str]
39
+
41
40
 
42
41
  class LocalKubernetesExecutor(BaseExecutor):
43
42
  """
@@ -81,7 +80,7 @@ class LocalKubernetesExecutor(BaseExecutor):
81
80
  """Not implemented for hybrid executors."""
82
81
 
83
82
  @property
84
- def queued_tasks(self) -> dict[TaskInstanceKey, QueuedTaskInstanceType]:
83
+ def queued_tasks(self) -> dict[TaskInstanceKey, Any]:
85
84
  """Return queued tasks from local and kubernetes executor."""
86
85
  queued_tasks = self.local_executor.queued_tasks.copy()
87
86
  # TODO: fix this, there is misalignment between the types of queued_tasks so it is likely wrong
@@ -121,6 +120,13 @@ class LocalKubernetesExecutor(BaseExecutor):
121
120
 
122
121
  def start(self) -> None:
123
122
  """Start local and kubernetes executor."""
123
+ if AIRFLOW_V_3_0_PLUS:
124
+ raise RuntimeError(
125
+ f"{self.__class__.__name__} does not support Airflow 3.0+. See "
126
+ "https://airflow.apache.org/docs/apache-airflow/stable/core-concepts/executor/index.html#using-multiple-executors-concurrently"
127
+ " how to use multiple executors concurrently."
128
+ )
129
+
124
130
  self.log.info("Starting local and Kubernetes Executor")
125
131
  self.local_executor.start()
126
132
  self.kubernetes_executor.start()
@@ -145,7 +151,7 @@ class LocalKubernetesExecutor(BaseExecutor):
145
151
  """Queues command via local or kubernetes executor."""
146
152
  executor = self._router(task_instance)
147
153
  self.log.debug("Using executor: %s for %s", executor.__class__.__name__, task_instance.key)
148
- executor.queue_command(task_instance, command, priority, queue)
154
+ executor.queue_command(task_instance, command, priority, queue) # type: ignore[union-attr]
149
155
 
150
156
  def queue_task_instance(
151
157
  self,
@@ -171,7 +177,7 @@ class LocalKubernetesExecutor(BaseExecutor):
171
177
  if not hasattr(task_instance, "pickle_id"):
172
178
  del kwargs["pickle_id"]
173
179
 
174
- executor.queue_task_instance(
180
+ executor.queue_task_instance( # type: ignore[union-attr]
175
181
  task_instance=task_instance,
176
182
  mark_success=mark_success,
177
183
  ignore_all_deps=ignore_all_deps,
@@ -169,7 +169,7 @@ def get_provider_info():
169
169
  "version_added": "8.1.0",
170
170
  "type": "string",
171
171
  "example": None,
172
- "default": "CreateContainerConfigError,ErrImagePull,CreateContainerError,ImageInspectError, InvalidImageName",
172
+ "default": "CreateContainerConfigError,ErrImagePull,CreateContainerError,ImageInspectError,InvalidImageName",
173
173
  },
174
174
  "worker_pods_creation_batch_size": {
175
175
  "description": 'Number of Kubernetes Worker Pod creation calls per scheduler loop.\nNote that the current default of "1" will only launch a single pod\nper-heartbeat. It is HIGHLY recommended that users increase this\nnumber to match the tolerance of their kubernetes cluster for\nbetter performance.\n',
@@ -42,9 +42,12 @@ class KubeConfig:
42
42
  )
43
43
  self.worker_pod_pending_fatal_container_state_reasons = []
44
44
  if conf.get(self.kubernetes_section, "worker_pod_pending_fatal_container_state_reasons", fallback=""):
45
- self.worker_pod_pending_fatal_container_state_reasons = conf.get(
46
- self.kubernetes_section, "worker_pod_pending_fatal_container_state_reasons"
47
- ).split(",")
45
+ self.worker_pod_pending_fatal_container_state_reasons = [
46
+ r.strip()
47
+ for r in conf.get(
48
+ self.kubernetes_section, "worker_pod_pending_fatal_container_state_reasons"
49
+ ).split(",")
50
+ ]
48
51
 
49
52
  self.worker_pods_creation_batch_size = conf.getint(
50
53
  self.kubernetes_section, "worker_pods_creation_batch_size"
@@ -18,6 +18,7 @@
18
18
 
19
19
  from __future__ import annotations
20
20
 
21
+ import asyncio
21
22
  import datetime
22
23
  import json
23
24
  import logging
@@ -83,6 +84,7 @@ from airflow.providers.cncf.kubernetes.utils.pod_manager import (
83
84
  from airflow.settings import pod_mutation_hook
84
85
  from airflow.utils import yaml
85
86
  from airflow.utils.helpers import prune_dict, validate_key
87
+ from airflow.utils.xcom import XCOM_RETURN_KEY
86
88
  from airflow.version import version as airflow_version
87
89
 
88
90
  if TYPE_CHECKING:
@@ -233,6 +235,7 @@ class KubernetesPodOperator(BaseOperator):
233
235
  :param logging_interval: max time in seconds that task should be in deferred state before
234
236
  resuming to fetch the latest logs. If ``None``, then the task will remain in deferred state until pod
235
237
  is done, and no logs will be visible until that time.
238
+ :param trigger_kwargs: additional keyword parameters passed to the trigger
236
239
  """
237
240
 
238
241
  # !!! Changes in KubernetesPodOperator's arguments should be also reflected in !!!
@@ -266,6 +269,7 @@ class KubernetesPodOperator(BaseOperator):
266
269
  "node_selector",
267
270
  "kubernetes_conn_id",
268
271
  "base_container_name",
272
+ "trigger_kwargs",
269
273
  )
270
274
  template_fields_renderers = {"env_vars": "py"}
271
275
 
@@ -305,7 +309,7 @@ class KubernetesPodOperator(BaseOperator):
305
309
  node_selector: dict | None = None,
306
310
  image_pull_secrets: list[k8s.V1LocalObjectReference] | None = None,
307
311
  service_account_name: str | None = None,
308
- automount_service_account_token: bool = True,
312
+ automount_service_account_token: bool | None = None,
309
313
  hostnetwork: bool = False,
310
314
  host_aliases: list[k8s.V1HostAlias] | None = None,
311
315
  tolerations: list[k8s.V1Toleration] | None = None,
@@ -339,6 +343,7 @@ class KubernetesPodOperator(BaseOperator):
339
343
  ) = None,
340
344
  progress_callback: Callable[[str], None] | None = None,
341
345
  logging_interval: int | None = None,
346
+ trigger_kwargs: dict | None = None,
342
347
  **kwargs,
343
348
  ) -> None:
344
349
  super().__init__(**kwargs)
@@ -428,6 +433,7 @@ class KubernetesPodOperator(BaseOperator):
428
433
  self.termination_message_policy = termination_message_policy
429
434
  self.active_deadline_seconds = active_deadline_seconds
430
435
  self.logging_interval = logging_interval
436
+ self.trigger_kwargs = trigger_kwargs
431
437
 
432
438
  self._config_dict: dict | None = None # TODO: remove it when removing convert_config_file_to_dict
433
439
  self._progress_callback = progress_callback
@@ -572,19 +578,46 @@ class KubernetesPodOperator(BaseOperator):
572
578
  if self.reattach_on_restart:
573
579
  pod = self.find_pod(pod_request_obj.metadata.namespace, context=context)
574
580
  if pod:
575
- return pod
581
+ # If pod is terminated then delete the pod an create a new as not possible to get xcom
582
+ pod_phase = (
583
+ pod.status.phase if hasattr(pod, "status") and hasattr(pod.status, "phase") else None
584
+ )
585
+ if pod_phase and pod_phase not in (PodPhase.SUCCEEDED, PodPhase.FAILED):
586
+ return pod
587
+
588
+ self.log.info(
589
+ "Found terminated old matching pod %s with labels %s",
590
+ pod.metadata.name,
591
+ pod.metadata.labels,
592
+ )
593
+
594
+ # if not required to delete the pod then keep old logic and not automatically create new pod
595
+ deleted_pod = self.process_pod_deletion(pod)
596
+ if not deleted_pod:
597
+ return pod
598
+
599
+ self.log.info("Deleted pod to handle rerun and create new pod!")
600
+
576
601
  self.log.debug("Starting pod:\n%s", yaml.safe_dump(pod_request_obj.to_dict()))
577
602
  self.pod_manager.create_pod(pod=pod_request_obj)
578
603
  return pod_request_obj
579
604
 
580
605
  def await_pod_start(self, pod: k8s.V1Pod) -> None:
581
606
  try:
582
- self.pod_manager.await_pod_start(
583
- pod=pod,
584
- schedule_timeout=self.schedule_timeout_seconds,
585
- startup_timeout=self.startup_timeout_seconds,
586
- check_interval=self.startup_check_interval_seconds,
607
+ loop = asyncio.get_event_loop()
608
+ events_task = asyncio.ensure_future(
609
+ self.pod_manager.watch_pod_events(pod, self.startup_check_interval_seconds)
587
610
  )
611
+ loop.run_until_complete(
612
+ self.pod_manager.await_pod_start(
613
+ pod=pod,
614
+ schedule_timeout=self.schedule_timeout_seconds,
615
+ startup_timeout=self.startup_timeout_seconds,
616
+ check_interval=self.startup_check_interval_seconds,
617
+ )
618
+ )
619
+ loop.run_until_complete(events_task)
620
+ loop.close()
588
621
  except PodLaunchFailedException:
589
622
  if self.log_events_on_failure:
590
623
  self._read_pod_events(pod, reraise=False)
@@ -688,6 +721,8 @@ class KubernetesPodOperator(BaseOperator):
688
721
  self.cleanup(
689
722
  pod=pod_to_clean,
690
723
  remote_pod=self.remote_pod,
724
+ xcom_result=result,
725
+ context=context,
691
726
  )
692
727
  for callback in self.callbacks:
693
728
  callback.on_pod_cleanup(
@@ -812,6 +847,7 @@ class KubernetesPodOperator(BaseOperator):
812
847
  on_finish_action=self.on_finish_action.value,
813
848
  last_log_time=last_log_time,
814
849
  logging_interval=self.logging_interval,
850
+ trigger_kwargs=self.trigger_kwargs,
815
851
  ),
816
852
  method_name="trigger_reentry",
817
853
  )
@@ -958,7 +994,13 @@ class KubernetesPodOperator(BaseOperator):
958
994
  pod=pod, client=self.client, mode=ExecutionMode.SYNC, operator=self, context=context
959
995
  )
960
996
 
961
- def cleanup(self, pod: k8s.V1Pod, remote_pod: k8s.V1Pod):
997
+ def cleanup(
998
+ self,
999
+ pod: k8s.V1Pod,
1000
+ remote_pod: k8s.V1Pod,
1001
+ xcom_result: dict | None = None,
1002
+ context: Context | None = None,
1003
+ ) -> None:
962
1004
  # Skip cleaning the pod in the following scenarios.
963
1005
  # 1. If a task got marked as failed, "on_kill" method would be called and the pod will be cleaned up
964
1006
  # there. Cleaning it up again will raise an exception (which might cause retry).
@@ -978,6 +1020,10 @@ class KubernetesPodOperator(BaseOperator):
978
1020
  )
979
1021
 
980
1022
  if failed:
1023
+ if self.do_xcom_push and xcom_result and context:
1024
+ # Ensure that existing XCom is pushed even in case of failure
1025
+ context["ti"].xcom_push(XCOM_RETURN_KEY, xcom_result)
1026
+
981
1027
  if self.log_events_on_failure:
982
1028
  self._read_pod_events(pod, reraise=False)
983
1029
 
@@ -1062,7 +1108,7 @@ class KubernetesPodOperator(BaseOperator):
1062
1108
  if self.KILL_ISTIO_PROXY_SUCCESS_MSG not in output_str:
1063
1109
  raise AirflowException("Error while deleting istio-proxy sidecar: %s", output_str)
1064
1110
 
1065
- def process_pod_deletion(self, pod: k8s.V1Pod, *, reraise=True):
1111
+ def process_pod_deletion(self, pod: k8s.V1Pod, *, reraise=True) -> bool:
1066
1112
  with _optionally_suppress(reraise=reraise):
1067
1113
  if pod is not None:
1068
1114
  should_delete_pod = (self.on_finish_action == OnFinishAction.DELETE_POD) or (
@@ -1075,8 +1121,10 @@ class KubernetesPodOperator(BaseOperator):
1075
1121
  if should_delete_pod:
1076
1122
  self.log.info("Deleting pod: %s", pod.metadata.name)
1077
1123
  self.pod_manager.delete_pod(pod)
1078
- else:
1079
- self.log.info("Skipping deleting pod: %s", pod.metadata.name)
1124
+ return True
1125
+ self.log.info("Skipping deleting pod: %s", pod.metadata.name)
1126
+
1127
+ return False
1080
1128
 
1081
1129
  def _build_find_pod_label_selector(self, context: Context | None = None, *, exclude_checked=True) -> str:
1082
1130
  labels = {
@@ -34,7 +34,7 @@ from functools import reduce
34
34
  from typing import TYPE_CHECKING
35
35
 
36
36
  from dateutil import parser
37
- from kubernetes.client import V1EmptyDirVolumeSource, V1Volume, V1VolumeMount, models as k8s
37
+ from kubernetes.client import models as k8s
38
38
  from kubernetes.client.api_client import ApiClient
39
39
 
40
40
  from airflow.exceptions import (
@@ -46,6 +46,7 @@ from airflow.providers.cncf.kubernetes.kubernetes_helper_functions import (
46
46
  POD_NAME_MAX_LENGTH,
47
47
  add_unique_suffix,
48
48
  )
49
+ from airflow.providers.cncf.kubernetes.version_compat import AIRFLOW_V_3_0_PLUS
49
50
  from airflow.utils import yaml
50
51
  from airflow.utils.hashlib_wrapper import md5
51
52
  from airflow.version import version as airflow_version
@@ -53,11 +54,49 @@ from airflow.version import version as airflow_version
53
54
  if TYPE_CHECKING:
54
55
  import datetime
55
56
 
57
+ from airflow.executors import workloads
58
+ from airflow.models.taskinstance import TaskInstance
59
+
56
60
  log = logging.getLogger(__name__)
57
61
 
58
62
  MAX_LABEL_LEN = 63
59
63
 
60
64
 
65
+ def workload_to_command_args(workload: workloads.ExecuteTask) -> list[str]:
66
+ """
67
+ Convert a workload object to Task SDK command arguments.
68
+
69
+ :param workload: The ExecuteTask workload to convert
70
+ :return: List of command arguments for the Task SDK
71
+ """
72
+ ser_input = workload.model_dump_json()
73
+ return [
74
+ "python",
75
+ "-m",
76
+ "airflow.sdk.execution_time.execute_workload",
77
+ "--json-string",
78
+ ser_input,
79
+ ]
80
+
81
+
82
+ def generate_pod_command_args(task_instance: TaskInstance) -> list[str]:
83
+ """
84
+ Generate command arguments for a ``TaskInstance`` to be used in a Kubernetes pod.
85
+
86
+ This function handles backwards compatibility between Airflow 2.x and 3.x:
87
+ - In Airflow 2.x: Uses the existing ``command_as_list()`` method
88
+ - In Airflow 3.x: Uses the Task SDK workload approach with serialized workload
89
+ """
90
+ if AIRFLOW_V_3_0_PLUS:
91
+ # In Airflow 3+, use the Task SDK workload approach
92
+ from airflow.executors import workloads
93
+
94
+ workload = workloads.ExecuteTask.make(task_instance)
95
+ return workload_to_command_args(workload)
96
+ # In Airflow 2.x, use the existing method
97
+ return task_instance.command_as_list()
98
+
99
+
61
100
  def make_safe_label_value(string: str) -> str:
62
101
  """
63
102
  Normalize a provided label to be of valid length and characters.
@@ -287,7 +326,6 @@ class PodGenerator:
287
326
  scheduler_job_id: str,
288
327
  run_id: str | None = None,
289
328
  map_index: int = -1,
290
- content_json_for_volume: str = "",
291
329
  *,
292
330
  with_mutation_hook: bool = False,
293
331
  ) -> k8s.V1Pod:
@@ -355,39 +393,6 @@ class PodGenerator:
355
393
  containers=[main_container],
356
394
  )
357
395
 
358
- if content_json_for_volume:
359
- import shlex
360
-
361
- input_file_path = "/tmp/execute/input.json"
362
- execute_volume = V1Volume(
363
- name="execute-volume",
364
- empty_dir=V1EmptyDirVolumeSource(),
365
- )
366
-
367
- execute_volume_mount = V1VolumeMount(
368
- name="execute-volume",
369
- mount_path="/tmp/execute",
370
- read_only=False,
371
- )
372
-
373
- escaped_json = shlex.quote(content_json_for_volume)
374
- init_container = k8s.V1Container(
375
- name="init-container",
376
- image="busybox",
377
- command=["/bin/sh", "-c", f"echo {escaped_json} > {input_file_path}"],
378
- volume_mounts=[execute_volume_mount],
379
- )
380
-
381
- main_container.volume_mounts = [execute_volume_mount]
382
- main_container.command = args[:-1]
383
- main_container.args = args[-1:]
384
-
385
- podspec = k8s.V1PodSpec(
386
- containers=[main_container],
387
- volumes=[execute_volume],
388
- init_containers=[init_container],
389
- )
390
-
391
396
  dynamic_pod.spec = podspec
392
397
 
393
398
  # Reconcile the pods starting with the first chronologically,
@@ -546,7 +551,7 @@ def merge_objects(base_obj, client_obj):
546
551
 
547
552
  for base_key in base_obj.to_dict():
548
553
  base_val = getattr(base_obj, base_key, None)
549
- if not getattr(client_obj, base_key, None) and base_val:
554
+ if not getattr(client_obj, base_key, None) and base_val is not None:
550
555
  if not isinstance(client_obj_cp, dict):
551
556
  setattr(client_obj_cp, base_key, base_val)
552
557
  else:
@@ -19,11 +19,14 @@ from __future__ import annotations
19
19
 
20
20
  from typing import TYPE_CHECKING
21
21
 
22
+ from jinja2 import TemplateAssertionError, UndefinedError
22
23
  from kubernetes.client.api_client import ApiClient
23
24
 
25
+ from airflow.exceptions import AirflowException
24
26
  from airflow.providers.cncf.kubernetes.kube_config import KubeConfig
25
27
  from airflow.providers.cncf.kubernetes.kubernetes_helper_functions import create_unique_id
26
- from airflow.providers.cncf.kubernetes.pod_generator import PodGenerator
28
+ from airflow.providers.cncf.kubernetes.pod_generator import PodGenerator, generate_pod_command_args
29
+ from airflow.utils.session import NEW_SESSION, provide_session
27
30
 
28
31
  if TYPE_CHECKING:
29
32
  from airflow.models.taskinstance import TaskInstance
@@ -40,6 +43,10 @@ def render_k8s_pod_yaml(task_instance: TaskInstance) -> dict | None:
40
43
  # If no such pod_template_file override was passed, we can simply render
41
44
  # The pod spec using the default template.
42
45
  pod_template_file = kube_config.pod_template_file
46
+
47
+ # Generate command args using shared utility function
48
+ command_args = generate_pod_command_args(task_instance)
49
+
43
50
  pod = PodGenerator.construct_pod(
44
51
  dag_id=task_instance.dag_id,
45
52
  run_id=task_instance.run_id,
@@ -49,7 +56,7 @@ def render_k8s_pod_yaml(task_instance: TaskInstance) -> dict | None:
49
56
  pod_id=create_unique_id(task_instance.dag_id, task_instance.task_id),
50
57
  try_number=task_instance.try_number,
51
58
  kube_image=kube_config.kube_image,
52
- args=task_instance.command_as_list(),
59
+ args=command_args,
53
60
  pod_override_object=PodGenerator.from_obj(task_instance.executor_config),
54
61
  scheduler_job_id="0",
55
62
  namespace=kube_config.executor_namespace,
@@ -58,3 +65,17 @@ def render_k8s_pod_yaml(task_instance: TaskInstance) -> dict | None:
58
65
  )
59
66
  sanitized_pod = ApiClient().sanitize_for_serialization(pod)
60
67
  return sanitized_pod
68
+
69
+
70
+ @provide_session
71
+ def get_rendered_k8s_spec(task_instance: TaskInstance, session=NEW_SESSION) -> dict | None:
72
+ """Fetch rendered template fields from DB."""
73
+ from airflow.models.renderedtifields import RenderedTaskInstanceFields
74
+
75
+ rendered_k8s_spec = RenderedTaskInstanceFields.get_k8s_pod_yaml(task_instance, session=session)
76
+ if not rendered_k8s_spec:
77
+ try:
78
+ rendered_k8s_spec = render_k8s_pod_yaml(task_instance)
79
+ except (TemplateAssertionError, UndefinedError) as e:
80
+ raise AirflowException(f"Unable to render a k8s spec for this taskinstance: {e}") from e
81
+ return rendered_k8s_spec
@@ -75,6 +75,7 @@ class KubernetesPodTrigger(BaseTrigger):
75
75
  :param logging_interval: number of seconds to wait before kicking it back to
76
76
  the operator to print latest logs. If ``None`` will wait until container done.
77
77
  :param last_log_time: where to resume logs from
78
+ :param trigger_kwargs: additional keyword parameters to send in the event
78
79
  """
79
80
 
80
81
  def __init__(
@@ -94,6 +95,7 @@ class KubernetesPodTrigger(BaseTrigger):
94
95
  on_finish_action: str = "delete_pod",
95
96
  last_log_time: DateTime | None = None,
96
97
  logging_interval: int | None = None,
98
+ trigger_kwargs: dict | None = None,
97
99
  ):
98
100
  super().__init__()
99
101
  self.pod_name = pod_name
@@ -111,6 +113,7 @@ class KubernetesPodTrigger(BaseTrigger):
111
113
  self.last_log_time = last_log_time
112
114
  self.logging_interval = logging_interval
113
115
  self.on_finish_action = OnFinishAction(on_finish_action)
116
+ self.trigger_kwargs = trigger_kwargs or {}
114
117
 
115
118
  self._since_time = None
116
119
 
@@ -134,6 +137,7 @@ class KubernetesPodTrigger(BaseTrigger):
134
137
  "on_finish_action": self.on_finish_action.value,
135
138
  "last_log_time": self.last_log_time,
136
139
  "logging_interval": self.logging_interval,
140
+ "trigger_kwargs": self.trigger_kwargs,
137
141
  },
138
142
  )
139
143
 
@@ -149,6 +153,7 @@ class KubernetesPodTrigger(BaseTrigger):
149
153
  "namespace": self.pod_namespace,
150
154
  "name": self.pod_name,
151
155
  "message": "All containers inside pod have started successfully.",
156
+ **self.trigger_kwargs,
152
157
  }
153
158
  )
154
159
  elif state == ContainerState.FAILED:
@@ -158,6 +163,7 @@ class KubernetesPodTrigger(BaseTrigger):
158
163
  "namespace": self.pod_namespace,
159
164
  "name": self.pod_name,
160
165
  "message": "pod failed",
166
+ **self.trigger_kwargs,
161
167
  }
162
168
  )
163
169
  else:
@@ -172,6 +178,7 @@ class KubernetesPodTrigger(BaseTrigger):
172
178
  "namespace": self.pod_namespace,
173
179
  "status": "timeout",
174
180
  "message": message,
181
+ **self.trigger_kwargs,
175
182
  }
176
183
  )
177
184
  return
@@ -183,6 +190,7 @@ class KubernetesPodTrigger(BaseTrigger):
183
190
  "status": "error",
184
191
  "message": str(e),
185
192
  "stack_trace": traceback.format_exc(),
193
+ **self.trigger_kwargs,
186
194
  }
187
195
  )
188
196
  return
@@ -234,6 +242,7 @@ class KubernetesPodTrigger(BaseTrigger):
234
242
  "namespace": self.pod_namespace,
235
243
  "name": self.pod_name,
236
244
  "last_log_time": self.last_log_time,
245
+ **self.trigger_kwargs,
237
246
  }
238
247
  )
239
248
  if container_state == ContainerState.FAILED:
@@ -244,6 +253,7 @@ class KubernetesPodTrigger(BaseTrigger):
244
253
  "name": self.pod_name,
245
254
  "message": "Container state failed",
246
255
  "last_log_time": self.last_log_time,
256
+ **self.trigger_kwargs,
247
257
  }
248
258
  )
249
259
  self.log.debug("Container is not completed and still working.")
@@ -254,6 +264,7 @@ class KubernetesPodTrigger(BaseTrigger):
254
264
  "last_log_time": self.last_log_time,
255
265
  "namespace": self.pod_namespace,
256
266
  "name": self.pod_name,
267
+ **self.trigger_kwargs,
257
268
  }
258
269
  )
259
270
  self.log.debug("Sleeping for %s seconds.", self.poll_interval)
@@ -18,6 +18,7 @@
18
18
 
19
19
  from __future__ import annotations
20
20
 
21
+ import asyncio
21
22
  import enum
22
23
  import json
23
24
  import math
@@ -49,6 +50,7 @@ if TYPE_CHECKING:
49
50
  from kubernetes.client.models.v1_container_state import V1ContainerState
50
51
  from kubernetes.client.models.v1_container_state_waiting import V1ContainerStateWaiting
51
52
  from kubernetes.client.models.v1_container_status import V1ContainerStatus
53
+ from kubernetes.client.models.v1_object_reference import V1ObjectReference
52
54
  from kubernetes.client.models.v1_pod import V1Pod
53
55
  from kubernetes.client.models.v1_pod_condition import V1PodCondition
54
56
  from urllib3.response import HTTPResponse
@@ -377,7 +379,21 @@ class PodManager(LoggingMixin):
377
379
  """Launch the pod asynchronously."""
378
380
  return self.run_pod_async(pod)
379
381
 
380
- def await_pod_start(
382
+ async def watch_pod_events(self, pod: V1Pod, check_interval: int = 1) -> None:
383
+ """Read pod events and writes into log."""
384
+ self.keep_watching_for_events = True
385
+ num_events = 0
386
+ while self.keep_watching_for_events:
387
+ events = self.read_pod_events(pod)
388
+ for new_event in events.items[num_events:]:
389
+ involved_object: V1ObjectReference = new_event.involved_object
390
+ self.log.info(
391
+ "The Pod has an Event: %s from %s", new_event.message, involved_object.field_path
392
+ )
393
+ num_events = len(events.items)
394
+ await asyncio.sleep(check_interval)
395
+
396
+ async def await_pod_start(
381
397
  self, pod: V1Pod, schedule_timeout: int = 120, startup_timeout: int = 120, check_interval: int = 1
382
398
  ) -> None:
383
399
  """
@@ -439,7 +455,7 @@ class PodManager(LoggingMixin):
439
455
  f"\n{container_waiting.message}"
440
456
  )
441
457
 
442
- time.sleep(check_interval)
458
+ await asyncio.sleep(check_interval)
443
459
 
444
460
  def fetch_container_logs(
445
461
  self,
@@ -822,6 +838,10 @@ class PodManager(LoggingMixin):
822
838
  if self.container_is_running(pod, PodDefaults.SIDECAR_CONTAINER_NAME):
823
839
  self.log.info("The xcom sidecar container has started.")
824
840
  break
841
+ if self.container_is_terminated(pod, PodDefaults.SIDECAR_CONTAINER_NAME):
842
+ raise AirflowException(
843
+ "Xcom sidecar container is already terminated! Not possible to read xcom output of task."
844
+ )
825
845
  if (time.time() - last_log_time) >= log_interval:
826
846
  self.log.warning(
827
847
  "Still waiting for the xcom sidecar container to start. Elapsed time: %d seconds.",
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: apache-airflow-providers-cncf-kubernetes
3
- Version: 10.5.0rc1
3
+ Version: 10.6.0rc1
4
4
  Summary: Provider package apache-airflow-providers-cncf-kubernetes for Apache Airflow
5
5
  Keywords: airflow-provider,cncf.kubernetes,airflow,integration
6
6
  Author-email: Apache Software Foundation <dev@airflow.apache.org>
@@ -24,11 +24,11 @@ Requires-Dist: aiofiles>=23.2.0
24
24
  Requires-Dist: apache-airflow>=2.10.0rc1
25
25
  Requires-Dist: asgiref>=3.5.2
26
26
  Requires-Dist: cryptography>=41.0.0
27
- Requires-Dist: kubernetes>=29.0.0,<=31.0.0
28
- Requires-Dist: kubernetes_asyncio>=29.0.0,<=31.0.0
27
+ Requires-Dist: kubernetes>=32.0.0,<33.0.0
28
+ Requires-Dist: kubernetes_asyncio>=32.0.0,<33.0.0
29
29
  Project-URL: Bug Tracker, https://github.com/apache/airflow/issues
30
- Project-URL: Changelog, https://airflow.staged.apache.org/docs/apache-airflow-providers-cncf-kubernetes/10.5.0/changelog.html
31
- Project-URL: Documentation, https://airflow.staged.apache.org/docs/apache-airflow-providers-cncf-kubernetes/10.5.0
30
+ Project-URL: Changelog, https://airflow.staged.apache.org/docs/apache-airflow-providers-cncf-kubernetes/10.6.0/changelog.html
31
+ Project-URL: Documentation, https://airflow.staged.apache.org/docs/apache-airflow-providers-cncf-kubernetes/10.6.0
32
32
  Project-URL: Mastodon, https://fosstodon.org/@airflow
33
33
  Project-URL: Slack Chat, https://s.apache.org/airflow-slack
34
34
  Project-URL: Source Code, https://github.com/apache/airflow
@@ -59,7 +59,7 @@ Project-URL: YouTube, https://www.youtube.com/channel/UCSXwxpWZQ7XZ1WL3wqevChA/
59
59
 
60
60
  Package ``apache-airflow-providers-cncf-kubernetes``
61
61
 
62
- Release: ``10.5.0``
62
+ Release: ``10.6.0``
63
63
 
64
64
 
65
65
  `Kubernetes <https://kubernetes.io/>`__
@@ -72,7 +72,7 @@ This is a provider package for ``cncf.kubernetes`` provider. All classes for thi
72
72
  are in ``airflow.providers.cncf.kubernetes`` python package.
73
73
 
74
74
  You can find package information and changelog for the provider
75
- in the `documentation <https://airflow.apache.org/docs/apache-airflow-providers-cncf-kubernetes/10.5.0/>`_.
75
+ in the `documentation <https://airflow.apache.org/docs/apache-airflow-providers-cncf-kubernetes/10.6.0/>`_.
76
76
 
77
77
  Installation
78
78
  ------------
@@ -86,17 +86,17 @@ The package supports the following python versions: 3.9,3.10,3.11,3.12
86
86
  Requirements
87
87
  ------------
88
88
 
89
- ====================== =====================
89
+ ====================== ====================
90
90
  PIP package Version required
91
- ====================== =====================
91
+ ====================== ====================
92
92
  ``aiofiles`` ``>=23.2.0``
93
93
  ``apache-airflow`` ``>=2.10.0``
94
94
  ``asgiref`` ``>=3.5.2``
95
95
  ``cryptography`` ``>=41.0.0``
96
- ``kubernetes`` ``>=29.0.0,<=31.0.0``
97
- ``kubernetes_asyncio`` ``>=29.0.0,<=31.0.0``
98
- ====================== =====================
96
+ ``kubernetes`` ``>=32.0.0,<33.0.0``
97
+ ``kubernetes_asyncio`` ``>=32.0.0,<33.0.0``
98
+ ====================== ====================
99
99
 
100
100
  The changelog for the provider package can be found in the
101
- `changelog <https://airflow.apache.org/docs/apache-airflow-providers-cncf-kubernetes/10.5.0/changelog.html>`_.
101
+ `changelog <https://airflow.apache.org/docs/apache-airflow-providers-cncf-kubernetes/10.6.0/changelog.html>`_.
102
102
 
@@ -1,30 +1,30 @@
1
1
  airflow/providers/cncf/kubernetes/LICENSE,sha256=gXPVwptPlW1TJ4HSuG5OMPg-a3h43OGMkZRR1rpwfJA,10850
2
- airflow/providers/cncf/kubernetes/__init__.py,sha256=JbKTwSfn5nKWBEgMs8EXthKBZupv6f6sucVlGir2TKI,1505
2
+ airflow/providers/cncf/kubernetes/__init__.py,sha256=3zCSE5o2NsRNwIL9r5ir0zlepR293afTmqi8EUI-9Yo,1505
3
3
  airflow/providers/cncf/kubernetes/callbacks.py,sha256=5zGmQthojdT9iBEV3LIyBq-oKzjv2D4dOYCjYRbb61c,6076
4
4
  airflow/providers/cncf/kubernetes/exceptions.py,sha256=3cNEZTnrltBsqwzHiLfckwYYc_IWY1g4PcRs6zuMWWA,1137
5
- airflow/providers/cncf/kubernetes/get_provider_info.py,sha256=m4eN2HKhaAI2kD-mOvbMcbv5zxU78iP4pea_LltJ2v0,16042
5
+ airflow/providers/cncf/kubernetes/get_provider_info.py,sha256=Git4HycOcHrb4zD9W7ZYsqNDkQSQ4uipSJO_GaPiroE,16041
6
6
  airflow/providers/cncf/kubernetes/k8s_model.py,sha256=xmdFhX29DjegoZ-cq8-KDL9soVYXf4OpU6fAGr3cPTU,2101
7
7
  airflow/providers/cncf/kubernetes/kube_client.py,sha256=yflZxLousXA9d7t67KrEy55qzb1cUhEyy6yCPkEem28,5329
8
- airflow/providers/cncf/kubernetes/kube_config.py,sha256=3qWdCp2z4g8gX_sIOProgwp52UxM5kAIYabkxaX297g,5079
8
+ airflow/providers/cncf/kubernetes/kube_config.py,sha256=RX-ilybMXAcXg4gGyKfh6IYRWEf7llPycp9LqbYEe6s,5154
9
9
  airflow/providers/cncf/kubernetes/kubernetes_helper_functions.py,sha256=opxof6wxHEAHwa_zRB47QJBBrV5St4rIZzAiptA9Rek,5510
10
- airflow/providers/cncf/kubernetes/pod_generator.py,sha256=f24Qdg4QA9d8gaHA0X78jDojtm9swEgLLqcG6yEx2rc,20857
10
+ airflow/providers/cncf/kubernetes/pod_generator.py,sha256=0VEcAtT2SzAFwSDsQWe2QdrY2mDV8s4hBw0qLcmIMGw,21038
11
11
  airflow/providers/cncf/kubernetes/python_kubernetes_script.jinja2,sha256=I0EHRGwLHjSiX85e51HBIoddRDnC8TJPFrDBqQq_NJg,1776
12
12
  airflow/providers/cncf/kubernetes/python_kubernetes_script.py,sha256=KnTlZSWCZhwvj89fSc2kgIRTaI4iLNKPquHc2wXnluo,3460
13
13
  airflow/providers/cncf/kubernetes/secret.py,sha256=wj-T9gouqau_X14slAstGmnSxqXJQzdLwUdURzHna0I,5209
14
- airflow/providers/cncf/kubernetes/template_rendering.py,sha256=NyrAc2rsZ0oyXxtRHiY8qkaH4tftHlexTpr7YE4UQY0,2682
14
+ airflow/providers/cncf/kubernetes/template_rendering.py,sha256=WSUBhjGSDhjNtA4IFlbYyX50rvYN6UA4dMk0cPqgOjo,3618
15
15
  airflow/providers/cncf/kubernetes/version_compat.py,sha256=j5PCtXvZ71aBjixu-EFTNtVDPsngzzs7os0ZQDgFVDk,1536
16
16
  airflow/providers/cncf/kubernetes/backcompat/__init__.py,sha256=KXF76f3v1jIFUBNz8kwxVMvm7i4mNo35LbIG9IijBNc,1299
17
17
  airflow/providers/cncf/kubernetes/backcompat/backwards_compat_converters.py,sha256=FkRRtIEucp2hYrecGVYVgyPI6-b7hE7X7L17Z3r459Y,4303
18
18
  airflow/providers/cncf/kubernetes/cli/__init__.py,sha256=9hdXHABrVpkbpjZgUft39kOFL2xSGeG4GEua0Hmelus,785
19
- airflow/providers/cncf/kubernetes/cli/kubernetes_command.py,sha256=FRR8p50FgHaVPS8x1rbXFSd47NtBtidqL4JyTyfxqnQ,7366
19
+ airflow/providers/cncf/kubernetes/cli/kubernetes_command.py,sha256=lvs9RJ-fjrCdisE2rnRJkxUxpn6rdtFX4j7D94ASO9c,7943
20
20
  airflow/providers/cncf/kubernetes/decorators/__init__.py,sha256=mlJxuZLkd5x-iq2SBwD3mvRQpt3YR7wjz_nceyF1IaI,787
21
21
  airflow/providers/cncf/kubernetes/decorators/kubernetes.py,sha256=_OnebMazgYTJoCnkaMsRxwF6f2slKU_-ucrml8z8cq0,6449
22
22
  airflow/providers/cncf/kubernetes/decorators/kubernetes_cmd.py,sha256=Rsak_cPl_-yNv7SKh0PI7r2UgqZF-Li7zIxwX5cedc0,4951
23
23
  airflow/providers/cncf/kubernetes/executors/__init__.py,sha256=mlJxuZLkd5x-iq2SBwD3mvRQpt3YR7wjz_nceyF1IaI,787
24
- airflow/providers/cncf/kubernetes/executors/kubernetes_executor.py,sha256=TomXdMCdtnnsKOWZiNR2lrh0ZmHghfbzXjpjeRDJTFA,31976
25
- airflow/providers/cncf/kubernetes/executors/kubernetes_executor_types.py,sha256=L8_8HOHd_4O8WW6xT2tp49-yOj0EMKCYK5YqMOOx_bI,1973
26
- airflow/providers/cncf/kubernetes/executors/kubernetes_executor_utils.py,sha256=2xqLS8DQhnwVwxnv4an8cjv4HPqP_TSiVFjjADPMeHM,24750
27
- airflow/providers/cncf/kubernetes/executors/local_kubernetes_executor.py,sha256=TuFRbs1zqKajJoZmo25kT4AGd-_-iD-UbhfOY30EOck,11591
24
+ airflow/providers/cncf/kubernetes/executors/kubernetes_executor.py,sha256=dpy1d0BU-pID_t9wPmi7h9PULbk9sQBIWqKxy9PNSB8,31953
25
+ airflow/providers/cncf/kubernetes/executors/kubernetes_executor_types.py,sha256=drjK3ZGVJ8pETX0uwT4s6zqWPMegXumMQDgwBipx_xc,2042
26
+ airflow/providers/cncf/kubernetes/executors/kubernetes_executor_utils.py,sha256=1aoIOoL9-ufnvZxHOau-ZJtt14Xl2zxlah2SuSmNnp0,24461
27
+ airflow/providers/cncf/kubernetes/executors/local_kubernetes_executor.py,sha256=icUEyhfHZYI4GvacTrYdS_nSohLk5tLzR5ZNUAYlhQY,12042
28
28
  airflow/providers/cncf/kubernetes/hooks/__init__.py,sha256=9hdXHABrVpkbpjZgUft39kOFL2xSGeG4GEua0Hmelus,785
29
29
  airflow/providers/cncf/kubernetes/hooks/kubernetes.py,sha256=KPv5t7Xpvx9Nw9uampf1VCMdnq_ySqRzFbt07NKIjZY,37107
30
30
  airflow/providers/cncf/kubernetes/kubernetes_executor_templates/__init__.py,sha256=9hdXHABrVpkbpjZgUft39kOFL2xSGeG4GEua0Hmelus,785
@@ -33,7 +33,7 @@ airflow/providers/cncf/kubernetes/operators/__init__.py,sha256=mlJxuZLkd5x-iq2SB
33
33
  airflow/providers/cncf/kubernetes/operators/custom_object_launcher.py,sha256=jTVHQt1vp5gELrLNyM-DrZ1ywgmTy3Hh1i6wyl7AGS0,15314
34
34
  airflow/providers/cncf/kubernetes/operators/job.py,sha256=aK2MogooZ6K7uVC0hWRYhCGgzwHOERrgPyknWu5mp7c,23771
35
35
  airflow/providers/cncf/kubernetes/operators/kueue.py,sha256=eEfl__06k15-21Y86qHOLAsY2zR1OWM4QgQhDteDBP0,4563
36
- airflow/providers/cncf/kubernetes/operators/pod.py,sha256=kTWxSQQvxHI3rzVDx-Z1jEQa4g4ZN7o_7smkMRyOw8U,58632
36
+ airflow/providers/cncf/kubernetes/operators/pod.py,sha256=hF3M93zB-gEvdKaGV0gMkMzDJF0URnpXSbcoKWR99_g,60607
37
37
  airflow/providers/cncf/kubernetes/operators/resource.py,sha256=Q5WssuDyjtzo1Op1pzUmYG4JZZdzCKTe-vTZEy8HSNA,7579
38
38
  airflow/providers/cncf/kubernetes/operators/spark_kubernetes.py,sha256=0TO86G-gbWAkQvaWBfhCp6ZJwoQzciH-UGR5kgw9fmg,13847
39
39
  airflow/providers/cncf/kubernetes/pod_template_file_examples/__init__.py,sha256=9hdXHABrVpkbpjZgUft39kOFL2xSGeG4GEua0Hmelus,785
@@ -48,13 +48,13 @@ airflow/providers/cncf/kubernetes/sensors/__init__.py,sha256=9hdXHABrVpkbpjZgUft
48
48
  airflow/providers/cncf/kubernetes/sensors/spark_kubernetes.py,sha256=sMSuE4bziqPYzBNIZ2y1ab00kGO2tlS3Z7AmePBFA3w,5356
49
49
  airflow/providers/cncf/kubernetes/triggers/__init__.py,sha256=9hdXHABrVpkbpjZgUft39kOFL2xSGeG4GEua0Hmelus,785
50
50
  airflow/providers/cncf/kubernetes/triggers/job.py,sha256=DGbC1FZktBF-00Lb0pU9iIKQnmdW8HWklp5Wwq54OEY,6754
51
- airflow/providers/cncf/kubernetes/triggers/pod.py,sha256=IeoMrPECgsr2Sswfvd7Fl6p3uCAvnZLvAco3tO2wu7Q,12865
51
+ airflow/providers/cncf/kubernetes/triggers/pod.py,sha256=YEyYTQONZu66uHGp_38N_A56txZNamA0Kc4UA0I3sJk,13414
52
52
  airflow/providers/cncf/kubernetes/utils/__init__.py,sha256=ClZN0VPjWySdVwS_ktH7rrgL9VLAcs3OSJSB9s3zaYw,863
53
53
  airflow/providers/cncf/kubernetes/utils/delete_from.py,sha256=poObZSoEJwQyaYWilEURs8f4CDY2sn_pfwS31Lf579A,5195
54
54
  airflow/providers/cncf/kubernetes/utils/k8s_resource_iterator.py,sha256=DLypjkD_3YDixRTcsxEjgvHZNbbG9qamlz05eBqaWzU,1955
55
- airflow/providers/cncf/kubernetes/utils/pod_manager.py,sha256=8edX-K9t2YgGHYsEeqUmKodHv4jsEHdPjii89KlE0dw,39436
55
+ airflow/providers/cncf/kubernetes/utils/pod_manager.py,sha256=Vpww3vAHX48QtApI-j1cI1jNiozQ6YMA7wfKFV1ELI0,40471
56
56
  airflow/providers/cncf/kubernetes/utils/xcom_sidecar.py,sha256=k6bdmVJ21OrAwGmWwledRrAmaty9ZrmbuM-IbaI4mqo,2519
57
- apache_airflow_providers_cncf_kubernetes-10.5.0rc1.dist-info/entry_points.txt,sha256=ByD3QJJyP9CfmTYtpNI1953akD38RUDgpGXLaq9vpOw,111
58
- apache_airflow_providers_cncf_kubernetes-10.5.0rc1.dist-info/WHEEL,sha256=G2gURzTEtmeR8nrdXUJfNiB3VYVxigPQ-bEQujpNiNs,82
59
- apache_airflow_providers_cncf_kubernetes-10.5.0rc1.dist-info/METADATA,sha256=N5m9VD9yH_AjVp57oab1u-GkxOae0YlvC5FxJyRItHk,4328
60
- apache_airflow_providers_cncf_kubernetes-10.5.0rc1.dist-info/RECORD,,
57
+ apache_airflow_providers_cncf_kubernetes-10.6.0rc1.dist-info/entry_points.txt,sha256=ByD3QJJyP9CfmTYtpNI1953akD38RUDgpGXLaq9vpOw,111
58
+ apache_airflow_providers_cncf_kubernetes-10.6.0rc1.dist-info/WHEEL,sha256=G2gURzTEtmeR8nrdXUJfNiB3VYVxigPQ-bEQujpNiNs,82
59
+ apache_airflow_providers_cncf_kubernetes-10.6.0rc1.dist-info/METADATA,sha256=ZGRMBZlrTVFhLBekuYd9NxcSjhnXYLNtOOG2E6PT0xU,4321
60
+ apache_airflow_providers_cncf_kubernetes-10.6.0rc1.dist-info/RECORD,,