apache-airflow-providers-cncf-kubernetes 10.4.3rc1__py3-none-any.whl → 10.5.0rc2__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -29,11 +29,11 @@ from airflow import __version__ as airflow_version
29
29
 
30
30
  __all__ = ["__version__"]
31
31
 
32
- __version__ = "10.4.3"
32
+ __version__ = "10.5.0"
33
33
 
34
34
  if packaging.version.parse(packaging.version.parse(airflow_version).base_version) < packaging.version.parse(
35
- "2.9.0"
35
+ "2.10.0"
36
36
  ):
37
37
  raise RuntimeError(
38
- f"The package `apache-airflow-providers-cncf-kubernetes:{__version__}` needs Apache Airflow 2.9.0+"
38
+ f"The package `apache-airflow-providers-cncf-kubernetes:{__version__}` needs Apache Airflow 2.10.0+"
39
39
  )
@@ -0,0 +1,123 @@
1
+ # Licensed to the Apache Software Foundation (ASF) under one
2
+ # or more contributor license agreements. See the NOTICE file
3
+ # distributed with this work for additional information
4
+ # regarding copyright ownership. The ASF licenses this file
5
+ # to you under the Apache License, Version 2.0 (the
6
+ # "License"); you may not use this file except in compliance
7
+ # with the License. You may obtain a copy of the License at
8
+ #
9
+ # http://www.apache.org/licenses/LICENSE-2.0
10
+ #
11
+ # Unless required by applicable law or agreed to in writing,
12
+ # software distributed under the License is distributed on an
13
+ # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
14
+ # KIND, either express or implied. See the License for the
15
+ # specific language governing permissions and limitations
16
+ # under the License.
17
+ from __future__ import annotations
18
+
19
+ import warnings
20
+ from collections.abc import Sequence
21
+ from typing import TYPE_CHECKING, Callable
22
+
23
+ from airflow.providers.cncf.kubernetes.version_compat import AIRFLOW_V_3_0_PLUS
24
+
25
+ if AIRFLOW_V_3_0_PLUS:
26
+ from airflow.sdk.bases.decorator import DecoratedOperator, TaskDecorator, task_decorator_factory
27
+ else:
28
+ from airflow.decorators.base import ( # type: ignore[no-redef]
29
+ DecoratedOperator,
30
+ TaskDecorator,
31
+ task_decorator_factory,
32
+ )
33
+ from airflow.providers.cncf.kubernetes.operators.pod import KubernetesPodOperator
34
+ from airflow.utils.context import context_merge
35
+ from airflow.utils.operator_helpers import determine_kwargs
36
+
37
+ if TYPE_CHECKING:
38
+ from airflow.utils.context import Context
39
+
40
+
41
+ class _KubernetesCmdDecoratedOperator(DecoratedOperator, KubernetesPodOperator):
42
+ custom_operator_name = "@task.kubernetes_cmd"
43
+
44
+ template_fields: Sequence[str] = KubernetesPodOperator.template_fields
45
+ overwrite_rtif_after_execution: bool = True
46
+
47
+ def __init__(self, *, python_callable: Callable, args_only: bool = False, **kwargs) -> None:
48
+ self.args_only = args_only
49
+
50
+ cmds = kwargs.pop("cmds", None)
51
+ arguments = kwargs.pop("arguments", None)
52
+
53
+ if cmds is not None or arguments is not None:
54
+ warnings.warn(
55
+ f"The `cmds` and `arguments` are unused in {self.custom_operator_name} decorator. "
56
+ "You should return a list of commands or image entrypoint arguments with "
57
+ "args_only=True from the python_callable.",
58
+ UserWarning,
59
+ stacklevel=3,
60
+ )
61
+
62
+ # If the name was not provided, we generate operator name from the python_callable
63
+ # we also instruct operator to add a random suffix to avoid collisions by default
64
+ op_name = kwargs.pop("name", f"k8s-airflow-pod-{python_callable.__name__}")
65
+ random_name_suffix = kwargs.pop("random_name_suffix", True)
66
+
67
+ super().__init__(
68
+ python_callable=python_callable,
69
+ name=op_name,
70
+ random_name_suffix=random_name_suffix,
71
+ cmds=None,
72
+ arguments=None,
73
+ **kwargs,
74
+ )
75
+
76
+ def execute(self, context: Context):
77
+ generated = self._generate_cmds(context)
78
+ if self.args_only:
79
+ self.cmds = []
80
+ self.arguments = generated
81
+ else:
82
+ self.cmds = generated
83
+ self.arguments = []
84
+ context["ti"].render_templates() # type: ignore[attr-defined]
85
+ return super().execute(context)
86
+
87
+ def _generate_cmds(self, context: Context) -> list[str]:
88
+ context_merge(context, self.op_kwargs)
89
+ kwargs = determine_kwargs(self.python_callable, self.op_args, context)
90
+ generated_cmds = self.python_callable(*self.op_args, **kwargs)
91
+ func_name = self.python_callable.__name__
92
+ if not isinstance(generated_cmds, list):
93
+ raise TypeError(
94
+ f"Expected python_callable to return a list of strings, but got {type(generated_cmds)}"
95
+ )
96
+ if not all(isinstance(cmd, str) for cmd in generated_cmds):
97
+ raise TypeError(f"Expected {func_name} to return a list of strings, but got {generated_cmds}")
98
+ if not generated_cmds:
99
+ raise ValueError(f"The {func_name} returned an empty list of commands")
100
+
101
+ return generated_cmds
102
+
103
+
104
+ def kubernetes_cmd_task(
105
+ python_callable: Callable | None = None,
106
+ **kwargs,
107
+ ) -> TaskDecorator:
108
+ """
109
+ Kubernetes cmd operator decorator.
110
+
111
+ This wraps a function which should return command to be executed
112
+ in K8s using KubernetesPodOperator. The function should return a list of strings.
113
+ If args_only is set to True, the function should return a list of arguments for
114
+ container default command. Also accepts any argument that KubernetesPodOperator
115
+ will via ``kwargs``. Can be reused in a single DAG.
116
+
117
+ :param python_callable: Function to decorate
118
+ """
119
+ return task_decorator_factory(
120
+ python_callable=python_callable,
121
+ decorated_operator_class=_KubernetesCmdDecoratedOperator,
122
+ **kwargs,
123
+ )
@@ -398,8 +398,8 @@ class AirflowKubernetesScheduler(LoggingMixin):
398
398
  "python",
399
399
  "-m",
400
400
  "airflow.sdk.execution_time.execute_workload",
401
- "--json-path",
402
- "/tmp/execute/input.json",
401
+ "--json-string",
402
+ ser_input,
403
403
  ]
404
404
  else:
405
405
  raise ValueError(
@@ -427,7 +427,6 @@ class AirflowKubernetesScheduler(LoggingMixin):
427
427
  date=None,
428
428
  run_id=run_id,
429
429
  args=list(command),
430
- content_json_for_volume=ser_input,
431
430
  pod_override_object=kube_executor_config,
432
431
  base_worker_pod=base_worker_pod,
433
432
  with_mutation_hook=True,
@@ -85,7 +85,11 @@ def get_provider_info():
85
85
  {
86
86
  "class-name": "airflow.providers.cncf.kubernetes.decorators.kubernetes.kubernetes_task",
87
87
  "name": "kubernetes",
88
- }
88
+ },
89
+ {
90
+ "class-name": "airflow.providers.cncf.kubernetes.decorators.kubernetes_cmd.kubernetes_cmd_task",
91
+ "name": "kubernetes_cmd",
92
+ },
89
93
  ],
90
94
  "config": {
91
95
  "local_kubernetes_executor": {
@@ -275,5 +279,5 @@ def get_provider_info():
275
279
  },
276
280
  },
277
281
  },
278
- "executors": ["airflow.providers.cncf.kubernetes.kubernetes_executor.KubernetesExecutor"],
282
+ "executors": ["airflow.providers.cncf.kubernetes.executors.kubernetes_executor.KubernetesExecutor"],
279
283
  }
@@ -140,7 +140,8 @@ class KubernetesHook(BaseHook, PodOperatorHookProtocol):
140
140
 
141
141
  def __init__(
142
142
  self,
143
- conn_id: str | None = default_conn_name,
143
+ conn_id: str | None = None,
144
+ kubernetes_conn_id: str | None = default_conn_name,
144
145
  client_configuration: client.Configuration | None = None,
145
146
  cluster_context: str | None = None,
146
147
  config_file: str | None = None,
@@ -149,7 +150,7 @@ class KubernetesHook(BaseHook, PodOperatorHookProtocol):
149
150
  disable_tcp_keepalive: bool | None = None,
150
151
  ) -> None:
151
152
  super().__init__()
152
- self.conn_id = conn_id
153
+ self.conn_id = conn_id or kubernetes_conn_id
153
154
  self.client_configuration = client_configuration
154
155
  self.cluster_context = cluster_context
155
156
  self.config_file = config_file
@@ -706,6 +707,14 @@ class KubernetesHook(BaseHook, PodOperatorHookProtocol):
706
707
 
707
708
  return list(yaml.safe_load_all(response.text))
708
709
 
710
+ def test_connection(self):
711
+ try:
712
+ conn = self.get_conn()
713
+ version: client.VersionInfo = client.VersionApi(conn).get_code()
714
+ return True, f"Connection successful. Version Info: {version.to_dict()}"
715
+ except Exception as e:
716
+ return False, str(e)
717
+
709
718
 
710
719
  def _get_bool(val) -> bool | None:
711
720
  """Convert val to bool if can be done with certainty; if we cannot infer intention we return None."""
@@ -157,8 +157,9 @@ class KubernetesPodOperator(BaseOperator):
157
157
  :param reattach_on_restart: if the worker dies while the pod is running, reattach and monitor
158
158
  during the next try. If False, always create a new pod for each try.
159
159
  :param labels: labels to apply to the Pod. (templated)
160
- :param startup_timeout_seconds: timeout in seconds to startup the pod.
160
+ :param startup_timeout_seconds: timeout in seconds to startup the pod after pod was scheduled.
161
161
  :param startup_check_interval_seconds: interval in seconds to check if the pod has already started
162
+ :param schedule_timeout_seconds: timeout in seconds to schedule pod in cluster.
162
163
  :param get_logs: get the stdout of the base container as logs of the tasks.
163
164
  :param init_container_logs: list of init containers whose logs will be published to stdout
164
165
  Takes a sequence of containers, a single container name or True. If True,
@@ -180,6 +181,7 @@ class KubernetesPodOperator(BaseOperator):
180
181
  If more than one secret is required, provide a
181
182
  comma separated list: secret_a,secret_b
182
183
  :param service_account_name: Name of the service account
184
+ :param automount_service_account_token: indicates whether pods running as this service account should have an API token automatically mounted
183
185
  :param hostnetwork: If True enable host networking on the pod.
184
186
  :param host_aliases: A list of host aliases to apply to the containers in the pod.
185
187
  :param tolerations: A list of kubernetes tolerations.
@@ -289,6 +291,7 @@ class KubernetesPodOperator(BaseOperator):
289
291
  reattach_on_restart: bool = True,
290
292
  startup_timeout_seconds: int = 120,
291
293
  startup_check_interval_seconds: int = 5,
294
+ schedule_timeout_seconds: int | None = None,
292
295
  get_logs: bool = True,
293
296
  base_container_name: str | None = None,
294
297
  base_container_status_polling_interval: float = 1,
@@ -302,6 +305,7 @@ class KubernetesPodOperator(BaseOperator):
302
305
  node_selector: dict | None = None,
303
306
  image_pull_secrets: list[k8s.V1LocalObjectReference] | None = None,
304
307
  service_account_name: str | None = None,
308
+ automount_service_account_token: bool | None = None,
305
309
  hostnetwork: bool = False,
306
310
  host_aliases: list[k8s.V1HostAlias] | None = None,
307
311
  tolerations: list[k8s.V1Toleration] | None = None,
@@ -347,6 +351,8 @@ class KubernetesPodOperator(BaseOperator):
347
351
  self.labels = labels or {}
348
352
  self.startup_timeout_seconds = startup_timeout_seconds
349
353
  self.startup_check_interval_seconds = startup_check_interval_seconds
354
+ # New parameter startup_timeout_seconds adds breaking change, to handle this as smooth as possible just reuse startup time
355
+ self.schedule_timeout_seconds = schedule_timeout_seconds or startup_timeout_seconds
350
356
  env_vars = convert_env_vars(env_vars) if env_vars else []
351
357
  self.env_vars = env_vars
352
358
  pod_runtime_info_envs = (
@@ -380,6 +386,7 @@ class KubernetesPodOperator(BaseOperator):
380
386
  self.config_file = config_file
381
387
  self.image_pull_secrets = convert_image_pull_secrets(image_pull_secrets) if image_pull_secrets else []
382
388
  self.service_account_name = service_account_name
389
+ self.automount_service_account_token = automount_service_account_token
383
390
  self.hostnetwork = hostnetwork
384
391
  self.host_aliases = host_aliases
385
392
  self.tolerations = (
@@ -574,8 +581,9 @@ class KubernetesPodOperator(BaseOperator):
574
581
  try:
575
582
  self.pod_manager.await_pod_start(
576
583
  pod=pod,
584
+ schedule_timeout=self.schedule_timeout_seconds,
577
585
  startup_timeout=self.startup_timeout_seconds,
578
- startup_check_interval=self.startup_check_interval_seconds,
586
+ check_interval=self.startup_check_interval_seconds,
579
587
  )
580
588
  except PodLaunchFailedException:
581
589
  if self.log_events_on_failure:
@@ -1175,6 +1183,7 @@ class KubernetesPodOperator(BaseOperator):
1175
1183
  ],
1176
1184
  image_pull_secrets=self.image_pull_secrets,
1177
1185
  service_account_name=self.service_account_name,
1186
+ automount_service_account_token=self.automount_service_account_token,
1178
1187
  host_network=self.hostnetwork,
1179
1188
  hostname=self.hostname,
1180
1189
  subdomain=self.subdomain,
@@ -34,7 +34,7 @@ from functools import reduce
34
34
  from typing import TYPE_CHECKING
35
35
 
36
36
  from dateutil import parser
37
- from kubernetes.client import V1EmptyDirVolumeSource, V1Volume, V1VolumeMount, models as k8s
37
+ from kubernetes.client import models as k8s
38
38
  from kubernetes.client.api_client import ApiClient
39
39
 
40
40
  from airflow.exceptions import (
@@ -287,7 +287,6 @@ class PodGenerator:
287
287
  scheduler_job_id: str,
288
288
  run_id: str | None = None,
289
289
  map_index: int = -1,
290
- content_json_for_volume: str = "",
291
290
  *,
292
291
  with_mutation_hook: bool = False,
293
292
  ) -> k8s.V1Pod:
@@ -355,39 +354,6 @@ class PodGenerator:
355
354
  containers=[main_container],
356
355
  )
357
356
 
358
- if content_json_for_volume:
359
- import shlex
360
-
361
- input_file_path = "/tmp/execute/input.json"
362
- execute_volume = V1Volume(
363
- name="execute-volume",
364
- empty_dir=V1EmptyDirVolumeSource(),
365
- )
366
-
367
- execute_volume_mount = V1VolumeMount(
368
- name="execute-volume",
369
- mount_path="/tmp/execute",
370
- read_only=False,
371
- )
372
-
373
- escaped_json = shlex.quote(content_json_for_volume)
374
- init_container = k8s.V1Container(
375
- name="init-container",
376
- image="busybox",
377
- command=["/bin/sh", "-c", f"echo {escaped_json} > {input_file_path}"],
378
- volume_mounts=[execute_volume_mount],
379
- )
380
-
381
- main_container.volume_mounts = [execute_volume_mount]
382
- main_container.command = args[:-1]
383
- main_container.args = args[-1:]
384
-
385
- podspec = k8s.V1PodSpec(
386
- containers=[main_container],
387
- volumes=[execute_volume],
388
- init_containers=[init_container],
389
- )
390
-
391
357
  dynamic_pod.spec = podspec
392
358
 
393
359
  # Reconcile the pods starting with the first chronologically,
@@ -546,7 +512,7 @@ def merge_objects(base_obj, client_obj):
546
512
 
547
513
  for base_key in base_obj.to_dict():
548
514
  base_val = getattr(base_obj, base_key, None)
549
- if not getattr(client_obj, base_key, None) and base_val:
515
+ if not getattr(client_obj, base_key, None) and base_val is not None:
550
516
  if not isinstance(client_obj_cp, dict):
551
517
  setattr(client_obj_cp, base_key, base_val)
552
518
  else:
@@ -19,11 +19,14 @@ from __future__ import annotations
19
19
 
20
20
  from typing import TYPE_CHECKING
21
21
 
22
+ from jinja2 import TemplateAssertionError, UndefinedError
22
23
  from kubernetes.client.api_client import ApiClient
23
24
 
25
+ from airflow.exceptions import AirflowException
24
26
  from airflow.providers.cncf.kubernetes.kube_config import KubeConfig
25
27
  from airflow.providers.cncf.kubernetes.kubernetes_helper_functions import create_unique_id
26
28
  from airflow.providers.cncf.kubernetes.pod_generator import PodGenerator
29
+ from airflow.utils.session import NEW_SESSION, provide_session
27
30
 
28
31
  if TYPE_CHECKING:
29
32
  from airflow.models.taskinstance import TaskInstance
@@ -58,3 +61,17 @@ def render_k8s_pod_yaml(task_instance: TaskInstance) -> dict | None:
58
61
  )
59
62
  sanitized_pod = ApiClient().sanitize_for_serialization(pod)
60
63
  return sanitized_pod
64
+
65
+
66
+ @provide_session
67
+ def get_rendered_k8s_spec(task_instance: TaskInstance, session=NEW_SESSION) -> dict | None:
68
+ """Fetch rendered template fields from DB."""
69
+ from airflow.models.renderedtifields import RenderedTaskInstanceFields
70
+
71
+ rendered_k8s_spec = RenderedTaskInstanceFields.get_k8s_pod_yaml(task_instance, session=session)
72
+ if not rendered_k8s_spec:
73
+ try:
74
+ rendered_k8s_spec = render_k8s_pod_yaml(task_instance)
75
+ except (TemplateAssertionError, UndefinedError) as e:
76
+ raise AirflowException(f"Unable to render a k8s spec for this taskinstance: {e}") from e
77
+ return rendered_k8s_spec
@@ -46,8 +46,11 @@ from airflow.utils.timezone import utcnow
46
46
 
47
47
  if TYPE_CHECKING:
48
48
  from kubernetes.client.models.core_v1_event_list import CoreV1EventList
49
+ from kubernetes.client.models.v1_container_state import V1ContainerState
50
+ from kubernetes.client.models.v1_container_state_waiting import V1ContainerStateWaiting
49
51
  from kubernetes.client.models.v1_container_status import V1ContainerStatus
50
52
  from kubernetes.client.models.v1_pod import V1Pod
53
+ from kubernetes.client.models.v1_pod_condition import V1PodCondition
51
54
  from urllib3.response import HTTPResponse
52
55
 
53
56
 
@@ -375,30 +378,68 @@ class PodManager(LoggingMixin):
375
378
  return self.run_pod_async(pod)
376
379
 
377
380
  def await_pod_start(
378
- self, pod: V1Pod, startup_timeout: int = 120, startup_check_interval: int = 1
381
+ self, pod: V1Pod, schedule_timeout: int = 120, startup_timeout: int = 120, check_interval: int = 1
379
382
  ) -> None:
380
383
  """
381
384
  Wait for the pod to reach phase other than ``Pending``.
382
385
 
383
386
  :param pod:
387
+ :param schedule_timeout: Timeout (in seconds) for pod stay in schedule state
388
+ (if pod is taking to long in schedule state, fails task)
384
389
  :param startup_timeout: Timeout (in seconds) for startup of the pod
385
- (if pod is pending for too long, fails task)
386
- :param startup_check_interval: Interval (in seconds) between checks
390
+ (if pod is pending for too long after being scheduled, fails task)
391
+ :param check_interval: Interval (in seconds) between checks
387
392
  :return:
388
393
  """
389
- curr_time = time.time()
394
+ self.log.info("::group::Waiting until %ss to get the POD scheduled...", schedule_timeout)
395
+ pod_was_scheduled = False
396
+ start_check_time = time.time()
390
397
  while True:
391
398
  remote_pod = self.read_pod(pod)
392
- if remote_pod.status.phase != PodPhase.PENDING:
399
+ pod_status = remote_pod.status
400
+ if pod_status.phase != PodPhase.PENDING:
401
+ self.keep_watching_for_events = False
402
+ self.log.info("::endgroup::")
393
403
  break
394
- self.log.warning("Pod not yet started: %s", pod.metadata.name)
395
- if time.time() - curr_time >= startup_timeout:
396
- msg = (
397
- f"Pod took longer than {startup_timeout} seconds to start. "
398
- "Check the pod events in kubernetes to determine why."
399
- )
400
- raise PodLaunchFailedException(msg)
401
- time.sleep(startup_check_interval)
404
+
405
+ # Check for timeout
406
+ pod_conditions: list[V1PodCondition] = pod_status.conditions
407
+ if pod_conditions and any(
408
+ (condition.type == "PodScheduled" and condition.status == "True")
409
+ for condition in pod_conditions
410
+ ):
411
+ if not pod_was_scheduled:
412
+ # POD was initially scheduled update timeout for getting POD launched
413
+ pod_was_scheduled = True
414
+ start_check_time = time.time()
415
+ self.log.info("Waiting %ss to get the POD running...", startup_timeout)
416
+
417
+ if time.time() - start_check_time >= startup_timeout:
418
+ self.log.info("::endgroup::")
419
+ raise PodLaunchFailedException(
420
+ f"Pod took too long to start. More than {startup_timeout}s. Check the pod events in kubernetes."
421
+ )
422
+ else:
423
+ if time.time() - start_check_time >= schedule_timeout:
424
+ self.log.info("::endgroup::")
425
+ raise PodLaunchFailedException(
426
+ f"Pod took too long to be scheduled on the cluster, giving up. More than {schedule_timeout}s. Check the pod events in kubernetes."
427
+ )
428
+
429
+ # Check for general problems to terminate early - ErrImagePull
430
+ if pod_status.container_statuses:
431
+ for container_status in pod_status.container_statuses:
432
+ container_state: V1ContainerState = container_status.state
433
+ container_waiting: V1ContainerStateWaiting | None = container_state.waiting
434
+ if container_waiting:
435
+ if container_waiting.reason in ["ErrImagePull", "InvalidImageName"]:
436
+ self.log.info("::endgroup::")
437
+ raise PodLaunchFailedException(
438
+ f"Pod docker image cannot be pulled, unable to start: {container_waiting.reason}"
439
+ f"\n{container_waiting.message}"
440
+ )
441
+
442
+ time.sleep(check_interval)
402
443
 
403
444
  def fetch_container_logs(
404
445
  self,
@@ -32,5 +32,4 @@ def get_base_airflow_version_tuple() -> tuple[int, int, int]:
32
32
  return airflow_version.major, airflow_version.minor, airflow_version.micro
33
33
 
34
34
 
35
- AIRFLOW_V_2_10_PLUS = get_base_airflow_version_tuple() >= (2, 10, 0)
36
35
  AIRFLOW_V_3_0_PLUS = get_base_airflow_version_tuple() >= (3, 0, 0)
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: apache-airflow-providers-cncf-kubernetes
3
- Version: 10.4.3rc1
3
+ Version: 10.5.0rc2
4
4
  Summary: Provider package apache-airflow-providers-cncf-kubernetes for Apache Airflow
5
5
  Keywords: airflow-provider,cncf.kubernetes,airflow,integration
6
6
  Author-email: Apache Software Foundation <dev@airflow.apache.org>
@@ -21,14 +21,14 @@ Classifier: Programming Language :: Python :: 3.11
21
21
  Classifier: Programming Language :: Python :: 3.12
22
22
  Classifier: Topic :: System :: Monitoring
23
23
  Requires-Dist: aiofiles>=23.2.0
24
- Requires-Dist: apache-airflow>=2.9.0rc0
24
+ Requires-Dist: apache-airflow>=2.10.0rc1
25
25
  Requires-Dist: asgiref>=3.5.2
26
26
  Requires-Dist: cryptography>=41.0.0
27
27
  Requires-Dist: kubernetes>=29.0.0,<=31.0.0
28
28
  Requires-Dist: kubernetes_asyncio>=29.0.0,<=31.0.0
29
29
  Project-URL: Bug Tracker, https://github.com/apache/airflow/issues
30
- Project-URL: Changelog, https://airflow.apache.org/docs/apache-airflow-providers-cncf-kubernetes/10.4.3/changelog.html
31
- Project-URL: Documentation, https://airflow.apache.org/docs/apache-airflow-providers-cncf-kubernetes/10.4.3
30
+ Project-URL: Changelog, https://airflow.staged.apache.org/docs/apache-airflow-providers-cncf-kubernetes/10.5.0/changelog.html
31
+ Project-URL: Documentation, https://airflow.staged.apache.org/docs/apache-airflow-providers-cncf-kubernetes/10.5.0
32
32
  Project-URL: Mastodon, https://fosstodon.org/@airflow
33
33
  Project-URL: Slack Chat, https://s.apache.org/airflow-slack
34
34
  Project-URL: Source Code, https://github.com/apache/airflow
@@ -59,7 +59,7 @@ Project-URL: YouTube, https://www.youtube.com/channel/UCSXwxpWZQ7XZ1WL3wqevChA/
59
59
 
60
60
  Package ``apache-airflow-providers-cncf-kubernetes``
61
61
 
62
- Release: ``10.4.3``
62
+ Release: ``10.5.0``
63
63
 
64
64
 
65
65
  `Kubernetes <https://kubernetes.io/>`__
@@ -72,7 +72,7 @@ This is a provider package for ``cncf.kubernetes`` provider. All classes for thi
72
72
  are in ``airflow.providers.cncf.kubernetes`` python package.
73
73
 
74
74
  You can find package information and changelog for the provider
75
- in the `documentation <https://airflow.apache.org/docs/apache-airflow-providers-cncf-kubernetes/10.4.3/>`_.
75
+ in the `documentation <https://airflow.apache.org/docs/apache-airflow-providers-cncf-kubernetes/10.5.0/>`_.
76
76
 
77
77
  Installation
78
78
  ------------
@@ -90,7 +90,7 @@ Requirements
90
90
  PIP package Version required
91
91
  ====================== =====================
92
92
  ``aiofiles`` ``>=23.2.0``
93
- ``apache-airflow`` ``>=2.9.0``
93
+ ``apache-airflow`` ``>=2.10.0``
94
94
  ``asgiref`` ``>=3.5.2``
95
95
  ``cryptography`` ``>=41.0.0``
96
96
  ``kubernetes`` ``>=29.0.0,<=31.0.0``
@@ -98,5 +98,5 @@ PIP package Version required
98
98
  ====================== =====================
99
99
 
100
100
  The changelog for the provider package can be found in the
101
- `changelog <https://airflow.apache.org/docs/apache-airflow-providers-cncf-kubernetes/10.4.3/changelog.html>`_.
101
+ `changelog <https://airflow.apache.org/docs/apache-airflow-providers-cncf-kubernetes/10.5.0/changelog.html>`_.
102
102
 
@@ -1,38 +1,39 @@
1
1
  airflow/providers/cncf/kubernetes/LICENSE,sha256=gXPVwptPlW1TJ4HSuG5OMPg-a3h43OGMkZRR1rpwfJA,10850
2
- airflow/providers/cncf/kubernetes/__init__.py,sha256=m60_HPmd-QQwvdqA6pCrgJ34eZtwouli_zvBUQo9G-Q,1503
2
+ airflow/providers/cncf/kubernetes/__init__.py,sha256=JbKTwSfn5nKWBEgMs8EXthKBZupv6f6sucVlGir2TKI,1505
3
3
  airflow/providers/cncf/kubernetes/callbacks.py,sha256=5zGmQthojdT9iBEV3LIyBq-oKzjv2D4dOYCjYRbb61c,6076
4
4
  airflow/providers/cncf/kubernetes/exceptions.py,sha256=3cNEZTnrltBsqwzHiLfckwYYc_IWY1g4PcRs6zuMWWA,1137
5
- airflow/providers/cncf/kubernetes/get_provider_info.py,sha256=PqSjW28xplbuZqAX7AMYa1CHNk1w7naQfduN0rQJ8qI,15847
5
+ airflow/providers/cncf/kubernetes/get_provider_info.py,sha256=m4eN2HKhaAI2kD-mOvbMcbv5zxU78iP4pea_LltJ2v0,16042
6
6
  airflow/providers/cncf/kubernetes/k8s_model.py,sha256=xmdFhX29DjegoZ-cq8-KDL9soVYXf4OpU6fAGr3cPTU,2101
7
7
  airflow/providers/cncf/kubernetes/kube_client.py,sha256=yflZxLousXA9d7t67KrEy55qzb1cUhEyy6yCPkEem28,5329
8
8
  airflow/providers/cncf/kubernetes/kube_config.py,sha256=3qWdCp2z4g8gX_sIOProgwp52UxM5kAIYabkxaX297g,5079
9
9
  airflow/providers/cncf/kubernetes/kubernetes_helper_functions.py,sha256=opxof6wxHEAHwa_zRB47QJBBrV5St4rIZzAiptA9Rek,5510
10
- airflow/providers/cncf/kubernetes/pod_generator.py,sha256=f24Qdg4QA9d8gaHA0X78jDojtm9swEgLLqcG6yEx2rc,20857
10
+ airflow/providers/cncf/kubernetes/pod_generator.py,sha256=TFlNkn3PCytjxnhQduOxNAVdFjwne9-PupV6Gfun5A4,19649
11
11
  airflow/providers/cncf/kubernetes/python_kubernetes_script.jinja2,sha256=I0EHRGwLHjSiX85e51HBIoddRDnC8TJPFrDBqQq_NJg,1776
12
12
  airflow/providers/cncf/kubernetes/python_kubernetes_script.py,sha256=KnTlZSWCZhwvj89fSc2kgIRTaI4iLNKPquHc2wXnluo,3460
13
13
  airflow/providers/cncf/kubernetes/secret.py,sha256=wj-T9gouqau_X14slAstGmnSxqXJQzdLwUdURzHna0I,5209
14
- airflow/providers/cncf/kubernetes/template_rendering.py,sha256=NyrAc2rsZ0oyXxtRHiY8qkaH4tftHlexTpr7YE4UQY0,2682
15
- airflow/providers/cncf/kubernetes/version_compat.py,sha256=aHg90_DtgoSnQvILFICexMyNlHlALBdaeWqkX3dFDug,1605
14
+ airflow/providers/cncf/kubernetes/template_rendering.py,sha256=pV6lX8DW3dLNB945mxwM8E0Vynis2-chMCwHlnHeIVY,3490
15
+ airflow/providers/cncf/kubernetes/version_compat.py,sha256=j5PCtXvZ71aBjixu-EFTNtVDPsngzzs7os0ZQDgFVDk,1536
16
16
  airflow/providers/cncf/kubernetes/backcompat/__init__.py,sha256=KXF76f3v1jIFUBNz8kwxVMvm7i4mNo35LbIG9IijBNc,1299
17
17
  airflow/providers/cncf/kubernetes/backcompat/backwards_compat_converters.py,sha256=FkRRtIEucp2hYrecGVYVgyPI6-b7hE7X7L17Z3r459Y,4303
18
18
  airflow/providers/cncf/kubernetes/cli/__init__.py,sha256=9hdXHABrVpkbpjZgUft39kOFL2xSGeG4GEua0Hmelus,785
19
19
  airflow/providers/cncf/kubernetes/cli/kubernetes_command.py,sha256=FRR8p50FgHaVPS8x1rbXFSd47NtBtidqL4JyTyfxqnQ,7366
20
20
  airflow/providers/cncf/kubernetes/decorators/__init__.py,sha256=mlJxuZLkd5x-iq2SBwD3mvRQpt3YR7wjz_nceyF1IaI,787
21
21
  airflow/providers/cncf/kubernetes/decorators/kubernetes.py,sha256=_OnebMazgYTJoCnkaMsRxwF6f2slKU_-ucrml8z8cq0,6449
22
+ airflow/providers/cncf/kubernetes/decorators/kubernetes_cmd.py,sha256=Rsak_cPl_-yNv7SKh0PI7r2UgqZF-Li7zIxwX5cedc0,4951
22
23
  airflow/providers/cncf/kubernetes/executors/__init__.py,sha256=mlJxuZLkd5x-iq2SBwD3mvRQpt3YR7wjz_nceyF1IaI,787
23
24
  airflow/providers/cncf/kubernetes/executors/kubernetes_executor.py,sha256=TomXdMCdtnnsKOWZiNR2lrh0ZmHghfbzXjpjeRDJTFA,31976
24
25
  airflow/providers/cncf/kubernetes/executors/kubernetes_executor_types.py,sha256=L8_8HOHd_4O8WW6xT2tp49-yOj0EMKCYK5YqMOOx_bI,1973
25
- airflow/providers/cncf/kubernetes/executors/kubernetes_executor_utils.py,sha256=2xqLS8DQhnwVwxnv4an8cjv4HPqP_TSiVFjjADPMeHM,24750
26
+ airflow/providers/cncf/kubernetes/executors/kubernetes_executor_utils.py,sha256=U2mCkPy-MnPLLdPHil5_rLOsb-K_Xnn5YwNUVzD96i4,24689
26
27
  airflow/providers/cncf/kubernetes/executors/local_kubernetes_executor.py,sha256=TuFRbs1zqKajJoZmo25kT4AGd-_-iD-UbhfOY30EOck,11591
27
28
  airflow/providers/cncf/kubernetes/hooks/__init__.py,sha256=9hdXHABrVpkbpjZgUft39kOFL2xSGeG4GEua0Hmelus,785
28
- airflow/providers/cncf/kubernetes/hooks/kubernetes.py,sha256=x-kwKxqX3PUAMf_0wALQzCGVdKRdbyAlScc4hBMuxY0,36732
29
+ airflow/providers/cncf/kubernetes/hooks/kubernetes.py,sha256=KPv5t7Xpvx9Nw9uampf1VCMdnq_ySqRzFbt07NKIjZY,37107
29
30
  airflow/providers/cncf/kubernetes/kubernetes_executor_templates/__init__.py,sha256=9hdXHABrVpkbpjZgUft39kOFL2xSGeG4GEua0Hmelus,785
30
31
  airflow/providers/cncf/kubernetes/kubernetes_executor_templates/basic_template.yaml,sha256=yzJmXN4ZyB4aDwI_GIugpL9-f1YMVy__X-LQSbeU95A,2567
31
32
  airflow/providers/cncf/kubernetes/operators/__init__.py,sha256=mlJxuZLkd5x-iq2SBwD3mvRQpt3YR7wjz_nceyF1IaI,787
32
33
  airflow/providers/cncf/kubernetes/operators/custom_object_launcher.py,sha256=jTVHQt1vp5gELrLNyM-DrZ1ywgmTy3Hh1i6wyl7AGS0,15314
33
34
  airflow/providers/cncf/kubernetes/operators/job.py,sha256=aK2MogooZ6K7uVC0hWRYhCGgzwHOERrgPyknWu5mp7c,23771
34
35
  airflow/providers/cncf/kubernetes/operators/kueue.py,sha256=eEfl__06k15-21Y86qHOLAsY2zR1OWM4QgQhDteDBP0,4563
35
- airflow/providers/cncf/kubernetes/operators/pod.py,sha256=1OnqGLDb9wlqDWs7sNofuCJaqoLaTIUr74oMOHPQhhw,57827
36
+ airflow/providers/cncf/kubernetes/operators/pod.py,sha256=yO4gz_9cHYAn-7mjJKEu3KUf_7nxtUvaEAaL7NQ92nc,58639
36
37
  airflow/providers/cncf/kubernetes/operators/resource.py,sha256=Q5WssuDyjtzo1Op1pzUmYG4JZZdzCKTe-vTZEy8HSNA,7579
37
38
  airflow/providers/cncf/kubernetes/operators/spark_kubernetes.py,sha256=0TO86G-gbWAkQvaWBfhCp6ZJwoQzciH-UGR5kgw9fmg,13847
38
39
  airflow/providers/cncf/kubernetes/pod_template_file_examples/__init__.py,sha256=9hdXHABrVpkbpjZgUft39kOFL2xSGeG4GEua0Hmelus,785
@@ -51,9 +52,9 @@ airflow/providers/cncf/kubernetes/triggers/pod.py,sha256=IeoMrPECgsr2Sswfvd7Fl6p
51
52
  airflow/providers/cncf/kubernetes/utils/__init__.py,sha256=ClZN0VPjWySdVwS_ktH7rrgL9VLAcs3OSJSB9s3zaYw,863
52
53
  airflow/providers/cncf/kubernetes/utils/delete_from.py,sha256=poObZSoEJwQyaYWilEURs8f4CDY2sn_pfwS31Lf579A,5195
53
54
  airflow/providers/cncf/kubernetes/utils/k8s_resource_iterator.py,sha256=DLypjkD_3YDixRTcsxEjgvHZNbbG9qamlz05eBqaWzU,1955
54
- airflow/providers/cncf/kubernetes/utils/pod_manager.py,sha256=ARhKuTx6lahupd8w06mVc1s7UqqEeGHq4-5uOt9rqhY,36997
55
+ airflow/providers/cncf/kubernetes/utils/pod_manager.py,sha256=8edX-K9t2YgGHYsEeqUmKodHv4jsEHdPjii89KlE0dw,39436
55
56
  airflow/providers/cncf/kubernetes/utils/xcom_sidecar.py,sha256=k6bdmVJ21OrAwGmWwledRrAmaty9ZrmbuM-IbaI4mqo,2519
56
- apache_airflow_providers_cncf_kubernetes-10.4.3rc1.dist-info/entry_points.txt,sha256=ByD3QJJyP9CfmTYtpNI1953akD38RUDgpGXLaq9vpOw,111
57
- apache_airflow_providers_cncf_kubernetes-10.4.3rc1.dist-info/WHEEL,sha256=G2gURzTEtmeR8nrdXUJfNiB3VYVxigPQ-bEQujpNiNs,82
58
- apache_airflow_providers_cncf_kubernetes-10.4.3rc1.dist-info/METADATA,sha256=tdmtn_9o_dyNb2YZal89KuQpi4Gu4WSYQz0YrxOAhXc,4312
59
- apache_airflow_providers_cncf_kubernetes-10.4.3rc1.dist-info/RECORD,,
57
+ apache_airflow_providers_cncf_kubernetes-10.5.0rc2.dist-info/entry_points.txt,sha256=ByD3QJJyP9CfmTYtpNI1953akD38RUDgpGXLaq9vpOw,111
58
+ apache_airflow_providers_cncf_kubernetes-10.5.0rc2.dist-info/WHEEL,sha256=G2gURzTEtmeR8nrdXUJfNiB3VYVxigPQ-bEQujpNiNs,82
59
+ apache_airflow_providers_cncf_kubernetes-10.5.0rc2.dist-info/METADATA,sha256=8_LmnQyGyRNNoRJrOpvgi8vL4UGBr6ta5KTkZW-SEBk,4328
60
+ apache_airflow_providers_cncf_kubernetes-10.5.0rc2.dist-info/RECORD,,