apache-airflow-providers-cncf-kubernetes 10.4.2__py3-none-any.whl → 10.4.3__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of apache-airflow-providers-cncf-kubernetes might be problematic. Click here for more details.

Files changed (23) hide show
  1. airflow/providers/cncf/kubernetes/__init__.py +1 -1
  2. airflow/providers/cncf/kubernetes/backcompat/backwards_compat_converters.py +5 -8
  3. airflow/providers/cncf/kubernetes/cli/kubernetes_command.py +4 -1
  4. airflow/providers/cncf/kubernetes/executors/kubernetes_executor.py +12 -3
  5. airflow/providers/cncf/kubernetes/executors/kubernetes_executor_utils.py +3 -6
  6. airflow/providers/cncf/kubernetes/hooks/kubernetes.py +5 -7
  7. airflow/providers/cncf/kubernetes/kubernetes_helper_functions.py +1 -2
  8. airflow/providers/cncf/kubernetes/operators/custom_object_launcher.py +2 -3
  9. airflow/providers/cncf/kubernetes/operators/job.py +1 -1
  10. airflow/providers/cncf/kubernetes/operators/kueue.py +1 -1
  11. airflow/providers/cncf/kubernetes/operators/pod.py +2 -2
  12. airflow/providers/cncf/kubernetes/operators/resource.py +1 -2
  13. airflow/providers/cncf/kubernetes/operators/spark_kubernetes.py +1 -1
  14. airflow/providers/cncf/kubernetes/pod_generator.py +5 -6
  15. airflow/providers/cncf/kubernetes/resource_convert/env_variable.py +1 -1
  16. airflow/providers/cncf/kubernetes/sensors/spark_kubernetes.py +3 -4
  17. airflow/providers/cncf/kubernetes/template_rendering.py +0 -17
  18. airflow/providers/cncf/kubernetes/triggers/pod.py +2 -3
  19. airflow/providers/cncf/kubernetes/utils/pod_manager.py +8 -9
  20. {apache_airflow_providers_cncf_kubernetes-10.4.2.dist-info → apache_airflow_providers_cncf_kubernetes-10.4.3.dist-info}/METADATA +6 -6
  21. {apache_airflow_providers_cncf_kubernetes-10.4.2.dist-info → apache_airflow_providers_cncf_kubernetes-10.4.3.dist-info}/RECORD +23 -23
  22. {apache_airflow_providers_cncf_kubernetes-10.4.2.dist-info → apache_airflow_providers_cncf_kubernetes-10.4.3.dist-info}/WHEEL +0 -0
  23. {apache_airflow_providers_cncf_kubernetes-10.4.2.dist-info → apache_airflow_providers_cncf_kubernetes-10.4.3.dist-info}/entry_points.txt +0 -0
@@ -29,7 +29,7 @@ from airflow import __version__ as airflow_version
29
29
 
30
30
  __all__ = ["__version__"]
31
31
 
32
- __version__ = "10.4.2"
32
+ __version__ = "10.4.3"
33
33
 
34
34
  if packaging.version.parse(packaging.version.parse(airflow_version).base_version) < packaging.version.parse(
35
35
  "2.9.0"
@@ -27,20 +27,18 @@ def _convert_kube_model_object(obj, new_class):
27
27
  convert_op = getattr(obj, "to_k8s_client_obj", None)
28
28
  if callable(convert_op):
29
29
  return obj.to_k8s_client_obj()
30
- elif isinstance(obj, new_class):
30
+ if isinstance(obj, new_class):
31
31
  return obj
32
- else:
33
- raise AirflowException(f"Expected {new_class}, got {type(obj)}")
32
+ raise AirflowException(f"Expected {new_class}, got {type(obj)}")
34
33
 
35
34
 
36
35
  def _convert_from_dict(obj, new_class):
37
36
  if isinstance(obj, new_class):
38
37
  return obj
39
- elif isinstance(obj, dict):
38
+ if isinstance(obj, dict):
40
39
  api_client = ApiClient()
41
40
  return api_client._ApiClient__deserialize_model(obj, new_class)
42
- else:
43
- raise AirflowException(f"Expected dict or {new_class}, got {type(obj)}")
41
+ raise AirflowException(f"Expected dict or {new_class}, got {type(obj)}")
44
42
 
45
43
 
46
44
  def convert_volume(volume) -> k8s.V1Volume:
@@ -111,8 +109,7 @@ def convert_image_pull_secrets(image_pull_secrets) -> list[k8s.V1LocalObjectRefe
111
109
  if isinstance(image_pull_secrets, str):
112
110
  secrets = image_pull_secrets.split(",")
113
111
  return [k8s.V1LocalObjectReference(name=secret) for secret in secrets]
114
- else:
115
- return image_pull_secrets
112
+ return image_pull_secrets
116
113
 
117
114
 
118
115
  def convert_configmap(configmaps) -> k8s.V1EnvFromSource:
@@ -43,7 +43,10 @@ from airflow.utils.providers_configuration_loader import providers_configuration
43
43
  def generate_pod_yaml(args):
44
44
  """Generate yaml files for each task in the DAG. Used for testing output of KubernetesExecutor."""
45
45
  logical_date = args.logical_date if AIRFLOW_V_3_0_PLUS else args.execution_date
46
- dag = get_dag(subdir=args.subdir, dag_id=args.dag_id)
46
+ if AIRFLOW_V_3_0_PLUS:
47
+ dag = get_dag(bundle_names=args.bundle_name, dag_id=args.dag_id)
48
+ else:
49
+ dag = get_dag(subdir=args.subdir, dag_id=args.dag_id)
47
50
  yaml_output_path = args.output_path
48
51
  if AIRFLOW_V_3_0_PLUS:
49
52
  dr = DagRun(dag.dag_id, logical_date=logical_date)
@@ -52,7 +52,6 @@ except ImportError: # 2.x compatibility.
52
52
  from airflow.cli.cli_config import (
53
53
  ARG_DAG_ID,
54
54
  ARG_OUTPUT_PATH,
55
- ARG_SUBDIR,
56
55
  ARG_VERBOSE,
57
56
  ActionCommand,
58
57
  Arg,
@@ -94,6 +93,16 @@ if TYPE_CHECKING:
94
93
  AirflowKubernetesScheduler,
95
94
  )
96
95
 
96
+
97
+ if AIRFLOW_V_3_0_PLUS:
98
+ from airflow.cli.cli_config import ARG_BUNDLE_NAME
99
+
100
+ ARG_COMPAT = ARG_BUNDLE_NAME
101
+ else:
102
+ from airflow.cli.cli_config import ARG_SUBDIR # type: ignore[attr-defined]
103
+
104
+ ARG_COMPAT = ARG_SUBDIR
105
+
97
106
  # CLI Args
98
107
  ARG_NAMESPACE = Arg(
99
108
  ("--namespace",),
@@ -128,7 +137,7 @@ KUBERNETES_COMMANDS = (
128
137
  help="Generate YAML files for all tasks in DAG. Useful for debugging tasks without "
129
138
  "launching into a cluster",
130
139
  func=lazy_load_command("airflow.providers.cncf.kubernetes.cli.kubernetes_command.generate_pod_yaml"),
131
- args=(ARG_DAG_ID, ARG_LOGICAL_DATE, ARG_SUBDIR, ARG_OUTPUT_PATH, ARG_VERBOSE),
140
+ args=(ARG_DAG_ID, ARG_LOGICAL_DATE, ARG_COMPAT, ARG_OUTPUT_PATH, ARG_VERBOSE),
132
141
  ),
133
142
  )
134
143
 
@@ -482,7 +491,7 @@ class KubernetesExecutor(BaseExecutor):
482
491
  ).items
483
492
  if not pod_list:
484
493
  raise RuntimeError("Cannot find pod for ti %s", ti)
485
- elif len(pod_list) > 1:
494
+ if len(pod_list) > 1:
486
495
  raise RuntimeError("Found multiple pods for ti %s: %s", ti, pod_list)
487
496
  res = client.read_namespaced_pod_log(
488
497
  name=pod_list[0].metadata.name,
@@ -110,8 +110,7 @@ class KubernetesJobWatcher(multiprocessing.Process, LoggingMixin):
110
110
  try:
111
111
  if self.namespace == ALL_NAMESPACES:
112
112
  return watcher.stream(kube_client.list_pod_for_all_namespaces, **query_kwargs)
113
- else:
114
- return watcher.stream(kube_client.list_namespaced_pod, self.namespace, **query_kwargs)
113
+ return watcher.stream(kube_client.list_namespaced_pod, self.namespace, **query_kwargs)
115
114
  except ApiException as e:
116
115
  if str(e.status) == "410": # Resource version is too old
117
116
  if self.namespace == ALL_NAMESPACES:
@@ -121,8 +120,7 @@ class KubernetesJobWatcher(multiprocessing.Process, LoggingMixin):
121
120
  resource_version = pods.metadata.resource_version
122
121
  query_kwargs["resource_version"] = resource_version
123
122
  return self._pod_events(kube_client=kube_client, query_kwargs=query_kwargs)
124
- else:
125
- raise
123
+ raise
126
124
 
127
125
  def _run(
128
126
  self,
@@ -564,5 +562,4 @@ def get_base_pod_from_template(pod_template_file: str | None, kube_config: Any)
564
562
  """
565
563
  if pod_template_file:
566
564
  return PodGenerator.deserialize_model_file(pod_template_file)
567
- else:
568
- return PodGenerator.deserialize_model_file(kube_config.pod_template_file)
565
+ return PodGenerator.deserialize_model_file(kube_config.pod_template_file)
@@ -177,8 +177,7 @@ class KubernetesHook(BaseHook, PodOperatorHookProtocol):
177
177
  except AirflowNotFoundException:
178
178
  if conn_id == cls.default_conn_name:
179
179
  return Connection(conn_id=cls.default_conn_name)
180
- else:
181
- raise
180
+ raise
182
181
 
183
182
  @cached_property
184
183
  def conn_extras(self):
@@ -691,9 +690,8 @@ class KubernetesHook(BaseHook, PodOperatorHookProtocol):
691
690
  and replicas == ready_replicas
692
691
  ):
693
692
  return
694
- else:
695
- self.log.info("Waiting until Deployment will be ready...")
696
- sleep(polling_period_seconds)
693
+ self.log.info("Waiting until Deployment will be ready...")
694
+ sleep(polling_period_seconds)
697
695
 
698
696
  _timeout -= polling_period_seconds
699
697
 
@@ -713,10 +711,10 @@ def _get_bool(val) -> bool | None:
713
711
  """Convert val to bool if can be done with certainty; if we cannot infer intention we return None."""
714
712
  if isinstance(val, bool):
715
713
  return val
716
- elif isinstance(val, str):
714
+ if isinstance(val, str):
717
715
  if val.strip().lower() == "true":
718
716
  return True
719
- elif val.strip().lower() == "false":
717
+ if val.strip().lower() == "false":
720
718
  return False
721
719
  return None
722
720
 
@@ -89,8 +89,7 @@ def create_unique_id(
89
89
  base_name = slugify(name, lowercase=True)[:max_length].strip(".-")
90
90
  if unique:
91
91
  return add_unique_suffix(name=base_name, rand_len=8, max_len=max_length)
92
- else:
93
- return base_name
92
+ return base_name
94
93
 
95
94
 
96
95
  def annotations_to_key(annotations: dict[str, str]) -> TaskInstanceKey:
@@ -18,6 +18,7 @@
18
18
 
19
19
  from __future__ import annotations
20
20
 
21
+ import contextlib
21
22
  import time
22
23
  from copy import deepcopy
23
24
  from datetime import datetime as dt
@@ -326,12 +327,10 @@ class CustomObjectLauncher(LoggingMixin):
326
327
  driver_state = spark_job_info.get("status", {}).get("applicationState", {}).get("state", "SUBMITTED")
327
328
  if driver_state == CustomObjectStatus.FAILED:
328
329
  err = spark_job_info.get("status", {}).get("applicationState", {}).get("errorMessage", "N/A")
329
- try:
330
+ with contextlib.suppress(Exception):
330
331
  self.pod_manager.fetch_container_logs(
331
332
  pod=self.pod_spec, container_name="spark-kubernetes-driver"
332
333
  )
333
- except Exception:
334
- pass
335
334
  raise AirflowException(f"Spark Job Failed. Error stack: {err}")
336
335
  return driver_state == CustomObjectStatus.SUBMITTED
337
336
 
@@ -389,7 +389,7 @@ class KubernetesJobOperator(KubernetesPodOperator):
389
389
  return base_spec
390
390
  if not base_spec and client_spec:
391
391
  return client_spec
392
- elif client_spec and base_spec:
392
+ if client_spec and base_spec:
393
393
  client_spec.template.spec = PodGenerator.reconcile_specs(
394
394
  base_spec.template.spec, client_spec.template.spec
395
395
  )
@@ -101,7 +101,7 @@ class KubernetesStartKueueJobOperator(KubernetesJobOperator):
101
101
  "The `suspend` parameter can't be False. If you want to use Kueue for running Job"
102
102
  " in a Kubernetes cluster, set the `suspend` parameter to True.",
103
103
  )
104
- elif self.suspend is None:
104
+ if self.suspend is None:
105
105
  self.log.info(
106
106
  "You have not set parameter `suspend` in class %s. "
107
107
  "For running a Job in Kueue the `suspend` parameter has been set to True.",
@@ -857,7 +857,7 @@ class KubernetesPodOperator(BaseOperator):
857
857
  message = event.get("stack_trace", event["message"])
858
858
  raise AirflowException(message)
859
859
 
860
- elif event["status"] == "running":
860
+ if event["status"] == "running":
861
861
  if self.get_logs:
862
862
  self.log.info("Resuming logs read from time %r", last_log_time)
863
863
 
@@ -1297,7 +1297,7 @@ class _optionally_suppress(AbstractContextManager):
1297
1297
  matching_error = error and issubclass(exctype, self._exceptions)
1298
1298
  if (error and not matching_error) or (matching_error and self.reraise):
1299
1299
  return False
1300
- elif matching_error:
1300
+ if matching_error:
1301
1301
  self.exception = excinst
1302
1302
  logger = logging.getLogger(__name__)
1303
1303
  logger.exception(excinst)
@@ -99,8 +99,7 @@ class KubernetesResourceBaseOperator(BaseOperator):
99
99
  def get_namespace(self) -> str:
100
100
  if self._namespace:
101
101
  return self._namespace
102
- else:
103
- return self.hook.get_namespace() or "default"
102
+ return self.hook.get_namespace() or "default"
104
103
 
105
104
  def get_crd_fields(self, body: dict) -> tuple[str, str, str, str]:
106
105
  api_version = body["apiVersion"]
@@ -245,7 +245,7 @@ class SparkKubernetesOperator(KubernetesPodOperator):
245
245
  pod = None
246
246
  if len(pod_list) > 1: # and self.reattach_on_restart:
247
247
  raise AirflowException(f"More than one pod running with labels: {label_selector}")
248
- elif len(pod_list) == 1:
248
+ if len(pod_list) == 1:
249
249
  pod = pod_list[0]
250
250
  self.log.info(
251
251
  "Found matching driver pod %s with labels %s", pod.metadata.name, pod.metadata.labels
@@ -162,10 +162,9 @@ class PodGenerator:
162
162
 
163
163
  if isinstance(k8s_object, k8s.V1Pod):
164
164
  return k8s_object
165
- else:
166
- raise TypeError(
167
- "Cannot convert a non-kubernetes.client.models.V1Pod object into a KubernetesExecutorConfig"
168
- )
165
+ raise TypeError(
166
+ "Cannot convert a non-kubernetes.client.models.V1Pod object into a KubernetesExecutorConfig"
167
+ )
169
168
 
170
169
  @staticmethod
171
170
  def reconcile_pods(base_pod: k8s.V1Pod, client_pod: k8s.V1Pod | None) -> k8s.V1Pod:
@@ -203,7 +202,7 @@ class PodGenerator:
203
202
  return base_meta
204
203
  if not base_meta and client_meta:
205
204
  return client_meta
206
- elif client_meta and base_meta:
205
+ if client_meta and base_meta:
207
206
  client_meta.labels = merge_objects(base_meta.labels, client_meta.labels)
208
207
  client_meta.annotations = merge_objects(base_meta.annotations, client_meta.annotations)
209
208
  extend_object_field(base_meta, client_meta, "managed_fields")
@@ -229,7 +228,7 @@ class PodGenerator:
229
228
  return base_spec
230
229
  if not base_spec and client_spec:
231
230
  return client_spec
232
- elif client_spec and base_spec:
231
+ if client_spec and base_spec:
233
232
  client_spec.containers = PodGenerator.reconcile_containers(
234
233
  base_spec.containers, client_spec.containers
235
234
  )
@@ -33,7 +33,7 @@ def convert_env_vars(env_vars) -> list[k8s.V1EnvVar]:
33
33
  for k, v in env_vars.items():
34
34
  res.append(k8s.V1EnvVar(name=k, value=v))
35
35
  return res
36
- elif isinstance(env_vars, list):
36
+ if isinstance(env_vars, list):
37
37
  if all([isinstance(e, k8s.V1EnvVar) for e in env_vars]):
38
38
  return env_vars
39
39
  raise AirflowException(f"Expected dict or list of V1EnvVar, got {type(env_vars)}")
@@ -128,9 +128,8 @@ class SparkKubernetesSensor(BaseSensorOperator):
128
128
  if application_state in self.FAILURE_STATES:
129
129
  message = f"Spark application failed with state: {application_state}"
130
130
  raise AirflowException(message)
131
- elif application_state in self.SUCCESS_STATES:
131
+ if application_state in self.SUCCESS_STATES:
132
132
  self.log.info("Spark application ended successfully")
133
133
  return True
134
- else:
135
- self.log.info("Spark application is still in state: %s", application_state)
136
- return False
134
+ self.log.info("Spark application is still in state: %s", application_state)
135
+ return False
@@ -19,14 +19,11 @@ from __future__ import annotations
19
19
 
20
20
  from typing import TYPE_CHECKING
21
21
 
22
- from jinja2 import TemplateAssertionError, UndefinedError
23
22
  from kubernetes.client.api_client import ApiClient
24
23
 
25
- from airflow.exceptions import AirflowException
26
24
  from airflow.providers.cncf.kubernetes.kube_config import KubeConfig
27
25
  from airflow.providers.cncf.kubernetes.kubernetes_helper_functions import create_unique_id
28
26
  from airflow.providers.cncf.kubernetes.pod_generator import PodGenerator
29
- from airflow.utils.session import NEW_SESSION, provide_session
30
27
 
31
28
  if TYPE_CHECKING:
32
29
  from airflow.models.taskinstance import TaskInstance
@@ -61,17 +58,3 @@ def render_k8s_pod_yaml(task_instance: TaskInstance) -> dict | None:
61
58
  )
62
59
  sanitized_pod = ApiClient().sanitize_for_serialization(pod)
63
60
  return sanitized_pod
64
-
65
-
66
- @provide_session
67
- def get_rendered_k8s_spec(task_instance: TaskInstance, session=NEW_SESSION) -> dict | None:
68
- """Fetch rendered template fields from DB."""
69
- from airflow.models.renderedtifields import RenderedTaskInstanceFields
70
-
71
- rendered_k8s_spec = RenderedTaskInstanceFields.get_k8s_pod_yaml(task_instance, session=session)
72
- if not rendered_k8s_spec:
73
- try:
74
- rendered_k8s_spec = render_k8s_pod_yaml(task_instance)
75
- except (TemplateAssertionError, UndefinedError) as e:
76
- raise AirflowException(f"Unable to render a k8s spec for this taskinstance: {e}") from e
77
- return rendered_k8s_spec
@@ -236,7 +236,7 @@ class KubernetesPodTrigger(BaseTrigger):
236
236
  "last_log_time": self.last_log_time,
237
237
  }
238
238
  )
239
- elif container_state == ContainerState.FAILED:
239
+ if container_state == ContainerState.FAILED:
240
240
  return TriggerEvent(
241
241
  {
242
242
  "status": "failed",
@@ -289,8 +289,7 @@ class KubernetesPodTrigger(BaseTrigger):
289
289
  if state_obj is not None:
290
290
  if state != ContainerState.TERMINATED:
291
291
  return state
292
- else:
293
- return ContainerState.TERMINATED if state_obj.exit_code == 0 else ContainerState.FAILED
292
+ return ContainerState.TERMINATED if state_obj.exit_code == 0 else ContainerState.FAILED
294
293
  return ContainerState.UNDEFINED
295
294
 
296
295
  @staticmethod
@@ -520,15 +520,14 @@ class PodManager(LoggingMixin):
520
520
  return PodLoggingStatus(running=False, last_log_time=last_log_time)
521
521
  if not follow:
522
522
  return PodLoggingStatus(running=True, last_log_time=last_log_time)
523
- else:
524
- # a timeout is a normal thing and we ignore it and resume following logs
525
- if not isinstance(exc, TimeoutError):
526
- self.log.warning(
527
- "Pod %s log read interrupted but container %s still running. Logs generated in the last one second might get duplicated.",
528
- pod.metadata.name,
529
- container_name,
530
- )
531
- time.sleep(1)
523
+ # a timeout is a normal thing and we ignore it and resume following logs
524
+ if not isinstance(exc, TimeoutError):
525
+ self.log.warning(
526
+ "Pod %s log read interrupted but container %s still running. Logs generated in the last one second might get duplicated.",
527
+ pod.metadata.name,
528
+ container_name,
529
+ )
530
+ time.sleep(1)
532
531
 
533
532
  def _reconcile_requested_log_containers(
534
533
  self, requested: Iterable[str] | str | bool | None, actual: list[str], pod_name
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: apache-airflow-providers-cncf-kubernetes
3
- Version: 10.4.2
3
+ Version: 10.4.3
4
4
  Summary: Provider package apache-airflow-providers-cncf-kubernetes for Apache Airflow
5
5
  Keywords: airflow-provider,cncf.kubernetes,airflow,integration
6
6
  Author-email: Apache Software Foundation <dev@airflow.apache.org>
@@ -27,8 +27,8 @@ Requires-Dist: cryptography>=41.0.0
27
27
  Requires-Dist: kubernetes>=29.0.0,<=31.0.0
28
28
  Requires-Dist: kubernetes_asyncio>=29.0.0,<=31.0.0
29
29
  Project-URL: Bug Tracker, https://github.com/apache/airflow/issues
30
- Project-URL: Changelog, https://airflow.apache.org/docs/apache-airflow-providers-cncf-kubernetes/10.4.2/changelog.html
31
- Project-URL: Documentation, https://airflow.apache.org/docs/apache-airflow-providers-cncf-kubernetes/10.4.2
30
+ Project-URL: Changelog, https://airflow.apache.org/docs/apache-airflow-providers-cncf-kubernetes/10.4.3/changelog.html
31
+ Project-URL: Documentation, https://airflow.apache.org/docs/apache-airflow-providers-cncf-kubernetes/10.4.3
32
32
  Project-URL: Mastodon, https://fosstodon.org/@airflow
33
33
  Project-URL: Slack Chat, https://s.apache.org/airflow-slack
34
34
  Project-URL: Source Code, https://github.com/apache/airflow
@@ -59,7 +59,7 @@ Project-URL: YouTube, https://www.youtube.com/channel/UCSXwxpWZQ7XZ1WL3wqevChA/
59
59
 
60
60
  Package ``apache-airflow-providers-cncf-kubernetes``
61
61
 
62
- Release: ``10.4.2``
62
+ Release: ``10.4.3``
63
63
 
64
64
 
65
65
  `Kubernetes <https://kubernetes.io/>`__
@@ -72,7 +72,7 @@ This is a provider package for ``cncf.kubernetes`` provider. All classes for thi
72
72
  are in ``airflow.providers.cncf.kubernetes`` python package.
73
73
 
74
74
  You can find package information and changelog for the provider
75
- in the `documentation <https://airflow.apache.org/docs/apache-airflow-providers-cncf-kubernetes/10.4.2/>`_.
75
+ in the `documentation <https://airflow.apache.org/docs/apache-airflow-providers-cncf-kubernetes/10.4.3/>`_.
76
76
 
77
77
  Installation
78
78
  ------------
@@ -98,5 +98,5 @@ PIP package Version required
98
98
  ====================== =====================
99
99
 
100
100
  The changelog for the provider package can be found in the
101
- `changelog <https://airflow.apache.org/docs/apache-airflow-providers-cncf-kubernetes/10.4.2/changelog.html>`_.
101
+ `changelog <https://airflow.apache.org/docs/apache-airflow-providers-cncf-kubernetes/10.4.3/changelog.html>`_.
102
102
 
@@ -1,59 +1,59 @@
1
1
  airflow/providers/cncf/kubernetes/LICENSE,sha256=gXPVwptPlW1TJ4HSuG5OMPg-a3h43OGMkZRR1rpwfJA,10850
2
- airflow/providers/cncf/kubernetes/__init__.py,sha256=52kiWI08cAAtNNwjefMhvC0VPaskM3oCiII9ELhmYLk,1503
2
+ airflow/providers/cncf/kubernetes/__init__.py,sha256=m60_HPmd-QQwvdqA6pCrgJ34eZtwouli_zvBUQo9G-Q,1503
3
3
  airflow/providers/cncf/kubernetes/callbacks.py,sha256=5zGmQthojdT9iBEV3LIyBq-oKzjv2D4dOYCjYRbb61c,6076
4
4
  airflow/providers/cncf/kubernetes/exceptions.py,sha256=3cNEZTnrltBsqwzHiLfckwYYc_IWY1g4PcRs6zuMWWA,1137
5
5
  airflow/providers/cncf/kubernetes/get_provider_info.py,sha256=PqSjW28xplbuZqAX7AMYa1CHNk1w7naQfduN0rQJ8qI,15847
6
6
  airflow/providers/cncf/kubernetes/k8s_model.py,sha256=xmdFhX29DjegoZ-cq8-KDL9soVYXf4OpU6fAGr3cPTU,2101
7
7
  airflow/providers/cncf/kubernetes/kube_client.py,sha256=yflZxLousXA9d7t67KrEy55qzb1cUhEyy6yCPkEem28,5329
8
8
  airflow/providers/cncf/kubernetes/kube_config.py,sha256=3qWdCp2z4g8gX_sIOProgwp52UxM5kAIYabkxaX297g,5079
9
- airflow/providers/cncf/kubernetes/kubernetes_helper_functions.py,sha256=Zf7RyNt9BkUHY7Sjm_6CimTmb2H1bv5lrO3T0pni_r0,5524
10
- airflow/providers/cncf/kubernetes/pod_generator.py,sha256=KuzcWs35gS4avuHF1xBPkvG4VPA47jqU4MF1cYHWhwo,20887
9
+ airflow/providers/cncf/kubernetes/kubernetes_helper_functions.py,sha256=opxof6wxHEAHwa_zRB47QJBBrV5St4rIZzAiptA9Rek,5510
10
+ airflow/providers/cncf/kubernetes/pod_generator.py,sha256=f24Qdg4QA9d8gaHA0X78jDojtm9swEgLLqcG6yEx2rc,20857
11
11
  airflow/providers/cncf/kubernetes/python_kubernetes_script.jinja2,sha256=I0EHRGwLHjSiX85e51HBIoddRDnC8TJPFrDBqQq_NJg,1776
12
12
  airflow/providers/cncf/kubernetes/python_kubernetes_script.py,sha256=KnTlZSWCZhwvj89fSc2kgIRTaI4iLNKPquHc2wXnluo,3460
13
13
  airflow/providers/cncf/kubernetes/secret.py,sha256=wj-T9gouqau_X14slAstGmnSxqXJQzdLwUdURzHna0I,5209
14
- airflow/providers/cncf/kubernetes/template_rendering.py,sha256=pV6lX8DW3dLNB945mxwM8E0Vynis2-chMCwHlnHeIVY,3490
14
+ airflow/providers/cncf/kubernetes/template_rendering.py,sha256=NyrAc2rsZ0oyXxtRHiY8qkaH4tftHlexTpr7YE4UQY0,2682
15
15
  airflow/providers/cncf/kubernetes/version_compat.py,sha256=aHg90_DtgoSnQvILFICexMyNlHlALBdaeWqkX3dFDug,1605
16
16
  airflow/providers/cncf/kubernetes/backcompat/__init__.py,sha256=KXF76f3v1jIFUBNz8kwxVMvm7i4mNo35LbIG9IijBNc,1299
17
- airflow/providers/cncf/kubernetes/backcompat/backwards_compat_converters.py,sha256=pwkCHlUoP7boRNC27xYk8lSRMSggtsRnlUcoat9PbVw,4349
17
+ airflow/providers/cncf/kubernetes/backcompat/backwards_compat_converters.py,sha256=FkRRtIEucp2hYrecGVYVgyPI6-b7hE7X7L17Z3r459Y,4303
18
18
  airflow/providers/cncf/kubernetes/cli/__init__.py,sha256=9hdXHABrVpkbpjZgUft39kOFL2xSGeG4GEua0Hmelus,785
19
- airflow/providers/cncf/kubernetes/cli/kubernetes_command.py,sha256=0n1Tj7w6jWfeRtf_VTn1dA5XN1MlOziXFDBA51nHquM,7252
19
+ airflow/providers/cncf/kubernetes/cli/kubernetes_command.py,sha256=FRR8p50FgHaVPS8x1rbXFSd47NtBtidqL4JyTyfxqnQ,7366
20
20
  airflow/providers/cncf/kubernetes/decorators/__init__.py,sha256=mlJxuZLkd5x-iq2SBwD3mvRQpt3YR7wjz_nceyF1IaI,787
21
21
  airflow/providers/cncf/kubernetes/decorators/kubernetes.py,sha256=_OnebMazgYTJoCnkaMsRxwF6f2slKU_-ucrml8z8cq0,6449
22
22
  airflow/providers/cncf/kubernetes/executors/__init__.py,sha256=mlJxuZLkd5x-iq2SBwD3mvRQpt3YR7wjz_nceyF1IaI,787
23
- airflow/providers/cncf/kubernetes/executors/kubernetes_executor.py,sha256=t6nOCpR9VZRcPG5tel_lgQ1AJaEDoJch2jDBn2acu18,31765
23
+ airflow/providers/cncf/kubernetes/executors/kubernetes_executor.py,sha256=TomXdMCdtnnsKOWZiNR2lrh0ZmHghfbzXjpjeRDJTFA,31976
24
24
  airflow/providers/cncf/kubernetes/executors/kubernetes_executor_types.py,sha256=L8_8HOHd_4O8WW6xT2tp49-yOj0EMKCYK5YqMOOx_bI,1973
25
- airflow/providers/cncf/kubernetes/executors/kubernetes_executor_utils.py,sha256=C-2EQcVKHqsELfzCHxqs4zK46hGSc1bPTD5Uw_K3ikk,24808
25
+ airflow/providers/cncf/kubernetes/executors/kubernetes_executor_utils.py,sha256=2xqLS8DQhnwVwxnv4an8cjv4HPqP_TSiVFjjADPMeHM,24750
26
26
  airflow/providers/cncf/kubernetes/executors/local_kubernetes_executor.py,sha256=TuFRbs1zqKajJoZmo25kT4AGd-_-iD-UbhfOY30EOck,11591
27
27
  airflow/providers/cncf/kubernetes/hooks/__init__.py,sha256=9hdXHABrVpkbpjZgUft39kOFL2xSGeG4GEua0Hmelus,785
28
- airflow/providers/cncf/kubernetes/hooks/kubernetes.py,sha256=Iyn_kXNYj3sVpjCBqxTnu8Ob7SNTLsQva80IZpKpYWg,36784
28
+ airflow/providers/cncf/kubernetes/hooks/kubernetes.py,sha256=x-kwKxqX3PUAMf_0wALQzCGVdKRdbyAlScc4hBMuxY0,36732
29
29
  airflow/providers/cncf/kubernetes/kubernetes_executor_templates/__init__.py,sha256=9hdXHABrVpkbpjZgUft39kOFL2xSGeG4GEua0Hmelus,785
30
30
  airflow/providers/cncf/kubernetes/kubernetes_executor_templates/basic_template.yaml,sha256=yzJmXN4ZyB4aDwI_GIugpL9-f1YMVy__X-LQSbeU95A,2567
31
31
  airflow/providers/cncf/kubernetes/operators/__init__.py,sha256=mlJxuZLkd5x-iq2SBwD3mvRQpt3YR7wjz_nceyF1IaI,787
32
- airflow/providers/cncf/kubernetes/operators/custom_object_launcher.py,sha256=CS8VNDq0ZBvq25AkLn90Au1L75PrWM6sdz81PNIYG0U,15315
33
- airflow/providers/cncf/kubernetes/operators/job.py,sha256=GXtTHoK2y7mG7rvGdHt0KIp_0V8d0wKzZ2rUV1zg56g,23773
34
- airflow/providers/cncf/kubernetes/operators/kueue.py,sha256=kBKw3R2IDMNOaAQ3oMNaerPBxWhCW_xJT4WxVVDhBGo,4565
35
- airflow/providers/cncf/kubernetes/operators/pod.py,sha256=Gw2D9rc7k2pTyUNUSu3SVUWysrjdAPTlDdRdIYf9gU8,57831
36
- airflow/providers/cncf/kubernetes/operators/resource.py,sha256=4nS-eiVEGlotp-gCkHlwRuenj3pnKhZ4khh9s2cjZms,7597
37
- airflow/providers/cncf/kubernetes/operators/spark_kubernetes.py,sha256=QrK7sEAM5Wo5dq2YCmoX3he7RTr5K1cKNj7XafzTzaU,13849
32
+ airflow/providers/cncf/kubernetes/operators/custom_object_launcher.py,sha256=jTVHQt1vp5gELrLNyM-DrZ1ywgmTy3Hh1i6wyl7AGS0,15314
33
+ airflow/providers/cncf/kubernetes/operators/job.py,sha256=aK2MogooZ6K7uVC0hWRYhCGgzwHOERrgPyknWu5mp7c,23771
34
+ airflow/providers/cncf/kubernetes/operators/kueue.py,sha256=eEfl__06k15-21Y86qHOLAsY2zR1OWM4QgQhDteDBP0,4563
35
+ airflow/providers/cncf/kubernetes/operators/pod.py,sha256=1OnqGLDb9wlqDWs7sNofuCJaqoLaTIUr74oMOHPQhhw,57827
36
+ airflow/providers/cncf/kubernetes/operators/resource.py,sha256=Q5WssuDyjtzo1Op1pzUmYG4JZZdzCKTe-vTZEy8HSNA,7579
37
+ airflow/providers/cncf/kubernetes/operators/spark_kubernetes.py,sha256=0TO86G-gbWAkQvaWBfhCp6ZJwoQzciH-UGR5kgw9fmg,13847
38
38
  airflow/providers/cncf/kubernetes/pod_template_file_examples/__init__.py,sha256=9hdXHABrVpkbpjZgUft39kOFL2xSGeG4GEua0Hmelus,785
39
39
  airflow/providers/cncf/kubernetes/pod_template_file_examples/dags_in_image_template.yaml,sha256=7JdppZ-XDBpv2Bnde2SthhcME8w3b8xQdPAK1fJGW60,2256
40
40
  airflow/providers/cncf/kubernetes/pod_template_file_examples/dags_in_volume_template.yaml,sha256=-Pk_EwKpyWRYZKOnumUxVrDeAfFJ0nr3WZ7JNnvppzg,2442
41
41
  airflow/providers/cncf/kubernetes/pod_template_file_examples/git_sync_template.yaml,sha256=Pxpa1AiBlf4H8aIc7tUTmH2XNOz84cO0ttMQdlfMJ2c,3020
42
42
  airflow/providers/cncf/kubernetes/resource_convert/__init__.py,sha256=9hdXHABrVpkbpjZgUft39kOFL2xSGeG4GEua0Hmelus,785
43
43
  airflow/providers/cncf/kubernetes/resource_convert/configmap.py,sha256=gf7DdVeD0yKRNCTVCM-SywJDxwEJTYx3ogykAqbxRoU,1873
44
- airflow/providers/cncf/kubernetes/resource_convert/env_variable.py,sha256=CsVgLPXjI5pSDCBwcN3WCyyWPbiNRZc_OJCPK_C_298,1464
44
+ airflow/providers/cncf/kubernetes/resource_convert/env_variable.py,sha256=vBeR__dLHsG619rxHTmY1SSefSTdUhnD4HRKzzQJutM,1462
45
45
  airflow/providers/cncf/kubernetes/resource_convert/secret.py,sha256=ElZCMbTWeTKoPeIJ1fTvlqRXM8nGkWj2MrIlVckX6Ag,1494
46
46
  airflow/providers/cncf/kubernetes/sensors/__init__.py,sha256=9hdXHABrVpkbpjZgUft39kOFL2xSGeG4GEua0Hmelus,785
47
- airflow/providers/cncf/kubernetes/sensors/spark_kubernetes.py,sha256=q5VZPf037pbsunFUjUrtz7M5oJW7waZ1q8snNAugCIk,5380
47
+ airflow/providers/cncf/kubernetes/sensors/spark_kubernetes.py,sha256=sMSuE4bziqPYzBNIZ2y1ab00kGO2tlS3Z7AmePBFA3w,5356
48
48
  airflow/providers/cncf/kubernetes/triggers/__init__.py,sha256=9hdXHABrVpkbpjZgUft39kOFL2xSGeG4GEua0Hmelus,785
49
49
  airflow/providers/cncf/kubernetes/triggers/job.py,sha256=DGbC1FZktBF-00Lb0pU9iIKQnmdW8HWklp5Wwq54OEY,6754
50
- airflow/providers/cncf/kubernetes/triggers/pod.py,sha256=swYWgK86spCaAkQoqd1ti3sDakhti13nDiwl79FUAYI,12893
50
+ airflow/providers/cncf/kubernetes/triggers/pod.py,sha256=IeoMrPECgsr2Sswfvd7Fl6p3uCAvnZLvAco3tO2wu7Q,12865
51
51
  airflow/providers/cncf/kubernetes/utils/__init__.py,sha256=ClZN0VPjWySdVwS_ktH7rrgL9VLAcs3OSJSB9s3zaYw,863
52
52
  airflow/providers/cncf/kubernetes/utils/delete_from.py,sha256=poObZSoEJwQyaYWilEURs8f4CDY2sn_pfwS31Lf579A,5195
53
53
  airflow/providers/cncf/kubernetes/utils/k8s_resource_iterator.py,sha256=DLypjkD_3YDixRTcsxEjgvHZNbbG9qamlz05eBqaWzU,1955
54
- airflow/providers/cncf/kubernetes/utils/pod_manager.py,sha256=fFyO5X9aK-Qh2T0FO73UI1E7PBG0_Nz2n4-2Til0f40,37047
54
+ airflow/providers/cncf/kubernetes/utils/pod_manager.py,sha256=ARhKuTx6lahupd8w06mVc1s7UqqEeGHq4-5uOt9rqhY,36997
55
55
  airflow/providers/cncf/kubernetes/utils/xcom_sidecar.py,sha256=k6bdmVJ21OrAwGmWwledRrAmaty9ZrmbuM-IbaI4mqo,2519
56
- apache_airflow_providers_cncf_kubernetes-10.4.2.dist-info/entry_points.txt,sha256=ByD3QJJyP9CfmTYtpNI1953akD38RUDgpGXLaq9vpOw,111
57
- apache_airflow_providers_cncf_kubernetes-10.4.2.dist-info/WHEEL,sha256=G2gURzTEtmeR8nrdXUJfNiB3VYVxigPQ-bEQujpNiNs,82
58
- apache_airflow_providers_cncf_kubernetes-10.4.2.dist-info/METADATA,sha256=AU_3AyvbJpgO2wuQCtrMlYsBhSA72k03bTeCfcB8fL0,4306
59
- apache_airflow_providers_cncf_kubernetes-10.4.2.dist-info/RECORD,,
56
+ apache_airflow_providers_cncf_kubernetes-10.4.3.dist-info/entry_points.txt,sha256=ByD3QJJyP9CfmTYtpNI1953akD38RUDgpGXLaq9vpOw,111
57
+ apache_airflow_providers_cncf_kubernetes-10.4.3.dist-info/WHEEL,sha256=G2gURzTEtmeR8nrdXUJfNiB3VYVxigPQ-bEQujpNiNs,82
58
+ apache_airflow_providers_cncf_kubernetes-10.4.3.dist-info/METADATA,sha256=prc3_9KsPeDUBpedhnW25kih9jjS1IRYbA_ftkqI0xI,4306
59
+ apache_airflow_providers_cncf_kubernetes-10.4.3.dist-info/RECORD,,