apache-airflow-providers-cncf-kubernetes 8.2.0__tar.gz → 8.3.0__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of apache-airflow-providers-cncf-kubernetes might be problematic. Click here for more details.

Files changed (59) hide show
  1. {apache_airflow_providers_cncf_kubernetes-8.2.0 → apache_airflow_providers_cncf_kubernetes-8.3.0}/PKG-INFO +6 -6
  2. {apache_airflow_providers_cncf_kubernetes-8.2.0 → apache_airflow_providers_cncf_kubernetes-8.3.0}/README.rst +3 -3
  3. {apache_airflow_providers_cncf_kubernetes-8.2.0 → apache_airflow_providers_cncf_kubernetes-8.3.0}/airflow/providers/cncf/kubernetes/__init__.py +3 -6
  4. apache_airflow_providers_cncf_kubernetes-8.3.0/airflow/providers/cncf/kubernetes/cli/kubernetes_command.py +164 -0
  5. {apache_airflow_providers_cncf_kubernetes-8.2.0 → apache_airflow_providers_cncf_kubernetes-8.3.0}/airflow/providers/cncf/kubernetes/executors/kubernetes_executor.py +16 -38
  6. {apache_airflow_providers_cncf_kubernetes-8.2.0 → apache_airflow_providers_cncf_kubernetes-8.3.0}/airflow/providers/cncf/kubernetes/executors/kubernetes_executor_utils.py +14 -23
  7. {apache_airflow_providers_cncf_kubernetes-8.2.0 → apache_airflow_providers_cncf_kubernetes-8.3.0}/airflow/providers/cncf/kubernetes/get_provider_info.py +2 -1
  8. {apache_airflow_providers_cncf_kubernetes-8.2.0 → apache_airflow_providers_cncf_kubernetes-8.3.0}/airflow/providers/cncf/kubernetes/hooks/kubernetes.py +8 -0
  9. {apache_airflow_providers_cncf_kubernetes-8.2.0 → apache_airflow_providers_cncf_kubernetes-8.3.0}/airflow/providers/cncf/kubernetes/kubernetes_helper_functions.py +27 -29
  10. {apache_airflow_providers_cncf_kubernetes-8.2.0 → apache_airflow_providers_cncf_kubernetes-8.3.0}/airflow/providers/cncf/kubernetes/operators/pod.py +51 -42
  11. {apache_airflow_providers_cncf_kubernetes-8.2.0 → apache_airflow_providers_cncf_kubernetes-8.3.0}/airflow/providers/cncf/kubernetes/operators/resource.py +10 -0
  12. {apache_airflow_providers_cncf_kubernetes-8.2.0 → apache_airflow_providers_cncf_kubernetes-8.3.0}/airflow/providers/cncf/kubernetes/pod_generator.py +8 -12
  13. {apache_airflow_providers_cncf_kubernetes-8.2.0 → apache_airflow_providers_cncf_kubernetes-8.3.0}/airflow/providers/cncf/kubernetes/template_rendering.py +2 -4
  14. apache_airflow_providers_cncf_kubernetes-8.3.0/airflow/providers/cncf/kubernetes/triggers/__init__.py +16 -0
  15. {apache_airflow_providers_cncf_kubernetes-8.2.0 → apache_airflow_providers_cncf_kubernetes-8.3.0}/airflow/providers/cncf/kubernetes/utils/pod_manager.py +1 -1
  16. {apache_airflow_providers_cncf_kubernetes-8.2.0 → apache_airflow_providers_cncf_kubernetes-8.3.0}/pyproject.toml +3 -3
  17. {apache_airflow_providers_cncf_kubernetes-8.2.0 → apache_airflow_providers_cncf_kubernetes-8.3.0}/airflow/providers/cncf/kubernetes/LICENSE +0 -0
  18. {apache_airflow_providers_cncf_kubernetes-8.2.0 → apache_airflow_providers_cncf_kubernetes-8.3.0}/airflow/providers/cncf/kubernetes/backcompat/__init__.py +0 -0
  19. {apache_airflow_providers_cncf_kubernetes-8.2.0 → apache_airflow_providers_cncf_kubernetes-8.3.0}/airflow/providers/cncf/kubernetes/backcompat/backwards_compat_converters.py +0 -0
  20. {apache_airflow_providers_cncf_kubernetes-8.2.0 → apache_airflow_providers_cncf_kubernetes-8.3.0}/airflow/providers/cncf/kubernetes/callbacks.py +0 -0
  21. {apache_airflow_providers_cncf_kubernetes-8.2.0/airflow/providers/cncf/kubernetes/hooks → apache_airflow_providers_cncf_kubernetes-8.3.0/airflow/providers/cncf/kubernetes/cli}/__init__.py +0 -0
  22. {apache_airflow_providers_cncf_kubernetes-8.2.0 → apache_airflow_providers_cncf_kubernetes-8.3.0}/airflow/providers/cncf/kubernetes/decorators/__init__.py +0 -0
  23. {apache_airflow_providers_cncf_kubernetes-8.2.0 → apache_airflow_providers_cncf_kubernetes-8.3.0}/airflow/providers/cncf/kubernetes/decorators/kubernetes.py +0 -0
  24. {apache_airflow_providers_cncf_kubernetes-8.2.0 → apache_airflow_providers_cncf_kubernetes-8.3.0}/airflow/providers/cncf/kubernetes/executors/__init__.py +0 -0
  25. {apache_airflow_providers_cncf_kubernetes-8.2.0 → apache_airflow_providers_cncf_kubernetes-8.3.0}/airflow/providers/cncf/kubernetes/executors/kubernetes_executor_types.py +0 -0
  26. {apache_airflow_providers_cncf_kubernetes-8.2.0 → apache_airflow_providers_cncf_kubernetes-8.3.0}/airflow/providers/cncf/kubernetes/executors/local_kubernetes_executor.py +0 -0
  27. {apache_airflow_providers_cncf_kubernetes-8.2.0/airflow/providers/cncf/kubernetes/kubernetes_executor_templates → apache_airflow_providers_cncf_kubernetes-8.3.0/airflow/providers/cncf/kubernetes/hooks}/__init__.py +0 -0
  28. {apache_airflow_providers_cncf_kubernetes-8.2.0 → apache_airflow_providers_cncf_kubernetes-8.3.0}/airflow/providers/cncf/kubernetes/k8s_model.py +0 -0
  29. {apache_airflow_providers_cncf_kubernetes-8.2.0 → apache_airflow_providers_cncf_kubernetes-8.3.0}/airflow/providers/cncf/kubernetes/kube_client.py +0 -0
  30. {apache_airflow_providers_cncf_kubernetes-8.2.0 → apache_airflow_providers_cncf_kubernetes-8.3.0}/airflow/providers/cncf/kubernetes/kube_config.py +0 -0
  31. {apache_airflow_providers_cncf_kubernetes-8.2.0/airflow/providers/cncf/kubernetes/pod_template_file_examples → apache_airflow_providers_cncf_kubernetes-8.3.0/airflow/providers/cncf/kubernetes/kubernetes_executor_templates}/__init__.py +0 -0
  32. {apache_airflow_providers_cncf_kubernetes-8.2.0 → apache_airflow_providers_cncf_kubernetes-8.3.0}/airflow/providers/cncf/kubernetes/kubernetes_executor_templates/basic_template.yaml +0 -0
  33. {apache_airflow_providers_cncf_kubernetes-8.2.0 → apache_airflow_providers_cncf_kubernetes-8.3.0}/airflow/providers/cncf/kubernetes/operators/__init__.py +0 -0
  34. {apache_airflow_providers_cncf_kubernetes-8.2.0 → apache_airflow_providers_cncf_kubernetes-8.3.0}/airflow/providers/cncf/kubernetes/operators/custom_object_launcher.py +0 -0
  35. {apache_airflow_providers_cncf_kubernetes-8.2.0 → apache_airflow_providers_cncf_kubernetes-8.3.0}/airflow/providers/cncf/kubernetes/operators/job.py +0 -0
  36. {apache_airflow_providers_cncf_kubernetes-8.2.0 → apache_airflow_providers_cncf_kubernetes-8.3.0}/airflow/providers/cncf/kubernetes/operators/kubernetes_pod.py +0 -0
  37. {apache_airflow_providers_cncf_kubernetes-8.2.0 → apache_airflow_providers_cncf_kubernetes-8.3.0}/airflow/providers/cncf/kubernetes/operators/spark_kubernetes.py +0 -0
  38. {apache_airflow_providers_cncf_kubernetes-8.2.0 → apache_airflow_providers_cncf_kubernetes-8.3.0}/airflow/providers/cncf/kubernetes/pod_generator_deprecated.py +0 -0
  39. {apache_airflow_providers_cncf_kubernetes-8.2.0 → apache_airflow_providers_cncf_kubernetes-8.3.0}/airflow/providers/cncf/kubernetes/pod_launcher_deprecated.py +0 -0
  40. {apache_airflow_providers_cncf_kubernetes-8.2.0/airflow/providers/cncf/kubernetes/resource_convert → apache_airflow_providers_cncf_kubernetes-8.3.0/airflow/providers/cncf/kubernetes/pod_template_file_examples}/__init__.py +0 -0
  41. {apache_airflow_providers_cncf_kubernetes-8.2.0 → apache_airflow_providers_cncf_kubernetes-8.3.0}/airflow/providers/cncf/kubernetes/pod_template_file_examples/dags_in_image_template.yaml +0 -0
  42. {apache_airflow_providers_cncf_kubernetes-8.2.0 → apache_airflow_providers_cncf_kubernetes-8.3.0}/airflow/providers/cncf/kubernetes/pod_template_file_examples/dags_in_volume_template.yaml +0 -0
  43. {apache_airflow_providers_cncf_kubernetes-8.2.0 → apache_airflow_providers_cncf_kubernetes-8.3.0}/airflow/providers/cncf/kubernetes/pod_template_file_examples/git_sync_template.yaml +0 -0
  44. {apache_airflow_providers_cncf_kubernetes-8.2.0 → apache_airflow_providers_cncf_kubernetes-8.3.0}/airflow/providers/cncf/kubernetes/python_kubernetes_script.jinja2 +0 -0
  45. {apache_airflow_providers_cncf_kubernetes-8.2.0 → apache_airflow_providers_cncf_kubernetes-8.3.0}/airflow/providers/cncf/kubernetes/python_kubernetes_script.py +0 -0
  46. {apache_airflow_providers_cncf_kubernetes-8.2.0/airflow/providers/cncf/kubernetes/sensors → apache_airflow_providers_cncf_kubernetes-8.3.0/airflow/providers/cncf/kubernetes/resource_convert}/__init__.py +0 -0
  47. {apache_airflow_providers_cncf_kubernetes-8.2.0 → apache_airflow_providers_cncf_kubernetes-8.3.0}/airflow/providers/cncf/kubernetes/resource_convert/configmap.py +0 -0
  48. {apache_airflow_providers_cncf_kubernetes-8.2.0 → apache_airflow_providers_cncf_kubernetes-8.3.0}/airflow/providers/cncf/kubernetes/resource_convert/env_variable.py +0 -0
  49. {apache_airflow_providers_cncf_kubernetes-8.2.0 → apache_airflow_providers_cncf_kubernetes-8.3.0}/airflow/providers/cncf/kubernetes/resource_convert/secret.py +0 -0
  50. {apache_airflow_providers_cncf_kubernetes-8.2.0 → apache_airflow_providers_cncf_kubernetes-8.3.0}/airflow/providers/cncf/kubernetes/secret.py +0 -0
  51. {apache_airflow_providers_cncf_kubernetes-8.2.0/airflow/providers/cncf/kubernetes/triggers → apache_airflow_providers_cncf_kubernetes-8.3.0/airflow/providers/cncf/kubernetes/sensors}/__init__.py +0 -0
  52. {apache_airflow_providers_cncf_kubernetes-8.2.0 → apache_airflow_providers_cncf_kubernetes-8.3.0}/airflow/providers/cncf/kubernetes/sensors/spark_kubernetes.py +0 -0
  53. {apache_airflow_providers_cncf_kubernetes-8.2.0 → apache_airflow_providers_cncf_kubernetes-8.3.0}/airflow/providers/cncf/kubernetes/triggers/job.py +0 -0
  54. {apache_airflow_providers_cncf_kubernetes-8.2.0 → apache_airflow_providers_cncf_kubernetes-8.3.0}/airflow/providers/cncf/kubernetes/triggers/kubernetes_pod.py +0 -0
  55. {apache_airflow_providers_cncf_kubernetes-8.2.0 → apache_airflow_providers_cncf_kubernetes-8.3.0}/airflow/providers/cncf/kubernetes/triggers/pod.py +0 -0
  56. {apache_airflow_providers_cncf_kubernetes-8.2.0 → apache_airflow_providers_cncf_kubernetes-8.3.0}/airflow/providers/cncf/kubernetes/utils/__init__.py +0 -0
  57. {apache_airflow_providers_cncf_kubernetes-8.2.0 → apache_airflow_providers_cncf_kubernetes-8.3.0}/airflow/providers/cncf/kubernetes/utils/delete_from.py +0 -0
  58. {apache_airflow_providers_cncf_kubernetes-8.2.0 → apache_airflow_providers_cncf_kubernetes-8.3.0}/airflow/providers/cncf/kubernetes/utils/k8s_resource_iterator.py +0 -0
  59. {apache_airflow_providers_cncf_kubernetes-8.2.0 → apache_airflow_providers_cncf_kubernetes-8.3.0}/airflow/providers/cncf/kubernetes/utils/xcom_sidecar.py +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: apache-airflow-providers-cncf-kubernetes
3
- Version: 8.2.0
3
+ Version: 8.3.0
4
4
  Summary: Provider package apache-airflow-providers-cncf-kubernetes for Apache Airflow
5
5
  Keywords: airflow-provider,cncf.kubernetes,airflow,integration
6
6
  Author-email: Apache Software Foundation <dev@airflow.apache.org>
@@ -29,8 +29,8 @@ Requires-Dist: google-re2>=1.0
29
29
  Requires-Dist: kubernetes>=28.1.0,<=29.0.0
30
30
  Requires-Dist: kubernetes_asyncio>=28.1.0,<=29.0.0
31
31
  Project-URL: Bug Tracker, https://github.com/apache/airflow/issues
32
- Project-URL: Changelog, https://airflow.apache.org/docs/apache-airflow-providers-cncf-kubernetes/8.2.0/changelog.html
33
- Project-URL: Documentation, https://airflow.apache.org/docs/apache-airflow-providers-cncf-kubernetes/8.2.0
32
+ Project-URL: Changelog, https://airflow.apache.org/docs/apache-airflow-providers-cncf-kubernetes/8.3.0/changelog.html
33
+ Project-URL: Documentation, https://airflow.apache.org/docs/apache-airflow-providers-cncf-kubernetes/8.3.0
34
34
  Project-URL: Slack Chat, https://s.apache.org/airflow-slack
35
35
  Project-URL: Source Code, https://github.com/apache/airflow
36
36
  Project-URL: Twitter, https://twitter.com/ApacheAirflow
@@ -80,7 +80,7 @@ Project-URL: YouTube, https://www.youtube.com/channel/UCSXwxpWZQ7XZ1WL3wqevChA/
80
80
 
81
81
  Package ``apache-airflow-providers-cncf-kubernetes``
82
82
 
83
- Release: ``8.2.0``
83
+ Release: ``8.3.0``
84
84
 
85
85
 
86
86
  `Kubernetes <https://kubernetes.io/>`__
@@ -93,7 +93,7 @@ This is a provider package for ``cncf.kubernetes`` provider. All classes for thi
93
93
  are in ``airflow.providers.cncf.kubernetes`` python package.
94
94
 
95
95
  You can find package information and changelog for the provider
96
- in the `documentation <https://airflow.apache.org/docs/apache-airflow-providers-cncf-kubernetes/8.2.0/>`_.
96
+ in the `documentation <https://airflow.apache.org/docs/apache-airflow-providers-cncf-kubernetes/8.3.0/>`_.
97
97
 
98
98
  Installation
99
99
  ------------
@@ -120,4 +120,4 @@ PIP package Version required
120
120
  ====================== =====================
121
121
 
122
122
  The changelog for the provider package can be found in the
123
- `changelog <https://airflow.apache.org/docs/apache-airflow-providers-cncf-kubernetes/8.2.0/changelog.html>`_.
123
+ `changelog <https://airflow.apache.org/docs/apache-airflow-providers-cncf-kubernetes/8.3.0/changelog.html>`_.
@@ -42,7 +42,7 @@
42
42
 
43
43
  Package ``apache-airflow-providers-cncf-kubernetes``
44
44
 
45
- Release: ``8.2.0``
45
+ Release: ``8.3.0``
46
46
 
47
47
 
48
48
  `Kubernetes <https://kubernetes.io/>`__
@@ -55,7 +55,7 @@ This is a provider package for ``cncf.kubernetes`` provider. All classes for thi
55
55
  are in ``airflow.providers.cncf.kubernetes`` python package.
56
56
 
57
57
  You can find package information and changelog for the provider
58
- in the `documentation <https://airflow.apache.org/docs/apache-airflow-providers-cncf-kubernetes/8.2.0/>`_.
58
+ in the `documentation <https://airflow.apache.org/docs/apache-airflow-providers-cncf-kubernetes/8.3.0/>`_.
59
59
 
60
60
  Installation
61
61
  ------------
@@ -82,4 +82,4 @@ PIP package Version required
82
82
  ====================== =====================
83
83
 
84
84
  The changelog for the provider package can be found in the
85
- `changelog <https://airflow.apache.org/docs/apache-airflow-providers-cncf-kubernetes/8.2.0/changelog.html>`_.
85
+ `changelog <https://airflow.apache.org/docs/apache-airflow-providers-cncf-kubernetes/8.3.0/changelog.html>`_.
@@ -25,14 +25,11 @@ from __future__ import annotations
25
25
 
26
26
  import packaging.version
27
27
 
28
- __all__ = ["__version__"]
28
+ from airflow import __version__ as airflow_version
29
29
 
30
- __version__ = "8.2.0"
30
+ __all__ = ["__version__"]
31
31
 
32
- try:
33
- from airflow import __version__ as airflow_version
34
- except ImportError:
35
- from airflow.version import version as airflow_version
32
+ __version__ = "8.3.0"
36
33
 
37
34
  if packaging.version.parse(packaging.version.parse(airflow_version).base_version) < packaging.version.parse(
38
35
  "2.7.0"
@@ -0,0 +1,164 @@
1
+ # Licensed to the Apache Software Foundation (ASF) under one
2
+ # or more contributor license agreements. See the NOTICE file
3
+ # distributed with this work for additional information
4
+ # regarding copyright ownership. The ASF licenses this file
5
+ # to you under the Apache License, Version 2.0 (the
6
+ # "License"); you may not use this file except in compliance
7
+ # with the License. You may obtain a copy of the License at
8
+ #
9
+ # http://www.apache.org/licenses/LICENSE-2.0
10
+ #
11
+ # Unless required by applicable law or agreed to in writing,
12
+ # software distributed under the License is distributed on an
13
+ # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
14
+ # KIND, either express or implied. See the License for the
15
+ # specific language governing permissions and limitations
16
+ # under the License.
17
+ """Kubernetes sub-commands."""
18
+
19
+ from __future__ import annotations
20
+
21
+ import os
22
+ import sys
23
+ from datetime import datetime, timedelta
24
+
25
+ from kubernetes import client
26
+ from kubernetes.client.api_client import ApiClient
27
+ from kubernetes.client.rest import ApiException
28
+
29
+ from airflow.models import DagRun, TaskInstance
30
+ from airflow.providers.cncf.kubernetes import pod_generator
31
+ from airflow.providers.cncf.kubernetes.executors.kubernetes_executor import KubeConfig
32
+ from airflow.providers.cncf.kubernetes.kube_client import get_kube_client
33
+ from airflow.providers.cncf.kubernetes.kubernetes_helper_functions import create_pod_id
34
+ from airflow.providers.cncf.kubernetes.pod_generator import PodGenerator
35
+ from airflow.utils import cli as cli_utils, yaml
36
+ from airflow.utils.cli import get_dag
37
+ from airflow.utils.providers_configuration_loader import providers_configuration_loaded
38
+
39
+
40
+ @cli_utils.action_cli
41
+ @providers_configuration_loaded
42
+ def generate_pod_yaml(args):
43
+ """Generate yaml files for each task in the DAG. Used for testing output of KubernetesExecutor."""
44
+ execution_date = args.execution_date
45
+ dag = get_dag(subdir=args.subdir, dag_id=args.dag_id)
46
+ yaml_output_path = args.output_path
47
+ dr = DagRun(dag.dag_id, execution_date=execution_date)
48
+ kube_config = KubeConfig()
49
+ for task in dag.tasks:
50
+ ti = TaskInstance(task, None)
51
+ ti.dag_run = dr
52
+ pod = PodGenerator.construct_pod(
53
+ dag_id=args.dag_id,
54
+ task_id=ti.task_id,
55
+ pod_id=create_pod_id(args.dag_id, ti.task_id),
56
+ try_number=ti.try_number,
57
+ kube_image=kube_config.kube_image,
58
+ date=ti.execution_date,
59
+ args=ti.command_as_list(),
60
+ pod_override_object=PodGenerator.from_obj(ti.executor_config),
61
+ scheduler_job_id="worker-config",
62
+ namespace=kube_config.executor_namespace,
63
+ base_worker_pod=PodGenerator.deserialize_model_file(kube_config.pod_template_file),
64
+ with_mutation_hook=True,
65
+ )
66
+ api_client = ApiClient()
67
+ date_string = pod_generator.datetime_to_label_safe_datestring(execution_date)
68
+ yaml_file_name = f"{args.dag_id}_{ti.task_id}_{date_string}.yml"
69
+ os.makedirs(os.path.dirname(yaml_output_path + "/airflow_yaml_output/"), exist_ok=True)
70
+ with open(yaml_output_path + "/airflow_yaml_output/" + yaml_file_name, "w") as output:
71
+ sanitized_pod = api_client.sanitize_for_serialization(pod)
72
+ output.write(yaml.dump(sanitized_pod))
73
+ print(f"YAML output can be found at {yaml_output_path}/airflow_yaml_output/")
74
+
75
+
76
+ @cli_utils.action_cli
77
+ @providers_configuration_loaded
78
+ def cleanup_pods(args):
79
+ """Clean up k8s pods in evicted/failed/succeeded/pending states."""
80
+ namespace = args.namespace
81
+
82
+ min_pending_minutes = args.min_pending_minutes
83
+ # protect newly created pods from deletion
84
+ if min_pending_minutes < 5:
85
+ min_pending_minutes = 5
86
+
87
+ # https://kubernetes.io/docs/concepts/workloads/pods/pod-lifecycle/
88
+ # All Containers in the Pod have terminated in success, and will not be restarted.
89
+ pod_succeeded = "succeeded"
90
+
91
+ # The Pod has been accepted by the Kubernetes cluster,
92
+ # but one or more of the containers has not been set up and made ready to run.
93
+ pod_pending = "pending"
94
+
95
+ # All Containers in the Pod have terminated, and at least one Container has terminated in failure.
96
+ # That is, the Container either exited with non-zero status or was terminated by the system.
97
+ pod_failed = "failed"
98
+
99
+ # https://kubernetes.io/docs/tasks/administer-cluster/out-of-resource/
100
+ pod_reason_evicted = "evicted"
101
+ # If pod is failed and restartPolicy is:
102
+ # * Always: Restart Container; Pod phase stays Running.
103
+ # * OnFailure: Restart Container; Pod phase stays Running.
104
+ # * Never: Pod phase becomes Failed.
105
+ pod_restart_policy_never = "never"
106
+
107
+ print("Loading Kubernetes configuration")
108
+ kube_client = get_kube_client()
109
+ print(f"Listing pods in namespace {namespace}")
110
+ airflow_pod_labels = [
111
+ "dag_id",
112
+ "task_id",
113
+ "try_number",
114
+ "airflow_version",
115
+ ]
116
+ list_kwargs = {"namespace": namespace, "limit": 500, "label_selector": ",".join(airflow_pod_labels)}
117
+
118
+ while True:
119
+ pod_list = kube_client.list_namespaced_pod(**list_kwargs)
120
+ for pod in pod_list.items:
121
+ pod_name = pod.metadata.name
122
+ print(f"Inspecting pod {pod_name}")
123
+ pod_phase = pod.status.phase.lower()
124
+ pod_reason = pod.status.reason.lower() if pod.status.reason else ""
125
+ pod_restart_policy = pod.spec.restart_policy.lower()
126
+ current_time = datetime.now(pod.metadata.creation_timestamp.tzinfo)
127
+
128
+ if (
129
+ pod_phase == pod_succeeded
130
+ or (pod_phase == pod_failed and pod_restart_policy == pod_restart_policy_never)
131
+ or (pod_reason == pod_reason_evicted)
132
+ or (
133
+ pod_phase == pod_pending
134
+ and current_time - pod.metadata.creation_timestamp
135
+ > timedelta(minutes=min_pending_minutes)
136
+ )
137
+ ):
138
+ print(
139
+ f'Deleting pod "{pod_name}" phase "{pod_phase}" and reason "{pod_reason}", '
140
+ f'restart policy "{pod_restart_policy}"'
141
+ )
142
+ try:
143
+ _delete_pod(pod.metadata.name, namespace)
144
+ except ApiException as e:
145
+ print(f"Can't remove POD: {e}", file=sys.stderr)
146
+ else:
147
+ print(f"No action taken on pod {pod_name}")
148
+ continue_token = pod_list.metadata._continue
149
+ if not continue_token:
150
+ break
151
+ list_kwargs["_continue"] = continue_token
152
+
153
+
154
+ def _delete_pod(name, namespace):
155
+ """
156
+ Delete a namespaced pod.
157
+
158
+ Helper Function for cleanup_pods.
159
+ """
160
+ kube_client = get_kube_client()
161
+ delete_options = client.V1DeleteOptions()
162
+ print(f'Deleting POD "{name}" from "{namespace}" namespace')
163
+ api_response = kube_client.delete_namespaced_pod(name=name, namespace=namespace, body=delete_options)
164
+ print(api_response)
@@ -38,42 +38,18 @@ from typing import TYPE_CHECKING, Any, Sequence
38
38
  from kubernetes.dynamic import DynamicClient
39
39
  from sqlalchemy import select, update
40
40
 
41
- from airflow.providers.cncf.kubernetes.pod_generator import PodMutationHookException, PodReconciliationError
42
- from airflow.stats import Stats
43
-
44
- try:
45
- from airflow.cli.cli_config import (
46
- ARG_DAG_ID,
47
- ARG_EXECUTION_DATE,
48
- ARG_OUTPUT_PATH,
49
- ARG_SUBDIR,
50
- ARG_VERBOSE,
51
- ActionCommand,
52
- Arg,
53
- GroupCommand,
54
- lazy_load_command,
55
- positive_int,
56
- )
57
- except ImportError:
58
- try:
59
- from airflow import __version__ as airflow_version
60
- except ImportError:
61
- from airflow.version import version as airflow_version
62
-
63
- import packaging.version
64
-
65
- from airflow.exceptions import AirflowOptionalProviderFeatureException
66
-
67
- base_version = packaging.version.parse(airflow_version).base_version
68
-
69
- if packaging.version.parse(base_version) < packaging.version.parse("2.7.0"):
70
- raise AirflowOptionalProviderFeatureException(
71
- "Kubernetes Executor from CNCF Provider should only be used with Airflow 2.7.0+.\n"
72
- f"This is Airflow {airflow_version} and Kubernetes and CeleryKubernetesExecutor are "
73
- f"available in the 'airflow.executors' package. You should not use "
74
- f"the provider's executors in this version of Airflow."
75
- )
76
- raise
41
+ from airflow.cli.cli_config import (
42
+ ARG_DAG_ID,
43
+ ARG_EXECUTION_DATE,
44
+ ARG_OUTPUT_PATH,
45
+ ARG_SUBDIR,
46
+ ARG_VERBOSE,
47
+ ActionCommand,
48
+ Arg,
49
+ GroupCommand,
50
+ lazy_load_command,
51
+ positive_int,
52
+ )
77
53
  from airflow.configuration import conf
78
54
  from airflow.executors.base_executor import BaseExecutor
79
55
  from airflow.providers.cncf.kubernetes.executors.kubernetes_executor_types import (
@@ -82,6 +58,8 @@ from airflow.providers.cncf.kubernetes.executors.kubernetes_executor_types impor
82
58
  )
83
59
  from airflow.providers.cncf.kubernetes.kube_config import KubeConfig
84
60
  from airflow.providers.cncf.kubernetes.kubernetes_helper_functions import annotations_to_key
61
+ from airflow.providers.cncf.kubernetes.pod_generator import PodMutationHookException, PodReconciliationError
62
+ from airflow.stats import Stats
85
63
  from airflow.utils.event_scheduler import EventScheduler
86
64
  from airflow.utils.log.logging_mixin import remove_escape_codes
87
65
  from airflow.utils.session import NEW_SESSION, provide_session
@@ -132,14 +110,14 @@ KUBERNETES_COMMANDS = (
132
110
  "(created by KubernetesExecutor/KubernetesPodOperator) "
133
111
  "in evicted/failed/succeeded/pending states"
134
112
  ),
135
- func=lazy_load_command("airflow.cli.commands.kubernetes_command.cleanup_pods"),
113
+ func=lazy_load_command("airflow.providers.cncf.kubernetes.cli.kubernetes_command.cleanup_pods"),
136
114
  args=(ARG_NAMESPACE, ARG_MIN_PENDING_MINUTES, ARG_VERBOSE),
137
115
  ),
138
116
  ActionCommand(
139
117
  name="generate-dag-yaml",
140
118
  help="Generate YAML files for all tasks in DAG. Useful for debugging tasks without "
141
119
  "launching into a cluster",
142
- func=lazy_load_command("airflow.cli.commands.kubernetes_command.generate_pod_yaml"),
120
+ func=lazy_load_command("airflow.providers.cncf.kubernetes.cli.kubernetes_command.generate_pod_yaml"),
143
121
  args=(ARG_DAG_ID, ARG_EXECUTION_DATE, ARG_SUBDIR, ARG_OUTPUT_PATH, ARG_VERBOSE),
144
122
  ),
145
123
  )
@@ -21,7 +21,7 @@ import json
21
21
  import multiprocessing
22
22
  import time
23
23
  from queue import Empty, Queue
24
- from typing import TYPE_CHECKING, Any, Generic, TypeVar
24
+ from typing import TYPE_CHECKING, Any
25
25
 
26
26
  from kubernetes import client, watch
27
27
  from kubernetes.client.rest import ApiException
@@ -32,10 +32,11 @@ from airflow.providers.cncf.kubernetes.kube_client import get_kube_client
32
32
  from airflow.providers.cncf.kubernetes.kubernetes_helper_functions import (
33
33
  annotations_for_logging_task_metadata,
34
34
  annotations_to_key,
35
- create_pod_id,
35
+ create_unique_id,
36
36
  )
37
37
  from airflow.providers.cncf.kubernetes.pod_generator import PodGenerator
38
38
  from airflow.utils.log.logging_mixin import LoggingMixin
39
+ from airflow.utils.singleton import Singleton
39
40
  from airflow.utils.state import TaskInstanceState
40
41
 
41
42
  try:
@@ -60,22 +61,6 @@ if TYPE_CHECKING:
60
61
  KubernetesWatchType,
61
62
  )
62
63
 
63
- # Singleton here is duplicated version of airflow.utils.singleton.Singleton until
64
- # min-airflow version is 2.7.0 for the provider. then it can be imported from airflow.utils.singleton.
65
-
66
- T = TypeVar("T")
67
-
68
-
69
- class Singleton(type, Generic[T]):
70
- """Metaclass that allows to implement singleton pattern."""
71
-
72
- _instances: dict[Singleton[T], T] = {}
73
-
74
- def __call__(cls: Singleton[T], *args, **kwargs) -> T:
75
- if cls not in cls._instances:
76
- cls._instances[cls] = super().__call__(*args, **kwargs)
77
- return cls._instances[cls]
78
-
79
64
 
80
65
  class ResourceVersion(metaclass=Singleton):
81
66
  """Singleton for tracking resourceVersion from Kubernetes."""
@@ -113,9 +98,7 @@ class KubernetesJobWatcher(multiprocessing.Process, LoggingMixin):
113
98
  kube_client, self.resource_version, self.scheduler_job_id, self.kube_config
114
99
  )
115
100
  except ReadTimeoutError:
116
- self.log.warning(
117
- "There was a timeout error accessing the Kube API. Retrying request.", exc_info=True
118
- )
101
+ self.log.info("Kubernetes watch timed out waiting for events. Restarting watch.")
119
102
  time.sleep(1)
120
103
  except Exception:
121
104
  self.log.exception("Unknown error in KubernetesJobWatcher. Failing")
@@ -156,7 +139,7 @@ class KubernetesJobWatcher(multiprocessing.Process, LoggingMixin):
156
139
  ) -> str | None:
157
140
  self.log.info("Event: and now my watch begins starting at resource_version: %s", resource_version)
158
141
 
159
- kwargs = {"label_selector": f"airflow-worker={scheduler_job_id}"}
142
+ kwargs: dict[str, Any] = {"label_selector": f"airflow-worker={scheduler_job_id}"}
160
143
  if resource_version:
161
144
  kwargs["resource_version"] = resource_version
162
145
  if kube_config.kube_client_request_args:
@@ -165,6 +148,14 @@ class KubernetesJobWatcher(multiprocessing.Process, LoggingMixin):
165
148
 
166
149
  last_resource_version: str | None = None
167
150
 
151
+ # For info about k8s timeout settings see
152
+ # https://github.com/kubernetes-client/python/blob/v29.0.0/examples/watch/timeout-settings.md
153
+ # and https://github.com/kubernetes-client/python/blob/v29.0.0/kubernetes/client/api_client.py#L336-L339
154
+ client_timeout = 30
155
+ server_conn_timeout = 3600
156
+ kwargs["_request_timeout"] = client_timeout
157
+ kwargs["timeout_seconds"] = server_conn_timeout
158
+
168
159
  for event in self._pod_events(kube_client=kube_client, query_kwargs=kwargs):
169
160
  task = event["object"]
170
161
  self.log.debug("Event: %s had an event of type %s", task.metadata.name, event["type"])
@@ -413,7 +404,7 @@ class AirflowKubernetesScheduler(LoggingMixin):
413
404
  pod = PodGenerator.construct_pod(
414
405
  namespace=self.namespace,
415
406
  scheduler_job_id=self.scheduler_job_id,
416
- pod_id=create_pod_id(dag_id, task_id),
407
+ pod_id=create_unique_id(dag_id, task_id),
417
408
  dag_id=dag_id,
418
409
  task_id=task_id,
419
410
  kube_image=self.kube_config.kube_image,
@@ -28,8 +28,9 @@ def get_provider_info():
28
28
  "name": "Kubernetes",
29
29
  "description": "`Kubernetes <https://kubernetes.io/>`__\n",
30
30
  "state": "ready",
31
- "source-date-epoch": 1714476004,
31
+ "source-date-epoch": 1716287042,
32
32
  "versions": [
33
+ "8.3.0",
33
34
  "8.2.0",
34
35
  "8.1.1",
35
36
  "8.1.0",
@@ -25,6 +25,7 @@ from time import sleep
25
25
  from typing import TYPE_CHECKING, Any, Generator
26
26
 
27
27
  import aiofiles
28
+ import tenacity
28
29
  from asgiref.sync import sync_to_async
29
30
  from kubernetes import client, config, watch
30
31
  from kubernetes.config import ConfigException
@@ -35,6 +36,7 @@ from airflow.exceptions import AirflowException, AirflowNotFoundException
35
36
  from airflow.hooks.base import BaseHook
36
37
  from airflow.models import Connection
37
38
  from airflow.providers.cncf.kubernetes.kube_client import _disable_verify_ssl, _enable_tcp_keepalive
39
+ from airflow.providers.cncf.kubernetes.kubernetes_helper_functions import should_retry_creation
38
40
  from airflow.providers.cncf.kubernetes.utils.pod_manager import PodOperatorHookProtocol
39
41
  from airflow.utils import yaml
40
42
 
@@ -486,6 +488,12 @@ class KubernetesHook(BaseHook, PodOperatorHookProtocol):
486
488
  except Exception as exc:
487
489
  raise exc
488
490
 
491
+ @tenacity.retry(
492
+ stop=tenacity.stop_after_attempt(3),
493
+ wait=tenacity.wait_random_exponential(),
494
+ reraise=True,
495
+ retry=tenacity.retry_if_exception(should_retry_creation),
496
+ )
489
497
  def create_job(
490
498
  self,
491
499
  job: V1Job,
@@ -19,10 +19,11 @@ from __future__ import annotations
19
19
  import logging
20
20
  import secrets
21
21
  import string
22
- import warnings
23
22
  from typing import TYPE_CHECKING
24
23
 
25
24
  import pendulum
25
+ from deprecated import deprecated
26
+ from kubernetes.client.rest import ApiException
26
27
  from slugify import slugify
27
28
 
28
29
  from airflow.compat.functools import cache
@@ -59,6 +60,10 @@ def add_unique_suffix(*, name: str, rand_len: int = 8, max_len: int = POD_NAME_M
59
60
  return name[: max_len - len(suffix)].strip("-.") + suffix
60
61
 
61
62
 
63
+ @deprecated(
64
+ reason="This function is deprecated. Please use `add_unique_suffix`",
65
+ category=AirflowProviderDeprecationWarning,
66
+ )
62
67
  def add_pod_suffix(*, pod_name: str, rand_len: int = 8, max_len: int = POD_NAME_MAX_LENGTH) -> str:
63
68
  """Add random string to pod name while staying under max length.
64
69
 
@@ -67,14 +72,7 @@ def add_pod_suffix(*, pod_name: str, rand_len: int = 8, max_len: int = POD_NAME_
67
72
  :param max_len: maximum length of the pod name
68
73
  :meta private:
69
74
  """
70
- warnings.warn(
71
- "This function is deprecated. Please use `add_unique_suffix`.",
72
- AirflowProviderDeprecationWarning,
73
- stacklevel=2,
74
- )
75
-
76
- suffix = "-" + rand_str(rand_len)
77
- return pod_name[: max_len - len(suffix)].strip("-.") + suffix
75
+ return add_unique_suffix(name=pod_name, rand_len=rand_len, max_len=max_len)
78
76
 
79
77
 
80
78
  def create_unique_id(
@@ -109,6 +107,10 @@ def create_unique_id(
109
107
  return base_name
110
108
 
111
109
 
110
+ @deprecated(
111
+ reason="This function is deprecated. Please use `create_unique_id`.",
112
+ category=AirflowProviderDeprecationWarning,
113
+ )
112
114
  def create_pod_id(
113
115
  dag_id: str | None = None,
114
116
  task_id: str | None = None,
@@ -125,26 +127,7 @@ def create_pod_id(
125
127
  :param unique: whether a random string suffix should be added
126
128
  :return: A valid identifier for a kubernetes pod name
127
129
  """
128
- warnings.warn(
129
- "This function is deprecated. Please use `create_unique_id`.",
130
- AirflowProviderDeprecationWarning,
131
- stacklevel=2,
132
- )
133
-
134
- if not (dag_id or task_id):
135
- raise ValueError("Must supply either dag_id or task_id.")
136
- name = ""
137
- if dag_id:
138
- name += dag_id
139
- if task_id:
140
- if name:
141
- name += "-"
142
- name += task_id
143
- base_name = slugify(name, lowercase=True)[:max_length].strip(".-")
144
- if unique:
145
- return add_pod_suffix(pod_name=base_name, rand_len=8, max_len=max_length)
146
- else:
147
- return base_name
130
+ return create_unique_id(dag_id=dag_id, task_id=task_id, max_length=max_length, unique=unique)
148
131
 
149
132
 
150
133
  def annotations_to_key(annotations: dict[str, str]) -> TaskInstanceKey:
@@ -199,3 +182,18 @@ def annotations_for_logging_task_metadata(annotation_set):
199
182
  else:
200
183
  annotations_for_logging = "<omitted>"
201
184
  return annotations_for_logging
185
+
186
+
187
+ def should_retry_creation(exception: BaseException) -> bool:
188
+ """
189
+ Check if an Exception indicates a transient error and warrants retrying.
190
+
191
+ This function is needed for preventing 'No agent available' error. The error appears time to time
192
+ when users try to create a Resource or Job. This issue is inside kubernetes and in the current moment
193
+ has no solution. Like a temporary solution we decided to retry Job or Resource creation request each
194
+ time when this error appears.
195
+ More about this issue here: https://github.com/cert-manager/cert-manager/issues/6457
196
+ """
197
+ if isinstance(exception, ApiException):
198
+ return str(exception.status) == "500"
199
+ return False
@@ -36,6 +36,7 @@ import kubernetes
36
36
  import tenacity
37
37
  from deprecated import deprecated
38
38
  from kubernetes.client import CoreV1Api, V1Pod, models as k8s
39
+ from kubernetes.client.exceptions import ApiException
39
40
  from kubernetes.stream import stream
40
41
  from urllib3.exceptions import HTTPError
41
42
 
@@ -553,7 +554,7 @@ class KubernetesPodOperator(BaseOperator):
553
554
 
554
555
  return pod_request_obj
555
556
 
556
- def await_pod_start(self, pod: k8s.V1Pod):
557
+ def await_pod_start(self, pod: k8s.V1Pod) -> None:
557
558
  try:
558
559
  self.pod_manager.await_pod_start(
559
560
  pod=pod,
@@ -565,23 +566,23 @@ class KubernetesPodOperator(BaseOperator):
565
566
  self._read_pod_events(pod, reraise=False)
566
567
  raise
567
568
 
568
- def extract_xcom(self, pod: k8s.V1Pod):
569
+ def extract_xcom(self, pod: k8s.V1Pod) -> dict[Any, Any] | None:
569
570
  """Retrieve xcom value and kill xcom sidecar container."""
570
571
  result = self.pod_manager.extract_xcom(pod)
571
572
  if isinstance(result, str) and result.rstrip() == EMPTY_XCOM_RESULT:
572
573
  self.log.info("xcom result file is empty.")
573
574
  return None
574
- else:
575
- self.log.info("xcom result: \n%s", result)
576
- return json.loads(result)
575
+
576
+ self.log.info("xcom result: \n%s", result)
577
+ return json.loads(result)
577
578
 
578
579
  def execute(self, context: Context):
579
580
  """Based on the deferrable parameter runs the pod asynchronously or synchronously."""
580
- if self.deferrable:
581
- self.execute_async(context)
582
- else:
581
+ if not self.deferrable:
583
582
  return self.execute_sync(context)
584
583
 
584
+ self.execute_async(context)
585
+
585
586
  def execute_sync(self, context: Context):
586
587
  result = None
587
588
  try:
@@ -669,7 +670,7 @@ class KubernetesPodOperator(BaseOperator):
669
670
  del self.client
670
671
  del self.pod_manager
671
672
 
672
- def execute_async(self, context: Context):
673
+ def execute_async(self, context: Context) -> None:
673
674
  self.pod_request_obj = self.build_pod_request_obj(context)
674
675
  self.pod = self.get_or_create_pod( # must set `self.pod` for `on_kill`
675
676
  pod_request_obj=self.pod_request_obj,
@@ -687,7 +688,7 @@ class KubernetesPodOperator(BaseOperator):
687
688
 
688
689
  self.invoke_defer_method()
689
690
 
690
- def invoke_defer_method(self, last_log_time: DateTime | None = None):
691
+ def invoke_defer_method(self, last_log_time: DateTime | None = None) -> None:
691
692
  """Redefine triggers which are being used in child classes."""
692
693
  trigger_start_time = datetime.datetime.now(tz=datetime.timezone.utc)
693
694
  self.defer(
@@ -742,7 +743,7 @@ class KubernetesPodOperator(BaseOperator):
742
743
  if event["status"] in ("error", "failed", "timeout"):
743
744
  # fetch some logs when pod is failed
744
745
  if self.get_logs:
745
- self.write_logs(self.pod, follow=follow, since_time=last_log_time)
746
+ self._write_logs(self.pod, follow=follow, since_time=last_log_time)
746
747
 
747
748
  if self.do_xcom_push:
748
749
  _ = self.extract_xcom(pod=self.pod)
@@ -770,7 +771,7 @@ class KubernetesPodOperator(BaseOperator):
770
771
  elif event["status"] == "success":
771
772
  # fetch some logs when pod is executed successfully
772
773
  if self.get_logs:
773
- self.write_logs(self.pod, follow=follow, since_time=last_log_time)
774
+ self._write_logs(self.pod, follow=follow, since_time=last_log_time)
774
775
 
775
776
  if self.do_xcom_push:
776
777
  xcom_sidecar_output = self.extract_xcom(pod=self.pod)
@@ -781,51 +782,59 @@ class KubernetesPodOperator(BaseOperator):
781
782
  finally:
782
783
  self._clean(event)
783
784
 
784
- def _clean(self, event: dict[str, Any]):
785
+ def _clean(self, event: dict[str, Any]) -> None:
785
786
  if event["status"] == "running":
786
787
  return
787
788
  istio_enabled = self.is_istio_enabled(self.pod)
788
789
  # Skip await_pod_completion when the event is 'timeout' due to the pod can hang
789
790
  # on the ErrImagePull or ContainerCreating step and it will never complete
790
791
  if event["status"] != "timeout":
791
- self.pod = self.pod_manager.await_pod_completion(
792
- self.pod, istio_enabled, self.base_container_name
793
- )
792
+ try:
793
+ self.pod = self.pod_manager.await_pod_completion(
794
+ self.pod, istio_enabled, self.base_container_name
795
+ )
796
+ except ApiException as e:
797
+ if e.status == 404:
798
+ self.pod = None
799
+ self.log.warning(
800
+ "Pod not found while waiting for completion. The last status was %r", event["status"]
801
+ )
802
+ else:
803
+ raise e
794
804
  if self.pod is not None:
795
805
  self.post_complete_action(
796
806
  pod=self.pod,
797
807
  remote_pod=self.pod,
798
808
  )
799
809
 
800
- @deprecated(reason="use `trigger_reentry` instead.", category=AirflowProviderDeprecationWarning)
801
- def execute_complete(self, context: Context, event: dict, **kwargs):
802
- return self.trigger_reentry(context=context, event=event)
803
-
804
- def write_logs(self, pod: k8s.V1Pod, follow: bool = False, since_time: DateTime | None = None):
810
+ def _write_logs(self, pod: k8s.V1Pod, follow: bool = False, since_time: DateTime | None = None) -> None:
805
811
  try:
806
812
  since_seconds = (
807
813
  math.ceil((datetime.datetime.now(tz=datetime.timezone.utc) - since_time).total_seconds())
808
814
  if since_time
809
815
  else None
810
816
  )
811
- logs = self.pod_manager.read_pod_logs(
812
- pod=pod,
813
- container_name=self.base_container_name,
817
+ logs = self.client.read_namespaced_pod_log(
818
+ name=pod.metadata.name,
819
+ namespace=pod.metadata.namespace,
820
+ container=self.base_container_name,
814
821
  follow=follow,
822
+ timestamps=False,
815
823
  since_seconds=since_seconds,
824
+ _preload_content=False,
816
825
  )
817
826
  for raw_line in logs:
818
827
  line = raw_line.decode("utf-8", errors="backslashreplace").rstrip("\n")
819
828
  if line:
820
- self.log.info("Container logs: %s", line)
821
- except HTTPError as e:
829
+ self.log.info("[%s] logs: %s", self.base_container_name, line)
830
+ except (HTTPError, ApiException) as e:
822
831
  self.log.warning(
823
832
  "Reading of logs interrupted with error %r; will retry. "
824
833
  "Set log level to DEBUG for traceback.",
825
- e,
834
+ e if not isinstance(e, ApiException) else e.reason,
826
835
  )
827
836
 
828
- def post_complete_action(self, *, pod, remote_pod, **kwargs):
837
+ def post_complete_action(self, *, pod, remote_pod, **kwargs) -> None:
829
838
  """Actions that must be done after operator finishes logic of the deferrable_execution."""
830
839
  self.cleanup(
831
840
  pod=pod,
@@ -893,7 +902,7 @@ class KubernetesPodOperator(BaseOperator):
893
902
  )
894
903
  )
895
904
 
896
- def _read_pod_events(self, pod, *, reraise=True):
905
+ def _read_pod_events(self, pod, *, reraise=True) -> None:
897
906
  """Will fetch and emit events from pod."""
898
907
  with _optionally_suppress(reraise=reraise):
899
908
  for event in self.pod_manager.read_pod_events(pod).items:
@@ -941,15 +950,11 @@ class KubernetesPodOperator(BaseOperator):
941
950
  def process_pod_deletion(self, pod: k8s.V1Pod, *, reraise=True):
942
951
  with _optionally_suppress(reraise=reraise):
943
952
  if pod is not None:
944
- should_delete_pod = (
945
- (self.on_finish_action == OnFinishAction.DELETE_POD)
946
- or (
947
- self.on_finish_action == OnFinishAction.DELETE_SUCCEEDED_POD
948
- and pod.status.phase == PodPhase.SUCCEEDED
949
- )
950
- or (
951
- self.on_finish_action == OnFinishAction.DELETE_SUCCEEDED_POD
952
- and container_is_succeeded(pod, self.base_container_name)
953
+ should_delete_pod = (self.on_finish_action == OnFinishAction.DELETE_POD) or (
954
+ self.on_finish_action == OnFinishAction.DELETE_SUCCEEDED_POD
955
+ and (
956
+ pod.status.phase == PodPhase.SUCCEEDED
957
+ or container_is_succeeded(pod, self.base_container_name)
953
958
  )
954
959
  )
955
960
  if should_delete_pod:
@@ -966,8 +971,8 @@ class KubernetesPodOperator(BaseOperator):
966
971
  label_strings = [f"{label_id}={label}" for label_id, label in sorted(labels.items())]
967
972
  labels_value = ",".join(label_strings)
968
973
  if exclude_checked:
969
- labels_value += f",{self.POD_CHECKED_KEY}!=True"
970
- labels_value += ",!airflow-worker"
974
+ labels_value = f"{labels_value},{self.POD_CHECKED_KEY}!=True"
975
+ labels_value = f"{labels_value},!airflow-worker"
971
976
  return labels_value
972
977
 
973
978
  @staticmethod
@@ -1129,6 +1134,10 @@ class KubernetesPodOperator(BaseOperator):
1129
1134
  pod = self.build_pod_request_obj()
1130
1135
  print(yaml.dump(prune_dict(pod.to_dict(), mode="strict")))
1131
1136
 
1137
+ @deprecated(reason="use `trigger_reentry` instead.", category=AirflowProviderDeprecationWarning)
1138
+ def execute_complete(self, context: Context, event: dict, **kwargs):
1139
+ return self.trigger_reentry(context=context, event=event)
1140
+
1132
1141
 
1133
1142
  class _optionally_suppress(AbstractContextManager):
1134
1143
  """
@@ -1142,7 +1151,7 @@ class _optionally_suppress(AbstractContextManager):
1142
1151
  :meta private:
1143
1152
  """
1144
1153
 
1145
- def __init__(self, *exceptions, reraise=False):
1154
+ def __init__(self, *exceptions, reraise: bool = False) -> None:
1146
1155
  self._exceptions = exceptions or (Exception,)
1147
1156
  self.reraise = reraise
1148
1157
  self.exception = None
@@ -1150,7 +1159,7 @@ class _optionally_suppress(AbstractContextManager):
1150
1159
  def __enter__(self):
1151
1160
  return self
1152
1161
 
1153
- def __exit__(self, exctype, excinst, exctb):
1162
+ def __exit__(self, exctype, excinst, exctb) -> bool:
1154
1163
  error = exctype is not None
1155
1164
  matching_error = error and issubclass(exctype, self._exceptions)
1156
1165
  if (error and not matching_error) or (matching_error and self.reraise):
@@ -22,12 +22,14 @@ import os
22
22
  from functools import cached_property
23
23
  from typing import TYPE_CHECKING, Sequence
24
24
 
25
+ import tenacity
25
26
  import yaml
26
27
  from kubernetes.utils import create_from_yaml
27
28
 
28
29
  from airflow.exceptions import AirflowException
29
30
  from airflow.models import BaseOperator
30
31
  from airflow.providers.cncf.kubernetes.hooks.kubernetes import KubernetesHook
32
+ from airflow.providers.cncf.kubernetes.kubernetes_helper_functions import should_retry_creation
31
33
  from airflow.providers.cncf.kubernetes.utils.delete_from import delete_from_yaml
32
34
  from airflow.providers.cncf.kubernetes.utils.k8s_resource_iterator import k8s_resource_iterator
33
35
 
@@ -126,7 +128,14 @@ class KubernetesCreateResourceOperator(KubernetesResourceBaseOperator):
126
128
  else:
127
129
  self.custom_object_client.create_cluster_custom_object(group, version, plural, body)
128
130
 
131
+ @tenacity.retry(
132
+ stop=tenacity.stop_after_attempt(3),
133
+ wait=tenacity.wait_random_exponential(),
134
+ reraise=True,
135
+ retry=tenacity.retry_if_exception(should_retry_creation),
136
+ )
129
137
  def _create_objects(self, objects):
138
+ self.log.info("Starting resource creation")
130
139
  if not self.custom_resource_definition:
131
140
  create_from_yaml(
132
141
  k8s_client=self.client,
@@ -144,6 +153,7 @@ class KubernetesCreateResourceOperator(KubernetesResourceBaseOperator):
144
153
  self._create_objects(yaml.safe_load_all(stream))
145
154
  else:
146
155
  raise AirflowException("File %s not found", self.yaml_conf_file)
156
+ self.log.info("Resource was created")
147
157
 
148
158
 
149
159
  class KubernetesDeleteResourceOperator(KubernetesResourceBaseOperator):
@@ -45,7 +45,6 @@ from airflow.exceptions import (
45
45
  )
46
46
  from airflow.providers.cncf.kubernetes.kubernetes_helper_functions import (
47
47
  POD_NAME_MAX_LENGTH,
48
- add_pod_suffix,
49
48
  add_unique_suffix,
50
49
  rand_str,
51
50
  )
@@ -156,12 +155,15 @@ class PodGenerator:
156
155
  # Attach sidecar
157
156
  self.extract_xcom = extract_xcom
158
157
 
159
- @deprecated(reason="This function is deprecated.", category=AirflowProviderDeprecationWarning)
158
+ @deprecated(
159
+ reason="This method is deprecated and will be removed in the future releases",
160
+ category=AirflowProviderDeprecationWarning,
161
+ )
160
162
  def gen_pod(self) -> k8s.V1Pod:
161
163
  """Generate pod."""
162
164
  result = self.ud_pod
163
165
 
164
- result.metadata.name = add_pod_suffix(pod_name=result.metadata.name)
166
+ result.metadata.name = add_unique_suffix(name=result.metadata.name)
165
167
 
166
168
  if self.extract_xcom:
167
169
  result = self.add_xcom_sidecar(result)
@@ -210,8 +212,8 @@ class PodGenerator:
210
212
  return k8s_object
211
213
  elif isinstance(k8s_legacy_object, dict):
212
214
  warnings.warn(
213
- "Using a dictionary for the executor_config is deprecated and will soon be removed."
214
- 'please use a `kubernetes.client.models.V1Pod` class with a "pod_override" key'
215
+ "Using a dictionary for the executor_config is deprecated and will soon be removed. "
216
+ 'Please use a `kubernetes.client.models.V1Pod` class with a "pod_override" key'
215
217
  " instead. ",
216
218
  category=AirflowProviderDeprecationWarning,
217
219
  stacklevel=2,
@@ -575,7 +577,7 @@ class PodGenerator:
575
577
 
576
578
  @staticmethod
577
579
  @deprecated(
578
- reason="This function is deprecated. Use `add_pod_suffix` in `kubernetes_helper_functions`.",
580
+ reason="This method is deprecated. Use `add_pod_suffix` in `kubernetes_helper_functions`.",
579
581
  category=AirflowProviderDeprecationWarning,
580
582
  )
581
583
  def make_unique_pod_id(pod_id: str) -> str | None:
@@ -595,12 +597,6 @@ class PodGenerator:
595
597
  :param pod_id: requested pod name
596
598
  :return: ``str`` valid Pod name of appropriate length
597
599
  """
598
- warnings.warn(
599
- "This function is deprecated. Use `add_pod_suffix` in `kubernetes_helper_functions`.",
600
- AirflowProviderDeprecationWarning,
601
- stacklevel=2,
602
- )
603
-
604
600
  if not pod_id:
605
601
  return None
606
602
 
@@ -24,9 +24,7 @@ from kubernetes.client.api_client import ApiClient
24
24
 
25
25
  from airflow.exceptions import AirflowException
26
26
  from airflow.providers.cncf.kubernetes.kube_config import KubeConfig
27
- from airflow.providers.cncf.kubernetes.kubernetes_helper_functions import (
28
- create_pod_id,
29
- )
27
+ from airflow.providers.cncf.kubernetes.kubernetes_helper_functions import create_unique_id
30
28
  from airflow.providers.cncf.kubernetes.pod_generator import PodGenerator
31
29
  from airflow.utils.session import NEW_SESSION, provide_session
32
30
 
@@ -43,7 +41,7 @@ def render_k8s_pod_yaml(task_instance: TaskInstance) -> dict | None:
43
41
  task_id=task_instance.task_id,
44
42
  map_index=task_instance.map_index,
45
43
  date=None,
46
- pod_id=create_pod_id(task_instance.dag_id, task_instance.task_id),
44
+ pod_id=create_unique_id(task_instance.dag_id, task_instance.task_id),
47
45
  try_number=task_instance.try_number,
48
46
  kube_image=kube_config.kube_image,
49
47
  args=task_instance.command_as_list(),
@@ -0,0 +1,16 @@
1
+ # Licensed to the Apache Software Foundation (ASF) under one
2
+ # or more contributor license agreements. See the NOTICE file
3
+ # distributed with this work for additional information
4
+ # regarding copyright ownership. The ASF licenses this file
5
+ # to you under the Apache License, Version 2.0 (the
6
+ # "License"); you may not use this file except in compliance
7
+ # with the License. You may obtain a copy of the License at
8
+ #
9
+ # http://www.apache.org/licenses/LICENSE-2.0
10
+ #
11
+ # Unless required by applicable law or agreed to in writing,
12
+ # software distributed under the License is distributed on an
13
+ # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
14
+ # KIND, either express or implied. See the License for the
15
+ # specific language governing permissions and limitations
16
+ # under the License.
@@ -516,7 +516,7 @@ class PodManager(LoggingMixin):
516
516
  # a timeout is a normal thing and we ignore it and resume following logs
517
517
  if not isinstance(exc, TimeoutError):
518
518
  self.log.warning(
519
- "Pod %s log read interrupted but container %s still running",
519
+ "Pod %s log read interrupted but container %s still running. Logs generated in the last one second might get duplicated.",
520
520
  pod.metadata.name,
521
521
  container_name,
522
522
  )
@@ -28,7 +28,7 @@ build-backend = "flit_core.buildapi"
28
28
 
29
29
  [project]
30
30
  name = "apache-airflow-providers-cncf-kubernetes"
31
- version = "8.2.0"
31
+ version = "8.3.0"
32
32
  description = "Provider package apache-airflow-providers-cncf-kubernetes for Apache Airflow"
33
33
  readme = "README.rst"
34
34
  authors = [
@@ -66,8 +66,8 @@ dependencies = [
66
66
  ]
67
67
 
68
68
  [project.urls]
69
- "Documentation" = "https://airflow.apache.org/docs/apache-airflow-providers-cncf-kubernetes/8.2.0"
70
- "Changelog" = "https://airflow.apache.org/docs/apache-airflow-providers-cncf-kubernetes/8.2.0/changelog.html"
69
+ "Documentation" = "https://airflow.apache.org/docs/apache-airflow-providers-cncf-kubernetes/8.3.0"
70
+ "Changelog" = "https://airflow.apache.org/docs/apache-airflow-providers-cncf-kubernetes/8.3.0/changelog.html"
71
71
  "Bug Tracker" = "https://github.com/apache/airflow/issues"
72
72
  "Source Code" = "https://github.com/apache/airflow"
73
73
  "Slack Chat" = "https://s.apache.org/airflow-slack"