apache-airflow-providers-cncf-kubernetes 7.9.0rc1__py3-none-any.whl → 7.10.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of apache-airflow-providers-cncf-kubernetes might be problematic. Click here for more details.

Files changed (21) hide show
  1. {apache_airflow_providers_cncf_kubernetes-7.9.0rc1.dist-info → airflow/providers/cncf/kubernetes}/LICENSE +52 -0
  2. airflow/providers/cncf/kubernetes/__init__.py +4 -5
  3. airflow/providers/cncf/kubernetes/executors/kubernetes_executor.py +109 -106
  4. airflow/providers/cncf/kubernetes/get_provider_info.py +4 -2
  5. airflow/providers/cncf/kubernetes/kubernetes_executor_templates/basic_template.yaml +79 -0
  6. airflow/providers/cncf/kubernetes/operators/pod.py +2 -1
  7. airflow/providers/cncf/kubernetes/operators/resource.py +51 -16
  8. airflow/providers/cncf/kubernetes/pod_generator.py +2 -2
  9. airflow/providers/cncf/kubernetes/pod_template_file_examples/dags_in_image_template.yaml +68 -0
  10. airflow/providers/cncf/kubernetes/pod_template_file_examples/dags_in_volume_template.yaml +74 -0
  11. airflow/providers/cncf/kubernetes/pod_template_file_examples/git_sync_template.yaml +95 -0
  12. airflow/providers/cncf/kubernetes/python_kubernetes_script.py +6 -1
  13. airflow/providers/cncf/kubernetes/utils/k8s_resource_iterator.py +46 -0
  14. airflow/providers/cncf/kubernetes/utils/pod_manager.py +27 -25
  15. {apache_airflow_providers_cncf_kubernetes-7.9.0rc1.dist-info → apache_airflow_providers_cncf_kubernetes-7.10.0.dist-info}/METADATA +32 -29
  16. {apache_airflow_providers_cncf_kubernetes-7.9.0rc1.dist-info → apache_airflow_providers_cncf_kubernetes-7.10.0.dist-info}/RECORD +18 -15
  17. {apache_airflow_providers_cncf_kubernetes-7.9.0rc1.dist-info → apache_airflow_providers_cncf_kubernetes-7.10.0.dist-info}/WHEEL +1 -2
  18. apache_airflow_providers_cncf_kubernetes-7.10.0.dist-info/entry_points.txt +3 -0
  19. apache_airflow_providers_cncf_kubernetes-7.9.0rc1.dist-info/NOTICE +0 -6
  20. apache_airflow_providers_cncf_kubernetes-7.9.0rc1.dist-info/entry_points.txt +0 -2
  21. apache_airflow_providers_cncf_kubernetes-7.9.0rc1.dist-info/top_level.txt +0 -1
@@ -199,3 +199,55 @@ distributed under the License is distributed on an "AS IS" BASIS,
199
199
  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
200
200
  See the License for the specific language governing permissions and
201
201
  limitations under the License.
202
+
203
+ ============================================================================
204
+ APACHE AIRFLOW SUBCOMPONENTS:
205
+
206
+ The Apache Airflow project contains subcomponents with separate copyright
207
+ notices and license terms. Your use of the source code for the these
208
+ subcomponents is subject to the terms and conditions of the following
209
+ licenses.
210
+
211
+
212
+ ========================================================================
213
+ Third party Apache 2.0 licenses
214
+ ========================================================================
215
+
216
+ The following components are provided under the Apache 2.0 License.
217
+ See project link for details. The text of each license is also included
218
+ at licenses/LICENSE-[project].txt.
219
+
220
+ (ALv2 License) hue v4.3.0 (https://github.com/cloudera/hue/)
221
+ (ALv2 License) jqclock v2.3.0 (https://github.com/JohnRDOrazio/jQuery-Clock-Plugin)
222
+ (ALv2 License) bootstrap3-typeahead v4.0.2 (https://github.com/bassjobsen/Bootstrap-3-Typeahead)
223
+ (ALv2 License) connexion v2.7.0 (https://github.com/zalando/connexion)
224
+
225
+ ========================================================================
226
+ MIT licenses
227
+ ========================================================================
228
+
229
+ The following components are provided under the MIT License. See project link for details.
230
+ The text of each license is also included at licenses/LICENSE-[project].txt.
231
+
232
+ (MIT License) jquery v3.5.1 (https://jquery.org/license/)
233
+ (MIT License) dagre-d3 v0.6.4 (https://github.com/cpettitt/dagre-d3)
234
+ (MIT License) bootstrap v3.4.1 (https://github.com/twbs/bootstrap/)
235
+ (MIT License) d3-tip v0.9.1 (https://github.com/Caged/d3-tip)
236
+ (MIT License) dataTables v1.10.25 (https://datatables.net)
237
+ (MIT License) normalize.css v3.0.2 (http://necolas.github.io/normalize.css/)
238
+ (MIT License) ElasticMock v1.3.2 (https://github.com/vrcmarcos/elasticmock)
239
+ (MIT License) MomentJS v2.24.0 (http://momentjs.com/)
240
+ (MIT License) eonasdan-bootstrap-datetimepicker v4.17.49 (https://github.com/eonasdan/bootstrap-datetimepicker/)
241
+
242
+ ========================================================================
243
+ BSD 3-Clause licenses
244
+ ========================================================================
245
+ The following components are provided under the BSD 3-Clause license. See project links for details.
246
+ The text of each license is also included at licenses/LICENSE-[project].txt.
247
+
248
+ (BSD 3 License) d3 v5.16.0 (https://d3js.org)
249
+ (BSD 3 License) d3-shape v2.1.0 (https://github.com/d3/d3-shape)
250
+ (BSD 3 License) cgroupspy 0.2.1 (https://github.com/cloudsigma/cgroupspy)
251
+
252
+ ========================================================================
253
+ See licenses/LICENSES-ui.txt for packages used in `/airflow/www`
@@ -1,4 +1,3 @@
1
- #
2
1
  # Licensed to the Apache Software Foundation (ASF) under one
3
2
  # or more contributor license agreements. See the NOTICE file
4
3
  # distributed with this work for additional information
@@ -19,8 +18,8 @@
19
18
  # NOTE! THIS FILE IS AUTOMATICALLY GENERATED AND WILL BE
20
19
  # OVERWRITTEN WHEN PREPARING DOCUMENTATION FOR THE PACKAGES.
21
20
  #
22
- # IF YOU WANT TO MODIFY IT, YOU SHOULD MODIFY THE TEMPLATE
23
- # `PROVIDER__INIT__PY_TEMPLATE.py.jinja2` IN the `dev/provider_packages` DIRECTORY
21
+ # IF YOU WANT TO MODIFY THIS FILE, YOU SHOULD MODIFY THE TEMPLATE
22
+ # `PROVIDER__INIT__PY_TEMPLATE.py.jinja2` IN the `dev/breeze/src/airflow_breeze/templates` DIRECTORY
24
23
  #
25
24
  from __future__ import annotations
26
25
 
@@ -28,7 +27,7 @@ import packaging.version
28
27
 
29
28
  __all__ = ["__version__"]
30
29
 
31
- __version__ = "7.9.0"
30
+ __version__ = "7.10.0"
32
31
 
33
32
  try:
34
33
  from airflow import __version__ as airflow_version
@@ -39,5 +38,5 @@ if packaging.version.parse(packaging.version.parse(airflow_version).base_version
39
38
  "2.5.0"
40
39
  ):
41
40
  raise RuntimeError(
42
- f"The package `apache-airflow-providers-cncf-kubernetes:{__version__}` requires Apache Airflow 2.5.0+"
41
+ f"The package `apache-airflow-providers-cncf-kubernetes:{__version__}` needs Apache Airflow 2.5.0+"
43
42
  )
@@ -37,6 +37,7 @@ from typing import TYPE_CHECKING, Any, Sequence
37
37
  from sqlalchemy import select, update
38
38
 
39
39
  from airflow.providers.cncf.kubernetes.pod_generator import PodMutationHookException, PodReconciliationError
40
+ from airflow.stats import Stats
40
41
 
41
42
  try:
42
43
  from airflow.cli.cli_config import (
@@ -209,89 +210,90 @@ class KubernetesExecutor(BaseExecutor):
209
210
  assert self.kube_client
210
211
  from airflow.models.taskinstance import TaskInstance
211
212
 
212
- self.log.debug("Clearing tasks that have not been launched")
213
- query = select(TaskInstance).where(
214
- TaskInstance.state == TaskInstanceState.QUEUED, TaskInstance.queued_by_job_id == self.job_id
215
- )
216
- if self.kubernetes_queue:
217
- query = query.where(TaskInstance.queue == self.kubernetes_queue)
218
- queued_tis: list[TaskInstance] = session.scalars(query).all()
219
- self.log.info("Found %s queued task instances", len(queued_tis))
220
-
221
- # Go through the "last seen" dictionary and clean out old entries
222
- allowed_age = self.kube_config.worker_pods_queued_check_interval * 3
223
- for key, timestamp in list(self.last_handled.items()):
224
- if time.time() - timestamp > allowed_age:
225
- del self.last_handled[key]
226
-
227
- if not queued_tis:
228
- return
229
-
230
- # airflow worker label selector batch call
231
- kwargs = {"label_selector": f"airflow-worker={self._make_safe_label_value(str(self.job_id))}"}
232
- if self.kube_config.kube_client_request_args:
233
- kwargs.update(self.kube_config.kube_client_request_args)
234
- pod_list = self._list_pods(kwargs)
235
-
236
- # create a set against pod query label fields
237
- label_search_set = set()
238
- for pod in pod_list:
239
- dag_id = pod.metadata.labels.get("dag_id", None)
240
- task_id = pod.metadata.labels.get("task_id", None)
241
- airflow_worker = pod.metadata.labels.get("airflow-worker", None)
242
- map_index = pod.metadata.labels.get("map_index", None)
243
- run_id = pod.metadata.labels.get("run_id", None)
244
- execution_date = pod.metadata.labels.get("execution_date", None)
245
- if dag_id is None or task_id is None or airflow_worker is None:
246
- continue
247
- label_search_base_str = f"dag_id={dag_id},task_id={task_id},airflow-worker={airflow_worker}"
248
- if map_index is not None:
249
- label_search_base_str += f",map_index={map_index}"
250
- if run_id is not None:
251
- label_search_str = f"{label_search_base_str},run_id={run_id}"
252
- label_search_set.add(label_search_str)
253
- if execution_date is not None:
254
- label_search_str = f"{label_search_base_str},execution_date={execution_date}"
255
- label_search_set.add(label_search_str)
256
-
257
- for ti in queued_tis:
258
- self.log.debug("Checking task instance %s", ti)
259
-
260
- # Check to see if we've handled it ourselves recently
261
- if ti.key in self.last_handled:
262
- continue
263
-
264
- # Build the pod selector
265
- base_label_selector = (
266
- f"dag_id={self._make_safe_label_value(ti.dag_id)},"
267
- f"task_id={self._make_safe_label_value(ti.task_id)},"
268
- f"airflow-worker={self._make_safe_label_value(str(ti.queued_by_job_id))}"
269
- )
270
- if ti.map_index >= 0:
271
- # Old tasks _couldn't_ be mapped, so we don't have to worry about compat
272
- base_label_selector += f",map_index={ti.map_index}"
273
-
274
- # Try run_id first
275
- label_search_str = f"{base_label_selector},run_id={self._make_safe_label_value(ti.run_id)}"
276
- if label_search_str in label_search_set:
277
- continue
278
- # Fallback to old style of using execution_date
279
- label_search_str = (
280
- f"{base_label_selector},execution_date={self._make_safe_label_value(ti.execution_date)}"
213
+ with Stats.timer("kubernetes_executor.clear_not_launched_queued_tasks.duration"):
214
+ self.log.debug("Clearing tasks that have not been launched")
215
+ query = select(TaskInstance).where(
216
+ TaskInstance.state == TaskInstanceState.QUEUED, TaskInstance.queued_by_job_id == self.job_id
281
217
  )
282
- if label_search_str in label_search_set:
283
- continue
284
- self.log.info("TaskInstance: %s found in queued state but was not launched, rescheduling", ti)
285
- session.execute(
286
- update(TaskInstance)
287
- .where(
288
- TaskInstance.dag_id == ti.dag_id,
289
- TaskInstance.task_id == ti.task_id,
290
- TaskInstance.run_id == ti.run_id,
291
- TaskInstance.map_index == ti.map_index,
218
+ if self.kubernetes_queue:
219
+ query = query.where(TaskInstance.queue == self.kubernetes_queue)
220
+ queued_tis: list[TaskInstance] = session.scalars(query).all()
221
+ self.log.info("Found %s queued task instances", len(queued_tis))
222
+
223
+ # Go through the "last seen" dictionary and clean out old entries
224
+ allowed_age = self.kube_config.worker_pods_queued_check_interval * 3
225
+ for key, timestamp in list(self.last_handled.items()):
226
+ if time.time() - timestamp > allowed_age:
227
+ del self.last_handled[key]
228
+
229
+ if not queued_tis:
230
+ return
231
+
232
+ # airflow worker label selector batch call
233
+ kwargs = {"label_selector": f"airflow-worker={self._make_safe_label_value(str(self.job_id))}"}
234
+ if self.kube_config.kube_client_request_args:
235
+ kwargs.update(self.kube_config.kube_client_request_args)
236
+ pod_list = self._list_pods(kwargs)
237
+
238
+ # create a set against pod query label fields
239
+ label_search_set = set()
240
+ for pod in pod_list:
241
+ dag_id = pod.metadata.labels.get("dag_id", None)
242
+ task_id = pod.metadata.labels.get("task_id", None)
243
+ airflow_worker = pod.metadata.labels.get("airflow-worker", None)
244
+ map_index = pod.metadata.labels.get("map_index", None)
245
+ run_id = pod.metadata.labels.get("run_id", None)
246
+ execution_date = pod.metadata.labels.get("execution_date", None)
247
+ if dag_id is None or task_id is None or airflow_worker is None:
248
+ continue
249
+ label_search_base_str = f"dag_id={dag_id},task_id={task_id},airflow-worker={airflow_worker}"
250
+ if map_index is not None:
251
+ label_search_base_str += f",map_index={map_index}"
252
+ if run_id is not None:
253
+ label_search_str = f"{label_search_base_str},run_id={run_id}"
254
+ label_search_set.add(label_search_str)
255
+ if execution_date is not None:
256
+ label_search_str = f"{label_search_base_str},execution_date={execution_date}"
257
+ label_search_set.add(label_search_str)
258
+
259
+ for ti in queued_tis:
260
+ self.log.debug("Checking task instance %s", ti)
261
+
262
+ # Check to see if we've handled it ourselves recently
263
+ if ti.key in self.last_handled:
264
+ continue
265
+
266
+ # Build the pod selector
267
+ base_label_selector = (
268
+ f"dag_id={self._make_safe_label_value(ti.dag_id)},"
269
+ f"task_id={self._make_safe_label_value(ti.task_id)},"
270
+ f"airflow-worker={self._make_safe_label_value(str(ti.queued_by_job_id))}"
271
+ )
272
+ if ti.map_index >= 0:
273
+ # Old tasks _couldn't_ be mapped, so we don't have to worry about compat
274
+ base_label_selector += f",map_index={ti.map_index}"
275
+
276
+ # Try run_id first
277
+ label_search_str = f"{base_label_selector},run_id={self._make_safe_label_value(ti.run_id)}"
278
+ if label_search_str in label_search_set:
279
+ continue
280
+ # Fallback to old style of using execution_date
281
+ label_search_str = (
282
+ f"{base_label_selector},execution_date={self._make_safe_label_value(ti.execution_date)}"
283
+ )
284
+ if label_search_str in label_search_set:
285
+ continue
286
+ self.log.info("TaskInstance: %s found in queued state but was not launched, rescheduling", ti)
287
+ session.execute(
288
+ update(TaskInstance)
289
+ .where(
290
+ TaskInstance.dag_id == ti.dag_id,
291
+ TaskInstance.task_id == ti.task_id,
292
+ TaskInstance.run_id == ti.run_id,
293
+ TaskInstance.map_index == ti.map_index,
294
+ )
295
+ .values(state=TaskInstanceState.SCHEDULED)
292
296
  )
293
- .values(state=TaskInstanceState.SCHEDULED)
294
- )
295
297
 
296
298
  def start(self) -> None:
297
299
  """Start the executor."""
@@ -534,31 +536,32 @@ class KubernetesExecutor(BaseExecutor):
534
536
  return messages, ["\n".join(log)]
535
537
 
536
538
  def try_adopt_task_instances(self, tis: Sequence[TaskInstance]) -> Sequence[TaskInstance]:
537
- # Always flush TIs without queued_by_job_id
538
- tis_to_flush = [ti for ti in tis if not ti.queued_by_job_id]
539
- scheduler_job_ids = {ti.queued_by_job_id for ti in tis}
540
- tis_to_flush_by_key = {ti.key: ti for ti in tis if ti.queued_by_job_id}
541
- kube_client: client.CoreV1Api = self.kube_client
542
- for scheduler_job_id in scheduler_job_ids:
543
- scheduler_job_id = self._make_safe_label_value(str(scheduler_job_id))
544
- # We will look for any pods owned by the no-longer-running scheduler,
545
- # but will exclude only successful pods, as those TIs will have a terminal state
546
- # and not be up for adoption!
547
- # Those workers that failed, however, are okay to adopt here as their TI will
548
- # still be in queued.
549
- query_kwargs = {
550
- "field_selector": "status.phase!=Succeeded",
551
- "label_selector": (
552
- "kubernetes_executor=True,"
553
- f"airflow-worker={scheduler_job_id},{POD_EXECUTOR_DONE_KEY}!=True"
554
- ),
555
- }
556
- pod_list = self._list_pods(query_kwargs)
557
- for pod in pod_list:
558
- self.adopt_launched_task(kube_client, pod, tis_to_flush_by_key)
559
- self._adopt_completed_pods(kube_client)
560
- tis_to_flush.extend(tis_to_flush_by_key.values())
561
- return tis_to_flush
539
+ with Stats.timer("kubernetes_executor.adopt_task_instances.duration"):
540
+ # Always flush TIs without queued_by_job_id
541
+ tis_to_flush = [ti for ti in tis if not ti.queued_by_job_id]
542
+ scheduler_job_ids = {ti.queued_by_job_id for ti in tis}
543
+ tis_to_flush_by_key = {ti.key: ti for ti in tis if ti.queued_by_job_id}
544
+ kube_client: client.CoreV1Api = self.kube_client
545
+ for scheduler_job_id in scheduler_job_ids:
546
+ scheduler_job_id = self._make_safe_label_value(str(scheduler_job_id))
547
+ # We will look for any pods owned by the no-longer-running scheduler,
548
+ # but will exclude only successful pods, as those TIs will have a terminal state
549
+ # and not be up for adoption!
550
+ # Those workers that failed, however, are okay to adopt here as their TI will
551
+ # still be in queued.
552
+ query_kwargs = {
553
+ "field_selector": "status.phase!=Succeeded",
554
+ "label_selector": (
555
+ "kubernetes_executor=True,"
556
+ f"airflow-worker={scheduler_job_id},{POD_EXECUTOR_DONE_KEY}!=True"
557
+ ),
558
+ }
559
+ pod_list = self._list_pods(query_kwargs)
560
+ for pod in pod_list:
561
+ self.adopt_launched_task(kube_client, pod, tis_to_flush_by_key)
562
+ self._adopt_completed_pods(kube_client)
563
+ tis_to_flush.extend(tis_to_flush_by_key.values())
564
+ return tis_to_flush
562
565
 
563
566
  def cleanup_stuck_queued_tasks(self, tis: list[TaskInstance]) -> list[str]:
564
567
  """
@@ -18,8 +18,8 @@
18
18
  # NOTE! THIS FILE IS AUTOMATICALLY GENERATED AND WILL BE
19
19
  # OVERWRITTEN WHEN PREPARING PACKAGES.
20
20
  #
21
- # IF YOU WANT TO MODIFY IT, YOU SHOULD MODIFY THE TEMPLATE
22
- # `get_provider_info_TEMPLATE.py.jinja2` IN the `provider_packages` DIRECTORY
21
+ # IF YOU WANT TO MODIFY THIS FILE, YOU SHOULD MODIFY THE TEMPLATE
22
+ # `get_provider_info_TEMPLATE.py.jinja2` IN the `dev/breeze/src/airflow_breeze/templates` DIRECTORY
23
23
 
24
24
 
25
25
  def get_provider_info():
@@ -28,7 +28,9 @@ def get_provider_info():
28
28
  "name": "Kubernetes",
29
29
  "description": "`Kubernetes <https://kubernetes.io/>`__\n",
30
30
  "suspended": False,
31
+ "source-date-epoch": 1700827450,
31
32
  "versions": [
33
+ "7.10.0",
32
34
  "7.9.0",
33
35
  "7.8.0",
34
36
  "7.7.0",
@@ -0,0 +1,79 @@
1
+ # Licensed to the Apache Software Foundation (ASF) under one
2
+ # or more contributor license agreements. See the NOTICE file
3
+ # distributed with this work for additional information
4
+ # regarding copyright ownership. The ASF licenses this file
5
+ # to you under the Apache License, Version 2.0 (the
6
+ # "License"); you may not use this file except in compliance
7
+ # with the License. You may obtain a copy of the License at
8
+ #
9
+ # http://www.apache.org/licenses/LICENSE-2.0
10
+ #
11
+ # Unless required by applicable law or agreed to in writing,
12
+ # software distributed under the License is distributed on an
13
+ # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
14
+ # KIND, either express or implied. See the License for the
15
+ # specific language governing permissions and limitations
16
+ # under the License.
17
+ ---
18
+ kind: Pod
19
+ apiVersion: v1
20
+ metadata:
21
+ name: placeholder-name-dont-delete
22
+ namespace: placeholder-name-dont-delete
23
+ labels:
24
+ mylabel: foo
25
+ spec:
26
+ containers:
27
+ - name: base
28
+ image: placeholder-name-dont-delete
29
+ env:
30
+ - name: AIRFLOW__CORE__EXECUTOR
31
+ value: LocalExecutor
32
+ - name: AIRFLOW_HOME
33
+ value: /opt/airflow
34
+ - name: AIRFLOW__CORE__DAGS_FOLDER
35
+ value: /opt/airflow/dags
36
+ - name: AIRFLOW__CORE__FERNET_KEY
37
+ valueFrom:
38
+ secretKeyRef:
39
+ name: airflow-fernet-key
40
+ key: fernet-key
41
+ - name: AIRFLOW__DATABASE__SQL_ALCHEMY_CONN
42
+ valueFrom:
43
+ secretKeyRef:
44
+ name: airflow-airflow-metadata
45
+ key: connection
46
+ - name: foo
47
+ value: bar
48
+ resources: {}
49
+ volumeMounts:
50
+ - name: airflow-logs
51
+ mountPath: /opt/airflow/logs
52
+ - name: airflow-config
53
+ readOnly: true
54
+ mountPath: /opt/airflow/airflow.cfg
55
+ subPath: airflow.cfg
56
+ - name: airflow-config
57
+ readOnly: true
58
+ mountPath: /opt/airflow/config/airflow_local_settings.py
59
+ subPath: airflow_local_settings.py
60
+ terminationMessagePath: /dev/termination-log
61
+ terminationMessagePolicy: File
62
+ imagePullPolicy: IfNotPresent
63
+ volumes:
64
+ - name: airflow-logs
65
+ emptyDir: {}
66
+ - name: airflow-config
67
+ configMap:
68
+ name: airflow-airflow-config
69
+ defaultMode: 420
70
+ restartPolicy: Never
71
+ terminationGracePeriodSeconds: 30
72
+ serviceAccountName: airflow-worker
73
+ serviceAccount: airflow-worker
74
+ securityContext:
75
+ runAsUser: 50000
76
+ fsGroup: 50000
77
+ imagePullSecrets:
78
+ - name: airflow-registry
79
+ schedulerName: default-scheduler
@@ -191,7 +191,7 @@ class KubernetesPodOperator(BaseOperator):
191
191
  :param image_pull_policy: Specify a policy to cache or always pull an image.
192
192
  :param annotations: non-identifying metadata you can attach to the Pod.
193
193
  Can be a large range of data, and can include characters
194
- that are not permitted by labels.
194
+ that are not permitted by labels. (templated)
195
195
  :param container_resources: resources for the launched pod. (templated)
196
196
  :param affinity: affinity scheduling rules for the launched pod.
197
197
  :param config_file: The path to the Kubernetes config file. (templated)
@@ -261,6 +261,7 @@ class KubernetesPodOperator(BaseOperator):
261
261
  template_fields: Sequence[str] = (
262
262
  "image",
263
263
  "cmds",
264
+ "annotations",
264
265
  "arguments",
265
266
  "env_vars",
266
267
  "labels",
@@ -27,9 +27,10 @@ from kubernetes.utils import create_from_yaml
27
27
  from airflow.models import BaseOperator
28
28
  from airflow.providers.cncf.kubernetes.hooks.kubernetes import KubernetesHook
29
29
  from airflow.providers.cncf.kubernetes.utils.delete_from import delete_from_yaml
30
+ from airflow.providers.cncf.kubernetes.utils.k8s_resource_iterator import k8s_resource_iterator
30
31
 
31
32
  if TYPE_CHECKING:
32
- from kubernetes.client import ApiClient
33
+ from kubernetes.client import ApiClient, CustomObjectsApi
33
34
 
34
35
  __all__ = ["KubernetesCreateResourceOperator", "KubernetesDeleteResourceOperator"]
35
36
 
@@ -45,11 +46,6 @@ class KubernetesResourceBaseOperator(BaseOperator):
45
46
  this parameter has no effect.
46
47
  :param kubernetes_conn_id: The :ref:`kubernetes connection id <howto/connection:kubernetes>`
47
48
  for the Kubernetes cluster.
48
- :param in_cluster: run kubernetes client with in_cluster configuration.
49
- :param cluster_context: context that points to kubernetes cluster.
50
- Ignored when in_cluster is True. If None, current-context is used.
51
- :param config_file: The path to the Kubernetes config file. (templated)
52
- If not specified, default value is ``~/.kube/config``
53
49
  """
54
50
 
55
51
  template_fields = ("yaml_conf",)
@@ -61,17 +57,23 @@ class KubernetesResourceBaseOperator(BaseOperator):
61
57
  yaml_conf: str,
62
58
  namespace: str | None = None,
63
59
  kubernetes_conn_id: str | None = KubernetesHook.default_conn_name,
60
+ custom_resource_definition: bool = False,
64
61
  **kwargs,
65
62
  ) -> None:
66
63
  super().__init__(**kwargs)
67
64
  self._namespace = namespace
68
65
  self.kubernetes_conn_id = kubernetes_conn_id
69
66
  self.yaml_conf = yaml_conf
67
+ self.custom_resource_definition = custom_resource_definition
70
68
 
71
69
  @cached_property
72
70
  def client(self) -> ApiClient:
73
71
  return self.hook.api_client
74
72
 
73
+ @cached_property
74
+ def custom_object_client(self) -> CustomObjectsApi:
75
+ return self.hook.custom_object_client
76
+
75
77
  @cached_property
76
78
  def hook(self) -> KubernetesHook:
77
79
  hook = KubernetesHook(conn_id=self.kubernetes_conn_id)
@@ -83,24 +85,57 @@ class KubernetesResourceBaseOperator(BaseOperator):
83
85
  else:
84
86
  return self.hook.get_namespace() or "default"
85
87
 
88
+ def get_crd_fields(self, body: dict) -> tuple[str, str, str, str]:
89
+ api_version = body["apiVersion"]
90
+ group = api_version[0 : api_version.find("/")]
91
+ version = api_version[api_version.find("/") + 1 :]
92
+
93
+ namespace = None
94
+ if body.get("metadata"):
95
+ metadata: dict = body.get("metadata", None)
96
+ namespace = metadata.get("namespace", None)
97
+ if namespace is None:
98
+ namespace = self.get_namespace()
99
+
100
+ plural = body["kind"].lower() + "s"
101
+
102
+ return group, version, namespace, plural
103
+
86
104
 
87
105
  class KubernetesCreateResourceOperator(KubernetesResourceBaseOperator):
88
106
  """Create a resource in a kubernetes."""
89
107
 
108
+ def create_custom_from_yaml_object(self, body: dict):
109
+ group, version, namespace, plural = self.get_crd_fields(body)
110
+ self.custom_object_client.create_namespaced_custom_object(group, version, namespace, plural, body)
111
+
90
112
  def execute(self, context) -> None:
91
- create_from_yaml(
92
- k8s_client=self.client,
93
- yaml_objects=yaml.safe_load_all(self.yaml_conf),
94
- namespace=self.get_namespace(),
95
- )
113
+ resources = yaml.safe_load_all(self.yaml_conf)
114
+ if not self.custom_resource_definition:
115
+ create_from_yaml(
116
+ k8s_client=self.client,
117
+ yaml_objects=resources,
118
+ namespace=self.get_namespace(),
119
+ )
120
+ else:
121
+ k8s_resource_iterator(self.create_custom_from_yaml_object, resources)
96
122
 
97
123
 
98
124
  class KubernetesDeleteResourceOperator(KubernetesResourceBaseOperator):
99
125
  """Delete a resource in a kubernetes."""
100
126
 
127
+ def delete_custom_from_yaml_object(self, body: dict):
128
+ name = body["metadata"]["name"]
129
+ group, version, namespace, plural = self.get_crd_fields(body)
130
+ self.custom_object_client.delete_namespaced_custom_object(group, version, namespace, plural, name)
131
+
101
132
  def execute(self, context) -> None:
102
- delete_from_yaml(
103
- k8s_client=self.client,
104
- yaml_objects=yaml.safe_load_all(self.yaml_conf),
105
- namespace=self.get_namespace(),
106
- )
133
+ resources = yaml.safe_load_all(self.yaml_conf)
134
+ if not self.custom_resource_definition:
135
+ delete_from_yaml(
136
+ k8s_client=self.client,
137
+ yaml_objects=resources,
138
+ namespace=self.get_namespace(),
139
+ )
140
+ else:
141
+ k8s_resource_iterator(self.delete_custom_from_yaml_object, resources)
@@ -434,8 +434,8 @@ class PodGenerator:
434
434
  )
435
435
 
436
436
  # Reconcile the pods starting with the first chronologically,
437
- # Pod from the pod_template_File -> Pod from executor_config arg -> Pod from the K8s executor
438
- pod_list = [base_worker_pod, pod_override_object, dynamic_pod]
437
+ # Pod from the pod_template_File -> Pod from the K8s executor -> Pod from executor_config arg
438
+ pod_list = [base_worker_pod, dynamic_pod, pod_override_object]
439
439
 
440
440
  try:
441
441
  pod = reduce(PodGenerator.reconcile_pods, pod_list)
@@ -0,0 +1,68 @@
1
+ # Licensed to the Apache Software Foundation (ASF) under one
2
+ # or more contributor license agreements. See the NOTICE file
3
+ # distributed with this work for additional information
4
+ # regarding copyright ownership. The ASF licenses this file
5
+ # to you under the Apache License, Version 2.0 (the
6
+ # "License"); you may not use this file except in compliance
7
+ # with the License. You may obtain a copy of the License at
8
+ #
9
+ # http://www.apache.org/licenses/LICENSE-2.0
10
+ #
11
+ # Unless required by applicable law or agreed to in writing,
12
+ # software distributed under the License is distributed on an
13
+ # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
14
+ # KIND, either express or implied. See the License for the
15
+ # specific language governing permissions and limitations
16
+ # under the License.
17
+ #
18
+ # This file is used for documentation purposes. Example can be found at docs/executor/kubernetes.rst
19
+ #
20
+ # [START template_with_dags_in_image]
21
+ ---
22
+ apiVersion: v1
23
+ kind: Pod
24
+ metadata:
25
+ name: placeholder-name
26
+ spec:
27
+ containers:
28
+ - env:
29
+ - name: AIRFLOW__CORE__EXECUTOR
30
+ value: LocalExecutor
31
+ # Hard Coded Airflow Envs
32
+ - name: AIRFLOW__CORE__FERNET_KEY
33
+ valueFrom:
34
+ secretKeyRef:
35
+ name: RELEASE-NAME-fernet-key
36
+ key: fernet-key
37
+ - name: AIRFLOW__DATABASE__SQL_ALCHEMY_CONN
38
+ valueFrom:
39
+ secretKeyRef:
40
+ name: RELEASE-NAME-airflow-metadata
41
+ key: connection
42
+ - name: AIRFLOW_CONN_AIRFLOW_DB
43
+ valueFrom:
44
+ secretKeyRef:
45
+ name: RELEASE-NAME-airflow-metadata
46
+ key: connection
47
+ image: dummy_image
48
+ imagePullPolicy: IfNotPresent
49
+ name: base
50
+ volumeMounts:
51
+ - mountPath: "/opt/airflow/logs"
52
+ name: airflow-logs
53
+ - mountPath: /opt/airflow/airflow.cfg
54
+ name: airflow-config
55
+ readOnly: true
56
+ subPath: airflow.cfg
57
+ restartPolicy: Never
58
+ securityContext:
59
+ runAsUser: 50000
60
+ fsGroup: 50000
61
+ serviceAccountName: "RELEASE-NAME-worker-serviceaccount"
62
+ volumes:
63
+ - emptyDir: {}
64
+ name: airflow-logs
65
+ - configMap:
66
+ name: RELEASE-NAME-airflow-config
67
+ name: airflow-config
68
+ # [END template_with_dags_in_image]
@@ -0,0 +1,74 @@
1
+ # Licensed to the Apache Software Foundation (ASF) under one
2
+ # or more contributor license agreements. See the NOTICE file
3
+ # distributed with this work for additional information
4
+ # regarding copyright ownership. The ASF licenses this file
5
+ # to you under the Apache License, Version 2.0 (the
6
+ # "License"); you may not use this file except in compliance
7
+ # with the License. You may obtain a copy of the License at
8
+ #
9
+ # http://www.apache.org/licenses/LICENSE-2.0
10
+ #
11
+ # Unless required by applicable law or agreed to in writing,
12
+ # software distributed under the License is distributed on an
13
+ # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
14
+ # KIND, either express or implied. See the License for the
15
+ # specific language governing permissions and limitations
16
+ # under the License.
17
+ #
18
+ # This file is used for documentation purposes. Example can be found at docs/executor/kubernetes.rst
19
+ #
20
+ # [START template_with_dags_in_volume]
21
+ ---
22
+ apiVersion: v1
23
+ kind: Pod
24
+ metadata:
25
+ name: placeholder-name
26
+ spec:
27
+ containers:
28
+ - env:
29
+ - name: AIRFLOW__CORE__EXECUTOR
30
+ value: LocalExecutor
31
+ # Hard Coded Airflow Envs
32
+ - name: AIRFLOW__CORE__FERNET_KEY
33
+ valueFrom:
34
+ secretKeyRef:
35
+ name: RELEASE-NAME-fernet-key
36
+ key: fernet-key
37
+ - name: AIRFLOW__DATABASE__SQL_ALCHEMY_CONN
38
+ valueFrom:
39
+ secretKeyRef:
40
+ name: RELEASE-NAME-airflow-metadata
41
+ key: connection
42
+ - name: AIRFLOW_CONN_AIRFLOW_DB
43
+ valueFrom:
44
+ secretKeyRef:
45
+ name: RELEASE-NAME-airflow-metadata
46
+ key: connection
47
+ image: dummy_image
48
+ imagePullPolicy: IfNotPresent
49
+ name: base
50
+ volumeMounts:
51
+ - mountPath: "/opt/airflow/logs"
52
+ name: airflow-logs
53
+ - mountPath: /opt/airflow/dags
54
+ name: airflow-dags
55
+ readOnly: true
56
+ - mountPath: /opt/airflow/airflow.cfg
57
+ name: airflow-config
58
+ readOnly: true
59
+ subPath: airflow.cfg
60
+ restartPolicy: Never
61
+ securityContext:
62
+ runAsUser: 50000
63
+ fsGroup: 50000
64
+ serviceAccountName: "RELEASE-NAME-worker-serviceaccount"
65
+ volumes:
66
+ - name: airflow-dags
67
+ persistentVolumeClaim:
68
+ claimName: RELEASE-NAME-dags
69
+ - emptyDir: {}
70
+ name: airflow-logs
71
+ - configMap:
72
+ name: RELEASE-NAME-airflow-config
73
+ name: airflow-config
74
+ # [END template_with_dags_in_volume]
@@ -0,0 +1,95 @@
1
+ # Licensed to the Apache Software Foundation (ASF) under one
2
+ # or more contributor license agreements. See the NOTICE file
3
+ # distributed with this work for additional information
4
+ # regarding copyright ownership. The ASF licenses this file
5
+ # to you under the Apache License, Version 2.0 (the
6
+ # "License"); you may not use this file except in compliance
7
+ # with the License. You may obtain a copy of the License at
8
+ #
9
+ # http://www.apache.org/licenses/LICENSE-2.0
10
+ #
11
+ # Unless required by applicable law or agreed to in writing,
12
+ # software distributed under the License is distributed on an
13
+ # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
14
+ # KIND, either express or implied. See the License for the
15
+ # specific language governing permissions and limitations
16
+ # under the License.
17
+ #
18
+ # This file is used for documentation purposes. Example can be found at docs/executor/kubernetes.rst
19
+ #
20
+ # [START git_sync_template]
21
+ ---
22
+ apiVersion: v1
23
+ kind: Pod
24
+ metadata:
25
+ name: dummy-name
26
+ spec:
27
+ initContainers:
28
+ - name: git-sync
29
+ image: "registry.k8s.io/git-sync/git-sync:v3.6.3"
30
+ env:
31
+ - name: GIT_SYNC_BRANCH
32
+ value: "v2-2-stable"
33
+ - name: GIT_SYNC_REPO
34
+ value: "https://github.com/apache/airflow.git"
35
+ - name: GIT_SYNC_DEPTH
36
+ value: "1"
37
+ - name: GIT_SYNC_ROOT
38
+ value: "/git"
39
+ - name: GIT_SYNC_DEST
40
+ value: "repo"
41
+ - name: GIT_SYNC_ADD_USER
42
+ value: "true"
43
+ - name: GIT_SYNC_ONE_TIME
44
+ value: "true"
45
+ volumeMounts:
46
+ - name: airflow-dags
47
+ mountPath: /git
48
+ containers:
49
+ - env:
50
+ - name: AIRFLOW__CORE__EXECUTOR
51
+ value: LocalExecutor
52
+ # Hard Coded Airflow Envs
53
+ - name: AIRFLOW__CORE__FERNET_KEY
54
+ valueFrom:
55
+ secretKeyRef:
56
+ name: RELEASE-NAME-fernet-key
57
+ key: fernet-key
58
+ - name: AIRFLOW__DATABASE__SQL_ALCHEMY_CONN
59
+ valueFrom:
60
+ secretKeyRef:
61
+ name: RELEASE-NAME-airflow-metadata
62
+ key: connection
63
+ - name: AIRFLOW_CONN_AIRFLOW_DB
64
+ valueFrom:
65
+ secretKeyRef:
66
+ name: RELEASE-NAME-airflow-metadata
67
+ key: connection
68
+ image: dummy_image
69
+ imagePullPolicy: IfNotPresent
70
+ name: base
71
+ volumeMounts:
72
+ - mountPath: "/opt/airflow/logs"
73
+ name: airflow-logs
74
+ - mountPath: /opt/airflow/dags
75
+ name: airflow-dags
76
+ subPath: repo/airflow/example_dags
77
+ readOnly: false
78
+ - mountPath: /opt/airflow/airflow.cfg
79
+ name: airflow-config
80
+ readOnly: true
81
+ subPath: airflow.cfg
82
+ restartPolicy: Never
83
+ securityContext:
84
+ runAsUser: 50000
85
+ fsGroup: 50000
86
+ serviceAccountName: "RELEASE-NAME-worker-serviceaccount"
87
+ volumes:
88
+ - name: airflow-dags
89
+ emptyDir: {}
90
+ - name: airflow-logs
91
+ emptyDir: {}
92
+ - configMap:
93
+ name: RELEASE-NAME-airflow-config
94
+ name: airflow-config
95
+ # [END git_sync_template]
@@ -22,6 +22,7 @@ import os
22
22
  from collections import deque
23
23
 
24
24
  import jinja2
25
+ from jinja2 import select_autoescape
25
26
 
26
27
 
27
28
  def _balance_parens(after_decorator):
@@ -83,6 +84,10 @@ def write_python_script(
83
84
  loader=template_loader, undefined=jinja2.StrictUndefined
84
85
  )
85
86
  else:
86
- template_env = jinja2.Environment(loader=template_loader, undefined=jinja2.StrictUndefined)
87
+ template_env = jinja2.Environment(
88
+ loader=template_loader,
89
+ undefined=jinja2.StrictUndefined,
90
+ autoescape=select_autoescape(["html", "xml"]),
91
+ )
87
92
  template = template_env.get_template("python_kubernetes_script.jinja2")
88
93
  template.stream(**jinja_context).dump(filename)
@@ -0,0 +1,46 @@
1
+ # Licensed to the Apache Software Foundation (ASF) under one
2
+ # or more contributor license agreements. See the NOTICE file
3
+ # distributed with this work for additional information
4
+ # regarding copyright ownership. The ASF licenses this file
5
+ # to you under the Apache License, Version 2.0 (the
6
+ # "License"); you may not use this file except in compliance
7
+ # with the License. You may obtain a copy of the License at
8
+ #
9
+ # http://www.apache.org/licenses/LICENSE-2.0
10
+ #
11
+ # Unless required by applicable law or agreed to in writing,
12
+ # software distributed under the License is distributed on an
13
+ # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
14
+ # KIND, either express or implied. See the License for the
15
+ # specific language governing permissions and limitations
16
+ # under the License.
17
+ from __future__ import annotations
18
+
19
+ from typing import Callable, Iterator
20
+
21
+ from kubernetes.utils import FailToCreateError
22
+
23
+ from airflow.providers.cncf.kubernetes.utils.delete_from import FailToDeleteError
24
+
25
+
26
+ def k8s_resource_iterator(callback: Callable[[dict], None], resources: Iterator) -> None:
27
+ failures: list = []
28
+ for data in resources:
29
+ if data is not None:
30
+ if "List" in data["kind"]:
31
+ kind = data["kind"].replace("List", "")
32
+ for yml_doc in data["items"]:
33
+ if kind != "":
34
+ yml_doc["apiVersion"] = data["apiVersion"]
35
+ yml_doc["kind"] = kind
36
+ try:
37
+ callback(yml_doc)
38
+ except (FailToCreateError, FailToDeleteError) as failure:
39
+ failures.extend(failure.api_exceptions)
40
+ else:
41
+ try:
42
+ callback(data)
43
+ except (FailToCreateError, FailToDeleteError) as failure:
44
+ failures.extend(failure.api_exceptions)
45
+ if failures:
46
+ raise FailToCreateError(failures)
@@ -25,8 +25,9 @@ import time
25
25
  import warnings
26
26
  from collections.abc import Iterable
27
27
  from contextlib import closing, suppress
28
+ from dataclasses import dataclass
28
29
  from datetime import timedelta
29
- from typing import TYPE_CHECKING, Callable, Generator, Literal, Protocol, cast
30
+ from typing import TYPE_CHECKING, Callable, Generator, Protocol, cast
30
31
 
31
32
  import pendulum
32
33
  import tenacity
@@ -35,6 +36,7 @@ from kubernetes.client.rest import ApiException
35
36
  from kubernetes.stream import stream as kubernetes_stream
36
37
  from pendulum import DateTime
37
38
  from pendulum.parsing.exceptions import ParserError
39
+ from typing_extensions import Literal
38
40
  from urllib3.exceptions import HTTPError as BaseHTTPError
39
41
 
40
42
  from airflow.exceptions import AirflowException, AirflowProviderDeprecationWarning
@@ -271,6 +273,14 @@ class PodLogsConsumer:
271
273
  return self.read_pod_cache
272
274
 
273
275
 
276
+ @dataclass
277
+ class PodLoggingStatus:
278
+ """Return the status of the pod and last log time when exiting from `fetch_container_logs`."""
279
+
280
+ running: bool
281
+ last_log_time: DateTime | None
282
+
283
+
274
284
  class PodManager(LoggingMixin):
275
285
  """Create, monitor, and otherwise interact with Kubernetes pods for use with the KubernetesPodOperator."""
276
286
 
@@ -355,7 +365,7 @@ class PodManager(LoggingMixin):
355
365
  raise PodLaunchFailedException(msg)
356
366
  time.sleep(startup_check_interval)
357
367
 
358
- def follow_container_logs(self, pod: V1Pod, container_name: str) -> None:
368
+ def follow_container_logs(self, pod: V1Pod, container_name: str) -> PodLoggingStatus:
359
369
  warnings.warn(
360
370
  "Method `follow_container_logs` is deprecated. Use `fetch_container_logs` instead"
361
371
  "with option `follow=True`.",
@@ -372,7 +382,7 @@ class PodManager(LoggingMixin):
372
382
  follow=False,
373
383
  since_time: DateTime | None = None,
374
384
  post_termination_timeout: int = 120,
375
- ) -> None:
385
+ ) -> PodLoggingStatus:
376
386
  """
377
387
  Follow the logs of container and stream to airflow logging.
378
388
 
@@ -385,13 +395,7 @@ class PodManager(LoggingMixin):
385
395
  :meta private:
386
396
  """
387
397
 
388
- def consume_logs(
389
- *,
390
- since_time: DateTime | None = None,
391
- follow: bool = True,
392
- termination_timeout: int = 120,
393
- logs: PodLogsConsumer | None,
394
- ) -> tuple[DateTime | None, PodLogsConsumer | None]:
398
+ def consume_logs(*, since_time: DateTime | None = None) -> DateTime | None:
395
399
  """
396
400
  Try to follow container logs until container completes.
397
401
 
@@ -448,30 +452,25 @@ class PodManager(LoggingMixin):
448
452
  "Reading of logs interrupted for container %r; will retry.",
449
453
  container_name,
450
454
  )
451
- return last_captured_timestamp or since_time, logs
455
+ return last_captured_timestamp or since_time
452
456
 
453
457
  # note: `read_pod_logs` follows the logs, so we shouldn't necessarily *need* to
454
458
  # loop as we do here. But in a long-running process we might temporarily lose connectivity.
455
459
  # So the looping logic is there to let us resume following the logs.
456
- logs = None
457
460
  last_log_time = since_time
458
461
  while True:
459
- last_log_time, logs = consume_logs(
460
- since_time=last_log_time,
461
- follow=follow,
462
- termination_timeout=post_termination_timeout,
463
- logs=logs,
464
- )
462
+ last_log_time = consume_logs(since_time=last_log_time)
463
+ if not self.container_is_running(pod, container_name=container_name):
464
+ return PodLoggingStatus(running=False, last_log_time=last_log_time)
465
465
  if not follow:
466
- return
467
- if self.container_is_running(pod, container_name=container_name):
466
+ return PodLoggingStatus(running=True, last_log_time=last_log_time)
467
+ else:
468
468
  self.log.warning(
469
- "Follow requested but pod log read interrupted and container %s still running",
469
+ "Pod %s log read interrupted but container %s still running",
470
+ pod.metadata.name,
470
471
  container_name,
471
472
  )
472
473
  time.sleep(1)
473
- else: # follow requested, but container is done
474
- break
475
474
 
476
475
  def _reconcile_requested_log_containers(
477
476
  self, requested: Iterable[str] | str | bool, actual: list[str], pod_name
@@ -519,7 +518,7 @@ class PodManager(LoggingMixin):
519
518
 
520
519
  def fetch_requested_container_logs(
521
520
  self, pod: V1Pod, containers: Iterable[str] | str | Literal[True], follow_logs=False
522
- ) -> None:
521
+ ) -> list[PodLoggingStatus]:
523
522
  """
524
523
  Follow the logs of containers in the specified pod and publish it to airflow logging.
525
524
 
@@ -527,6 +526,7 @@ class PodManager(LoggingMixin):
527
526
 
528
527
  :meta private:
529
528
  """
529
+ pod_logging_statuses = []
530
530
  all_containers = self.get_container_names(pod)
531
531
  containers_to_log = self._reconcile_requested_log_containers(
532
532
  requested=containers,
@@ -534,7 +534,9 @@ class PodManager(LoggingMixin):
534
534
  pod_name=pod.metadata.name,
535
535
  )
536
536
  for c in containers_to_log:
537
- self.fetch_container_logs(pod=pod, container_name=c, follow=follow_logs)
537
+ status = self.fetch_container_logs(pod=pod, container_name=c, follow=follow_logs)
538
+ pod_logging_statuses.append(status)
539
+ return pod_logging_statuses
538
540
 
539
541
  def await_container_completion(self, pod: V1Pod, container_name: str) -> None:
540
542
  """
@@ -1,19 +1,12 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: apache-airflow-providers-cncf-kubernetes
3
- Version: 7.9.0rc1
4
- Summary: Provider for Apache Airflow. Implements apache-airflow-providers-cncf-kubernetes package
5
- Home-page: https://airflow.apache.org/
6
- Download-URL: https://archive.apache.org/dist/airflow/providers
7
- Author: Apache Software Foundation
8
- Author-email: dev@airflow.apache.org
9
- License: Apache License 2.0
10
- Project-URL: Documentation, https://airflow.apache.org/docs/apache-airflow-providers-cncf-kubernetes/7.9.0/
11
- Project-URL: Changelog, https://airflow.apache.org/docs/apache-airflow-providers-cncf-kubernetes/7.9.0/changelog.html
12
- Project-URL: Bug Tracker, https://github.com/apache/airflow/issues
13
- Project-URL: Source Code, https://github.com/apache/airflow
14
- Project-URL: Slack Chat, https://s.apache.org/airflow-slack
15
- Project-URL: Twitter, https://twitter.com/ApacheAirflow
16
- Project-URL: YouTube, https://www.youtube.com/channel/UCSXwxpWZQ7XZ1WL3wqevChA/
3
+ Version: 7.10.0
4
+ Summary: Provider package apache-airflow-providers-cncf-kubernetes for Apache Airflow
5
+ Keywords: airflow-provider,cncf.kubernetes,airflow,integration
6
+ Author-email: Apache Software Foundation <dev@airflow.apache.org>
7
+ Maintainer-email: Apache Software Foundation <dev@airflow.apache.org>
8
+ Requires-Python: ~=3.8
9
+ Description-Content-Type: text/x-rst
17
10
  Classifier: Development Status :: 5 - Production/Stable
18
11
  Classifier: Environment :: Console
19
12
  Classifier: Environment :: Web Environment
@@ -27,17 +20,20 @@ Classifier: Programming Language :: Python :: 3.9
27
20
  Classifier: Programming Language :: Python :: 3.10
28
21
  Classifier: Programming Language :: Python :: 3.11
29
22
  Classifier: Topic :: System :: Monitoring
30
- Requires-Python: ~=3.8
31
- Description-Content-Type: text/x-rst
32
- License-File: LICENSE
33
- License-File: NOTICE
34
- Requires-Dist: aiofiles >=23.2.0
35
- Requires-Dist: apache-airflow >=2.5.0.dev0
36
- Requires-Dist: asgiref >=3.5.2
37
- Requires-Dist: cryptography >=2.0.0
38
- Requires-Dist: google-re2 >=1.0
39
- Requires-Dist: kubernetes <24,>=21.7.0
40
- Requires-Dist: kubernetes-asyncio <25,>=18.20.1
23
+ Requires-Dist: aiofiles>=23.2.0
24
+ Requires-Dist: apache-airflow>=2.5.0
25
+ Requires-Dist: asgiref>=3.5.2
26
+ Requires-Dist: cryptography>=2.0.0
27
+ Requires-Dist: google-re2>=1.0
28
+ Requires-Dist: kubernetes>=21.7.0,<24
29
+ Requires-Dist: kubernetes_asyncio>=18.20.1,<25
30
+ Project-URL: Bug Tracker, https://github.com/apache/airflow/issues
31
+ Project-URL: Changelog, https://airflow.apache.org/docs/apache-airflow-providers-cncf-kubernetes/7.10.0/changelog.html
32
+ Project-URL: Documentation, https://airflow.apache.org/docs/apache-airflow-providers-cncf-kubernetes/7.10.0
33
+ Project-URL: Slack Chat, https://s.apache.org/airflow-slack
34
+ Project-URL: Source Code, https://github.com/apache/airflow
35
+ Project-URL: Twitter, https://twitter.com/ApacheAirflow
36
+ Project-URL: YouTube, https://www.youtube.com/channel/UCSXwxpWZQ7XZ1WL3wqevChA/
41
37
 
42
38
 
43
39
  .. Licensed to the Apache Software Foundation (ASF) under one
@@ -56,7 +52,8 @@ Requires-Dist: kubernetes-asyncio <25,>=18.20.1
56
52
  KIND, either express or implied. See the License for the
57
53
  specific language governing permissions and limitations
58
54
  under the License.
59
- .. Licensed to the Apache Software Foundation (ASF) under one
55
+
56
+ .. Licensed to the Apache Software Foundation (ASF) under one
60
57
  or more contributor license agreements. See the NOTICE file
61
58
  distributed with this work for additional information
62
59
  regarding copyright ownership. The ASF licenses this file
@@ -73,10 +70,16 @@ Requires-Dist: kubernetes-asyncio <25,>=18.20.1
73
70
  specific language governing permissions and limitations
74
71
  under the License.
75
72
 
73
+ .. NOTE! THIS FILE IS AUTOMATICALLY GENERATED AND WILL BE
74
+ OVERWRITTEN WHEN PREPARING PACKAGES.
75
+
76
+ .. IF YOU WANT TO MODIFY TEMPLATE FOR THIS FILE, YOU SHOULD MODIFY THE TEMPLATE
77
+ `PROVIDER_README_TEMPLATE.rst.jinja2` IN the `dev/breeze/src/airflow_breeze/templates` DIRECTORY
78
+
76
79
 
77
80
  Package ``apache-airflow-providers-cncf-kubernetes``
78
81
 
79
- Release: ``7.9.0rc1``
82
+ Release: ``7.10.0``
80
83
 
81
84
 
82
85
  `Kubernetes <https://kubernetes.io/>`__
@@ -89,7 +92,7 @@ This is a provider package for ``cncf.kubernetes`` provider. All classes for thi
89
92
  are in ``airflow.providers.cncf.kubernetes`` python package.
90
93
 
91
94
  You can find package information and changelog for the provider
92
- in the `documentation <https://airflow.apache.org/docs/apache-airflow-providers-cncf-kubernetes/7.9.0/>`_.
95
+ in the `documentation <https://airflow.apache.org/docs/apache-airflow-providers-cncf-kubernetes/7.10.0/>`_.
93
96
 
94
97
  Installation
95
98
  ------------
@@ -116,4 +119,4 @@ PIP package Version required
116
119
  ====================== ==================
117
120
 
118
121
  The changelog for the provider package can be found in the
119
- `changelog <https://airflow.apache.org/docs/apache-airflow-providers-cncf-kubernetes/7.9.0/changelog.html>`_.
122
+ `changelog <https://airflow.apache.org/docs/apache-airflow-providers-cncf-kubernetes/7.10.0/changelog.html>`_.
@@ -1,14 +1,15 @@
1
- airflow/providers/cncf/kubernetes/__init__.py,sha256=7QxSSCvzKNa5ExtZT7g4sc6BJ-l72rv8JdCkSQ6thc4,1570
2
- airflow/providers/cncf/kubernetes/get_provider_info.py,sha256=F2RPcfOvau-_vGFxcYcIORiUXPyj-sgqq4F3TJ7ep2U,16113
1
+ airflow/providers/cncf/kubernetes/LICENSE,sha256=ywUBpKZc7Jb96rVt5I3IDbg7dIJAbUSHkuoDcF3jbH4,13569
2
+ airflow/providers/cncf/kubernetes/__init__.py,sha256=Dz8xRkqVdfc6O0llrEMtUzUtUhnJFqusQ70CGJPnCfw,1591
3
+ airflow/providers/cncf/kubernetes/get_provider_info.py,sha256=Mb0cvjjCrIJY2UwkLQ9EDHYX7pxmlJrFOrHBkbGSa5E,16205
3
4
  airflow/providers/cncf/kubernetes/k8s_model.py,sha256=JzpSjHdCBpajT5HohKhvYp4vZ9emf7A6AVmte8tI4T0,2100
4
5
  airflow/providers/cncf/kubernetes/kube_client.py,sha256=nL9daGLElvX4f72rWvONRN-VUbrOPzjsElix6xfkcXU,5328
5
6
  airflow/providers/cncf/kubernetes/kube_config.py,sha256=SZhMYmCJACkzxEFe0vcCW0m1XmoFpmDaIYf_Rl_uycA,4851
6
7
  airflow/providers/cncf/kubernetes/kubernetes_helper_functions.py,sha256=vZGvWDBLogDFF4Lv1_x3Xd5KbgpqB2jFYq_x0Ect30w,4740
7
- airflow/providers/cncf/kubernetes/pod_generator.py,sha256=5iBsguS1sCbg65dN1OrnKTRy2rdyn5cKGtdCmW9voOU,24050
8
+ airflow/providers/cncf/kubernetes/pod_generator.py,sha256=1luGs87070GLWVk7iS3s5xdUCkJesqh1WpiJ8VELlIg,24050
8
9
  airflow/providers/cncf/kubernetes/pod_generator_deprecated.py,sha256=ar89bJQtZbvOEAx6TE7iJ49fmvrQW368KROYG4d4X28,12126
9
10
  airflow/providers/cncf/kubernetes/pod_launcher_deprecated.py,sha256=3XQCOR4-KwVDV6tWVlsm7FksdT7Z9H54dZglPlSjNKQ,11944
10
11
  airflow/providers/cncf/kubernetes/python_kubernetes_script.jinja2,sha256=gUGBhBTFWIXjnjxTAjawWIacDwk2EDxoGEzVQYnlUT8,1741
11
- airflow/providers/cncf/kubernetes/python_kubernetes_script.py,sha256=d7Rw-R9o9WgCxv59Z1kTIRRbmdGtIFhAZHtHD3iJ3hA,3321
12
+ airflow/providers/cncf/kubernetes/python_kubernetes_script.py,sha256=yLIWsB1HvGlmU74G_QDXRwRJS02gH7RlgxecJtLxYug,3452
12
13
  airflow/providers/cncf/kubernetes/secret.py,sha256=U06v6pSp_9GAsokC_gu5JGN298Ud9DN9YwpQZqmQ9fI,5208
13
14
  airflow/providers/cncf/kubernetes/template_rendering.py,sha256=ld9iFK2VdzoCTqTnc5tSp7Tp0UkveaepFQavRn2ISt0,2968
14
15
  airflow/providers/cncf/kubernetes/backcompat/__init__.py,sha256=9hdXHABrVpkbpjZgUft39kOFL2xSGeG4GEua0Hmelus,785
@@ -16,19 +17,23 @@ airflow/providers/cncf/kubernetes/backcompat/backwards_compat_converters.py,sha2
16
17
  airflow/providers/cncf/kubernetes/decorators/__init__.py,sha256=mlJxuZLkd5x-iq2SBwD3mvRQpt3YR7wjz_nceyF1IaI,787
17
18
  airflow/providers/cncf/kubernetes/decorators/kubernetes.py,sha256=CwzOZFPnxt66GgfsMBY-lePLErDidovxB2SOjkX47wI,6140
18
19
  airflow/providers/cncf/kubernetes/executors/__init__.py,sha256=mlJxuZLkd5x-iq2SBwD3mvRQpt3YR7wjz_nceyF1IaI,787
19
- airflow/providers/cncf/kubernetes/executors/kubernetes_executor.py,sha256=vzkXFwVlPYmIMudc_GLmHDX9Upf5Vl1lke7ShIw3ACc,31744
20
+ airflow/providers/cncf/kubernetes/executors/kubernetes_executor.py,sha256=_IA3hHAPdLRTtl32I_Zt2u35KIULyc4vm9s6Wr7-HSk,32345
20
21
  airflow/providers/cncf/kubernetes/executors/kubernetes_executor_types.py,sha256=bPLwDIY5OpL7BtoLHQMwnKCjrkQuG1IgDWet8U1A6f0,1622
21
22
  airflow/providers/cncf/kubernetes/executors/kubernetes_executor_utils.py,sha256=xOV3dlF2MkZiJB3dBuhkI22yjbrs1VvVEhFdpyjhJ2Q,21255
22
23
  airflow/providers/cncf/kubernetes/executors/local_kubernetes_executor.py,sha256=r5cvhj9NUTreQScn3Y8NlPFoYIa_NaAVQ27WqtOrJvw,10014
23
24
  airflow/providers/cncf/kubernetes/hooks/__init__.py,sha256=9hdXHABrVpkbpjZgUft39kOFL2xSGeG4GEua0Hmelus,785
24
25
  airflow/providers/cncf/kubernetes/hooks/kubernetes.py,sha256=PyTgNGydKEIcrZ2urXzE-v4LdbIgGqb_kFK7uaVg5bI,23994
25
26
  airflow/providers/cncf/kubernetes/kubernetes_executor_templates/__init__.py,sha256=9hdXHABrVpkbpjZgUft39kOFL2xSGeG4GEua0Hmelus,785
27
+ airflow/providers/cncf/kubernetes/kubernetes_executor_templates/basic_template.yaml,sha256=yzJmXN4ZyB4aDwI_GIugpL9-f1YMVy__X-LQSbeU95A,2567
26
28
  airflow/providers/cncf/kubernetes/operators/__init__.py,sha256=mlJxuZLkd5x-iq2SBwD3mvRQpt3YR7wjz_nceyF1IaI,787
27
29
  airflow/providers/cncf/kubernetes/operators/kubernetes_pod.py,sha256=XvJgehU-4ZubJZ2vsekHX4DlCLlzBttXuZQlpVZZ2Ro,1262
28
- airflow/providers/cncf/kubernetes/operators/pod.py,sha256=4nPfKh70KqL0AuN9du7FHm4r5oTR10ChKhOSVw78Ho8,45342
29
- airflow/providers/cncf/kubernetes/operators/resource.py,sha256=f74SdXJOhtMMAQ-icJoaxm03jPRhht5fb54KXbROv_c,3883
30
+ airflow/providers/cncf/kubernetes/operators/pod.py,sha256=S4cFGEErkoGVfrvkvtAiG7TLWGZOkbS4l8RLHlMXAX0,45377
31
+ airflow/providers/cncf/kubernetes/operators/resource.py,sha256=ie0QspONlLBah-3hCUGNX85jWr7DvUf4wo77d8wMkOA,5348
30
32
  airflow/providers/cncf/kubernetes/operators/spark_kubernetes.py,sha256=GZdDaxyuUX40il1kP2zyBu8F203iyIuMXUks9WyXhmA,7490
31
33
  airflow/providers/cncf/kubernetes/pod_template_file_examples/__init__.py,sha256=9hdXHABrVpkbpjZgUft39kOFL2xSGeG4GEua0Hmelus,785
34
+ airflow/providers/cncf/kubernetes/pod_template_file_examples/dags_in_image_template.yaml,sha256=7JdppZ-XDBpv2Bnde2SthhcME8w3b8xQdPAK1fJGW60,2256
35
+ airflow/providers/cncf/kubernetes/pod_template_file_examples/dags_in_volume_template.yaml,sha256=-Pk_EwKpyWRYZKOnumUxVrDeAfFJ0nr3WZ7JNnvppzg,2442
36
+ airflow/providers/cncf/kubernetes/pod_template_file_examples/git_sync_template.yaml,sha256=Pxpa1AiBlf4H8aIc7tUTmH2XNOz84cO0ttMQdlfMJ2c,3020
32
37
  airflow/providers/cncf/kubernetes/sensors/__init__.py,sha256=9hdXHABrVpkbpjZgUft39kOFL2xSGeG4GEua0Hmelus,785
33
38
  airflow/providers/cncf/kubernetes/sensors/spark_kubernetes.py,sha256=AFml2CgXTV13G2VGrmNAIh-778FnRZdyLfverw92Uo0,5552
34
39
  airflow/providers/cncf/kubernetes/triggers/__init__.py,sha256=9hdXHABrVpkbpjZgUft39kOFL2xSGeG4GEua0Hmelus,785
@@ -37,12 +42,10 @@ airflow/providers/cncf/kubernetes/triggers/pod.py,sha256=qYk2GJtspgsbeTBt3tkYwoO
37
42
  airflow/providers/cncf/kubernetes/utils/__init__.py,sha256=ClZN0VPjWySdVwS_ktH7rrgL9VLAcs3OSJSB9s3zaYw,863
38
43
  airflow/providers/cncf/kubernetes/utils/delete_from.py,sha256=poObZSoEJwQyaYWilEURs8f4CDY2sn_pfwS31Lf579A,5195
39
44
  airflow/providers/cncf/kubernetes/utils/k8s_hashlib_wrapper.py,sha256=N8caY0otTuxdXuMpY8rvXAlE_huxDTiwTDhwxtJZClI,1545
40
- airflow/providers/cncf/kubernetes/utils/pod_manager.py,sha256=QxsqCmM7PbqBuW7_G8GpCZwRrF3WiBUovVvBzmLEPyE,30788
45
+ airflow/providers/cncf/kubernetes/utils/k8s_resource_iterator.py,sha256=-Pgc5i2WEDl7ZBvtJZ4eWDqqlSj8WdULqwUyOWmsRp8,1928
46
+ airflow/providers/cncf/kubernetes/utils/pod_manager.py,sha256=ltJ8zdD6hOTtnEuw7ytENCa6VGq4K8_sCTkM8RwXf9w,30946
41
47
  airflow/providers/cncf/kubernetes/utils/xcom_sidecar.py,sha256=dCLPE-KyI3nVfawcuKMjhxuBuK9TgVZocc4eC82hAM4,2518
42
- apache_airflow_providers_cncf_kubernetes-7.9.0rc1.dist-info/LICENSE,sha256=gXPVwptPlW1TJ4HSuG5OMPg-a3h43OGMkZRR1rpwfJA,10850
43
- apache_airflow_providers_cncf_kubernetes-7.9.0rc1.dist-info/METADATA,sha256=I2xxS8WHMJPkTew61deMyC8xFCIp_WtPtAsPSgoYpRU,4967
44
- apache_airflow_providers_cncf_kubernetes-7.9.0rc1.dist-info/NOTICE,sha256=m-6s2XynUxVSUIxO4rVablAZCvFq-wmLrqV91DotRBw,240
45
- apache_airflow_providers_cncf_kubernetes-7.9.0rc1.dist-info/WHEEL,sha256=Xo9-1PvkuimrydujYJAjF7pCkriuXBpUPEjma1nZyJ0,92
46
- apache_airflow_providers_cncf_kubernetes-7.9.0rc1.dist-info/entry_points.txt,sha256=GZl6SYJuUg-3koITGRd9PU1lBmqhecrKTeCQ6-wyHpM,112
47
- apache_airflow_providers_cncf_kubernetes-7.9.0rc1.dist-info/top_level.txt,sha256=OeMVH5md7fr2QQWpnZoOWWxWO-0WH1IP70lpTVwopPg,8
48
- apache_airflow_providers_cncf_kubernetes-7.9.0rc1.dist-info/RECORD,,
48
+ apache_airflow_providers_cncf_kubernetes-7.10.0.dist-info/entry_points.txt,sha256=ByD3QJJyP9CfmTYtpNI1953akD38RUDgpGXLaq9vpOw,111
49
+ apache_airflow_providers_cncf_kubernetes-7.10.0.dist-info/WHEEL,sha256=EZbGkh7Ie4PoZfRQ8I0ZuP9VklN_TvcZ6DSE5Uar4z4,81
50
+ apache_airflow_providers_cncf_kubernetes-7.10.0.dist-info/METADATA,sha256=V-zIoDRtw1hwt6CTgPOzXDOREsue66p013JELoAqnmg,5181
51
+ apache_airflow_providers_cncf_kubernetes-7.10.0.dist-info/RECORD,,
@@ -1,5 +1,4 @@
1
1
  Wheel-Version: 1.0
2
- Generator: bdist_wheel (0.41.3)
2
+ Generator: flit 3.9.0
3
3
  Root-Is-Purelib: true
4
4
  Tag: py3-none-any
5
-
@@ -0,0 +1,3 @@
1
+ [apache_airflow_provider]
2
+ provider_info=airflow.providers.cncf.kubernetes.get_provider_info:get_provider_info
3
+
@@ -1,6 +0,0 @@
1
- Apache Airflow
2
- Copyright 2016-2021 The Apache Software Foundation
3
-
4
- This product includes software developed at The Apache Software
5
- Foundation (http://www.apache.org/).
6
- =======================================================================
@@ -1,2 +0,0 @@
1
- [apache_airflow_provider]
2
- provider_info = airflow.providers.cncf.kubernetes.get_provider_info:get_provider_info