apache-airflow-providers-cncf-kubernetes 3.1.0__py3-none-any.whl → 10.10.0rc1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (72) hide show
  1. airflow/providers/cncf/kubernetes/__init__.py +18 -23
  2. airflow/providers/cncf/kubernetes/backcompat/__init__.py +17 -0
  3. airflow/providers/cncf/kubernetes/backcompat/backwards_compat_converters.py +31 -49
  4. airflow/providers/cncf/kubernetes/callbacks.py +200 -0
  5. airflow/providers/cncf/kubernetes/cli/__init__.py +16 -0
  6. airflow/providers/cncf/kubernetes/cli/kubernetes_command.py +195 -0
  7. airflow/providers/cncf/kubernetes/decorators/kubernetes.py +163 -0
  8. airflow/providers/cncf/kubernetes/decorators/kubernetes_cmd.py +118 -0
  9. airflow/providers/cncf/kubernetes/exceptions.py +37 -0
  10. airflow/providers/cncf/kubernetes/executors/__init__.py +17 -0
  11. airflow/providers/cncf/kubernetes/executors/kubernetes_executor.py +831 -0
  12. airflow/providers/cncf/kubernetes/executors/kubernetes_executor_types.py +91 -0
  13. airflow/providers/cncf/kubernetes/executors/kubernetes_executor_utils.py +736 -0
  14. airflow/providers/cncf/kubernetes/executors/local_kubernetes_executor.py +306 -0
  15. airflow/providers/cncf/kubernetes/get_provider_info.py +249 -50
  16. airflow/providers/cncf/kubernetes/hooks/kubernetes.py +846 -112
  17. airflow/providers/cncf/kubernetes/k8s_model.py +62 -0
  18. airflow/providers/cncf/kubernetes/kube_client.py +156 -0
  19. airflow/providers/cncf/kubernetes/kube_config.py +125 -0
  20. airflow/providers/cncf/kubernetes/kubernetes_executor_templates/__init__.py +16 -0
  21. airflow/providers/cncf/kubernetes/kubernetes_executor_templates/basic_template.yaml +79 -0
  22. airflow/providers/cncf/kubernetes/kubernetes_helper_functions.py +165 -0
  23. airflow/providers/cncf/kubernetes/operators/custom_object_launcher.py +368 -0
  24. airflow/providers/cncf/kubernetes/operators/job.py +646 -0
  25. airflow/providers/cncf/kubernetes/operators/kueue.py +132 -0
  26. airflow/providers/cncf/kubernetes/operators/pod.py +1417 -0
  27. airflow/providers/cncf/kubernetes/operators/resource.py +191 -0
  28. airflow/providers/cncf/kubernetes/operators/spark_kubernetes.py +336 -35
  29. airflow/providers/cncf/kubernetes/pod_generator.py +592 -0
  30. airflow/providers/cncf/kubernetes/pod_template_file_examples/__init__.py +16 -0
  31. airflow/providers/cncf/kubernetes/pod_template_file_examples/dags_in_image_template.yaml +68 -0
  32. airflow/providers/cncf/kubernetes/pod_template_file_examples/dags_in_volume_template.yaml +74 -0
  33. airflow/providers/cncf/kubernetes/pod_template_file_examples/git_sync_template.yaml +95 -0
  34. airflow/providers/cncf/kubernetes/python_kubernetes_script.jinja2 +51 -0
  35. airflow/providers/cncf/kubernetes/python_kubernetes_script.py +92 -0
  36. airflow/providers/cncf/kubernetes/resource_convert/__init__.py +16 -0
  37. airflow/providers/cncf/kubernetes/resource_convert/configmap.py +52 -0
  38. airflow/providers/cncf/kubernetes/resource_convert/env_variable.py +39 -0
  39. airflow/providers/cncf/kubernetes/resource_convert/secret.py +40 -0
  40. airflow/providers/cncf/kubernetes/secret.py +128 -0
  41. airflow/providers/cncf/kubernetes/sensors/spark_kubernetes.py +30 -14
  42. airflow/providers/cncf/kubernetes/template_rendering.py +81 -0
  43. airflow/providers/cncf/kubernetes/triggers/__init__.py +16 -0
  44. airflow/providers/cncf/kubernetes/triggers/job.py +176 -0
  45. airflow/providers/cncf/kubernetes/triggers/pod.py +344 -0
  46. airflow/providers/cncf/kubernetes/utils/__init__.py +3 -0
  47. airflow/providers/cncf/kubernetes/utils/container.py +118 -0
  48. airflow/providers/cncf/kubernetes/utils/delete_from.py +154 -0
  49. airflow/providers/cncf/kubernetes/utils/k8s_resource_iterator.py +46 -0
  50. airflow/providers/cncf/kubernetes/utils/pod_manager.py +887 -152
  51. airflow/providers/cncf/kubernetes/utils/xcom_sidecar.py +25 -16
  52. airflow/providers/cncf/kubernetes/version_compat.py +38 -0
  53. apache_airflow_providers_cncf_kubernetes-10.10.0rc1.dist-info/METADATA +125 -0
  54. apache_airflow_providers_cncf_kubernetes-10.10.0rc1.dist-info/RECORD +62 -0
  55. {apache_airflow_providers_cncf_kubernetes-3.1.0.dist-info → apache_airflow_providers_cncf_kubernetes-10.10.0rc1.dist-info}/WHEEL +1 -2
  56. apache_airflow_providers_cncf_kubernetes-10.10.0rc1.dist-info/entry_points.txt +3 -0
  57. apache_airflow_providers_cncf_kubernetes-10.10.0rc1.dist-info/licenses/NOTICE +5 -0
  58. airflow/providers/cncf/kubernetes/backcompat/pod.py +0 -119
  59. airflow/providers/cncf/kubernetes/backcompat/pod_runtime_info_env.py +0 -56
  60. airflow/providers/cncf/kubernetes/backcompat/volume.py +0 -62
  61. airflow/providers/cncf/kubernetes/backcompat/volume_mount.py +0 -58
  62. airflow/providers/cncf/kubernetes/example_dags/example_kubernetes.py +0 -163
  63. airflow/providers/cncf/kubernetes/example_dags/example_spark_kubernetes.py +0 -66
  64. airflow/providers/cncf/kubernetes/example_dags/example_spark_kubernetes_spark_pi.yaml +0 -57
  65. airflow/providers/cncf/kubernetes/operators/kubernetes_pod.py +0 -622
  66. apache_airflow_providers_cncf_kubernetes-3.1.0.dist-info/METADATA +0 -452
  67. apache_airflow_providers_cncf_kubernetes-3.1.0.dist-info/NOTICE +0 -6
  68. apache_airflow_providers_cncf_kubernetes-3.1.0.dist-info/RECORD +0 -29
  69. apache_airflow_providers_cncf_kubernetes-3.1.0.dist-info/entry_points.txt +0 -3
  70. apache_airflow_providers_cncf_kubernetes-3.1.0.dist-info/top_level.txt +0 -1
  71. /airflow/providers/cncf/kubernetes/{example_dags → decorators}/__init__.py +0 -0
  72. {apache_airflow_providers_cncf_kubernetes-3.1.0.dist-info → apache_airflow_providers_cncf_kubernetes-10.10.0rc1.dist-info/licenses}/LICENSE +0 -0
@@ -0,0 +1,592 @@
1
+ # Licensed to the Apache Software Foundation (ASF) under one
2
+ # or more contributor license agreements. See the NOTICE file
3
+ # distributed with this work for additional information
4
+ # regarding copyright ownership. The ASF licenses this file
5
+ # to you under the Apache License, Version 2.0 (the
6
+ # "License"); you may not use this file except in compliance
7
+ # with the License. You may obtain a copy of the License at
8
+ #
9
+ # http://www.apache.org/licenses/LICENSE-2.0
10
+ #
11
+ # Unless required by applicable law or agreed to in writing,
12
+ # software distributed under the License is distributed on an
13
+ # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
14
+ # KIND, either express or implied. See the License for the
15
+ # specific language governing permissions and limitations
16
+ # under the License.
17
+ """
18
+ Pod generator.
19
+
20
+ This module provides an interface between the previous Pod
21
+ API and outputs a kubernetes.client.models.V1Pod.
22
+ The advantage being that the full Kubernetes API
23
+ is supported and no serialization need be written.
24
+ """
25
+
26
+ from __future__ import annotations
27
+
28
+ import copy
29
+ import logging
30
+ import os
31
+ import re
32
+ import warnings
33
+ from functools import reduce
34
+ from typing import TYPE_CHECKING
35
+
36
+ from dateutil import parser
37
+ from kubernetes.client import models as k8s
38
+ from kubernetes.client.api_client import ApiClient
39
+
40
+ from airflow.exceptions import (
41
+ AirflowConfigException,
42
+ )
43
+ from airflow.providers.cncf.kubernetes.backcompat import get_logical_date_key
44
+ from airflow.providers.cncf.kubernetes.exceptions import PodMutationHookException, PodReconciliationError
45
+ from airflow.providers.cncf.kubernetes.kubernetes_helper_functions import (
46
+ POD_NAME_MAX_LENGTH,
47
+ add_unique_suffix,
48
+ )
49
+ from airflow.providers.cncf.kubernetes.version_compat import AIRFLOW_V_3_0_PLUS
50
+ from airflow.utils import yaml
51
+ from airflow.utils.hashlib_wrapper import md5
52
+ from airflow.version import version as airflow_version
53
+
54
+ if TYPE_CHECKING:
55
+ import datetime
56
+
57
+ from airflow.executors import workloads
58
+ from airflow.models.taskinstance import TaskInstance
59
+
60
+ log = logging.getLogger(__name__)
61
+
62
+ MAX_LABEL_LEN = 63
63
+
64
+
65
+ def workload_to_command_args(workload: workloads.ExecuteTask) -> list[str]:
66
+ """
67
+ Convert a workload object to Task SDK command arguments.
68
+
69
+ :param workload: The ExecuteTask workload to convert
70
+ :return: List of command arguments for the Task SDK
71
+ """
72
+ ser_input = workload.model_dump_json()
73
+ return [
74
+ "python",
75
+ "-m",
76
+ "airflow.sdk.execution_time.execute_workload",
77
+ "--json-string",
78
+ ser_input,
79
+ ]
80
+
81
+
82
+ def generate_pod_command_args(task_instance: TaskInstance) -> list[str]:
83
+ """
84
+ Generate command arguments for a ``TaskInstance`` to be used in a Kubernetes pod.
85
+
86
+ This function handles backwards compatibility between Airflow 2.x and 3.x:
87
+ - In Airflow 2.x: Uses the existing ``command_as_list()`` method
88
+ - In Airflow 3.x: Uses the Task SDK workload approach with serialized workload
89
+ """
90
+ if AIRFLOW_V_3_0_PLUS:
91
+ # In Airflow 3+, use the Task SDK workload approach
92
+ from airflow.executors import workloads
93
+
94
+ workload = workloads.ExecuteTask.make(task_instance)
95
+ return workload_to_command_args(workload)
96
+ # In Airflow 2.x, use the existing method
97
+ return task_instance.command_as_list()
98
+
99
+
100
+ def make_safe_label_value(string: str) -> str:
101
+ """
102
+ Normalize a provided label to be of valid length and characters.
103
+
104
+ Valid label values must be 63 characters or less and must be empty or begin and
105
+ end with an alphanumeric character ([a-z0-9A-Z]) with dashes (-), underscores (_),
106
+ dots (.), and alphanumerics between.
107
+
108
+ If the label value is greater than 63 chars once made safe, or differs in any
109
+ way from the original value sent to this function, then we need to truncate to
110
+ 53 chars, and append it with a unique hash.
111
+ """
112
+ safe_label = re.sub(r"^[^a-z0-9A-Z]*|[^a-zA-Z0-9_\-\.]|[^a-z0-9A-Z]*$", "", string)
113
+
114
+ if len(safe_label) > MAX_LABEL_LEN or string != safe_label:
115
+ safe_hash = md5(string.encode()).hexdigest()[:9]
116
+ safe_label = safe_label[: MAX_LABEL_LEN - len(safe_hash) - 1] + "-" + safe_hash
117
+
118
+ return safe_label
119
+
120
+
121
+ def datetime_to_label_safe_datestring(datetime_obj: datetime.datetime) -> str:
122
+ """
123
+ Transform a datetime string to use as a label.
124
+
125
+ Kubernetes doesn't like ":" in labels, since ISO datetime format uses ":" but
126
+ not "_" let's
127
+ replace ":" with "_"
128
+
129
+ :param datetime_obj: datetime.datetime object
130
+ :return: ISO-like string representing the datetime
131
+ """
132
+ return datetime_obj.isoformat().replace(":", "_").replace("+", "_plus_")
133
+
134
+
135
+ def label_safe_datestring_to_datetime(string: str) -> datetime.datetime:
136
+ """
137
+ Transform a label back to a datetime object.
138
+
139
+ Kubernetes doesn't permit ":" in labels. ISO datetime format uses ":" but not
140
+ "_", let's
141
+ replace ":" with "_"
142
+
143
+ :param string: str
144
+ :return: datetime.datetime object
145
+ """
146
+ return parser.parse(string.replace("_plus_", "+").replace("_", ":"))
147
+
148
+
149
+ class PodGenerator:
150
+ """
151
+ Contains Kubernetes Airflow Worker configuration logic.
152
+
153
+ Represents a kubernetes pod and manages execution of a single pod.
154
+ Any configuration that is container specific gets applied to
155
+ the first container in the list of containers.
156
+
157
+ :param pod: The fully specified pod. Mutually exclusive with `pod_template_file`
158
+ :param pod_template_file: Path to YAML file. Mutually exclusive with `pod`
159
+ :param extract_xcom: Whether to bring up a container for xcom
160
+ """
161
+
162
+ def __init__(
163
+ self,
164
+ pod: k8s.V1Pod | None = None,
165
+ pod_template_file: str | None = None,
166
+ extract_xcom: bool = True,
167
+ ):
168
+ if not pod_template_file and not pod:
169
+ raise AirflowConfigException(
170
+ "Podgenerator requires either a `pod` or a `pod_template_file` argument"
171
+ )
172
+ if pod_template_file and pod:
173
+ raise AirflowConfigException("Cannot pass both `pod` and `pod_template_file` arguments")
174
+
175
+ if pod_template_file:
176
+ self.ud_pod = self.deserialize_model_file(pod_template_file)
177
+ else:
178
+ self.ud_pod = pod
179
+
180
+ # Attach sidecar
181
+ self.extract_xcom = extract_xcom
182
+
183
+ @staticmethod
184
+ def from_obj(obj) -> dict | k8s.V1Pod | None:
185
+ """Convert to pod from obj."""
186
+ if obj is None:
187
+ return None
188
+
189
+ k8s_legacy_object = obj.get("KubernetesExecutor", None)
190
+ k8s_object = obj.get("pod_override", None)
191
+
192
+ if k8s_legacy_object and k8s_object:
193
+ raise AirflowConfigException(
194
+ "Can not have both a legacy and new"
195
+ "executor_config object. Please delete the KubernetesExecutor"
196
+ "dict and only use the pod_override kubernetes.client.models.V1Pod"
197
+ "object."
198
+ )
199
+ if not k8s_object and not k8s_legacy_object:
200
+ return None
201
+
202
+ if isinstance(k8s_object, k8s.V1Pod):
203
+ return k8s_object
204
+ raise TypeError(
205
+ "Cannot convert a non-kubernetes.client.models.V1Pod object into a KubernetesExecutorConfig"
206
+ )
207
+
208
+ @staticmethod
209
+ def reconcile_pods(base_pod: k8s.V1Pod, client_pod: k8s.V1Pod | None) -> k8s.V1Pod:
210
+ """
211
+ Merge Kubernetes Pod objects.
212
+
213
+ :param base_pod: has the base attributes which are overwritten if they exist
214
+ in the client pod and remain if they do not exist in the client_pod
215
+ :param client_pod: the pod that the client wants to create.
216
+ :return: the merged pods
217
+
218
+ This can't be done recursively as certain fields are overwritten and some are concatenated.
219
+ """
220
+ if client_pod is None:
221
+ return base_pod
222
+
223
+ client_pod_cp = copy.deepcopy(client_pod)
224
+ client_pod_cp.spec = PodGenerator.reconcile_specs(base_pod.spec, client_pod_cp.spec)
225
+ client_pod_cp.metadata = PodGenerator.reconcile_metadata(base_pod.metadata, client_pod_cp.metadata)
226
+ client_pod_cp = merge_objects(base_pod, client_pod_cp)
227
+
228
+ return client_pod_cp
229
+
230
+ @staticmethod
231
+ def reconcile_metadata(base_meta, client_meta):
232
+ """
233
+ Merge Kubernetes Metadata objects.
234
+
235
+ :param base_meta: has the base attributes which are overwritten if they exist
236
+ in the client_meta and remain if they do not exist in the client_meta
237
+ :param client_meta: the spec that the client wants to create.
238
+ :return: the merged specs
239
+ """
240
+ if base_meta and not client_meta:
241
+ return base_meta
242
+ if not base_meta and client_meta:
243
+ return client_meta
244
+ if client_meta and base_meta:
245
+ client_meta.labels = merge_objects(base_meta.labels, client_meta.labels)
246
+ client_meta.annotations = merge_objects(base_meta.annotations, client_meta.annotations)
247
+ extend_object_field(base_meta, client_meta, "managed_fields")
248
+ extend_object_field(base_meta, client_meta, "finalizers")
249
+ extend_object_field(base_meta, client_meta, "owner_references")
250
+ return merge_objects(base_meta, client_meta)
251
+
252
+ return None
253
+
254
+ @staticmethod
255
+ def reconcile_specs(
256
+ base_spec: k8s.V1PodSpec | None, client_spec: k8s.V1PodSpec | None
257
+ ) -> k8s.V1PodSpec | None:
258
+ """
259
+ Merge Kubernetes PodSpec objects.
260
+
261
+ :param base_spec: has the base attributes which are overwritten if they exist
262
+ in the client_spec and remain if they do not exist in the client_spec
263
+ :param client_spec: the spec that the client wants to create.
264
+ :return: the merged specs
265
+ """
266
+ if base_spec and not client_spec:
267
+ return base_spec
268
+ if not base_spec and client_spec:
269
+ return client_spec
270
+ if client_spec and base_spec:
271
+ client_spec.containers = PodGenerator.reconcile_containers(
272
+ base_spec.containers, client_spec.containers
273
+ )
274
+ merged_spec = extend_object_field(base_spec, client_spec, "init_containers")
275
+ merged_spec = extend_object_field(base_spec, merged_spec, "volumes")
276
+ return merge_objects(base_spec, merged_spec)
277
+
278
+ return None
279
+
280
+ @staticmethod
281
+ def reconcile_containers(
282
+ base_containers: list[k8s.V1Container], client_containers: list[k8s.V1Container]
283
+ ) -> list[k8s.V1Container]:
284
+ """
285
+ Merge Kubernetes Container objects.
286
+
287
+ :param base_containers: has the base attributes which are overwritten if they exist
288
+ in the client_containers and remain if they do not exist in the client_containers
289
+ :param client_containers: the containers that the client wants to create.
290
+ :return: the merged containers
291
+
292
+ The runs recursively over the list of containers.
293
+ """
294
+ if not base_containers:
295
+ return client_containers
296
+ if not client_containers:
297
+ return base_containers
298
+
299
+ client_container = client_containers[0]
300
+ base_container = base_containers[0]
301
+ client_container = extend_object_field(base_container, client_container, "volume_mounts")
302
+ client_container = extend_object_field(base_container, client_container, "env")
303
+ client_container = extend_object_field(base_container, client_container, "env_from")
304
+ client_container = extend_object_field(base_container, client_container, "ports")
305
+ client_container = extend_object_field(base_container, client_container, "volume_devices")
306
+ client_container = merge_objects(base_container, client_container)
307
+
308
+ return [
309
+ client_container,
310
+ *PodGenerator.reconcile_containers(base_containers[1:], client_containers[1:]),
311
+ ]
312
+
313
+ @classmethod
314
+ def construct_pod(
315
+ cls,
316
+ dag_id: str,
317
+ task_id: str,
318
+ pod_id: str,
319
+ try_number: int,
320
+ kube_image: str,
321
+ date: datetime.datetime | None,
322
+ args: list[str],
323
+ pod_override_object: k8s.V1Pod | None,
324
+ base_worker_pod: k8s.V1Pod,
325
+ namespace: str,
326
+ scheduler_job_id: str,
327
+ run_id: str | None = None,
328
+ map_index: int = -1,
329
+ *,
330
+ with_mutation_hook: bool = False,
331
+ ) -> k8s.V1Pod:
332
+ """
333
+ Create a Pod.
334
+
335
+ Construct a pod by gathering and consolidating the configuration from 3 places:
336
+ - airflow.cfg
337
+ - executor_config
338
+ - dynamic arguments
339
+ """
340
+ if len(pod_id) > POD_NAME_MAX_LENGTH:
341
+ warnings.warn(
342
+ f"pod_id supplied is longer than {POD_NAME_MAX_LENGTH} characters; "
343
+ f"truncating and adding unique suffix.",
344
+ UserWarning,
345
+ stacklevel=2,
346
+ )
347
+ pod_id = add_unique_suffix(name=pod_id, max_len=POD_NAME_MAX_LENGTH)
348
+ try:
349
+ image = pod_override_object.spec.containers[0].image # type: ignore
350
+ if not image:
351
+ image = kube_image
352
+ except Exception:
353
+ image = kube_image
354
+
355
+ annotations = {
356
+ "dag_id": dag_id,
357
+ "task_id": task_id,
358
+ "try_number": str(try_number),
359
+ }
360
+ if map_index >= 0:
361
+ annotations["map_index"] = str(map_index)
362
+ if date:
363
+ annotations[get_logical_date_key()] = date.isoformat()
364
+ if run_id:
365
+ annotations["run_id"] = run_id
366
+
367
+ main_container = k8s.V1Container(
368
+ name="base",
369
+ args=args,
370
+ image=image,
371
+ env=[
372
+ k8s.V1EnvVar(name="AIRFLOW_IS_K8S_EXECUTOR_POD", value="True"),
373
+ ],
374
+ )
375
+ dynamic_pod = k8s.V1Pod(
376
+ metadata=k8s.V1ObjectMeta(
377
+ namespace=namespace,
378
+ annotations=annotations,
379
+ name=pod_id,
380
+ labels=cls.build_labels_for_k8s_executor_pod(
381
+ dag_id=dag_id,
382
+ task_id=task_id,
383
+ try_number=try_number,
384
+ airflow_worker=scheduler_job_id,
385
+ map_index=map_index,
386
+ logical_date=date,
387
+ run_id=run_id,
388
+ ),
389
+ ),
390
+ )
391
+
392
+ podspec = k8s.V1PodSpec(
393
+ containers=[main_container],
394
+ )
395
+
396
+ dynamic_pod.spec = podspec
397
+
398
+ # Reconcile the pods starting with the first chronologically,
399
+ # Pod from the pod_template_File -> Pod from the K8s executor -> Pod from executor_config arg
400
+ pod_list = [base_worker_pod, dynamic_pod, pod_override_object]
401
+
402
+ try:
403
+ pod = reduce(PodGenerator.reconcile_pods, pod_list)
404
+ except Exception as e:
405
+ raise PodReconciliationError from e
406
+
407
+ if with_mutation_hook:
408
+ from airflow.settings import pod_mutation_hook
409
+
410
+ try:
411
+ pod_mutation_hook(pod)
412
+ except Exception as e:
413
+ raise PodMutationHookException from e
414
+
415
+ return pod
416
+
417
+ @classmethod
418
+ def build_selector_for_k8s_executor_pod(
419
+ cls,
420
+ *,
421
+ dag_id,
422
+ task_id,
423
+ try_number,
424
+ map_index=None,
425
+ logical_date=None,
426
+ run_id=None,
427
+ airflow_worker=None,
428
+ include_version=False,
429
+ ):
430
+ """
431
+ Generate selector for kubernetes executor pod.
432
+
433
+ :meta private:
434
+ """
435
+ labels = cls.build_labels_for_k8s_executor_pod(
436
+ dag_id=dag_id,
437
+ task_id=task_id,
438
+ try_number=try_number,
439
+ map_index=map_index,
440
+ logical_date=logical_date,
441
+ run_id=run_id,
442
+ airflow_worker=airflow_worker,
443
+ include_version=include_version,
444
+ )
445
+ label_strings = [f"{label_id}={label}" for label_id, label in sorted(labels.items())]
446
+ selector = ",".join(label_strings)
447
+ if not airflow_worker: # this filters out KPO pods even when we don't know the scheduler job id
448
+ selector += ",airflow-worker"
449
+ return selector
450
+
451
+ @classmethod
452
+ def build_labels_for_k8s_executor_pod(
453
+ cls,
454
+ *,
455
+ dag_id,
456
+ task_id,
457
+ try_number,
458
+ airflow_worker=None,
459
+ map_index=None,
460
+ logical_date=None,
461
+ run_id=None,
462
+ include_version=True,
463
+ ):
464
+ """
465
+ Generate labels for kubernetes executor pod.
466
+
467
+ :meta private:
468
+ """
469
+ labels = {
470
+ "dag_id": make_safe_label_value(dag_id),
471
+ "task_id": make_safe_label_value(task_id),
472
+ "try_number": str(try_number),
473
+ "kubernetes_executor": "True",
474
+ }
475
+ if include_version:
476
+ labels["airflow_version"] = airflow_version.replace("+", "-")
477
+ if airflow_worker is not None:
478
+ labels["airflow-worker"] = make_safe_label_value(str(airflow_worker))
479
+ if map_index is not None and map_index >= 0:
480
+ labels["map_index"] = str(map_index)
481
+ if logical_date:
482
+ labels[get_logical_date_key()] = datetime_to_label_safe_datestring(logical_date)
483
+ if run_id:
484
+ labels["run_id"] = make_safe_label_value(run_id)
485
+ return labels
486
+
487
+ @staticmethod
488
+ def serialize_pod(pod: k8s.V1Pod) -> dict:
489
+ """
490
+ Convert a k8s.V1Pod into a json serializable dictionary.
491
+
492
+ :param pod: k8s.V1Pod object
493
+ :return: Serialized version of the pod returned as dict
494
+ """
495
+ api_client = ApiClient()
496
+ return api_client.sanitize_for_serialization(pod)
497
+
498
+ @staticmethod
499
+ def deserialize_model_file(path: str) -> k8s.V1Pod:
500
+ """
501
+ Generate a Pod from a file.
502
+
503
+ :param path: Path to the file
504
+ :return: a kubernetes.client.models.V1Pod
505
+ """
506
+ if os.path.exists(path):
507
+ with open(path) as stream:
508
+ pod = yaml.safe_load(stream)
509
+ else:
510
+ pod = None
511
+ log.warning("Model file %s does not exist", path)
512
+
513
+ return PodGenerator.deserialize_model_dict(pod)
514
+
515
+ @staticmethod
516
+ def deserialize_model_dict(pod_dict: dict | None) -> k8s.V1Pod:
517
+ """
518
+ Deserializes a Python dictionary to k8s.V1Pod.
519
+
520
+ Unfortunately we need access to the private method
521
+ ``_ApiClient__deserialize_model`` from the kubernetes client.
522
+ This issue is tracked here; https://github.com/kubernetes-client/python/issues/977.
523
+
524
+ :param pod_dict: Serialized dict of k8s.V1Pod object
525
+ :return: De-serialized k8s.V1Pod
526
+ """
527
+ api_client = ApiClient()
528
+ return api_client._ApiClient__deserialize_model(pod_dict, k8s.V1Pod)
529
+
530
+
531
+ def merge_objects(base_obj, client_obj):
532
+ """
533
+ Merge objects.
534
+
535
+ :param base_obj: has the base attributes which are overwritten if they exist
536
+ in the client_obj and remain if they do not exist in the client_obj
537
+ :param client_obj: the object that the client wants to create.
538
+ :return: the merged objects
539
+ """
540
+ if not base_obj:
541
+ return client_obj
542
+ if not client_obj:
543
+ return base_obj
544
+
545
+ client_obj_cp = copy.deepcopy(client_obj)
546
+
547
+ if isinstance(base_obj, dict) and isinstance(client_obj_cp, dict):
548
+ base_obj_cp = copy.deepcopy(base_obj)
549
+ base_obj_cp.update(client_obj_cp)
550
+ return base_obj_cp
551
+
552
+ for base_key in base_obj.to_dict():
553
+ base_val = getattr(base_obj, base_key, None)
554
+ if not getattr(client_obj, base_key, None) and base_val is not None:
555
+ if not isinstance(client_obj_cp, dict):
556
+ setattr(client_obj_cp, base_key, base_val)
557
+ else:
558
+ client_obj_cp[base_key] = base_val
559
+ return client_obj_cp
560
+
561
+
562
+ def extend_object_field(base_obj, client_obj, field_name):
563
+ """
564
+ Add field values to existing objects.
565
+
566
+ :param base_obj: an object which has a property `field_name` that is a list
567
+ :param client_obj: an object which has a property `field_name` that is a list.
568
+ A copy of this object is returned with `field_name` modified
569
+ :param field_name: the name of the list field
570
+ :return: the client_obj with the property `field_name` being the two properties appended
571
+ """
572
+ client_obj_cp = copy.deepcopy(client_obj)
573
+ base_obj_field = getattr(base_obj, field_name, None)
574
+ client_obj_field = getattr(client_obj, field_name, None)
575
+
576
+ if (not isinstance(base_obj_field, list) and base_obj_field is not None) or (
577
+ not isinstance(client_obj_field, list) and client_obj_field is not None
578
+ ):
579
+ raise ValueError(
580
+ f"The chosen field must be a list. Got {type(base_obj_field)} base_object_field "
581
+ f"and {type(client_obj_field)} client_object_field."
582
+ )
583
+
584
+ if not base_obj_field:
585
+ return client_obj_cp
586
+ if not client_obj_field:
587
+ setattr(client_obj_cp, field_name, base_obj_field)
588
+ return client_obj_cp
589
+
590
+ appended_fields = base_obj_field + client_obj_field
591
+ setattr(client_obj_cp, field_name, appended_fields)
592
+ return client_obj_cp
@@ -0,0 +1,16 @@
1
+ # Licensed to the Apache Software Foundation (ASF) under one
2
+ # or more contributor license agreements. See the NOTICE file
3
+ # distributed with this work for additional information
4
+ # regarding copyright ownership. The ASF licenses this file
5
+ # to you under the Apache License, Version 2.0 (the
6
+ # "License"); you may not use this file except in compliance
7
+ # with the License. You may obtain a copy of the License at
8
+ #
9
+ # http://www.apache.org/licenses/LICENSE-2.0
10
+ #
11
+ # Unless required by applicable law or agreed to in writing,
12
+ # software distributed under the License is distributed on an
13
+ # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
14
+ # KIND, either express or implied. See the License for the
15
+ # specific language governing permissions and limitations
16
+ # under the License.
@@ -0,0 +1,68 @@
1
+ # Licensed to the Apache Software Foundation (ASF) under one
2
+ # or more contributor license agreements. See the NOTICE file
3
+ # distributed with this work for additional information
4
+ # regarding copyright ownership. The ASF licenses this file
5
+ # to you under the Apache License, Version 2.0 (the
6
+ # "License"); you may not use this file except in compliance
7
+ # with the License. You may obtain a copy of the License at
8
+ #
9
+ # http://www.apache.org/licenses/LICENSE-2.0
10
+ #
11
+ # Unless required by applicable law or agreed to in writing,
12
+ # software distributed under the License is distributed on an
13
+ # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
14
+ # KIND, either express or implied. See the License for the
15
+ # specific language governing permissions and limitations
16
+ # under the License.
17
+ #
18
+ # This file is used for documentation purposes. Example can be found at docs/executor/kubernetes.rst
19
+ #
20
+ # [START template_with_dags_in_image]
21
+ ---
22
+ apiVersion: v1
23
+ kind: Pod
24
+ metadata:
25
+ name: placeholder-name
26
+ spec:
27
+ containers:
28
+ - env:
29
+ - name: AIRFLOW__CORE__EXECUTOR
30
+ value: LocalExecutor
31
+ # Hard Coded Airflow Envs
32
+ - name: AIRFLOW__CORE__FERNET_KEY
33
+ valueFrom:
34
+ secretKeyRef:
35
+ name: RELEASE-NAME-fernet-key
36
+ key: fernet-key
37
+ - name: AIRFLOW__DATABASE__SQL_ALCHEMY_CONN
38
+ valueFrom:
39
+ secretKeyRef:
40
+ name: RELEASE-NAME-airflow-metadata
41
+ key: connection
42
+ - name: AIRFLOW_CONN_AIRFLOW_DB
43
+ valueFrom:
44
+ secretKeyRef:
45
+ name: RELEASE-NAME-airflow-metadata
46
+ key: connection
47
+ image: dummy_image
48
+ imagePullPolicy: IfNotPresent
49
+ name: base
50
+ volumeMounts:
51
+ - mountPath: "/opt/airflow/logs"
52
+ name: airflow-logs
53
+ - mountPath: /opt/airflow/airflow.cfg
54
+ name: airflow-config
55
+ readOnly: true
56
+ subPath: airflow.cfg
57
+ restartPolicy: Never
58
+ securityContext:
59
+ runAsUser: 50000
60
+ fsGroup: 50000
61
+ serviceAccountName: "RELEASE-NAME-worker-serviceaccount"
62
+ volumes:
63
+ - emptyDir: {}
64
+ name: airflow-logs
65
+ - configMap:
66
+ name: RELEASE-NAME-airflow-config
67
+ name: airflow-config
68
+ # [END template_with_dags_in_image]