apache-airflow-providers-cncf-kubernetes 10.8.0rc1__py3-none-any.whl → 10.8.1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- airflow/providers/cncf/kubernetes/__init__.py +1 -1
- airflow/providers/cncf/kubernetes/operators/job.py +6 -1
- airflow/providers/cncf/kubernetes/operators/kueue.py +6 -1
- airflow/providers/cncf/kubernetes/operators/pod.py +12 -6
- airflow/providers/cncf/kubernetes/operators/resource.py +6 -1
- airflow/providers/cncf/kubernetes/operators/spark_kubernetes.py +79 -33
- airflow/providers/cncf/kubernetes/sensors/spark_kubernetes.py +6 -1
- airflow/providers/cncf/kubernetes/utils/pod_manager.py +10 -3
- airflow/providers/cncf/kubernetes/version_compat.py +4 -6
- {apache_airflow_providers_cncf_kubernetes-10.8.0rc1.dist-info → apache_airflow_providers_cncf_kubernetes-10.8.1.dist-info}/METADATA +7 -7
- {apache_airflow_providers_cncf_kubernetes-10.8.0rc1.dist-info → apache_airflow_providers_cncf_kubernetes-10.8.1.dist-info}/RECORD +13 -13
- {apache_airflow_providers_cncf_kubernetes-10.8.0rc1.dist-info → apache_airflow_providers_cncf_kubernetes-10.8.1.dist-info}/WHEEL +0 -0
- {apache_airflow_providers_cncf_kubernetes-10.8.0rc1.dist-info → apache_airflow_providers_cncf_kubernetes-10.8.1.dist-info}/entry_points.txt +0 -0
|
@@ -29,7 +29,7 @@ from airflow import __version__ as airflow_version
|
|
|
29
29
|
|
|
30
30
|
__all__ = ["__version__"]
|
|
31
31
|
|
|
32
|
-
__version__ = "10.8.
|
|
32
|
+
__version__ = "10.8.1"
|
|
33
33
|
|
|
34
34
|
if packaging.version.parse(packaging.version.parse(airflow_version).base_version) < packaging.version.parse(
|
|
35
35
|
"2.10.0"
|
|
@@ -42,7 +42,12 @@ from airflow.providers.cncf.kubernetes.operators.pod import KubernetesPodOperato
|
|
|
42
42
|
from airflow.providers.cncf.kubernetes.pod_generator import PodGenerator, merge_objects
|
|
43
43
|
from airflow.providers.cncf.kubernetes.triggers.job import KubernetesJobTrigger
|
|
44
44
|
from airflow.providers.cncf.kubernetes.utils.pod_manager import EMPTY_XCOM_RESULT, PodNotFoundException
|
|
45
|
-
from airflow.providers.cncf.kubernetes.version_compat import
|
|
45
|
+
from airflow.providers.cncf.kubernetes.version_compat import AIRFLOW_V_3_1_PLUS
|
|
46
|
+
|
|
47
|
+
if AIRFLOW_V_3_1_PLUS:
|
|
48
|
+
from airflow.sdk import BaseOperator
|
|
49
|
+
else:
|
|
50
|
+
from airflow.models import BaseOperator
|
|
46
51
|
from airflow.utils import yaml
|
|
47
52
|
from airflow.utils.context import Context
|
|
48
53
|
|
|
@@ -27,7 +27,12 @@ from kubernetes.utils import FailToCreateError
|
|
|
27
27
|
from airflow.exceptions import AirflowException
|
|
28
28
|
from airflow.providers.cncf.kubernetes.hooks.kubernetes import KubernetesHook
|
|
29
29
|
from airflow.providers.cncf.kubernetes.operators.job import KubernetesJobOperator
|
|
30
|
-
from airflow.providers.cncf.kubernetes.version_compat import
|
|
30
|
+
from airflow.providers.cncf.kubernetes.version_compat import AIRFLOW_V_3_1_PLUS
|
|
31
|
+
|
|
32
|
+
if AIRFLOW_V_3_1_PLUS:
|
|
33
|
+
from airflow.sdk import BaseOperator
|
|
34
|
+
else:
|
|
35
|
+
from airflow.models import BaseOperator
|
|
31
36
|
|
|
32
37
|
|
|
33
38
|
class KubernetesInstallKueueOperator(BaseOperator):
|
|
@@ -80,7 +80,12 @@ from airflow.providers.cncf.kubernetes.utils.pod_manager import (
|
|
|
80
80
|
container_is_succeeded,
|
|
81
81
|
get_container_termination_message,
|
|
82
82
|
)
|
|
83
|
-
from airflow.providers.cncf.kubernetes.version_compat import
|
|
83
|
+
from airflow.providers.cncf.kubernetes.version_compat import AIRFLOW_V_3_1_PLUS, XCOM_RETURN_KEY
|
|
84
|
+
|
|
85
|
+
if AIRFLOW_V_3_1_PLUS:
|
|
86
|
+
from airflow.sdk import BaseOperator
|
|
87
|
+
else:
|
|
88
|
+
from airflow.models import BaseOperator
|
|
84
89
|
from airflow.settings import pod_mutation_hook
|
|
85
90
|
from airflow.utils import yaml
|
|
86
91
|
from airflow.utils.helpers import prune_dict, validate_key
|
|
@@ -833,7 +838,6 @@ class KubernetesPodOperator(BaseOperator):
|
|
|
833
838
|
ti.xcom_push(key="pod_name", value=self.pod.metadata.name)
|
|
834
839
|
ti.xcom_push(key="pod_namespace", value=self.pod.metadata.namespace)
|
|
835
840
|
|
|
836
|
-
self.convert_config_file_to_dict()
|
|
837
841
|
self.invoke_defer_method()
|
|
838
842
|
|
|
839
843
|
def convert_config_file_to_dict(self):
|
|
@@ -847,6 +851,7 @@ class KubernetesPodOperator(BaseOperator):
|
|
|
847
851
|
|
|
848
852
|
def invoke_defer_method(self, last_log_time: DateTime | None = None) -> None:
|
|
849
853
|
"""Redefine triggers which are being used in child classes."""
|
|
854
|
+
self.convert_config_file_to_dict()
|
|
850
855
|
trigger_start_time = datetime.datetime.now(tz=datetime.timezone.utc)
|
|
851
856
|
self.defer(
|
|
852
857
|
trigger=KubernetesPodTrigger(
|
|
@@ -1088,10 +1093,11 @@ class KubernetesPodOperator(BaseOperator):
|
|
|
1088
1093
|
"""Will fetch and emit events from pod."""
|
|
1089
1094
|
with _optionally_suppress(reraise=reraise):
|
|
1090
1095
|
for event in self.pod_manager.read_pod_events(pod).items:
|
|
1091
|
-
if event.type == PodEventType.
|
|
1092
|
-
self.log.
|
|
1096
|
+
if event.type == PodEventType.WARNING.value:
|
|
1097
|
+
self.log.warning("Pod Event: %s - %s", event.reason, event.message)
|
|
1093
1098
|
else:
|
|
1094
|
-
|
|
1099
|
+
# events.k8s.io/v1 at this stage will always be Normal
|
|
1100
|
+
self.log.info("Pod Event: %s - %s", event.reason, event.message)
|
|
1095
1101
|
|
|
1096
1102
|
def _read_pod_container_states(self, pod, *, reraise=True) -> None:
|
|
1097
1103
|
"""Log detailed container states of pod for debugging."""
|
|
@@ -1101,7 +1107,7 @@ class KubernetesPodOperator(BaseOperator):
|
|
|
1101
1107
|
pod_reason = getattr(remote_pod.status, "reason", None)
|
|
1102
1108
|
self.log.info("Pod phase: %s, reason: %s", pod_phase, pod_reason)
|
|
1103
1109
|
|
|
1104
|
-
container_statuses = getattr(remote_pod.status, "container_statuses", []
|
|
1110
|
+
container_statuses = getattr(remote_pod.status, "container_statuses", None) or []
|
|
1105
1111
|
for status in container_statuses:
|
|
1106
1112
|
name = status.name
|
|
1107
1113
|
state = status.state
|
|
@@ -32,7 +32,12 @@ from airflow.providers.cncf.kubernetes.hooks.kubernetes import KubernetesHook
|
|
|
32
32
|
from airflow.providers.cncf.kubernetes.kubernetes_helper_functions import should_retry_creation
|
|
33
33
|
from airflow.providers.cncf.kubernetes.utils.delete_from import delete_from_yaml
|
|
34
34
|
from airflow.providers.cncf.kubernetes.utils.k8s_resource_iterator import k8s_resource_iterator
|
|
35
|
-
from airflow.providers.cncf.kubernetes.version_compat import
|
|
35
|
+
from airflow.providers.cncf.kubernetes.version_compat import AIRFLOW_V_3_1_PLUS
|
|
36
|
+
|
|
37
|
+
if AIRFLOW_V_3_1_PLUS:
|
|
38
|
+
from airflow.sdk import BaseOperator
|
|
39
|
+
else:
|
|
40
|
+
from airflow.models import BaseOperator
|
|
36
41
|
|
|
37
42
|
if TYPE_CHECKING:
|
|
38
43
|
from kubernetes.client import ApiClient, CustomObjectsApi
|
|
@@ -66,7 +66,9 @@ class SparkKubernetesOperator(KubernetesPodOperator):
|
|
|
66
66
|
:param success_run_history_limit: Number of past successful runs of the application to keep.
|
|
67
67
|
:param startup_timeout_seconds: timeout in seconds to startup the pod.
|
|
68
68
|
:param log_events_on_failure: Log the pod's events if a failure occurs
|
|
69
|
-
:param reattach_on_restart: if the scheduler dies while the pod is running, reattach and monitor
|
|
69
|
+
:param reattach_on_restart: if the scheduler dies while the pod is running, reattach and monitor.
|
|
70
|
+
When enabled, the operator automatically adds Airflow task context labels (dag_id, task_id, run_id)
|
|
71
|
+
to the driver and executor pods to enable finding them for reattachment.
|
|
70
72
|
:param delete_on_termination: What to do when the pod reaches its final
|
|
71
73
|
state, or the execution is interrupted. If True (default), delete the
|
|
72
74
|
pod; if False, leave the pod.
|
|
@@ -203,17 +205,16 @@ class SparkKubernetesOperator(KubernetesPodOperator):
|
|
|
203
205
|
"spark_kubernetes_operator": "True",
|
|
204
206
|
}
|
|
205
207
|
|
|
206
|
-
|
|
207
|
-
map_index
|
|
208
|
-
|
|
209
|
-
labels["map_index"] = map_index
|
|
208
|
+
map_index = ti.map_index
|
|
209
|
+
if map_index is not None and map_index >= 0:
|
|
210
|
+
labels["map_index"] = str(map_index)
|
|
210
211
|
|
|
211
212
|
if include_try_number:
|
|
212
|
-
labels.update(try_number=ti.try_number)
|
|
213
|
+
labels.update(try_number=str(ti.try_number))
|
|
213
214
|
|
|
214
215
|
# In the case of sub dags this is just useful
|
|
215
216
|
# TODO: Remove this when the minimum version of Airflow is bumped to 3.0
|
|
216
|
-
if getattr(context_dict["dag"], "
|
|
217
|
+
if getattr(context_dict["dag"], "parent_dag", False):
|
|
217
218
|
labels["parent_dag_id"] = context_dict["dag"].parent_dag.dag_id
|
|
218
219
|
# Ensure that label is valid for Kube,
|
|
219
220
|
# and if not truncate/remove invalid chars and replace with short hash.
|
|
@@ -226,9 +227,11 @@ class SparkKubernetesOperator(KubernetesPodOperator):
|
|
|
226
227
|
def pod_manager(self) -> PodManager:
|
|
227
228
|
return PodManager(kube_client=self.client)
|
|
228
229
|
|
|
229
|
-
|
|
230
|
-
|
|
231
|
-
|
|
230
|
+
def _try_numbers_match(self, context, pod) -> bool:
|
|
231
|
+
task_instance = context["task_instance"]
|
|
232
|
+
task_context_labels = self._get_ti_pod_labels(context)
|
|
233
|
+
pod_try_number = pod.metadata.labels.get(task_context_labels.get("try_number", ""), "")
|
|
234
|
+
return str(task_instance.try_number) == str(pod_try_number)
|
|
232
235
|
|
|
233
236
|
@property
|
|
234
237
|
def template_body(self):
|
|
@@ -251,20 +254,9 @@ class SparkKubernetesOperator(KubernetesPodOperator):
|
|
|
251
254
|
"Found matching driver pod %s with labels %s", pod.metadata.name, pod.metadata.labels
|
|
252
255
|
)
|
|
253
256
|
self.log.info("`try_number` of task_instance: %s", context["ti"].try_number)
|
|
254
|
-
self.log.info("`try_number` of pod: %s", pod.metadata.labels
|
|
257
|
+
self.log.info("`try_number` of pod: %s", pod.metadata.labels.get("try_number", "unknown"))
|
|
255
258
|
return pod
|
|
256
259
|
|
|
257
|
-
def get_or_create_spark_crd(self, context) -> k8s.V1Pod:
|
|
258
|
-
if self.reattach_on_restart:
|
|
259
|
-
driver_pod = self.find_spark_job(context)
|
|
260
|
-
if driver_pod:
|
|
261
|
-
return driver_pod
|
|
262
|
-
|
|
263
|
-
driver_pod, spark_obj_spec = self.launcher.start_spark_job(
|
|
264
|
-
image=self.image, code_path=self.code_path, startup_timeout=self.startup_timeout_seconds
|
|
265
|
-
)
|
|
266
|
-
return driver_pod
|
|
267
|
-
|
|
268
260
|
def process_pod_deletion(self, pod, *, reraise=True):
|
|
269
261
|
if pod is not None:
|
|
270
262
|
if self.delete_on_termination:
|
|
@@ -294,25 +286,79 @@ class SparkKubernetesOperator(KubernetesPodOperator):
|
|
|
294
286
|
def custom_obj_api(self) -> CustomObjectsApi:
|
|
295
287
|
return CustomObjectsApi()
|
|
296
288
|
|
|
297
|
-
|
|
298
|
-
|
|
299
|
-
|
|
300
|
-
|
|
301
|
-
|
|
302
|
-
|
|
303
|
-
|
|
304
|
-
|
|
289
|
+
def get_or_create_spark_crd(self, launcher: CustomObjectLauncher, context) -> k8s.V1Pod:
|
|
290
|
+
if self.reattach_on_restart:
|
|
291
|
+
driver_pod = self.find_spark_job(context)
|
|
292
|
+
if driver_pod:
|
|
293
|
+
return driver_pod
|
|
294
|
+
|
|
295
|
+
driver_pod, spark_obj_spec = launcher.start_spark_job(
|
|
296
|
+
image=self.image, code_path=self.code_path, startup_timeout=self.startup_timeout_seconds
|
|
305
297
|
)
|
|
306
|
-
return
|
|
298
|
+
return driver_pod
|
|
307
299
|
|
|
308
300
|
def execute(self, context: Context):
|
|
309
301
|
self.name = self.create_job_name()
|
|
310
302
|
|
|
303
|
+
self._setup_spark_configuration(context)
|
|
304
|
+
|
|
305
|
+
if self.deferrable:
|
|
306
|
+
self.execute_async(context)
|
|
307
|
+
|
|
308
|
+
return super().execute(context)
|
|
309
|
+
|
|
310
|
+
def _setup_spark_configuration(self, context: Context):
|
|
311
|
+
"""Set up Spark-specific configuration including reattach logic."""
|
|
312
|
+
import copy
|
|
313
|
+
|
|
314
|
+
template_body = copy.deepcopy(self.template_body)
|
|
315
|
+
|
|
316
|
+
if self.reattach_on_restart:
|
|
317
|
+
task_context_labels = self._get_ti_pod_labels(context)
|
|
318
|
+
|
|
319
|
+
existing_pod = self.find_spark_job(context)
|
|
320
|
+
if existing_pod:
|
|
321
|
+
self.log.info(
|
|
322
|
+
"Found existing Spark driver pod %s. Reattaching to it.", existing_pod.metadata.name
|
|
323
|
+
)
|
|
324
|
+
self.pod = existing_pod
|
|
325
|
+
self.pod_request_obj = None
|
|
326
|
+
return
|
|
327
|
+
|
|
328
|
+
if "spark" not in template_body:
|
|
329
|
+
template_body["spark"] = {}
|
|
330
|
+
if "spec" not in template_body["spark"]:
|
|
331
|
+
template_body["spark"]["spec"] = {}
|
|
332
|
+
|
|
333
|
+
spec_dict = template_body["spark"]["spec"]
|
|
334
|
+
|
|
335
|
+
if "labels" not in spec_dict:
|
|
336
|
+
spec_dict["labels"] = {}
|
|
337
|
+
spec_dict["labels"].update(task_context_labels)
|
|
338
|
+
|
|
339
|
+
for component in ["driver", "executor"]:
|
|
340
|
+
if component not in spec_dict:
|
|
341
|
+
spec_dict[component] = {}
|
|
342
|
+
|
|
343
|
+
if "labels" not in spec_dict[component]:
|
|
344
|
+
spec_dict[component]["labels"] = {}
|
|
345
|
+
|
|
346
|
+
spec_dict[component]["labels"].update(task_context_labels)
|
|
347
|
+
|
|
311
348
|
self.log.info("Creating sparkApplication.")
|
|
312
|
-
self.
|
|
349
|
+
self.launcher = CustomObjectLauncher(
|
|
350
|
+
name=self.name,
|
|
351
|
+
namespace=self.namespace,
|
|
352
|
+
kube_client=self.client,
|
|
353
|
+
custom_obj_api=self.custom_obj_api,
|
|
354
|
+
template_body=template_body,
|
|
355
|
+
)
|
|
356
|
+
self.pod = self.get_or_create_spark_crd(self.launcher, context)
|
|
313
357
|
self.pod_request_obj = self.launcher.pod_spec
|
|
314
358
|
|
|
315
|
-
|
|
359
|
+
def find_pod(self, namespace: str, context: Context, *, exclude_checked: bool = True):
|
|
360
|
+
"""Override parent's find_pod to use our Spark-specific find_spark_job method."""
|
|
361
|
+
return self.find_spark_job(context, exclude_checked=exclude_checked)
|
|
316
362
|
|
|
317
363
|
def on_kill(self) -> None:
|
|
318
364
|
if self.launcher:
|
|
@@ -25,7 +25,12 @@ from kubernetes import client
|
|
|
25
25
|
|
|
26
26
|
from airflow.exceptions import AirflowException
|
|
27
27
|
from airflow.providers.cncf.kubernetes.hooks.kubernetes import KubernetesHook
|
|
28
|
-
from airflow.providers.cncf.kubernetes.version_compat import
|
|
28
|
+
from airflow.providers.cncf.kubernetes.version_compat import AIRFLOW_V_3_0_PLUS
|
|
29
|
+
|
|
30
|
+
if AIRFLOW_V_3_0_PLUS:
|
|
31
|
+
from airflow.sdk import BaseSensorOperator
|
|
32
|
+
else:
|
|
33
|
+
from airflow.sensors.base import BaseSensorOperator # type: ignore[no-redef]
|
|
29
34
|
|
|
30
35
|
if TYPE_CHECKING:
|
|
31
36
|
from airflow.utils.context import Context
|
|
@@ -521,13 +521,20 @@ class PodManager(LoggingMixin):
|
|
|
521
521
|
# can safely resume from a few seconds later
|
|
522
522
|
read_timeout = 60 * 5
|
|
523
523
|
try:
|
|
524
|
+
since_seconds = None
|
|
525
|
+
if since_time:
|
|
526
|
+
try:
|
|
527
|
+
since_seconds = math.ceil((pendulum.now() - since_time).total_seconds())
|
|
528
|
+
except TypeError:
|
|
529
|
+
self.log.warning(
|
|
530
|
+
"Error calculating since_seconds with since_time %s. Using None instead.",
|
|
531
|
+
since_time,
|
|
532
|
+
)
|
|
524
533
|
logs = self.read_pod_logs(
|
|
525
534
|
pod=pod,
|
|
526
535
|
container_name=container_name,
|
|
527
536
|
timestamps=True,
|
|
528
|
-
since_seconds=
|
|
529
|
-
math.ceil((pendulum.now() - since_time).total_seconds()) if since_time else None
|
|
530
|
-
),
|
|
537
|
+
since_seconds=since_seconds,
|
|
531
538
|
follow=follow,
|
|
532
539
|
post_termination_timeout=post_termination_timeout,
|
|
533
540
|
_request_timeout=(connection_timeout, read_timeout),
|
|
@@ -37,16 +37,14 @@ AIRFLOW_V_3_1_PLUS = get_base_airflow_version_tuple() >= (3, 1, 0)
|
|
|
37
37
|
|
|
38
38
|
if AIRFLOW_V_3_1_PLUS:
|
|
39
39
|
from airflow.models.xcom import XCOM_RETURN_KEY
|
|
40
|
-
from airflow.sdk import BaseHook
|
|
40
|
+
from airflow.sdk import BaseHook
|
|
41
41
|
from airflow.sdk.definitions.context import context_merge
|
|
42
42
|
else:
|
|
43
43
|
from airflow.hooks.base import BaseHook # type: ignore[attr-defined,no-redef]
|
|
44
|
-
from airflow.models import BaseOperator
|
|
45
44
|
from airflow.utils.context import context_merge # type: ignore[attr-defined, no-redef]
|
|
46
45
|
from airflow.utils.xcom import XCOM_RETURN_KEY # type: ignore[no-redef]
|
|
47
46
|
|
|
48
47
|
if AIRFLOW_V_3_0_PLUS:
|
|
49
|
-
from airflow.sdk import BaseSensorOperator
|
|
50
48
|
from airflow.sdk.bases.decorator import DecoratedOperator, TaskDecorator, task_decorator_factory
|
|
51
49
|
else:
|
|
52
50
|
from airflow.decorators.base import ( # type: ignore[no-redef]
|
|
@@ -54,14 +52,14 @@ else:
|
|
|
54
52
|
TaskDecorator,
|
|
55
53
|
task_decorator_factory,
|
|
56
54
|
)
|
|
57
|
-
|
|
55
|
+
|
|
56
|
+
# BaseOperator and BaseSensorOperator removed from version_compat to avoid circular imports
|
|
57
|
+
# Import them directly in files that need them instead
|
|
58
58
|
|
|
59
59
|
__all__ = [
|
|
60
60
|
"AIRFLOW_V_3_0_PLUS",
|
|
61
61
|
"AIRFLOW_V_3_1_PLUS",
|
|
62
62
|
"BaseHook",
|
|
63
|
-
"BaseOperator",
|
|
64
|
-
"BaseSensorOperator",
|
|
65
63
|
"DecoratedOperator",
|
|
66
64
|
"TaskDecorator",
|
|
67
65
|
"task_decorator_factory",
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.4
|
|
2
2
|
Name: apache-airflow-providers-cncf-kubernetes
|
|
3
|
-
Version: 10.8.
|
|
3
|
+
Version: 10.8.1
|
|
4
4
|
Summary: Provider package apache-airflow-providers-cncf-kubernetes for Apache Airflow
|
|
5
5
|
Keywords: airflow-provider,cncf.kubernetes,airflow,integration
|
|
6
6
|
Author-email: Apache Software Foundation <dev@airflow.apache.org>
|
|
@@ -21,14 +21,14 @@ Classifier: Programming Language :: Python :: 3.12
|
|
|
21
21
|
Classifier: Programming Language :: Python :: 3.13
|
|
22
22
|
Classifier: Topic :: System :: Monitoring
|
|
23
23
|
Requires-Dist: aiofiles>=23.2.0
|
|
24
|
-
Requires-Dist: apache-airflow>=2.10.
|
|
24
|
+
Requires-Dist: apache-airflow>=2.10.0
|
|
25
25
|
Requires-Dist: asgiref>=3.5.2
|
|
26
26
|
Requires-Dist: cryptography>=41.0.0
|
|
27
27
|
Requires-Dist: kubernetes>=32.0.0,<33.0.0
|
|
28
28
|
Requires-Dist: kubernetes_asyncio>=32.0.0,<33.0.0
|
|
29
29
|
Project-URL: Bug Tracker, https://github.com/apache/airflow/issues
|
|
30
|
-
Project-URL: Changelog, https://airflow.
|
|
31
|
-
Project-URL: Documentation, https://airflow.
|
|
30
|
+
Project-URL: Changelog, https://airflow.apache.org/docs/apache-airflow-providers-cncf-kubernetes/10.8.1/changelog.html
|
|
31
|
+
Project-URL: Documentation, https://airflow.apache.org/docs/apache-airflow-providers-cncf-kubernetes/10.8.1
|
|
32
32
|
Project-URL: Mastodon, https://fosstodon.org/@airflow
|
|
33
33
|
Project-URL: Slack Chat, https://s.apache.org/airflow-slack
|
|
34
34
|
Project-URL: Source Code, https://github.com/apache/airflow
|
|
@@ -59,7 +59,7 @@ Project-URL: YouTube, https://www.youtube.com/channel/UCSXwxpWZQ7XZ1WL3wqevChA/
|
|
|
59
59
|
|
|
60
60
|
Package ``apache-airflow-providers-cncf-kubernetes``
|
|
61
61
|
|
|
62
|
-
Release: ``10.8.
|
|
62
|
+
Release: ``10.8.1``
|
|
63
63
|
|
|
64
64
|
|
|
65
65
|
`Kubernetes <https://kubernetes.io/>`__
|
|
@@ -72,7 +72,7 @@ This is a provider package for ``cncf.kubernetes`` provider. All classes for thi
|
|
|
72
72
|
are in ``airflow.providers.cncf.kubernetes`` python package.
|
|
73
73
|
|
|
74
74
|
You can find package information and changelog for the provider
|
|
75
|
-
in the `documentation <https://airflow.apache.org/docs/apache-airflow-providers-cncf-kubernetes/10.8.
|
|
75
|
+
in the `documentation <https://airflow.apache.org/docs/apache-airflow-providers-cncf-kubernetes/10.8.1/>`_.
|
|
76
76
|
|
|
77
77
|
Installation
|
|
78
78
|
------------
|
|
@@ -98,5 +98,5 @@ PIP package Version required
|
|
|
98
98
|
====================== ====================
|
|
99
99
|
|
|
100
100
|
The changelog for the provider package can be found in the
|
|
101
|
-
`changelog <https://airflow.apache.org/docs/apache-airflow-providers-cncf-kubernetes/10.8.
|
|
101
|
+
`changelog <https://airflow.apache.org/docs/apache-airflow-providers-cncf-kubernetes/10.8.1/changelog.html>`_.
|
|
102
102
|
|
|
@@ -1,5 +1,5 @@
|
|
|
1
1
|
airflow/providers/cncf/kubernetes/LICENSE,sha256=gXPVwptPlW1TJ4HSuG5OMPg-a3h43OGMkZRR1rpwfJA,10850
|
|
2
|
-
airflow/providers/cncf/kubernetes/__init__.py,sha256=
|
|
2
|
+
airflow/providers/cncf/kubernetes/__init__.py,sha256=8cvd55rnWlQ9hA3n7N4Lxy5hyUfN_uRD-Wm5lVIdKfg,1505
|
|
3
3
|
airflow/providers/cncf/kubernetes/callbacks.py,sha256=1nCLXFJKtr5FM9ApB8Drw5VAGSC3TDFsPSTMtRnAR3Q,6085
|
|
4
4
|
airflow/providers/cncf/kubernetes/exceptions.py,sha256=3cNEZTnrltBsqwzHiLfckwYYc_IWY1g4PcRs6zuMWWA,1137
|
|
5
5
|
airflow/providers/cncf/kubernetes/get_provider_info.py,sha256=Git4HycOcHrb4zD9W7ZYsqNDkQSQ4uipSJO_GaPiroE,16041
|
|
@@ -12,7 +12,7 @@ airflow/providers/cncf/kubernetes/python_kubernetes_script.jinja2,sha256=I0EHRGw
|
|
|
12
12
|
airflow/providers/cncf/kubernetes/python_kubernetes_script.py,sha256=KnTlZSWCZhwvj89fSc2kgIRTaI4iLNKPquHc2wXnluo,3460
|
|
13
13
|
airflow/providers/cncf/kubernetes/secret.py,sha256=wj-T9gouqau_X14slAstGmnSxqXJQzdLwUdURzHna0I,5209
|
|
14
14
|
airflow/providers/cncf/kubernetes/template_rendering.py,sha256=WSUBhjGSDhjNtA4IFlbYyX50rvYN6UA4dMk0cPqgOjo,3618
|
|
15
|
-
airflow/providers/cncf/kubernetes/version_compat.py,sha256=
|
|
15
|
+
airflow/providers/cncf/kubernetes/version_compat.py,sha256=1k5uiliHIpi5jqdaGmwxUS_GZBv9N-vyjrfhdlkDbw8,2674
|
|
16
16
|
airflow/providers/cncf/kubernetes/backcompat/__init__.py,sha256=KXF76f3v1jIFUBNz8kwxVMvm7i4mNo35LbIG9IijBNc,1299
|
|
17
17
|
airflow/providers/cncf/kubernetes/backcompat/backwards_compat_converters.py,sha256=FkRRtIEucp2hYrecGVYVgyPI6-b7hE7X7L17Z3r459Y,4303
|
|
18
18
|
airflow/providers/cncf/kubernetes/cli/__init__.py,sha256=9hdXHABrVpkbpjZgUft39kOFL2xSGeG4GEua0Hmelus,785
|
|
@@ -31,11 +31,11 @@ airflow/providers/cncf/kubernetes/kubernetes_executor_templates/__init__.py,sha2
|
|
|
31
31
|
airflow/providers/cncf/kubernetes/kubernetes_executor_templates/basic_template.yaml,sha256=yzJmXN4ZyB4aDwI_GIugpL9-f1YMVy__X-LQSbeU95A,2567
|
|
32
32
|
airflow/providers/cncf/kubernetes/operators/__init__.py,sha256=mlJxuZLkd5x-iq2SBwD3mvRQpt3YR7wjz_nceyF1IaI,787
|
|
33
33
|
airflow/providers/cncf/kubernetes/operators/custom_object_launcher.py,sha256=jTVHQt1vp5gELrLNyM-DrZ1ywgmTy3Hh1i6wyl7AGS0,15314
|
|
34
|
-
airflow/providers/cncf/kubernetes/operators/job.py,sha256=
|
|
35
|
-
airflow/providers/cncf/kubernetes/operators/kueue.py,sha256=
|
|
36
|
-
airflow/providers/cncf/kubernetes/operators/pod.py,sha256=
|
|
37
|
-
airflow/providers/cncf/kubernetes/operators/resource.py,sha256=
|
|
38
|
-
airflow/providers/cncf/kubernetes/operators/spark_kubernetes.py,sha256=
|
|
34
|
+
airflow/providers/cncf/kubernetes/operators/job.py,sha256=B4C3CbcJTnhqJQmMAbvWrvQGAU8_gfyOmYbsJ1NvraA,26896
|
|
35
|
+
airflow/providers/cncf/kubernetes/operators/kueue.py,sha256=iDyw9hYaMWVLtBwjsmSXLsSoWW-uEEvh8stptgKOFVQ,5543
|
|
36
|
+
airflow/providers/cncf/kubernetes/operators/pod.py,sha256=6BSyJNtmkPiLHNMamjDKxWMWSTEYIL0OMqpFtLNr2Do,64090
|
|
37
|
+
airflow/providers/cncf/kubernetes/operators/resource.py,sha256=hm-ZVhqS08CiF1Csmd06KxAr40oelehuxUOwaSh30D0,7695
|
|
38
|
+
airflow/providers/cncf/kubernetes/operators/spark_kubernetes.py,sha256=I_e1Jj4Y_xjapA5MH_sYa1P5ROF10JA5Xf2V4HYg5pQ,15991
|
|
39
39
|
airflow/providers/cncf/kubernetes/pod_template_file_examples/__init__.py,sha256=9hdXHABrVpkbpjZgUft39kOFL2xSGeG4GEua0Hmelus,785
|
|
40
40
|
airflow/providers/cncf/kubernetes/pod_template_file_examples/dags_in_image_template.yaml,sha256=7JdppZ-XDBpv2Bnde2SthhcME8w3b8xQdPAK1fJGW60,2256
|
|
41
41
|
airflow/providers/cncf/kubernetes/pod_template_file_examples/dags_in_volume_template.yaml,sha256=-Pk_EwKpyWRYZKOnumUxVrDeAfFJ0nr3WZ7JNnvppzg,2442
|
|
@@ -45,16 +45,16 @@ airflow/providers/cncf/kubernetes/resource_convert/configmap.py,sha256=gf7DdVeD0
|
|
|
45
45
|
airflow/providers/cncf/kubernetes/resource_convert/env_variable.py,sha256=vBeR__dLHsG619rxHTmY1SSefSTdUhnD4HRKzzQJutM,1462
|
|
46
46
|
airflow/providers/cncf/kubernetes/resource_convert/secret.py,sha256=ElZCMbTWeTKoPeIJ1fTvlqRXM8nGkWj2MrIlVckX6Ag,1494
|
|
47
47
|
airflow/providers/cncf/kubernetes/sensors/__init__.py,sha256=9hdXHABrVpkbpjZgUft39kOFL2xSGeG4GEua0Hmelus,785
|
|
48
|
-
airflow/providers/cncf/kubernetes/sensors/spark_kubernetes.py,sha256=
|
|
48
|
+
airflow/providers/cncf/kubernetes/sensors/spark_kubernetes.py,sha256=43fm2s5yL8Jx5r0g4tLMj9UGUTtGQRjzUjkvLsj2A0Y,5543
|
|
49
49
|
airflow/providers/cncf/kubernetes/triggers/__init__.py,sha256=9hdXHABrVpkbpjZgUft39kOFL2xSGeG4GEua0Hmelus,785
|
|
50
50
|
airflow/providers/cncf/kubernetes/triggers/job.py,sha256=_lLP6ZYRV4kdwb7U0w5QFnlY1E9deZ5wtg-nrlfl6-8,7505
|
|
51
51
|
airflow/providers/cncf/kubernetes/triggers/pod.py,sha256=AVk0-dJN_wjMeZtImMxan4JZ7sSl-allC8ga1o3WhKM,13388
|
|
52
52
|
airflow/providers/cncf/kubernetes/utils/__init__.py,sha256=ClZN0VPjWySdVwS_ktH7rrgL9VLAcs3OSJSB9s3zaYw,863
|
|
53
53
|
airflow/providers/cncf/kubernetes/utils/delete_from.py,sha256=poObZSoEJwQyaYWilEURs8f4CDY2sn_pfwS31Lf579A,5195
|
|
54
54
|
airflow/providers/cncf/kubernetes/utils/k8s_resource_iterator.py,sha256=pl-G-2WhZVbewKkwmL9AxPo1hAQWHHEPK43b-ruF4-w,1937
|
|
55
|
-
airflow/providers/cncf/kubernetes/utils/pod_manager.py,sha256=
|
|
55
|
+
airflow/providers/cncf/kubernetes/utils/pod_manager.py,sha256=p2M-P2x1Vx8L6-V-VjZX79vymSey93AOSEcwWIu-RiY,42768
|
|
56
56
|
airflow/providers/cncf/kubernetes/utils/xcom_sidecar.py,sha256=k6bdmVJ21OrAwGmWwledRrAmaty9ZrmbuM-IbaI4mqo,2519
|
|
57
|
-
apache_airflow_providers_cncf_kubernetes-10.8.
|
|
58
|
-
apache_airflow_providers_cncf_kubernetes-10.8.
|
|
59
|
-
apache_airflow_providers_cncf_kubernetes-10.8.
|
|
60
|
-
apache_airflow_providers_cncf_kubernetes-10.8.
|
|
57
|
+
apache_airflow_providers_cncf_kubernetes-10.8.1.dist-info/entry_points.txt,sha256=ByD3QJJyP9CfmTYtpNI1953akD38RUDgpGXLaq9vpOw,111
|
|
58
|
+
apache_airflow_providers_cncf_kubernetes-10.8.1.dist-info/WHEEL,sha256=G2gURzTEtmeR8nrdXUJfNiB3VYVxigPQ-bEQujpNiNs,82
|
|
59
|
+
apache_airflow_providers_cncf_kubernetes-10.8.1.dist-info/METADATA,sha256=l0qloIakNp3bVro5MFdpiUsy0BhaQa-erFlQJ8t9liI,4302
|
|
60
|
+
apache_airflow_providers_cncf_kubernetes-10.8.1.dist-info/RECORD,,
|
|
File without changes
|