apache-airflow-providers-cncf-kubernetes 10.12.2rc2__py3-none-any.whl → 10.12.3rc1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- airflow/providers/cncf/kubernetes/__init__.py +1 -1
- airflow/providers/cncf/kubernetes/executors/kubernetes_executor.py +5 -1
- airflow/providers/cncf/kubernetes/hooks/kubernetes.py +126 -52
- airflow/providers/cncf/kubernetes/kubernetes_helper_functions.py +2 -1
- airflow/providers/cncf/kubernetes/operators/spark_kubernetes.py +29 -3
- {apache_airflow_providers_cncf_kubernetes-10.12.2rc2.dist-info → apache_airflow_providers_cncf_kubernetes-10.12.3rc1.dist-info}/METADATA +9 -9
- {apache_airflow_providers_cncf_kubernetes-10.12.2rc2.dist-info → apache_airflow_providers_cncf_kubernetes-10.12.3rc1.dist-info}/RECORD +11 -11
- {apache_airflow_providers_cncf_kubernetes-10.12.2rc2.dist-info → apache_airflow_providers_cncf_kubernetes-10.12.3rc1.dist-info}/WHEEL +0 -0
- {apache_airflow_providers_cncf_kubernetes-10.12.2rc2.dist-info → apache_airflow_providers_cncf_kubernetes-10.12.3rc1.dist-info}/entry_points.txt +0 -0
- {apache_airflow_providers_cncf_kubernetes-10.12.2rc2.dist-info → apache_airflow_providers_cncf_kubernetes-10.12.3rc1.dist-info}/licenses/LICENSE +0 -0
- {apache_airflow_providers_cncf_kubernetes-10.12.2rc2.dist-info → apache_airflow_providers_cncf_kubernetes-10.12.3rc1.dist-info}/licenses/NOTICE +0 -0
|
@@ -29,7 +29,7 @@ from airflow import __version__ as airflow_version
|
|
|
29
29
|
|
|
30
30
|
__all__ = ["__version__"]
|
|
31
31
|
|
|
32
|
-
__version__ = "10.12.
|
|
32
|
+
__version__ = "10.12.3"
|
|
33
33
|
|
|
34
34
|
if packaging.version.parse(packaging.version.parse(airflow_version).base_version) < packaging.version.parse(
|
|
35
35
|
"2.11.0"
|
|
@@ -367,6 +367,8 @@ class KubernetesExecutor(BaseExecutor):
|
|
|
367
367
|
namespace = results.namespace
|
|
368
368
|
failure_details = results.failure_details
|
|
369
369
|
|
|
370
|
+
termination_reason: str | None = None
|
|
371
|
+
|
|
370
372
|
if state == TaskInstanceState.FAILED:
|
|
371
373
|
# Use pre-collected failure details from the watcher to avoid additional API calls
|
|
372
374
|
if failure_details:
|
|
@@ -380,6 +382,8 @@ class KubernetesExecutor(BaseExecutor):
|
|
|
380
382
|
container_type = failure_details.get("container_type")
|
|
381
383
|
container_name = failure_details.get("container_name")
|
|
382
384
|
|
|
385
|
+
termination_reason = f"Pod failed because of {pod_reason}"
|
|
386
|
+
|
|
383
387
|
task_key_str = f"{key.dag_id}.{key.task_id}.{key.try_number}"
|
|
384
388
|
self.log.warning(
|
|
385
389
|
"Task %s failed in pod %s/%s. Pod phase: %s, reason: %s, message: %s, "
|
|
@@ -447,7 +451,7 @@ class KubernetesExecutor(BaseExecutor):
|
|
|
447
451
|
state = None
|
|
448
452
|
state = TaskInstanceState(state) if state else None
|
|
449
453
|
|
|
450
|
-
self.event_buffer[key] = state,
|
|
454
|
+
self.event_buffer[key] = state, termination_reason
|
|
451
455
|
|
|
452
456
|
@staticmethod
|
|
453
457
|
def _get_pod_namespace(ti: TaskInstance):
|
|
@@ -27,7 +27,6 @@ from typing import TYPE_CHECKING, Any, Protocol
|
|
|
27
27
|
|
|
28
28
|
import aiofiles
|
|
29
29
|
import requests
|
|
30
|
-
from asgiref.sync import sync_to_async
|
|
31
30
|
from kubernetes import client, config, utils, watch
|
|
32
31
|
from kubernetes.client.models import V1Deployment
|
|
33
32
|
from kubernetes.config import ConfigException
|
|
@@ -46,6 +45,7 @@ from airflow.providers.cncf.kubernetes.utils.container import (
|
|
|
46
45
|
container_is_completed,
|
|
47
46
|
container_is_running,
|
|
48
47
|
)
|
|
48
|
+
from airflow.providers.common.compat.connection import get_async_connection
|
|
49
49
|
from airflow.providers.common.compat.sdk import AirflowException, AirflowNotFoundException, BaseHook
|
|
50
50
|
from airflow.utils import yaml
|
|
51
51
|
|
|
@@ -816,48 +816,54 @@ class AsyncKubernetesHook(KubernetesHook):
|
|
|
816
816
|
self.config_dict = config_dict
|
|
817
817
|
self._extras: dict | None = connection_extras
|
|
818
818
|
self._event_polling_fallback = False
|
|
819
|
+
self._config_loaded = False
|
|
819
820
|
|
|
820
821
|
async def _load_config(self):
|
|
821
|
-
"""
|
|
822
|
+
"""Load Kubernetes configuration once per hook instance."""
|
|
823
|
+
if self._config_loaded:
|
|
824
|
+
return
|
|
825
|
+
|
|
822
826
|
in_cluster = self._coalesce_param(self.in_cluster, await self._get_field("in_cluster"))
|
|
823
827
|
cluster_context = self._coalesce_param(self.cluster_context, await self._get_field("cluster_context"))
|
|
824
828
|
kubeconfig_path = await self._get_field("kube_config_path")
|
|
825
829
|
kubeconfig = await self._get_field("kube_config")
|
|
830
|
+
|
|
826
831
|
num_selected_configuration = sum(
|
|
827
832
|
1 for o in [in_cluster, kubeconfig, kubeconfig_path, self.config_dict] if o
|
|
828
833
|
)
|
|
829
834
|
|
|
830
|
-
async def api_client_from_kubeconfig_file(_kubeconfig_path: str | None):
|
|
831
|
-
await async_config.load_kube_config(
|
|
832
|
-
config_file=_kubeconfig_path,
|
|
833
|
-
client_configuration=self.client_configuration,
|
|
834
|
-
context=cluster_context,
|
|
835
|
-
)
|
|
836
|
-
return _TimeoutAsyncK8sApiClient()
|
|
837
|
-
|
|
838
835
|
if num_selected_configuration > 1:
|
|
839
836
|
raise AirflowException(
|
|
840
837
|
"Invalid connection configuration. Options kube_config_path, "
|
|
841
|
-
"kube_config, in_cluster are mutually exclusive. "
|
|
838
|
+
"kube_config, in_cluster, and config_dict are mutually exclusive. "
|
|
842
839
|
"You can only use one option at a time."
|
|
843
840
|
)
|
|
844
841
|
|
|
845
842
|
if in_cluster:
|
|
846
843
|
self.log.debug(LOADING_KUBE_CONFIG_FILE_RESOURCE.format("within a pod"))
|
|
847
|
-
self._is_in_cluster = True
|
|
848
844
|
async_config.load_incluster_config()
|
|
849
|
-
|
|
845
|
+
self._is_in_cluster = True
|
|
846
|
+
self._config_loaded = True
|
|
847
|
+
return
|
|
848
|
+
|
|
849
|
+
# If above block does not return, we are not in a cluster.
|
|
850
|
+
self._is_in_cluster = False
|
|
850
851
|
|
|
851
852
|
if self.config_dict:
|
|
852
853
|
self.log.debug(LOADING_KUBE_CONFIG_FILE_RESOURCE.format("config dictionary"))
|
|
853
|
-
self._is_in_cluster = False
|
|
854
854
|
await async_config.load_kube_config_from_dict(self.config_dict, context=cluster_context)
|
|
855
|
-
|
|
855
|
+
self._config_loaded = True
|
|
856
|
+
return
|
|
856
857
|
|
|
857
858
|
if kubeconfig_path is not None:
|
|
858
859
|
self.log.debug("loading kube_config from: %s", kubeconfig_path)
|
|
859
|
-
|
|
860
|
-
|
|
860
|
+
await async_config.load_kube_config(
|
|
861
|
+
config_file=kubeconfig_path,
|
|
862
|
+
client_configuration=self.client_configuration,
|
|
863
|
+
context=cluster_context,
|
|
864
|
+
)
|
|
865
|
+
self._config_loaded = True
|
|
866
|
+
return
|
|
861
867
|
|
|
862
868
|
if kubeconfig is not None:
|
|
863
869
|
async with aiofiles.tempfile.NamedTemporaryFile() as temp_config:
|
|
@@ -874,18 +880,32 @@ class AsyncKubernetesHook(KubernetesHook):
|
|
|
874
880
|
kubeconfig = json.dumps(kubeconfig)
|
|
875
881
|
await temp_config.write(kubeconfig.encode())
|
|
876
882
|
await temp_config.flush()
|
|
877
|
-
|
|
878
|
-
|
|
883
|
+
|
|
884
|
+
await async_config.load_kube_config(
|
|
885
|
+
config_file=temp_config.name,
|
|
886
|
+
client_configuration=self.client_configuration,
|
|
887
|
+
context=cluster_context,
|
|
888
|
+
)
|
|
889
|
+
self._config_loaded = True
|
|
890
|
+
return
|
|
891
|
+
|
|
879
892
|
self.log.debug(LOADING_KUBE_CONFIG_FILE_RESOURCE.format("default configuration file"))
|
|
880
893
|
await async_config.load_kube_config(
|
|
881
894
|
client_configuration=self.client_configuration,
|
|
882
895
|
context=cluster_context,
|
|
883
896
|
)
|
|
897
|
+
self._config_loaded = True
|
|
884
898
|
|
|
885
899
|
async def get_conn_extras(self) -> dict:
|
|
886
900
|
if self._extras is None:
|
|
887
901
|
if self.conn_id:
|
|
888
|
-
|
|
902
|
+
try:
|
|
903
|
+
connection = await get_async_connection(self.conn_id)
|
|
904
|
+
except AirflowNotFoundException:
|
|
905
|
+
if self.conn_id == self.default_conn_name:
|
|
906
|
+
connection = Connection(conn_id=self.default_conn_name)
|
|
907
|
+
else:
|
|
908
|
+
raise
|
|
889
909
|
self._extras = connection.extra_dejson
|
|
890
910
|
else:
|
|
891
911
|
self._extras = {}
|
|
@@ -907,7 +927,8 @@ class AsyncKubernetesHook(KubernetesHook):
|
|
|
907
927
|
async def get_conn(self) -> AsyncGenerator[async_client.ApiClient, None]:
|
|
908
928
|
kube_client = None
|
|
909
929
|
try:
|
|
910
|
-
|
|
930
|
+
await self._load_config()
|
|
931
|
+
kube_client = _TimeoutAsyncK8sApiClient()
|
|
911
932
|
yield kube_client
|
|
912
933
|
finally:
|
|
913
934
|
if kube_client is not None:
|
|
@@ -1021,48 +1042,101 @@ class AsyncKubernetesHook(KubernetesHook):
|
|
|
1021
1042
|
timeout_seconds: int = 30,
|
|
1022
1043
|
) -> AsyncGenerator[CoreV1Event]:
|
|
1023
1044
|
"""
|
|
1024
|
-
Watch
|
|
1045
|
+
Watch Kubernetes events for a pod.
|
|
1046
|
+
|
|
1047
|
+
Reconnects on watch termination and resumes from the last observed
|
|
1048
|
+
resourceVersion. The watch stops when the pod is terminal or deleted,
|
|
1049
|
+
and falls back to polling if watch access is denied.
|
|
1025
1050
|
|
|
1026
1051
|
:param name: Pod name to watch events for
|
|
1027
1052
|
:param namespace: Kubernetes namespace
|
|
1028
1053
|
:param resource_version: Only return events not older than this resource version
|
|
1029
1054
|
:param timeout_seconds: Timeout in seconds for the watch stream. A small additional buffer may be applied internally.
|
|
1055
|
+
This does not limit the total duration of event streaming.
|
|
1030
1056
|
"""
|
|
1031
|
-
|
|
1032
|
-
async for event_polled in self.watch_pod_events_polling_fallback(
|
|
1033
|
-
name, namespace, resource_version, timeout_seconds
|
|
1034
|
-
):
|
|
1035
|
-
yield event_polled
|
|
1036
|
-
|
|
1037
|
-
try:
|
|
1038
|
-
w = async_watch.Watch()
|
|
1039
|
-
async with self.get_conn() as connection:
|
|
1040
|
-
v1_api = async_client.CoreV1Api(connection)
|
|
1057
|
+
last_rv = resource_version
|
|
1041
1058
|
|
|
1042
|
-
|
|
1043
|
-
|
|
1044
|
-
|
|
1045
|
-
field_selector=f"involvedObject.name={name}",
|
|
1046
|
-
resource_version=resource_version,
|
|
1047
|
-
timeout_seconds=timeout_seconds,
|
|
1048
|
-
):
|
|
1049
|
-
event: CoreV1Event = event_watched.get("object")
|
|
1050
|
-
yield event
|
|
1051
|
-
|
|
1052
|
-
except async_client.exceptions.ApiException as e:
|
|
1053
|
-
if hasattr(e, "status") and e.status == 403:
|
|
1054
|
-
self.log.warning(
|
|
1055
|
-
"Triggerer does not have Kubernetes API permission to 'watch' events: %s Falling back to polling.",
|
|
1056
|
-
str(e),
|
|
1057
|
-
)
|
|
1058
|
-
self._event_polling_fallback = True
|
|
1059
|
+
while True:
|
|
1060
|
+
# If watch is known to be unavailable, use polling fallback
|
|
1061
|
+
if self._event_polling_fallback:
|
|
1059
1062
|
async for event_polled in self.watch_pod_events_polling_fallback(
|
|
1060
|
-
name, namespace,
|
|
1063
|
+
name, namespace, last_rv, timeout_seconds
|
|
1061
1064
|
):
|
|
1062
1065
|
yield event_polled
|
|
1066
|
+
return
|
|
1063
1067
|
|
|
1064
|
-
|
|
1065
|
-
w
|
|
1068
|
+
# Watch may not be created if pod inspection triggers early return.
|
|
1069
|
+
w = None
|
|
1070
|
+
|
|
1071
|
+
try:
|
|
1072
|
+
# Pod lifecycle is authoritative; events alone are not.
|
|
1073
|
+
pod = await self.get_pod(name=name, namespace=namespace)
|
|
1074
|
+
if pod.status and pod.status.phase in ("Succeeded", "Failed"):
|
|
1075
|
+
self.log.info(
|
|
1076
|
+
"Pod '%s' reached terminal phase '%s'; stopping event watch",
|
|
1077
|
+
name,
|
|
1078
|
+
pod.status.phase,
|
|
1079
|
+
)
|
|
1080
|
+
return
|
|
1081
|
+
|
|
1082
|
+
w = async_watch.Watch()
|
|
1083
|
+
async with self.get_conn() as connection:
|
|
1084
|
+
v1_api = async_client.CoreV1Api(connection)
|
|
1085
|
+
|
|
1086
|
+
async for event_watched in w.stream(
|
|
1087
|
+
v1_api.list_namespaced_event,
|
|
1088
|
+
namespace=namespace,
|
|
1089
|
+
field_selector=f"involvedObject.name={name}",
|
|
1090
|
+
resource_version=last_rv,
|
|
1091
|
+
timeout_seconds=timeout_seconds,
|
|
1092
|
+
):
|
|
1093
|
+
event = event_watched.get("object")
|
|
1094
|
+
if not event or not event.metadata:
|
|
1095
|
+
continue
|
|
1096
|
+
|
|
1097
|
+
if event.metadata.resource_version:
|
|
1098
|
+
last_rv = event.metadata.resource_version
|
|
1099
|
+
|
|
1100
|
+
yield event
|
|
1101
|
+
|
|
1102
|
+
# Never swallow cancellation.
|
|
1103
|
+
except asyncio.CancelledError:
|
|
1104
|
+
raise
|
|
1105
|
+
|
|
1106
|
+
except async_client.exceptions.ApiException as e:
|
|
1107
|
+
status = getattr(e, "status", None)
|
|
1108
|
+
|
|
1109
|
+
if status == 403:
|
|
1110
|
+
# Permanently fall back to polling when watch is not permitted.
|
|
1111
|
+
self.log.warning(
|
|
1112
|
+
"Kubernetes API does not permit watching events; falling back to polling: %s",
|
|
1113
|
+
str(e),
|
|
1114
|
+
)
|
|
1115
|
+
self._event_polling_fallback = True
|
|
1116
|
+
continue
|
|
1117
|
+
|
|
1118
|
+
if status == 404:
|
|
1119
|
+
# Terminate the watch if pod no longer exists.
|
|
1120
|
+
self.log.info("Pod '%s' no longer exists; stopping event watch", name)
|
|
1121
|
+
return
|
|
1122
|
+
|
|
1123
|
+
if status == 410:
|
|
1124
|
+
# Restart watch from current state if resourceVersion is too old.
|
|
1125
|
+
self.log.info(
|
|
1126
|
+
"resourceVersion too old while watching pod '%s'; restarting watch",
|
|
1127
|
+
name,
|
|
1128
|
+
)
|
|
1129
|
+
last_rv = None
|
|
1130
|
+
continue
|
|
1131
|
+
|
|
1132
|
+
# Other API errors are either transient or configuration/programming errors.
|
|
1133
|
+
# Re-raise so generic_api_retry can apply centralized retry/backoff for
|
|
1134
|
+
# transient failures, and fail fast for non-retryable ones.
|
|
1135
|
+
raise
|
|
1136
|
+
|
|
1137
|
+
finally:
|
|
1138
|
+
if w is not None:
|
|
1139
|
+
w.stop()
|
|
1066
1140
|
|
|
1067
1141
|
async def watch_pod_events_polling_fallback(
|
|
1068
1142
|
self,
|
|
@@ -166,7 +166,8 @@ def annotations_to_key(annotations: dict[str, str]) -> TaskInstanceKey:
|
|
|
166
166
|
|
|
167
167
|
# Compat: Look up the run_id from the TI table!
|
|
168
168
|
from airflow.models.dagrun import DagRun
|
|
169
|
-
from airflow.models.taskinstance import TaskInstance
|
|
169
|
+
from airflow.models.taskinstance import TaskInstance
|
|
170
|
+
from airflow.models.taskinstancekey import TaskInstanceKey
|
|
170
171
|
from airflow.settings import Session
|
|
171
172
|
|
|
172
173
|
logical_date_key = get_logical_date_key()
|
|
@@ -17,6 +17,7 @@
|
|
|
17
17
|
# under the License.
|
|
18
18
|
from __future__ import annotations
|
|
19
19
|
|
|
20
|
+
from datetime import datetime, timezone
|
|
20
21
|
from functools import cached_property
|
|
21
22
|
from pathlib import Path
|
|
22
23
|
from typing import TYPE_CHECKING, Any, cast
|
|
@@ -29,7 +30,7 @@ from airflow.providers.cncf.kubernetes.kubernetes_helper_functions import add_un
|
|
|
29
30
|
from airflow.providers.cncf.kubernetes.operators.custom_object_launcher import CustomObjectLauncher
|
|
30
31
|
from airflow.providers.cncf.kubernetes.operators.pod import KubernetesPodOperator
|
|
31
32
|
from airflow.providers.cncf.kubernetes.pod_generator import MAX_LABEL_LEN, PodGenerator
|
|
32
|
-
from airflow.providers.cncf.kubernetes.utils.pod_manager import PodManager
|
|
33
|
+
from airflow.providers.cncf.kubernetes.utils.pod_manager import PodManager, PodPhase
|
|
33
34
|
from airflow.providers.common.compat.sdk import AirflowException
|
|
34
35
|
from airflow.utils.helpers import prune_dict
|
|
35
36
|
|
|
@@ -235,6 +236,14 @@ class SparkKubernetesOperator(KubernetesPodOperator):
|
|
|
235
236
|
return self.manage_template_specs()
|
|
236
237
|
|
|
237
238
|
def find_spark_job(self, context, exclude_checked: bool = True):
|
|
239
|
+
"""
|
|
240
|
+
Find an existing Spark driver pod for this task instance.
|
|
241
|
+
|
|
242
|
+
The pod is identified using Airflow task context labels. If multiple
|
|
243
|
+
driver pods match the same labels (which can occur if cleanup did not
|
|
244
|
+
run after an abrupt failure), a single pod is selected deterministically
|
|
245
|
+
for reattachment, preferring a Running driver pod when present.
|
|
246
|
+
"""
|
|
238
247
|
label_selector = (
|
|
239
248
|
self._build_find_pod_label_selector(context, exclude_checked=exclude_checked)
|
|
240
249
|
+ ",spark-role=driver"
|
|
@@ -242,8 +251,25 @@ class SparkKubernetesOperator(KubernetesPodOperator):
|
|
|
242
251
|
pod_list = self.client.list_namespaced_pod(self.namespace, label_selector=label_selector).items
|
|
243
252
|
|
|
244
253
|
pod = None
|
|
245
|
-
if len(pod_list) > 1:
|
|
246
|
-
|
|
254
|
+
if len(pod_list) > 1:
|
|
255
|
+
# When multiple pods match the same labels, select one deterministically,
|
|
256
|
+
# preferring a Running pod, then creation time, with name as a tie-breaker.
|
|
257
|
+
pod = max(
|
|
258
|
+
pod_list,
|
|
259
|
+
key=lambda p: (
|
|
260
|
+
p.status.phase == PodPhase.RUNNING,
|
|
261
|
+
p.metadata.creation_timestamp or datetime.min.replace(tzinfo=timezone.utc),
|
|
262
|
+
p.metadata.name or "",
|
|
263
|
+
),
|
|
264
|
+
)
|
|
265
|
+
self.log.warning(
|
|
266
|
+
"Found %d Spark driver pods matching labels %s; "
|
|
267
|
+
"selecting pod %s for reattachment based on status and creation time.",
|
|
268
|
+
len(pod_list),
|
|
269
|
+
label_selector,
|
|
270
|
+
pod.metadata.name,
|
|
271
|
+
)
|
|
272
|
+
|
|
247
273
|
if len(pod_list) == 1:
|
|
248
274
|
pod = pod_list[0]
|
|
249
275
|
self.log.info(
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.4
|
|
2
2
|
Name: apache-airflow-providers-cncf-kubernetes
|
|
3
|
-
Version: 10.12.
|
|
3
|
+
Version: 10.12.3rc1
|
|
4
4
|
Summary: Provider package apache-airflow-providers-cncf-kubernetes for Apache Airflow
|
|
5
5
|
Keywords: airflow-provider,cncf.kubernetes,airflow,integration
|
|
6
6
|
Author-email: Apache Software Foundation <dev@airflow.apache.org>
|
|
@@ -24,15 +24,15 @@ License-File: LICENSE
|
|
|
24
24
|
License-File: NOTICE
|
|
25
25
|
Requires-Dist: aiofiles>=23.2.0
|
|
26
26
|
Requires-Dist: apache-airflow>=2.11.0rc1
|
|
27
|
-
Requires-Dist: apache-airflow-providers-common-compat>=1.
|
|
27
|
+
Requires-Dist: apache-airflow-providers-common-compat>=1.13.0rc1
|
|
28
28
|
Requires-Dist: asgiref>=3.5.2
|
|
29
29
|
Requires-Dist: cryptography>=41.0.0,<46.0.0
|
|
30
30
|
Requires-Dist: kubernetes>=35.0.0,<36.0.0
|
|
31
31
|
Requires-Dist: urllib3>=2.1.0,!=2.6.0
|
|
32
32
|
Requires-Dist: kubernetes_asyncio>=32.0.0,<35.0.0
|
|
33
33
|
Project-URL: Bug Tracker, https://github.com/apache/airflow/issues
|
|
34
|
-
Project-URL: Changelog, https://airflow.staged.apache.org/docs/apache-airflow-providers-cncf-kubernetes/10.12.
|
|
35
|
-
Project-URL: Documentation, https://airflow.staged.apache.org/docs/apache-airflow-providers-cncf-kubernetes/10.12.
|
|
34
|
+
Project-URL: Changelog, https://airflow.staged.apache.org/docs/apache-airflow-providers-cncf-kubernetes/10.12.3/changelog.html
|
|
35
|
+
Project-URL: Documentation, https://airflow.staged.apache.org/docs/apache-airflow-providers-cncf-kubernetes/10.12.3
|
|
36
36
|
Project-URL: Mastodon, https://fosstodon.org/@airflow
|
|
37
37
|
Project-URL: Slack Chat, https://s.apache.org/airflow-slack
|
|
38
38
|
Project-URL: Source Code, https://github.com/apache/airflow
|
|
@@ -63,7 +63,7 @@ Project-URL: YouTube, https://www.youtube.com/channel/UCSXwxpWZQ7XZ1WL3wqevChA/
|
|
|
63
63
|
|
|
64
64
|
Package ``apache-airflow-providers-cncf-kubernetes``
|
|
65
65
|
|
|
66
|
-
Release: ``10.12.
|
|
66
|
+
Release: ``10.12.3``
|
|
67
67
|
|
|
68
68
|
|
|
69
69
|
`Kubernetes <https://kubernetes.io/>`__
|
|
@@ -76,7 +76,7 @@ This is a provider package for ``cncf.kubernetes`` provider. All classes for thi
|
|
|
76
76
|
are in ``airflow.providers.cncf.kubernetes`` python package.
|
|
77
77
|
|
|
78
78
|
You can find package information and changelog for the provider
|
|
79
|
-
in the `documentation <https://airflow.apache.org/docs/apache-airflow-providers-cncf-kubernetes/10.12.
|
|
79
|
+
in the `documentation <https://airflow.apache.org/docs/apache-airflow-providers-cncf-kubernetes/10.12.3/>`_.
|
|
80
80
|
|
|
81
81
|
Installation
|
|
82
82
|
------------
|
|
@@ -95,10 +95,10 @@ PIP package Version required
|
|
|
95
95
|
========================================== ====================
|
|
96
96
|
``aiofiles`` ``>=23.2.0``
|
|
97
97
|
``apache-airflow`` ``>=2.11.0``
|
|
98
|
-
``apache-airflow-providers-common-compat`` ``>=1.
|
|
98
|
+
``apache-airflow-providers-common-compat`` ``>=1.13.0``
|
|
99
99
|
``asgiref`` ``>=3.5.2``
|
|
100
100
|
``cryptography`` ``>=41.0.0,<46.0.0``
|
|
101
|
-
``kubernetes`` ``>=
|
|
101
|
+
``kubernetes`` ``>=35.0.0,<36.0.0``
|
|
102
102
|
``urllib3`` ``>=2.1.0,!=2.6.0``
|
|
103
103
|
``kubernetes_asyncio`` ``>=32.0.0,<35.0.0``
|
|
104
104
|
========================================== ====================
|
|
@@ -123,5 +123,5 @@ Dependent package
|
|
|
123
123
|
================================================================================================================== =================
|
|
124
124
|
|
|
125
125
|
The changelog for the provider package can be found in the
|
|
126
|
-
`changelog <https://airflow.apache.org/docs/apache-airflow-providers-cncf-kubernetes/10.12.
|
|
126
|
+
`changelog <https://airflow.apache.org/docs/apache-airflow-providers-cncf-kubernetes/10.12.3/changelog.html>`_.
|
|
127
127
|
|
|
@@ -1,11 +1,11 @@
|
|
|
1
|
-
airflow/providers/cncf/kubernetes/__init__.py,sha256=
|
|
1
|
+
airflow/providers/cncf/kubernetes/__init__.py,sha256=GHZWa4Fwm2LfwekTqYrVDzqmKdqWsuGBiKMzuNnGKo0,1506
|
|
2
2
|
airflow/providers/cncf/kubernetes/callbacks.py,sha256=svvPFkkllJh3Qo7wIG5pnSf_19c3f5kzyKVYHkhyXoE,6456
|
|
3
3
|
airflow/providers/cncf/kubernetes/exceptions.py,sha256=iRrXBxaLPqYwUBt9zbadYgRbEDhGTo6I2mhLOa9F3DI,1707
|
|
4
4
|
airflow/providers/cncf/kubernetes/get_provider_info.py,sha256=_V-bvjuAW1yWM_RlJFLPEdDpjaBkMXFYjRr8dAKQZT0,16138
|
|
5
5
|
airflow/providers/cncf/kubernetes/k8s_model.py,sha256=xmdFhX29DjegoZ-cq8-KDL9soVYXf4OpU6fAGr3cPTU,2101
|
|
6
6
|
airflow/providers/cncf/kubernetes/kube_client.py,sha256=AaTY2UhhKVa-qrhMvpiQjdUJhrQyndwQ_5PoRmWJy3k,5714
|
|
7
7
|
airflow/providers/cncf/kubernetes/kube_config.py,sha256=PFw_n3QHaEEXXYYqAuOCabxWUOgrlUnEp0QpnT2J380,5155
|
|
8
|
-
airflow/providers/cncf/kubernetes/kubernetes_helper_functions.py,sha256=
|
|
8
|
+
airflow/providers/cncf/kubernetes/kubernetes_helper_functions.py,sha256=EnWRyKsOKNDHoIiFJY1JHwnL_RQkOSWgQr41fOEj5EM,7430
|
|
9
9
|
airflow/providers/cncf/kubernetes/pod_generator.py,sha256=0VEcAtT2SzAFwSDsQWe2QdrY2mDV8s4hBw0qLcmIMGw,21038
|
|
10
10
|
airflow/providers/cncf/kubernetes/python_kubernetes_script.jinja2,sha256=I0EHRGwLHjSiX85e51HBIoddRDnC8TJPFrDBqQq_NJg,1776
|
|
11
11
|
airflow/providers/cncf/kubernetes/python_kubernetes_script.py,sha256=KnTlZSWCZhwvj89fSc2kgIRTaI4iLNKPquHc2wXnluo,3460
|
|
@@ -21,12 +21,12 @@ airflow/providers/cncf/kubernetes/decorators/__init__.py,sha256=mlJxuZLkd5x-iq2S
|
|
|
21
21
|
airflow/providers/cncf/kubernetes/decorators/kubernetes.py,sha256=1qUiSHseMS31xU5jqRc2dJFq1Kor0yEMx1KKEULHWR4,6358
|
|
22
22
|
airflow/providers/cncf/kubernetes/decorators/kubernetes_cmd.py,sha256=pdH2TGCYVywY0qPTosq7EoGE0oKd03q9OKka4qSsDI4,4722
|
|
23
23
|
airflow/providers/cncf/kubernetes/executors/__init__.py,sha256=mlJxuZLkd5x-iq2SBwD3mvRQpt3YR7wjz_nceyF1IaI,787
|
|
24
|
-
airflow/providers/cncf/kubernetes/executors/kubernetes_executor.py,sha256=
|
|
24
|
+
airflow/providers/cncf/kubernetes/executors/kubernetes_executor.py,sha256=nNKIOSNxgLkxb2rnKa2at1FVf83ZU9dHd_5JSM96uhg,32974
|
|
25
25
|
airflow/providers/cncf/kubernetes/executors/kubernetes_executor_types.py,sha256=F0IlLbC6qKMVNZwqnbgUPxwFsZdcRhot2kwBhzc9gSM,2698
|
|
26
26
|
airflow/providers/cncf/kubernetes/executors/kubernetes_executor_utils.py,sha256=kFbUmFDMMO5Xs7ndB8y0juU3T9CblwTboaAInbJ278M,31633
|
|
27
27
|
airflow/providers/cncf/kubernetes/executors/local_kubernetes_executor.py,sha256=0f3Zay9w8iyQbo2kWQ3S1E2wbQ-EgQppktO2Lx7KdkE,12403
|
|
28
28
|
airflow/providers/cncf/kubernetes/hooks/__init__.py,sha256=9hdXHABrVpkbpjZgUft39kOFL2xSGeG4GEua0Hmelus,785
|
|
29
|
-
airflow/providers/cncf/kubernetes/hooks/kubernetes.py,sha256=
|
|
29
|
+
airflow/providers/cncf/kubernetes/hooks/kubernetes.py,sha256=Nr93TO3UvY1d26XJAueyjaO95Ei_OrDLlbLbz9MHc6M,48211
|
|
30
30
|
airflow/providers/cncf/kubernetes/kubernetes_executor_templates/__init__.py,sha256=9hdXHABrVpkbpjZgUft39kOFL2xSGeG4GEua0Hmelus,785
|
|
31
31
|
airflow/providers/cncf/kubernetes/kubernetes_executor_templates/basic_template.yaml,sha256=yzJmXN4ZyB4aDwI_GIugpL9-f1YMVy__X-LQSbeU95A,2567
|
|
32
32
|
airflow/providers/cncf/kubernetes/operators/__init__.py,sha256=mlJxuZLkd5x-iq2SBwD3mvRQpt3YR7wjz_nceyF1IaI,787
|
|
@@ -35,7 +35,7 @@ airflow/providers/cncf/kubernetes/operators/job.py,sha256=nprHUQ0Nmw3U5LuU6KvhVw
|
|
|
35
35
|
airflow/providers/cncf/kubernetes/operators/kueue.py,sha256=E0ZqMQzH2dtNOAaA2W5bAuaS-zRz_ohfOElQ1N7NSTA,5560
|
|
36
36
|
airflow/providers/cncf/kubernetes/operators/pod.py,sha256=mYLAAwrNnvKIbhaFB27fG_l0dXo4ahvrPttcTKiWY0I,67989
|
|
37
37
|
airflow/providers/cncf/kubernetes/operators/resource.py,sha256=NHU8LtC1B8mq9V6SgIwo1GWZREtmC1-plQb1DALpmCc,7506
|
|
38
|
-
airflow/providers/cncf/kubernetes/operators/spark_kubernetes.py,sha256=
|
|
38
|
+
airflow/providers/cncf/kubernetes/operators/spark_kubernetes.py,sha256=wpjJrKDYFf3yGCfh5hGEBePey5qvA-GPUGOlSWOjBdU,17614
|
|
39
39
|
airflow/providers/cncf/kubernetes/pod_template_file_examples/__init__.py,sha256=9hdXHABrVpkbpjZgUft39kOFL2xSGeG4GEua0Hmelus,785
|
|
40
40
|
airflow/providers/cncf/kubernetes/pod_template_file_examples/dags_in_image_template.yaml,sha256=7JdppZ-XDBpv2Bnde2SthhcME8w3b8xQdPAK1fJGW60,2256
|
|
41
41
|
airflow/providers/cncf/kubernetes/pod_template_file_examples/dags_in_volume_template.yaml,sha256=-Pk_EwKpyWRYZKOnumUxVrDeAfFJ0nr3WZ7JNnvppzg,2442
|
|
@@ -55,9 +55,9 @@ airflow/providers/cncf/kubernetes/utils/delete_from.py,sha256=poObZSoEJwQyaYWilE
|
|
|
55
55
|
airflow/providers/cncf/kubernetes/utils/k8s_resource_iterator.py,sha256=pl-G-2WhZVbewKkwmL9AxPo1hAQWHHEPK43b-ruF4-w,1937
|
|
56
56
|
airflow/providers/cncf/kubernetes/utils/pod_manager.py,sha256=sk09s55ggGTnjlv1K1ZLgWc49CS8Rq5Lixsqc_nG3Ds,45853
|
|
57
57
|
airflow/providers/cncf/kubernetes/utils/xcom_sidecar.py,sha256=k6bdmVJ21OrAwGmWwledRrAmaty9ZrmbuM-IbaI4mqo,2519
|
|
58
|
-
apache_airflow_providers_cncf_kubernetes-10.12.
|
|
59
|
-
apache_airflow_providers_cncf_kubernetes-10.12.
|
|
60
|
-
apache_airflow_providers_cncf_kubernetes-10.12.
|
|
61
|
-
apache_airflow_providers_cncf_kubernetes-10.12.
|
|
62
|
-
apache_airflow_providers_cncf_kubernetes-10.12.
|
|
63
|
-
apache_airflow_providers_cncf_kubernetes-10.12.
|
|
58
|
+
apache_airflow_providers_cncf_kubernetes-10.12.3rc1.dist-info/entry_points.txt,sha256=ByD3QJJyP9CfmTYtpNI1953akD38RUDgpGXLaq9vpOw,111
|
|
59
|
+
apache_airflow_providers_cncf_kubernetes-10.12.3rc1.dist-info/licenses/LICENSE,sha256=gXPVwptPlW1TJ4HSuG5OMPg-a3h43OGMkZRR1rpwfJA,10850
|
|
60
|
+
apache_airflow_providers_cncf_kubernetes-10.12.3rc1.dist-info/licenses/NOTICE,sha256=_cWHznIoUSbLCY_KfmKqetlKlsoH0c2VBjmZjElAzuc,168
|
|
61
|
+
apache_airflow_providers_cncf_kubernetes-10.12.3rc1.dist-info/WHEEL,sha256=G2gURzTEtmeR8nrdXUJfNiB3VYVxigPQ-bEQujpNiNs,82
|
|
62
|
+
apache_airflow_providers_cncf_kubernetes-10.12.3rc1.dist-info/METADATA,sha256=XmkBg7RqGlEjkDeJHvntUL0qK3trvyVXVTi7MXvM-_M,5857
|
|
63
|
+
apache_airflow_providers_cncf_kubernetes-10.12.3rc1.dist-info/RECORD,,
|
|
File without changes
|
|
File without changes
|
|
File without changes
|