apache-airflow-providers-cncf-kubernetes 10.12.0rc1__py3-none-any.whl → 10.12.2__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- airflow/providers/cncf/kubernetes/__init__.py +1 -1
- airflow/providers/cncf/kubernetes/callbacks.py +15 -1
- airflow/providers/cncf/kubernetes/cli/definition.py +115 -0
- airflow/providers/cncf/kubernetes/cli/kubernetes_command.py +3 -1
- airflow/providers/cncf/kubernetes/executors/kubernetes_executor.py +5 -83
- airflow/providers/cncf/kubernetes/executors/kubernetes_executor_utils.py +7 -2
- airflow/providers/cncf/kubernetes/executors/local_kubernetes_executor.py +6 -3
- airflow/providers/cncf/kubernetes/get_provider_info.py +1 -6
- airflow/providers/cncf/kubernetes/hooks/kubernetes.py +44 -14
- airflow/providers/cncf/kubernetes/kube_config.py +2 -24
- airflow/providers/cncf/kubernetes/kubernetes_helper_functions.py +2 -0
- airflow/providers/cncf/kubernetes/operators/job.py +23 -16
- airflow/providers/cncf/kubernetes/operators/pod.py +64 -27
- airflow/providers/cncf/kubernetes/triggers/pod.py +2 -4
- airflow/providers/cncf/kubernetes/utils/pod_manager.py +37 -25
- {apache_airflow_providers_cncf_kubernetes-10.12.0rc1.dist-info → apache_airflow_providers_cncf_kubernetes-10.12.2.dist-info}/METADATA +9 -9
- {apache_airflow_providers_cncf_kubernetes-10.12.0rc1.dist-info → apache_airflow_providers_cncf_kubernetes-10.12.2.dist-info}/RECORD +21 -20
- {apache_airflow_providers_cncf_kubernetes-10.12.0rc1.dist-info → apache_airflow_providers_cncf_kubernetes-10.12.2.dist-info}/licenses/NOTICE +1 -1
- {apache_airflow_providers_cncf_kubernetes-10.12.0rc1.dist-info → apache_airflow_providers_cncf_kubernetes-10.12.2.dist-info}/WHEEL +0 -0
- {apache_airflow_providers_cncf_kubernetes-10.12.0rc1.dist-info → apache_airflow_providers_cncf_kubernetes-10.12.2.dist-info}/entry_points.txt +0 -0
- {apache_airflow_providers_cncf_kubernetes-10.12.0rc1.dist-info → apache_airflow_providers_cncf_kubernetes-10.12.2.dist-info}/licenses/LICENSE +0 -0
|
@@ -29,7 +29,7 @@ from airflow import __version__ as airflow_version
|
|
|
29
29
|
|
|
30
30
|
__all__ = ["__version__"]
|
|
31
31
|
|
|
32
|
-
__version__ = "10.12.
|
|
32
|
+
__version__ = "10.12.2"
|
|
33
33
|
|
|
34
34
|
if packaging.version.parse(packaging.version.parse(airflow_version).base_version) < packaging.version.parse(
|
|
35
35
|
"2.11.0"
|
|
@@ -23,6 +23,8 @@ import kubernetes.client as k8s
|
|
|
23
23
|
import kubernetes_asyncio.client as async_k8s
|
|
24
24
|
|
|
25
25
|
if TYPE_CHECKING:
|
|
26
|
+
from pendulum import DateTime
|
|
27
|
+
|
|
26
28
|
from airflow.providers.cncf.kubernetes.operators.pod import KubernetesPodOperator
|
|
27
29
|
from airflow.sdk import Context
|
|
28
30
|
|
|
@@ -189,12 +191,24 @@ class KubernetesPodOperatorCallback:
|
|
|
189
191
|
pass
|
|
190
192
|
|
|
191
193
|
@staticmethod
|
|
192
|
-
def progress_callback(
|
|
194
|
+
def progress_callback(
|
|
195
|
+
*,
|
|
196
|
+
line: str,
|
|
197
|
+
client: client_type,
|
|
198
|
+
mode: str,
|
|
199
|
+
container_name: str,
|
|
200
|
+
timestamp: DateTime | None,
|
|
201
|
+
pod: k8s.V1Pod,
|
|
202
|
+
**kwargs,
|
|
203
|
+
) -> None:
|
|
193
204
|
"""
|
|
194
205
|
Invoke this callback to process pod container logs.
|
|
195
206
|
|
|
196
207
|
:param line: the read line of log.
|
|
197
208
|
:param client: the Kubernetes client that can be used in the callback.
|
|
198
209
|
:param mode: the current execution mode, it's one of (`sync`, `async`).
|
|
210
|
+
:param container_name: the name of the container from which the log line was read.
|
|
211
|
+
:param timestamp: the timestamp of the log line.
|
|
212
|
+
:param pod: the pod from which the log line was read.
|
|
199
213
|
"""
|
|
200
214
|
pass
|
|
@@ -0,0 +1,115 @@
|
|
|
1
|
+
# Licensed to the Apache Software Foundation (ASF) under one
|
|
2
|
+
# or more contributor license agreements. See the NOTICE file
|
|
3
|
+
# distributed with this work for additional information
|
|
4
|
+
# regarding copyright ownership. The ASF licenses this file
|
|
5
|
+
# to you under the Apache License, Version 2.0 (the
|
|
6
|
+
# "License"); you may not use this file except in compliance
|
|
7
|
+
# with the License. You may obtain a copy of the License at
|
|
8
|
+
#
|
|
9
|
+
# http://www.apache.org/licenses/LICENSE-2.0
|
|
10
|
+
#
|
|
11
|
+
# Unless required by applicable law or agreed to in writing,
|
|
12
|
+
# software distributed under the License is distributed on an
|
|
13
|
+
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
|
14
|
+
# KIND, either express or implied. See the License for the
|
|
15
|
+
# specific language governing permissions and limitations
|
|
16
|
+
# under the License.
|
|
17
|
+
from __future__ import annotations
|
|
18
|
+
|
|
19
|
+
from typing import TYPE_CHECKING
|
|
20
|
+
|
|
21
|
+
from airflow.cli.cli_config import (
|
|
22
|
+
ARG_DAG_ID,
|
|
23
|
+
ARG_OUTPUT_PATH,
|
|
24
|
+
ARG_VERBOSE,
|
|
25
|
+
ActionCommand,
|
|
26
|
+
Arg,
|
|
27
|
+
GroupCommand,
|
|
28
|
+
lazy_load_command,
|
|
29
|
+
positive_int,
|
|
30
|
+
)
|
|
31
|
+
from airflow.configuration import conf
|
|
32
|
+
from airflow.providers.cncf.kubernetes.version_compat import AIRFLOW_V_3_0_PLUS
|
|
33
|
+
|
|
34
|
+
if TYPE_CHECKING:
|
|
35
|
+
import argparse
|
|
36
|
+
|
|
37
|
+
|
|
38
|
+
try:
|
|
39
|
+
from airflow.cli.cli_config import ARG_LOGICAL_DATE
|
|
40
|
+
except ImportError: # 2.x compatibility.
|
|
41
|
+
from airflow.cli.cli_config import ( # type: ignore[attr-defined, no-redef]
|
|
42
|
+
ARG_EXECUTION_DATE as ARG_LOGICAL_DATE,
|
|
43
|
+
)
|
|
44
|
+
|
|
45
|
+
if AIRFLOW_V_3_0_PLUS:
|
|
46
|
+
from airflow.cli.cli_config import ARG_BUNDLE_NAME
|
|
47
|
+
|
|
48
|
+
ARG_COMPAT = ARG_BUNDLE_NAME
|
|
49
|
+
else:
|
|
50
|
+
from airflow.cli.cli_config import ARG_SUBDIR # type: ignore[attr-defined]
|
|
51
|
+
|
|
52
|
+
ARG_COMPAT = ARG_SUBDIR
|
|
53
|
+
|
|
54
|
+
# CLI Args
|
|
55
|
+
ARG_NAMESPACE = Arg(
|
|
56
|
+
("--namespace",),
|
|
57
|
+
default=conf.get("kubernetes_executor", "namespace"),
|
|
58
|
+
help="Kubernetes Namespace. Default value is `[kubernetes] namespace` in configuration.",
|
|
59
|
+
)
|
|
60
|
+
|
|
61
|
+
ARG_MIN_PENDING_MINUTES = Arg(
|
|
62
|
+
("--min-pending-minutes",),
|
|
63
|
+
default=30,
|
|
64
|
+
type=positive_int(allow_zero=False),
|
|
65
|
+
help=(
|
|
66
|
+
"Pending pods created before the time interval are to be cleaned up, "
|
|
67
|
+
"measured in minutes. Default value is 30(m). The minimum value is 5(m)."
|
|
68
|
+
),
|
|
69
|
+
)
|
|
70
|
+
|
|
71
|
+
# CLI Commands
|
|
72
|
+
KUBERNETES_COMMANDS = (
|
|
73
|
+
ActionCommand(
|
|
74
|
+
name="cleanup-pods",
|
|
75
|
+
help=(
|
|
76
|
+
"Clean up Kubernetes pods "
|
|
77
|
+
"(created by KubernetesExecutor/KubernetesPodOperator) "
|
|
78
|
+
"in evicted/failed/succeeded/pending states"
|
|
79
|
+
),
|
|
80
|
+
func=lazy_load_command("airflow.providers.cncf.kubernetes.cli.kubernetes_command.cleanup_pods"),
|
|
81
|
+
args=(ARG_NAMESPACE, ARG_MIN_PENDING_MINUTES, ARG_VERBOSE),
|
|
82
|
+
),
|
|
83
|
+
ActionCommand(
|
|
84
|
+
name="generate-dag-yaml",
|
|
85
|
+
help="Generate YAML files for all tasks in DAG. Useful for debugging tasks without "
|
|
86
|
+
"launching into a cluster",
|
|
87
|
+
func=lazy_load_command("airflow.providers.cncf.kubernetes.cli.kubernetes_command.generate_pod_yaml"),
|
|
88
|
+
args=(ARG_DAG_ID, ARG_LOGICAL_DATE, ARG_COMPAT, ARG_OUTPUT_PATH, ARG_VERBOSE),
|
|
89
|
+
),
|
|
90
|
+
)
|
|
91
|
+
|
|
92
|
+
|
|
93
|
+
def get_kubernetes_cli_commands() -> list[GroupCommand]:
|
|
94
|
+
return [
|
|
95
|
+
GroupCommand(
|
|
96
|
+
name="kubernetes",
|
|
97
|
+
help="Tools to help run the KubernetesExecutor",
|
|
98
|
+
subcommands=KUBERNETES_COMMANDS,
|
|
99
|
+
)
|
|
100
|
+
]
|
|
101
|
+
|
|
102
|
+
|
|
103
|
+
def get_parser() -> argparse.ArgumentParser:
|
|
104
|
+
"""
|
|
105
|
+
Generate documentation; used by Sphinx.
|
|
106
|
+
|
|
107
|
+
:meta private:
|
|
108
|
+
"""
|
|
109
|
+
from airflow.cli.cli_parser import AirflowHelpFormatter, DefaultHelpParser, _add_command
|
|
110
|
+
|
|
111
|
+
parser = DefaultHelpParser(prog="airflow", formatter_class=AirflowHelpFormatter)
|
|
112
|
+
subparsers = parser.add_subparsers(dest="subcommand", metavar="GROUP_OR_COMMAND")
|
|
113
|
+
for group_command in get_kubernetes_cli_commands():
|
|
114
|
+
_add_command(subparsers, group_command)
|
|
115
|
+
return parser
|
|
@@ -37,6 +37,8 @@ from airflow.utils import cli as cli_utils, yaml
|
|
|
37
37
|
from airflow.utils.providers_configuration_loader import providers_configuration_loaded
|
|
38
38
|
from airflow.utils.types import DagRunType
|
|
39
39
|
|
|
40
|
+
from tests_common.test_utils.taskinstance import create_task_instance
|
|
41
|
+
|
|
40
42
|
if AIRFLOW_V_3_1_PLUS:
|
|
41
43
|
from airflow.utils.cli import get_bagged_dag
|
|
42
44
|
else:
|
|
@@ -73,7 +75,7 @@ def generate_pod_yaml(args):
|
|
|
73
75
|
if AIRFLOW_V_3_0_PLUS:
|
|
74
76
|
from uuid6 import uuid7
|
|
75
77
|
|
|
76
|
-
ti =
|
|
78
|
+
ti = create_task_instance(task, run_id=dr.run_id, dag_version_id=uuid7())
|
|
77
79
|
else:
|
|
78
80
|
ti = TaskInstance(task, run_id=dr.run_id)
|
|
79
81
|
ti.dag_run = dr
|
|
@@ -39,25 +39,6 @@ from deprecated import deprecated
|
|
|
39
39
|
from kubernetes.dynamic import DynamicClient
|
|
40
40
|
from sqlalchemy import select
|
|
41
41
|
|
|
42
|
-
from airflow.providers.cncf.kubernetes.pod_generator import PodGenerator
|
|
43
|
-
from airflow.providers.cncf.kubernetes.version_compat import AIRFLOW_V_3_0_PLUS
|
|
44
|
-
|
|
45
|
-
try:
|
|
46
|
-
from airflow.cli.cli_config import ARG_LOGICAL_DATE
|
|
47
|
-
except ImportError: # 2.x compatibility.
|
|
48
|
-
from airflow.cli.cli_config import ( # type: ignore[attr-defined, no-redef]
|
|
49
|
-
ARG_EXECUTION_DATE as ARG_LOGICAL_DATE,
|
|
50
|
-
)
|
|
51
|
-
from airflow.cli.cli_config import (
|
|
52
|
-
ARG_DAG_ID,
|
|
53
|
-
ARG_OUTPUT_PATH,
|
|
54
|
-
ARG_VERBOSE,
|
|
55
|
-
ActionCommand,
|
|
56
|
-
Arg,
|
|
57
|
-
GroupCommand,
|
|
58
|
-
lazy_load_command,
|
|
59
|
-
positive_int,
|
|
60
|
-
)
|
|
61
42
|
from airflow.configuration import conf
|
|
62
43
|
from airflow.exceptions import AirflowProviderDeprecationWarning
|
|
63
44
|
from airflow.executors.base_executor import BaseExecutor
|
|
@@ -70,19 +51,21 @@ from airflow.providers.cncf.kubernetes.executors.kubernetes_executor_types impor
|
|
|
70
51
|
)
|
|
71
52
|
from airflow.providers.cncf.kubernetes.kube_config import KubeConfig
|
|
72
53
|
from airflow.providers.cncf.kubernetes.kubernetes_helper_functions import annotations_to_key
|
|
54
|
+
from airflow.providers.cncf.kubernetes.pod_generator import PodGenerator
|
|
55
|
+
from airflow.providers.cncf.kubernetes.version_compat import AIRFLOW_V_3_0_PLUS
|
|
73
56
|
from airflow.providers.common.compat.sdk import Stats
|
|
74
57
|
from airflow.utils.log.logging_mixin import remove_escape_codes
|
|
75
58
|
from airflow.utils.session import NEW_SESSION, provide_session
|
|
76
59
|
from airflow.utils.state import TaskInstanceState
|
|
77
60
|
|
|
78
61
|
if TYPE_CHECKING:
|
|
79
|
-
import argparse
|
|
80
62
|
from collections.abc import Sequence
|
|
81
63
|
|
|
82
64
|
from kubernetes import client
|
|
83
65
|
from kubernetes.client import models as k8s
|
|
84
66
|
from sqlalchemy.orm import Session
|
|
85
67
|
|
|
68
|
+
from airflow.cli.cli_config import GroupCommand
|
|
86
69
|
from airflow.executors import workloads
|
|
87
70
|
from airflow.models.taskinstance import TaskInstance
|
|
88
71
|
from airflow.models.taskinstancekey import TaskInstanceKey
|
|
@@ -91,54 +74,6 @@ if TYPE_CHECKING:
|
|
|
91
74
|
)
|
|
92
75
|
|
|
93
76
|
|
|
94
|
-
if AIRFLOW_V_3_0_PLUS:
|
|
95
|
-
from airflow.cli.cli_config import ARG_BUNDLE_NAME
|
|
96
|
-
|
|
97
|
-
ARG_COMPAT = ARG_BUNDLE_NAME
|
|
98
|
-
else:
|
|
99
|
-
from airflow.cli.cli_config import ARG_SUBDIR # type: ignore[attr-defined]
|
|
100
|
-
|
|
101
|
-
ARG_COMPAT = ARG_SUBDIR
|
|
102
|
-
|
|
103
|
-
# CLI Args
|
|
104
|
-
ARG_NAMESPACE = Arg(
|
|
105
|
-
("--namespace",),
|
|
106
|
-
default=conf.get("kubernetes_executor", "namespace"),
|
|
107
|
-
help="Kubernetes Namespace. Default value is `[kubernetes] namespace` in configuration.",
|
|
108
|
-
)
|
|
109
|
-
|
|
110
|
-
ARG_MIN_PENDING_MINUTES = Arg(
|
|
111
|
-
("--min-pending-minutes",),
|
|
112
|
-
default=30,
|
|
113
|
-
type=positive_int(allow_zero=False),
|
|
114
|
-
help=(
|
|
115
|
-
"Pending pods created before the time interval are to be cleaned up, "
|
|
116
|
-
"measured in minutes. Default value is 30(m). The minimum value is 5(m)."
|
|
117
|
-
),
|
|
118
|
-
)
|
|
119
|
-
|
|
120
|
-
# CLI Commands
|
|
121
|
-
KUBERNETES_COMMANDS = (
|
|
122
|
-
ActionCommand(
|
|
123
|
-
name="cleanup-pods",
|
|
124
|
-
help=(
|
|
125
|
-
"Clean up Kubernetes pods "
|
|
126
|
-
"(created by KubernetesExecutor/KubernetesPodOperator) "
|
|
127
|
-
"in evicted/failed/succeeded/pending states"
|
|
128
|
-
),
|
|
129
|
-
func=lazy_load_command("airflow.providers.cncf.kubernetes.cli.kubernetes_command.cleanup_pods"),
|
|
130
|
-
args=(ARG_NAMESPACE, ARG_MIN_PENDING_MINUTES, ARG_VERBOSE),
|
|
131
|
-
),
|
|
132
|
-
ActionCommand(
|
|
133
|
-
name="generate-dag-yaml",
|
|
134
|
-
help="Generate YAML files for all tasks in DAG. Useful for debugging tasks without "
|
|
135
|
-
"launching into a cluster",
|
|
136
|
-
func=lazy_load_command("airflow.providers.cncf.kubernetes.cli.kubernetes_command.generate_pod_yaml"),
|
|
137
|
-
args=(ARG_DAG_ID, ARG_LOGICAL_DATE, ARG_COMPAT, ARG_OUTPUT_PATH, ARG_VERBOSE),
|
|
138
|
-
),
|
|
139
|
-
)
|
|
140
|
-
|
|
141
|
-
|
|
142
77
|
class KubernetesExecutor(BaseExecutor):
|
|
143
78
|
"""Executor for Kubernetes."""
|
|
144
79
|
|
|
@@ -812,19 +747,6 @@ class KubernetesExecutor(BaseExecutor):
|
|
|
812
747
|
|
|
813
748
|
@staticmethod
|
|
814
749
|
def get_cli_commands() -> list[GroupCommand]:
|
|
815
|
-
|
|
816
|
-
GroupCommand(
|
|
817
|
-
name="kubernetes",
|
|
818
|
-
help="Tools to help run the KubernetesExecutor",
|
|
819
|
-
subcommands=KUBERNETES_COMMANDS,
|
|
820
|
-
)
|
|
821
|
-
]
|
|
822
|
-
|
|
823
|
-
|
|
824
|
-
def _get_parser() -> argparse.ArgumentParser:
|
|
825
|
-
"""
|
|
826
|
-
Generate documentation; used by Sphinx.
|
|
750
|
+
from airflow.providers.cncf.kubernetes.cli.definition import get_kubernetes_cli_commands
|
|
827
751
|
|
|
828
|
-
|
|
829
|
-
"""
|
|
830
|
-
return KubernetesExecutor._get_parser()
|
|
752
|
+
return get_kubernetes_cli_commands()
|
|
@@ -47,18 +47,23 @@ from airflow.providers.cncf.kubernetes.kubernetes_helper_functions import (
|
|
|
47
47
|
from airflow.providers.cncf.kubernetes.pod_generator import PodGenerator, workload_to_command_args
|
|
48
48
|
from airflow.providers.common.compat.sdk import AirflowException
|
|
49
49
|
from airflow.utils.log.logging_mixin import LoggingMixin
|
|
50
|
-
from airflow.utils.singleton import Singleton
|
|
51
50
|
from airflow.utils.state import TaskInstanceState
|
|
52
51
|
|
|
53
52
|
if TYPE_CHECKING:
|
|
54
53
|
from kubernetes.client import Configuration, models as k8s
|
|
55
54
|
|
|
56
55
|
|
|
57
|
-
class ResourceVersion
|
|
56
|
+
class ResourceVersion:
|
|
58
57
|
"""Singleton for tracking resourceVersion from Kubernetes."""
|
|
59
58
|
|
|
59
|
+
_instance: ResourceVersion | None = None
|
|
60
60
|
resource_version: dict[str, str] = {}
|
|
61
61
|
|
|
62
|
+
def __new__(cls):
|
|
63
|
+
if cls._instance is None:
|
|
64
|
+
cls._instance = super().__new__(cls)
|
|
65
|
+
return cls._instance
|
|
66
|
+
|
|
62
67
|
|
|
63
68
|
class KubernetesJobWatcher(multiprocessing.Process, LoggingMixin):
|
|
64
69
|
"""Watches for Kubernetes jobs."""
|
|
@@ -25,12 +25,12 @@ from deprecated import deprecated
|
|
|
25
25
|
from airflow.configuration import conf
|
|
26
26
|
from airflow.exceptions import AirflowProviderDeprecationWarning
|
|
27
27
|
from airflow.executors.base_executor import BaseExecutor
|
|
28
|
-
from airflow.providers.cncf.kubernetes.executors.kubernetes_executor import KubernetesExecutor
|
|
29
28
|
from airflow.providers.cncf.kubernetes.version_compat import AIRFLOW_V_3_0_PLUS
|
|
30
29
|
|
|
31
30
|
if TYPE_CHECKING:
|
|
32
31
|
from airflow.callbacks.base_callback_sink import BaseCallbackSink
|
|
33
32
|
from airflow.callbacks.callback_requests import CallbackRequest
|
|
33
|
+
from airflow.cli.cli_config import GroupCommand
|
|
34
34
|
from airflow.executors.base_executor import EventBufferValueType
|
|
35
35
|
from airflow.executors.local_executor import LocalExecutor
|
|
36
36
|
from airflow.models.taskinstance import ( # type: ignore[attr-defined]
|
|
@@ -38,6 +38,7 @@ if TYPE_CHECKING:
|
|
|
38
38
|
TaskInstance,
|
|
39
39
|
TaskInstanceKey,
|
|
40
40
|
)
|
|
41
|
+
from airflow.providers.cncf.kubernetes.executors.kubernetes_executor import KubernetesExecutor
|
|
41
42
|
|
|
42
43
|
CommandType = Sequence[str]
|
|
43
44
|
|
|
@@ -302,5 +303,7 @@ class LocalKubernetesExecutor(BaseExecutor):
|
|
|
302
303
|
self.callback_sink.send(request)
|
|
303
304
|
|
|
304
305
|
@staticmethod
|
|
305
|
-
def get_cli_commands() -> list:
|
|
306
|
-
|
|
306
|
+
def get_cli_commands() -> list[GroupCommand]:
|
|
307
|
+
from airflow.providers.cncf.kubernetes.cli.definition import get_kubernetes_cli_commands
|
|
308
|
+
|
|
309
|
+
return get_kubernetes_cli_commands()
|
|
@@ -135,8 +135,6 @@ def get_provider_info():
|
|
|
135
135
|
"type": "string",
|
|
136
136
|
"example": None,
|
|
137
137
|
"default": "",
|
|
138
|
-
"deprecated": True,
|
|
139
|
-
"deprecation_reason": "This configuration is deprecated. Use `pod_template_file` to specify container image instead.\n",
|
|
140
138
|
},
|
|
141
139
|
"worker_container_tag": {
|
|
142
140
|
"description": "The tag of the Kubernetes Image for the Worker to Run\n",
|
|
@@ -144,8 +142,6 @@ def get_provider_info():
|
|
|
144
142
|
"type": "string",
|
|
145
143
|
"example": None,
|
|
146
144
|
"default": "",
|
|
147
|
-
"deprecated": True,
|
|
148
|
-
"deprecation_reason": "This configuration is deprecated. Use `pod_template_file` to specify the image tag instead.\n",
|
|
149
145
|
},
|
|
150
146
|
"namespace": {
|
|
151
147
|
"description": "The Kubernetes namespace where airflow workers should be created. Defaults to ``default``\n",
|
|
@@ -153,8 +149,6 @@ def get_provider_info():
|
|
|
153
149
|
"type": "string",
|
|
154
150
|
"example": None,
|
|
155
151
|
"default": "default",
|
|
156
|
-
"deprecated": True,
|
|
157
|
-
"deprecation_reason": "This configuration is deprecated. Use `pod_template_file` to specify namespace instead.\n",
|
|
158
152
|
},
|
|
159
153
|
"delete_worker_pods": {
|
|
160
154
|
"description": "If True, all worker pods will be deleted upon termination\n",
|
|
@@ -286,4 +280,5 @@ def get_provider_info():
|
|
|
286
280
|
},
|
|
287
281
|
},
|
|
288
282
|
"executors": ["airflow.providers.cncf.kubernetes.executors.kubernetes_executor.KubernetesExecutor"],
|
|
283
|
+
"cli": ["airflow.providers.cncf.kubernetes.cli.definition.get_kubernetes_cli_commands"],
|
|
289
284
|
}
|
|
@@ -20,6 +20,7 @@ import asyncio
|
|
|
20
20
|
import contextlib
|
|
21
21
|
import json
|
|
22
22
|
import tempfile
|
|
23
|
+
from collections.abc import AsyncGenerator
|
|
23
24
|
from functools import cached_property
|
|
24
25
|
from time import sleep
|
|
25
26
|
from typing import TYPE_CHECKING, Any, Protocol
|
|
@@ -36,7 +37,11 @@ from urllib3.exceptions import HTTPError
|
|
|
36
37
|
from airflow.models import Connection
|
|
37
38
|
from airflow.providers.cncf.kubernetes.exceptions import KubernetesApiError, KubernetesApiPermissionError
|
|
38
39
|
from airflow.providers.cncf.kubernetes.kube_client import _disable_verify_ssl, _enable_tcp_keepalive
|
|
39
|
-
from airflow.providers.cncf.kubernetes.kubernetes_helper_functions import
|
|
40
|
+
from airflow.providers.cncf.kubernetes.kubernetes_helper_functions import (
|
|
41
|
+
API_TIMEOUT,
|
|
42
|
+
API_TIMEOUT_OFFSET_SERVER_SIDE,
|
|
43
|
+
generic_api_retry,
|
|
44
|
+
)
|
|
40
45
|
from airflow.providers.cncf.kubernetes.utils.container import (
|
|
41
46
|
container_is_completed,
|
|
42
47
|
container_is_running,
|
|
@@ -68,6 +73,31 @@ def _load_body_to_dict(body: str) -> dict:
|
|
|
68
73
|
return body_dict
|
|
69
74
|
|
|
70
75
|
|
|
76
|
+
def _get_request_timeout(timeout_seconds: int | None) -> float:
|
|
77
|
+
"""Get the client-side request timeout."""
|
|
78
|
+
if timeout_seconds is not None and timeout_seconds > API_TIMEOUT - API_TIMEOUT_OFFSET_SERVER_SIDE:
|
|
79
|
+
return timeout_seconds + API_TIMEOUT_OFFSET_SERVER_SIDE
|
|
80
|
+
return API_TIMEOUT
|
|
81
|
+
|
|
82
|
+
|
|
83
|
+
class _TimeoutK8sApiClient(client.ApiClient):
|
|
84
|
+
"""Wrapper around kubernetes sync ApiClient to set default timeout."""
|
|
85
|
+
|
|
86
|
+
def call_api(self, *args, **kwargs):
|
|
87
|
+
timeout_seconds = kwargs.get("timeout_seconds") # get server-side timeout
|
|
88
|
+
kwargs.setdefault("_request_timeout", _get_request_timeout(timeout_seconds)) # client-side timeout
|
|
89
|
+
return super().call_api(*args, **kwargs)
|
|
90
|
+
|
|
91
|
+
|
|
92
|
+
class _TimeoutAsyncK8sApiClient(async_client.ApiClient):
|
|
93
|
+
"""Wrapper around kubernetes async ApiClient to set default timeout."""
|
|
94
|
+
|
|
95
|
+
async def call_api(self, *args, **kwargs):
|
|
96
|
+
timeout_seconds = kwargs.get("timeout_seconds") # server-side timeout
|
|
97
|
+
kwargs.setdefault("_request_timeout", _get_request_timeout(timeout_seconds)) # client-side timeout
|
|
98
|
+
return await super().call_api(*args, **kwargs)
|
|
99
|
+
|
|
100
|
+
|
|
71
101
|
class PodOperatorHookProtocol(Protocol):
|
|
72
102
|
"""
|
|
73
103
|
Protocol to define methods relied upon by KubernetesPodOperator.
|
|
@@ -272,7 +302,7 @@ class KubernetesHook(BaseHook, PodOperatorHookProtocol):
|
|
|
272
302
|
self.log.debug("loading kube_config from: in_cluster configuration")
|
|
273
303
|
self._is_in_cluster = True
|
|
274
304
|
config.load_incluster_config()
|
|
275
|
-
return
|
|
305
|
+
return _TimeoutK8sApiClient()
|
|
276
306
|
|
|
277
307
|
if kubeconfig_path is not None:
|
|
278
308
|
self.log.debug("loading kube_config from: %s", kubeconfig_path)
|
|
@@ -282,7 +312,7 @@ class KubernetesHook(BaseHook, PodOperatorHookProtocol):
|
|
|
282
312
|
client_configuration=self.client_configuration,
|
|
283
313
|
context=cluster_context,
|
|
284
314
|
)
|
|
285
|
-
return
|
|
315
|
+
return _TimeoutK8sApiClient()
|
|
286
316
|
|
|
287
317
|
if kubeconfig is not None:
|
|
288
318
|
with tempfile.NamedTemporaryFile() as temp_config:
|
|
@@ -297,7 +327,7 @@ class KubernetesHook(BaseHook, PodOperatorHookProtocol):
|
|
|
297
327
|
client_configuration=self.client_configuration,
|
|
298
328
|
context=cluster_context,
|
|
299
329
|
)
|
|
300
|
-
return
|
|
330
|
+
return _TimeoutK8sApiClient()
|
|
301
331
|
|
|
302
332
|
if self.config_dict:
|
|
303
333
|
self.log.debug(LOADING_KUBE_CONFIG_FILE_RESOURCE.format("config dictionary"))
|
|
@@ -307,7 +337,7 @@ class KubernetesHook(BaseHook, PodOperatorHookProtocol):
|
|
|
307
337
|
client_configuration=self.client_configuration,
|
|
308
338
|
context=cluster_context,
|
|
309
339
|
)
|
|
310
|
-
return
|
|
340
|
+
return _TimeoutK8sApiClient()
|
|
311
341
|
|
|
312
342
|
return self._get_default_client(cluster_context=cluster_context)
|
|
313
343
|
|
|
@@ -326,7 +356,7 @@ class KubernetesHook(BaseHook, PodOperatorHookProtocol):
|
|
|
326
356
|
client_configuration=self.client_configuration,
|
|
327
357
|
context=cluster_context,
|
|
328
358
|
)
|
|
329
|
-
return
|
|
359
|
+
return _TimeoutK8sApiClient()
|
|
330
360
|
|
|
331
361
|
@property
|
|
332
362
|
def is_in_cluster(self) -> bool:
|
|
@@ -803,7 +833,7 @@ class AsyncKubernetesHook(KubernetesHook):
|
|
|
803
833
|
client_configuration=self.client_configuration,
|
|
804
834
|
context=cluster_context,
|
|
805
835
|
)
|
|
806
|
-
return
|
|
836
|
+
return _TimeoutAsyncK8sApiClient()
|
|
807
837
|
|
|
808
838
|
if num_selected_configuration > 1:
|
|
809
839
|
raise AirflowException(
|
|
@@ -816,13 +846,13 @@ class AsyncKubernetesHook(KubernetesHook):
|
|
|
816
846
|
self.log.debug(LOADING_KUBE_CONFIG_FILE_RESOURCE.format("within a pod"))
|
|
817
847
|
self._is_in_cluster = True
|
|
818
848
|
async_config.load_incluster_config()
|
|
819
|
-
return
|
|
849
|
+
return _TimeoutAsyncK8sApiClient()
|
|
820
850
|
|
|
821
851
|
if self.config_dict:
|
|
822
852
|
self.log.debug(LOADING_KUBE_CONFIG_FILE_RESOURCE.format("config dictionary"))
|
|
823
853
|
self._is_in_cluster = False
|
|
824
854
|
await async_config.load_kube_config_from_dict(self.config_dict, context=cluster_context)
|
|
825
|
-
return
|
|
855
|
+
return _TimeoutAsyncK8sApiClient()
|
|
826
856
|
|
|
827
857
|
if kubeconfig_path is not None:
|
|
828
858
|
self.log.debug("loading kube_config from: %s", kubeconfig_path)
|
|
@@ -874,10 +904,10 @@ class AsyncKubernetesHook(KubernetesHook):
|
|
|
874
904
|
return extras.get(prefixed_name)
|
|
875
905
|
|
|
876
906
|
@contextlib.asynccontextmanager
|
|
877
|
-
async def get_conn(self) -> async_client.ApiClient:
|
|
907
|
+
async def get_conn(self) -> AsyncGenerator[async_client.ApiClient, None]:
|
|
878
908
|
kube_client = None
|
|
879
909
|
try:
|
|
880
|
-
kube_client = await self._load_config() or
|
|
910
|
+
kube_client = await self._load_config() or _TimeoutAsyncK8sApiClient()
|
|
881
911
|
yield kube_client
|
|
882
912
|
finally:
|
|
883
913
|
if kube_client is not None:
|
|
@@ -951,8 +981,8 @@ class AsyncKubernetesHook(KubernetesHook):
|
|
|
951
981
|
timestamps=True,
|
|
952
982
|
since_seconds=since_seconds,
|
|
953
983
|
)
|
|
954
|
-
|
|
955
|
-
return
|
|
984
|
+
logs_list: list[str] = logs.splitlines()
|
|
985
|
+
return logs_list
|
|
956
986
|
except HTTPError as e:
|
|
957
987
|
raise KubernetesApiError from e
|
|
958
988
|
|
|
@@ -996,7 +1026,7 @@ class AsyncKubernetesHook(KubernetesHook):
|
|
|
996
1026
|
:param name: Pod name to watch events for
|
|
997
1027
|
:param namespace: Kubernetes namespace
|
|
998
1028
|
:param resource_version: Only return events not older than this resource version
|
|
999
|
-
:param timeout_seconds: Timeout in seconds for the watch stream
|
|
1029
|
+
:param timeout_seconds: Timeout in seconds for the watch stream. A small additional buffer may be applied internally.
|
|
1000
1030
|
"""
|
|
1001
1031
|
if self._event_polling_fallback:
|
|
1002
1032
|
async for event_polled in self.watch_pod_events_polling_fallback(
|
|
@@ -16,10 +16,8 @@
|
|
|
16
16
|
# under the License.
|
|
17
17
|
from __future__ import annotations
|
|
18
18
|
|
|
19
|
-
import warnings
|
|
20
|
-
|
|
21
19
|
from airflow.configuration import conf
|
|
22
|
-
from airflow.exceptions import AirflowConfigException
|
|
20
|
+
from airflow.exceptions import AirflowConfigException
|
|
23
21
|
from airflow.settings import AIRFLOW_HOME
|
|
24
22
|
|
|
25
23
|
|
|
@@ -55,21 +53,8 @@ class KubeConfig:
|
|
|
55
53
|
self.kubernetes_section, "worker_pods_creation_batch_size"
|
|
56
54
|
)
|
|
57
55
|
self.worker_container_repository = conf.get(self.kubernetes_section, "worker_container_repository")
|
|
58
|
-
if self.worker_container_repository:
|
|
59
|
-
warnings.warn(
|
|
60
|
-
"Configuration 'worker_container_repository' is deprecated. "
|
|
61
|
-
"Use 'pod_template_file' to specify the container image repository instead.",
|
|
62
|
-
AirflowProviderDeprecationWarning,
|
|
63
|
-
stacklevel=2,
|
|
64
|
-
)
|
|
65
56
|
self.worker_container_tag = conf.get(self.kubernetes_section, "worker_container_tag")
|
|
66
|
-
|
|
67
|
-
warnings.warn(
|
|
68
|
-
"Configuration 'worker_container_tag' is deprecated. "
|
|
69
|
-
"Use 'pod_template_file' to specify the container image tag instead.",
|
|
70
|
-
AirflowProviderDeprecationWarning,
|
|
71
|
-
stacklevel=2,
|
|
72
|
-
)
|
|
57
|
+
|
|
73
58
|
if self.worker_container_repository and self.worker_container_tag:
|
|
74
59
|
self.kube_image = f"{self.worker_container_repository}:{self.worker_container_tag}"
|
|
75
60
|
else:
|
|
@@ -80,13 +65,6 @@ class KubeConfig:
|
|
|
80
65
|
# cluster has RBAC enabled, your scheduler may need service account permissions to
|
|
81
66
|
# create, watch, get, and delete pods in this namespace.
|
|
82
67
|
self.kube_namespace = conf.get(self.kubernetes_section, "namespace")
|
|
83
|
-
if self.kube_namespace and self.kube_namespace != "default":
|
|
84
|
-
warnings.warn(
|
|
85
|
-
"Configuration 'namespace' is deprecated. "
|
|
86
|
-
"Use 'pod_template_file' to specify the namespace instead.",
|
|
87
|
-
AirflowProviderDeprecationWarning,
|
|
88
|
-
stacklevel=2,
|
|
89
|
-
)
|
|
90
68
|
self.multi_namespace_mode = conf.getboolean(self.kubernetes_section, "multi_namespace_mode")
|
|
91
69
|
if self.multi_namespace_mode and conf.get(
|
|
92
70
|
self.kubernetes_section, "multi_namespace_mode_namespace_list"
|
|
@@ -52,6 +52,8 @@ class KubernetesApiException(AirflowException):
|
|
|
52
52
|
"""When communication with kubernetes API fails."""
|
|
53
53
|
|
|
54
54
|
|
|
55
|
+
API_TIMEOUT = 60 # allow 1 min of timeout for kubernetes api calls
|
|
56
|
+
API_TIMEOUT_OFFSET_SERVER_SIDE = 5 # offset to the server side timeout for the client side timeout
|
|
55
57
|
API_RETRIES = conf.getint("workers", "api_retries", fallback=5)
|
|
56
58
|
API_RETRY_WAIT_MIN = conf.getfloat("workers", "api_retry_wait_min", fallback=1)
|
|
57
59
|
API_RETRY_WAIT_MAX = conf.getfloat("workers", "api_retry_wait_max", fallback=15)
|
|
@@ -81,6 +81,7 @@ class KubernetesJobOperator(KubernetesPodOperator):
|
|
|
81
81
|
:param completions: Specifies the desired number of successfully finished pods the job should be run with.
|
|
82
82
|
:param manual_selector: manualSelector controls generation of pod labels and pod selectors.
|
|
83
83
|
:param parallelism: Specifies the maximum desired number of pods the job should run at any given time.
|
|
84
|
+
The value here must be >=1. Default value is 1
|
|
84
85
|
:param selector: The selector of this V1JobSpec.
|
|
85
86
|
:param suspend: Suspend specifies whether the Job controller should create Pods or not.
|
|
86
87
|
:param ttl_seconds_after_finished: ttlSecondsAfterFinished limits the lifetime of a Job that has finished execution (either Complete or Failed).
|
|
@@ -114,7 +115,7 @@ class KubernetesJobOperator(KubernetesPodOperator):
|
|
|
114
115
|
completion_mode: str | None = None,
|
|
115
116
|
completions: int | None = None,
|
|
116
117
|
manual_selector: bool | None = None,
|
|
117
|
-
parallelism: int
|
|
118
|
+
parallelism: int = 1,
|
|
118
119
|
selector: k8s.V1LabelSelector | None = None,
|
|
119
120
|
suspend: bool | None = None,
|
|
120
121
|
ttl_seconds_after_finished: int | None = None,
|
|
@@ -188,6 +189,7 @@ class KubernetesJobOperator(KubernetesPodOperator):
|
|
|
188
189
|
return job_request_obj
|
|
189
190
|
|
|
190
191
|
def execute(self, context: Context):
|
|
192
|
+
self.name = self._set_name(self.name)
|
|
191
193
|
if self.deferrable and not self.wait_until_job_complete:
|
|
192
194
|
self.log.warning(
|
|
193
195
|
"Deferrable mode is available only with parameter `wait_until_job_complete=True`. "
|
|
@@ -198,6 +200,16 @@ class KubernetesJobOperator(KubernetesPodOperator):
|
|
|
198
200
|
"Getting Logs and pushing to XCom are available only with parameter `wait_until_job_complete=True`. "
|
|
199
201
|
"Please, set it up."
|
|
200
202
|
)
|
|
203
|
+
if self.parallelism is None:
|
|
204
|
+
warnings.warn(
|
|
205
|
+
"parallelism should be set explicitly. Defaulting to 1.",
|
|
206
|
+
AirflowProviderDeprecationWarning,
|
|
207
|
+
stacklevel=2,
|
|
208
|
+
)
|
|
209
|
+
self.parallelism = 1
|
|
210
|
+
elif self.wait_until_job_complete and self.parallelism < 1:
|
|
211
|
+
# get_pods() will raise an error if parallelism = 0
|
|
212
|
+
raise AirflowException("parallelism cannot be less than 1 with `wait_until_job_complete=True`.")
|
|
201
213
|
self.job_request_obj = self.build_job_request_obj(context)
|
|
202
214
|
self.job = self.create_job( # must set `self.job` for `on_kill`
|
|
203
215
|
job_request_obj=self.job_request_obj
|
|
@@ -207,22 +219,15 @@ class KubernetesJobOperator(KubernetesPodOperator):
|
|
|
207
219
|
ti.xcom_push(key="job_name", value=self.job.metadata.name)
|
|
208
220
|
ti.xcom_push(key="job_namespace", value=self.job.metadata.namespace)
|
|
209
221
|
|
|
210
|
-
self.
|
|
211
|
-
|
|
212
|
-
|
|
213
|
-
|
|
214
|
-
pod_request_obj=self.pod_request_obj,
|
|
215
|
-
context=context,
|
|
216
|
-
)
|
|
217
|
-
]
|
|
218
|
-
else:
|
|
219
|
-
self.pods = self.get_pods(pod_request_obj=self.pod_request_obj, context=context)
|
|
222
|
+
if self.wait_until_job_complete:
|
|
223
|
+
self.pods: Sequence[k8s.V1Pod] = self.get_pods(
|
|
224
|
+
pod_request_obj=self.pod_request_obj, context=context
|
|
225
|
+
)
|
|
220
226
|
|
|
221
|
-
|
|
222
|
-
|
|
223
|
-
|
|
227
|
+
if self.deferrable:
|
|
228
|
+
self.execute_deferrable()
|
|
229
|
+
return
|
|
224
230
|
|
|
225
|
-
if self.wait_until_job_complete:
|
|
226
231
|
if self.do_xcom_push:
|
|
227
232
|
xcom_result = []
|
|
228
233
|
for pod in self.pods:
|
|
@@ -460,7 +465,9 @@ class KubernetesJobOperator(KubernetesPodOperator):
|
|
|
460
465
|
pod_list: Sequence[k8s.V1Pod] = []
|
|
461
466
|
retry_number: int = 0
|
|
462
467
|
|
|
463
|
-
while
|
|
468
|
+
while retry_number <= self.discover_pods_retry_number:
|
|
469
|
+
if len(pod_list) == self.parallelism:
|
|
470
|
+
break
|
|
464
471
|
pod_list = self.client.list_namespaced_pod(
|
|
465
472
|
namespace=pod_request_obj.metadata.namespace,
|
|
466
473
|
label_selector=label_selector,
|
|
@@ -20,6 +20,7 @@ from __future__ import annotations
|
|
|
20
20
|
|
|
21
21
|
import asyncio
|
|
22
22
|
import datetime
|
|
23
|
+
import inspect
|
|
23
24
|
import json
|
|
24
25
|
import logging
|
|
25
26
|
import math
|
|
@@ -63,7 +64,7 @@ from airflow.providers.cncf.kubernetes.kubernetes_helper_functions import (
|
|
|
63
64
|
generic_api_retry,
|
|
64
65
|
)
|
|
65
66
|
from airflow.providers.cncf.kubernetes.pod_generator import PodGenerator
|
|
66
|
-
from airflow.providers.cncf.kubernetes.triggers.pod import KubernetesPodTrigger
|
|
67
|
+
from airflow.providers.cncf.kubernetes.triggers.pod import ContainerState, KubernetesPodTrigger
|
|
67
68
|
from airflow.providers.cncf.kubernetes.utils import xcom_sidecar
|
|
68
69
|
from airflow.providers.cncf.kubernetes.utils.container import (
|
|
69
70
|
container_is_succeeded,
|
|
@@ -852,7 +853,14 @@ class KubernetesPodOperator(BaseOperator):
|
|
|
852
853
|
ti.xcom_push(key="pod_name", value=self.pod.metadata.name)
|
|
853
854
|
ti.xcom_push(key="pod_namespace", value=self.pod.metadata.namespace)
|
|
854
855
|
|
|
855
|
-
|
|
856
|
+
# Check if invoke_defer_method accepts context parameter
|
|
857
|
+
# This might happen if the KPO is extended by for example old Google
|
|
858
|
+
# provider where invoke_defer_method does not accept context parameter
|
|
859
|
+
sig = inspect.signature(self.invoke_defer_method)
|
|
860
|
+
if "context" in sig.parameters:
|
|
861
|
+
self.invoke_defer_method(context=context)
|
|
862
|
+
else:
|
|
863
|
+
self.invoke_defer_method()
|
|
856
864
|
|
|
857
865
|
def convert_config_file_to_dict(self):
|
|
858
866
|
"""Convert passed config_file to dict representation."""
|
|
@@ -863,7 +871,9 @@ class KubernetesPodOperator(BaseOperator):
|
|
|
863
871
|
else:
|
|
864
872
|
self._config_dict = None
|
|
865
873
|
|
|
866
|
-
def invoke_defer_method(
|
|
874
|
+
def invoke_defer_method(
|
|
875
|
+
self, last_log_time: DateTime | None = None, context: Context | None = None
|
|
876
|
+
) -> None:
|
|
867
877
|
"""Redefine triggers which are being used in child classes."""
|
|
868
878
|
self.convert_config_file_to_dict()
|
|
869
879
|
|
|
@@ -882,29 +892,47 @@ class KubernetesPodOperator(BaseOperator):
|
|
|
882
892
|
self.log.info("Successfully resolved connection extras for deferral.")
|
|
883
893
|
|
|
884
894
|
trigger_start_time = datetime.datetime.now(tz=datetime.timezone.utc)
|
|
885
|
-
|
|
886
|
-
|
|
887
|
-
|
|
888
|
-
|
|
889
|
-
|
|
890
|
-
|
|
891
|
-
|
|
892
|
-
|
|
893
|
-
|
|
894
|
-
|
|
895
|
-
|
|
896
|
-
|
|
897
|
-
|
|
898
|
-
|
|
899
|
-
|
|
900
|
-
|
|
901
|
-
|
|
902
|
-
|
|
903
|
-
|
|
904
|
-
|
|
905
|
-
),
|
|
906
|
-
method_name="trigger_reentry",
|
|
895
|
+
|
|
896
|
+
trigger = KubernetesPodTrigger(
|
|
897
|
+
pod_name=self.pod.metadata.name, # type: ignore[union-attr]
|
|
898
|
+
pod_namespace=self.pod.metadata.namespace, # type: ignore[union-attr]
|
|
899
|
+
trigger_start_time=trigger_start_time,
|
|
900
|
+
kubernetes_conn_id=self.kubernetes_conn_id,
|
|
901
|
+
connection_extras=connection_extras,
|
|
902
|
+
cluster_context=self.cluster_context,
|
|
903
|
+
config_dict=self._config_dict,
|
|
904
|
+
in_cluster=self.in_cluster,
|
|
905
|
+
poll_interval=self.poll_interval,
|
|
906
|
+
get_logs=self.get_logs,
|
|
907
|
+
startup_timeout=self.startup_timeout_seconds,
|
|
908
|
+
startup_check_interval=self.startup_check_interval_seconds,
|
|
909
|
+
schedule_timeout=self.schedule_timeout_seconds,
|
|
910
|
+
base_container_name=self.base_container_name,
|
|
911
|
+
on_finish_action=self.on_finish_action.value,
|
|
912
|
+
last_log_time=last_log_time,
|
|
913
|
+
logging_interval=self.logging_interval,
|
|
914
|
+
trigger_kwargs=self.trigger_kwargs,
|
|
907
915
|
)
|
|
916
|
+
container_state = trigger.define_container_state(self.pod) if self.pod else None
|
|
917
|
+
if context and (
|
|
918
|
+
container_state == ContainerState.TERMINATED or container_state == ContainerState.FAILED
|
|
919
|
+
):
|
|
920
|
+
self.log.info("Skipping deferral as pod is already in a terminal state")
|
|
921
|
+
self.trigger_reentry(
|
|
922
|
+
context=context,
|
|
923
|
+
event={
|
|
924
|
+
"status": "failed" if container_state == ContainerState.FAILED else "success",
|
|
925
|
+
"namespace": trigger.pod_namespace,
|
|
926
|
+
"name": trigger.pod_name,
|
|
927
|
+
"message": "Container failed"
|
|
928
|
+
if container_state == ContainerState.FAILED
|
|
929
|
+
else "Container succeeded",
|
|
930
|
+
"last_log_time": last_log_time,
|
|
931
|
+
**(self.trigger_kwargs or {}),
|
|
932
|
+
},
|
|
933
|
+
)
|
|
934
|
+
else:
|
|
935
|
+
self.defer(trigger=trigger, method_name="trigger_reentry")
|
|
908
936
|
|
|
909
937
|
def trigger_reentry(self, context: Context, event: dict[str, Any]) -> Any:
|
|
910
938
|
"""
|
|
@@ -1422,12 +1450,21 @@ class KubernetesPodOperator(BaseOperator):
|
|
|
1422
1450
|
self.process_pod_deletion(old_pod)
|
|
1423
1451
|
return new_pod
|
|
1424
1452
|
|
|
1425
|
-
|
|
1426
|
-
def _get_most_recent_pod_index(pod_list: list[k8s.V1Pod]) -> int:
|
|
1453
|
+
def _get_most_recent_pod_index(self, pod_list: list[k8s.V1Pod]) -> int:
|
|
1427
1454
|
"""Loop through a list of V1Pod objects and get the index of the most recent one."""
|
|
1428
1455
|
pod_start_times: list[datetime.datetime] = [
|
|
1429
1456
|
pod.to_dict().get("status").get("start_time") for pod in pod_list
|
|
1430
1457
|
]
|
|
1458
|
+
if not all(pod_start_times):
|
|
1459
|
+
self.log.info(
|
|
1460
|
+
"Unable to determine most recent pod using start_time (some pods have not started yet). Falling back to creation_timestamp from pod metadata."
|
|
1461
|
+
)
|
|
1462
|
+
pod_start_times: list[datetime.datetime] = [ # type: ignore[no-redef]
|
|
1463
|
+
pod.to_dict()
|
|
1464
|
+
.get("metadata", {})
|
|
1465
|
+
.get("creation_timestamp", datetime.datetime.now(tz=datetime.timezone.utc))
|
|
1466
|
+
for pod in pod_list
|
|
1467
|
+
]
|
|
1431
1468
|
most_recent_start_time = max(pod_start_times)
|
|
1432
1469
|
return pod_start_times.index(most_recent_start_time)
|
|
1433
1470
|
|
|
@@ -335,12 +335,10 @@ class KubernetesPodTrigger(BaseTrigger):
|
|
|
335
335
|
return AsyncPodManager(async_hook=self.hook)
|
|
336
336
|
|
|
337
337
|
def define_container_state(self, pod: V1Pod) -> ContainerState:
|
|
338
|
-
|
|
339
|
-
|
|
340
|
-
if pod_containers is None:
|
|
338
|
+
if pod.status is None or pod.status.container_statuses is None:
|
|
341
339
|
return ContainerState.UNDEFINED
|
|
342
340
|
|
|
343
|
-
container = next(c for c in
|
|
341
|
+
container = next(c for c in pod.status.container_statuses if c.name == self.base_container_name)
|
|
344
342
|
|
|
345
343
|
for state in (ContainerState.RUNNING, ContainerState.WAITING, ContainerState.TERMINATED):
|
|
346
344
|
state_obj = getattr(container.state, state)
|
|
@@ -51,9 +51,8 @@ from airflow.providers.cncf.kubernetes.utils.container import (
|
|
|
51
51
|
get_container_status,
|
|
52
52
|
)
|
|
53
53
|
from airflow.providers.cncf.kubernetes.utils.xcom_sidecar import PodDefaults
|
|
54
|
-
from airflow.providers.common.compat.sdk import AirflowException
|
|
54
|
+
from airflow.providers.common.compat.sdk import AirflowException, timezone
|
|
55
55
|
from airflow.utils.log.logging_mixin import LoggingMixin
|
|
56
|
-
from airflow.utils.timezone import utcnow
|
|
57
56
|
|
|
58
57
|
if TYPE_CHECKING:
|
|
59
58
|
from kubernetes.client.models.core_v1_event import CoreV1Event
|
|
@@ -76,6 +75,10 @@ Sentinel for no xcom result.
|
|
|
76
75
|
"""
|
|
77
76
|
|
|
78
77
|
|
|
78
|
+
class XComRetrievalError(AirflowException):
|
|
79
|
+
"""When not possible to get xcom."""
|
|
80
|
+
|
|
81
|
+
|
|
79
82
|
class PodPhase:
|
|
80
83
|
"""
|
|
81
84
|
Possible pod phases.
|
|
@@ -281,11 +284,11 @@ class PodLogsConsumer:
|
|
|
281
284
|
if terminated:
|
|
282
285
|
termination_time = terminated.finished_at
|
|
283
286
|
if termination_time:
|
|
284
|
-
return termination_time + timedelta(seconds=self.post_termination_timeout) > utcnow()
|
|
287
|
+
return termination_time + timedelta(seconds=self.post_termination_timeout) > timezone.utcnow()
|
|
285
288
|
return False
|
|
286
289
|
|
|
287
290
|
def read_pod(self):
|
|
288
|
-
_now = utcnow()
|
|
291
|
+
_now = timezone.utcnow()
|
|
289
292
|
if (
|
|
290
293
|
self.read_pod_cache is None
|
|
291
294
|
or self.last_read_pod_at + timedelta(seconds=self.read_pod_cache_timeout) < _now
|
|
@@ -476,7 +479,6 @@ class PodManager(LoggingMixin):
|
|
|
476
479
|
)
|
|
477
480
|
message_to_log = None
|
|
478
481
|
message_timestamp = None
|
|
479
|
-
progress_callback_lines = []
|
|
480
482
|
try:
|
|
481
483
|
for raw_line in logs:
|
|
482
484
|
line = raw_line.decode("utf-8", errors="backslashreplace")
|
|
@@ -485,35 +487,39 @@ class PodManager(LoggingMixin):
|
|
|
485
487
|
if message_to_log is None: # first line in the log
|
|
486
488
|
message_to_log = message
|
|
487
489
|
message_timestamp = line_timestamp
|
|
488
|
-
progress_callback_lines.append(line)
|
|
489
490
|
else: # previous log line is complete
|
|
490
|
-
for
|
|
491
|
-
|
|
492
|
-
|
|
493
|
-
|
|
494
|
-
|
|
495
|
-
|
|
496
|
-
|
|
497
|
-
|
|
498
|
-
container_name,
|
|
499
|
-
container_name_log_prefix_enabled,
|
|
500
|
-
log_formatter,
|
|
491
|
+
for callback in self._callbacks:
|
|
492
|
+
callback.progress_callback(
|
|
493
|
+
line=message_to_log,
|
|
494
|
+
client=self._client,
|
|
495
|
+
mode=ExecutionMode.SYNC,
|
|
496
|
+
container_name=container_name,
|
|
497
|
+
timestamp=message_timestamp,
|
|
498
|
+
pod=pod,
|
|
501
499
|
)
|
|
500
|
+
self._log_message(
|
|
501
|
+
message_to_log,
|
|
502
|
+
container_name,
|
|
503
|
+
container_name_log_prefix_enabled,
|
|
504
|
+
log_formatter,
|
|
505
|
+
)
|
|
502
506
|
last_captured_timestamp = message_timestamp
|
|
503
507
|
message_to_log = message
|
|
504
508
|
message_timestamp = line_timestamp
|
|
505
|
-
progress_callback_lines = [line]
|
|
506
509
|
else: # continuation of the previous log line
|
|
507
510
|
message_to_log = f"{message_to_log}\n{message}"
|
|
508
|
-
progress_callback_lines.append(line)
|
|
509
511
|
finally:
|
|
510
512
|
# log the last line and update the last_captured_timestamp
|
|
511
|
-
|
|
513
|
+
if message_to_log is not None:
|
|
512
514
|
for callback in self._callbacks:
|
|
513
515
|
callback.progress_callback(
|
|
514
|
-
line=
|
|
516
|
+
line=message_to_log,
|
|
517
|
+
client=self._client,
|
|
518
|
+
mode=ExecutionMode.SYNC,
|
|
519
|
+
container_name=container_name,
|
|
520
|
+
timestamp=message_timestamp,
|
|
521
|
+
pod=pod,
|
|
515
522
|
)
|
|
516
|
-
if message_to_log is not None:
|
|
517
523
|
self._log_message(
|
|
518
524
|
message_to_log, container_name, container_name_log_prefix_enabled, log_formatter
|
|
519
525
|
)
|
|
@@ -527,9 +533,9 @@ class PodManager(LoggingMixin):
|
|
|
527
533
|
exception = e
|
|
528
534
|
self._http_error_timestamps = getattr(self, "_http_error_timestamps", [])
|
|
529
535
|
self._http_error_timestamps = [
|
|
530
|
-
t for t in self._http_error_timestamps if t > utcnow() - timedelta(seconds=60)
|
|
536
|
+
t for t in self._http_error_timestamps if t > timezone.utcnow() - timedelta(seconds=60)
|
|
531
537
|
]
|
|
532
|
-
self._http_error_timestamps.append(utcnow())
|
|
538
|
+
self._http_error_timestamps.append(timezone.utcnow())
|
|
533
539
|
# Log only if more than 2 errors occurred in the last 60 seconds
|
|
534
540
|
if len(self._http_error_timestamps) > 2:
|
|
535
541
|
self.log.exception(
|
|
@@ -843,6 +849,12 @@ class PodManager(LoggingMixin):
|
|
|
843
849
|
|
|
844
850
|
def extract_xcom(self, pod: V1Pod) -> str:
|
|
845
851
|
"""Retrieve XCom value and kill xcom sidecar container."""
|
|
852
|
+
# make sure that xcom sidecar container is still running
|
|
853
|
+
if not self.container_is_running(pod, PodDefaults.SIDECAR_CONTAINER_NAME):
|
|
854
|
+
raise XComRetrievalError(
|
|
855
|
+
f"{PodDefaults.SIDECAR_CONTAINER_NAME} container is not running! Not possible to read xcom from pod: {pod.metadata.name}"
|
|
856
|
+
)
|
|
857
|
+
|
|
846
858
|
try:
|
|
847
859
|
result = self.extract_xcom_json(pod)
|
|
848
860
|
return result
|
|
@@ -887,7 +899,7 @@ class PodManager(LoggingMixin):
|
|
|
887
899
|
json.loads(result)
|
|
888
900
|
|
|
889
901
|
if result is None:
|
|
890
|
-
raise
|
|
902
|
+
raise XComRetrievalError(f"Failed to extract xcom from pod: {pod.metadata.name}")
|
|
891
903
|
return result
|
|
892
904
|
|
|
893
905
|
@generic_api_retry
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.4
|
|
2
2
|
Name: apache-airflow-providers-cncf-kubernetes
|
|
3
|
-
Version: 10.12.
|
|
3
|
+
Version: 10.12.2
|
|
4
4
|
Summary: Provider package apache-airflow-providers-cncf-kubernetes for Apache Airflow
|
|
5
5
|
Keywords: airflow-provider,cncf.kubernetes,airflow,integration
|
|
6
6
|
Author-email: Apache Software Foundation <dev@airflow.apache.org>
|
|
@@ -23,16 +23,16 @@ Classifier: Topic :: System :: Monitoring
|
|
|
23
23
|
License-File: LICENSE
|
|
24
24
|
License-File: NOTICE
|
|
25
25
|
Requires-Dist: aiofiles>=23.2.0
|
|
26
|
-
Requires-Dist: apache-airflow>=2.11.
|
|
27
|
-
Requires-Dist: apache-airflow-providers-common-compat>=1.10.
|
|
26
|
+
Requires-Dist: apache-airflow>=2.11.0
|
|
27
|
+
Requires-Dist: apache-airflow-providers-common-compat>=1.10.1
|
|
28
28
|
Requires-Dist: asgiref>=3.5.2
|
|
29
29
|
Requires-Dist: cryptography>=41.0.0,<46.0.0
|
|
30
|
-
Requires-Dist: kubernetes>=
|
|
30
|
+
Requires-Dist: kubernetes>=35.0.0,<36.0.0
|
|
31
31
|
Requires-Dist: urllib3>=2.1.0,!=2.6.0
|
|
32
32
|
Requires-Dist: kubernetes_asyncio>=32.0.0,<35.0.0
|
|
33
33
|
Project-URL: Bug Tracker, https://github.com/apache/airflow/issues
|
|
34
|
-
Project-URL: Changelog, https://airflow.
|
|
35
|
-
Project-URL: Documentation, https://airflow.
|
|
34
|
+
Project-URL: Changelog, https://airflow.apache.org/docs/apache-airflow-providers-cncf-kubernetes/10.12.2/changelog.html
|
|
35
|
+
Project-URL: Documentation, https://airflow.apache.org/docs/apache-airflow-providers-cncf-kubernetes/10.12.2
|
|
36
36
|
Project-URL: Mastodon, https://fosstodon.org/@airflow
|
|
37
37
|
Project-URL: Slack Chat, https://s.apache.org/airflow-slack
|
|
38
38
|
Project-URL: Source Code, https://github.com/apache/airflow
|
|
@@ -63,7 +63,7 @@ Project-URL: YouTube, https://www.youtube.com/channel/UCSXwxpWZQ7XZ1WL3wqevChA/
|
|
|
63
63
|
|
|
64
64
|
Package ``apache-airflow-providers-cncf-kubernetes``
|
|
65
65
|
|
|
66
|
-
Release: ``10.12.
|
|
66
|
+
Release: ``10.12.2``
|
|
67
67
|
|
|
68
68
|
|
|
69
69
|
`Kubernetes <https://kubernetes.io/>`__
|
|
@@ -76,7 +76,7 @@ This is a provider package for ``cncf.kubernetes`` provider. All classes for thi
|
|
|
76
76
|
are in ``airflow.providers.cncf.kubernetes`` python package.
|
|
77
77
|
|
|
78
78
|
You can find package information and changelog for the provider
|
|
79
|
-
in the `documentation <https://airflow.apache.org/docs/apache-airflow-providers-cncf-kubernetes/10.12.
|
|
79
|
+
in the `documentation <https://airflow.apache.org/docs/apache-airflow-providers-cncf-kubernetes/10.12.2/>`_.
|
|
80
80
|
|
|
81
81
|
Installation
|
|
82
82
|
------------
|
|
@@ -123,5 +123,5 @@ Dependent package
|
|
|
123
123
|
================================================================================================================== =================
|
|
124
124
|
|
|
125
125
|
The changelog for the provider package can be found in the
|
|
126
|
-
`changelog <https://airflow.apache.org/docs/apache-airflow-providers-cncf-kubernetes/10.12.
|
|
126
|
+
`changelog <https://airflow.apache.org/docs/apache-airflow-providers-cncf-kubernetes/10.12.2/changelog.html>`_.
|
|
127
127
|
|
|
@@ -1,11 +1,11 @@
|
|
|
1
|
-
airflow/providers/cncf/kubernetes/__init__.py,sha256=
|
|
2
|
-
airflow/providers/cncf/kubernetes/callbacks.py,sha256=
|
|
1
|
+
airflow/providers/cncf/kubernetes/__init__.py,sha256=6kcIzXO5-9ErEJ4ETdkUU7lJTGClhNoxTqtzlNSwOyA,1506
|
|
2
|
+
airflow/providers/cncf/kubernetes/callbacks.py,sha256=svvPFkkllJh3Qo7wIG5pnSf_19c3f5kzyKVYHkhyXoE,6456
|
|
3
3
|
airflow/providers/cncf/kubernetes/exceptions.py,sha256=iRrXBxaLPqYwUBt9zbadYgRbEDhGTo6I2mhLOa9F3DI,1707
|
|
4
|
-
airflow/providers/cncf/kubernetes/get_provider_info.py,sha256=
|
|
4
|
+
airflow/providers/cncf/kubernetes/get_provider_info.py,sha256=_V-bvjuAW1yWM_RlJFLPEdDpjaBkMXFYjRr8dAKQZT0,16138
|
|
5
5
|
airflow/providers/cncf/kubernetes/k8s_model.py,sha256=xmdFhX29DjegoZ-cq8-KDL9soVYXf4OpU6fAGr3cPTU,2101
|
|
6
6
|
airflow/providers/cncf/kubernetes/kube_client.py,sha256=AaTY2UhhKVa-qrhMvpiQjdUJhrQyndwQ_5PoRmWJy3k,5714
|
|
7
|
-
airflow/providers/cncf/kubernetes/kube_config.py,sha256=
|
|
8
|
-
airflow/providers/cncf/kubernetes/kubernetes_helper_functions.py,sha256=
|
|
7
|
+
airflow/providers/cncf/kubernetes/kube_config.py,sha256=PFw_n3QHaEEXXYYqAuOCabxWUOgrlUnEp0QpnT2J380,5155
|
|
8
|
+
airflow/providers/cncf/kubernetes/kubernetes_helper_functions.py,sha256=6fe6TpFVase7EOLxu4_zSOE2cUjnnSAUnq-tFxTu-_8,7384
|
|
9
9
|
airflow/providers/cncf/kubernetes/pod_generator.py,sha256=0VEcAtT2SzAFwSDsQWe2QdrY2mDV8s4hBw0qLcmIMGw,21038
|
|
10
10
|
airflow/providers/cncf/kubernetes/python_kubernetes_script.jinja2,sha256=I0EHRGwLHjSiX85e51HBIoddRDnC8TJPFrDBqQq_NJg,1776
|
|
11
11
|
airflow/providers/cncf/kubernetes/python_kubernetes_script.py,sha256=KnTlZSWCZhwvj89fSc2kgIRTaI4iLNKPquHc2wXnluo,3460
|
|
@@ -15,24 +15,25 @@ airflow/providers/cncf/kubernetes/version_compat.py,sha256=MpWxT1g5WGhlmooHPsjyF
|
|
|
15
15
|
airflow/providers/cncf/kubernetes/backcompat/__init__.py,sha256=KXF76f3v1jIFUBNz8kwxVMvm7i4mNo35LbIG9IijBNc,1299
|
|
16
16
|
airflow/providers/cncf/kubernetes/backcompat/backwards_compat_converters.py,sha256=3YOZHuAbFe-w1LM2r4w9xmbtIaIdp6ObehXwvh-7iTk,4320
|
|
17
17
|
airflow/providers/cncf/kubernetes/cli/__init__.py,sha256=9hdXHABrVpkbpjZgUft39kOFL2xSGeG4GEua0Hmelus,785
|
|
18
|
-
airflow/providers/cncf/kubernetes/cli/
|
|
18
|
+
airflow/providers/cncf/kubernetes/cli/definition.py,sha256=2LfXAmZHzP87W4TWpzecwbdEMtSr7j91r5cbHXkwEjY,3781
|
|
19
|
+
airflow/providers/cncf/kubernetes/cli/kubernetes_command.py,sha256=8drrenFqiS9GXB818PkUkdSH_oaUD4UIVY-NAnRAYTE,8376
|
|
19
20
|
airflow/providers/cncf/kubernetes/decorators/__init__.py,sha256=mlJxuZLkd5x-iq2SBwD3mvRQpt3YR7wjz_nceyF1IaI,787
|
|
20
21
|
airflow/providers/cncf/kubernetes/decorators/kubernetes.py,sha256=1qUiSHseMS31xU5jqRc2dJFq1Kor0yEMx1KKEULHWR4,6358
|
|
21
22
|
airflow/providers/cncf/kubernetes/decorators/kubernetes_cmd.py,sha256=pdH2TGCYVywY0qPTosq7EoGE0oKd03q9OKka4qSsDI4,4722
|
|
22
23
|
airflow/providers/cncf/kubernetes/executors/__init__.py,sha256=mlJxuZLkd5x-iq2SBwD3mvRQpt3YR7wjz_nceyF1IaI,787
|
|
23
|
-
airflow/providers/cncf/kubernetes/executors/kubernetes_executor.py,sha256=
|
|
24
|
+
airflow/providers/cncf/kubernetes/executors/kubernetes_executor.py,sha256=4SMUjuXO3hDVdGfWVGlG50j_2n5zoIOYNGmw7zp-3ig,32837
|
|
24
25
|
airflow/providers/cncf/kubernetes/executors/kubernetes_executor_types.py,sha256=F0IlLbC6qKMVNZwqnbgUPxwFsZdcRhot2kwBhzc9gSM,2698
|
|
25
|
-
airflow/providers/cncf/kubernetes/executors/kubernetes_executor_utils.py,sha256=
|
|
26
|
-
airflow/providers/cncf/kubernetes/executors/local_kubernetes_executor.py,sha256=
|
|
26
|
+
airflow/providers/cncf/kubernetes/executors/kubernetes_executor_utils.py,sha256=kFbUmFDMMO5Xs7ndB8y0juU3T9CblwTboaAInbJ278M,31633
|
|
27
|
+
airflow/providers/cncf/kubernetes/executors/local_kubernetes_executor.py,sha256=0f3Zay9w8iyQbo2kWQ3S1E2wbQ-EgQppktO2Lx7KdkE,12403
|
|
27
28
|
airflow/providers/cncf/kubernetes/hooks/__init__.py,sha256=9hdXHABrVpkbpjZgUft39kOFL2xSGeG4GEua0Hmelus,785
|
|
28
|
-
airflow/providers/cncf/kubernetes/hooks/kubernetes.py,sha256=
|
|
29
|
+
airflow/providers/cncf/kubernetes/hooks/kubernetes.py,sha256=O1bDKFwArk9ANPy_ORWBq059up-pkYQYcVZwF8_cnHk,45445
|
|
29
30
|
airflow/providers/cncf/kubernetes/kubernetes_executor_templates/__init__.py,sha256=9hdXHABrVpkbpjZgUft39kOFL2xSGeG4GEua0Hmelus,785
|
|
30
31
|
airflow/providers/cncf/kubernetes/kubernetes_executor_templates/basic_template.yaml,sha256=yzJmXN4ZyB4aDwI_GIugpL9-f1YMVy__X-LQSbeU95A,2567
|
|
31
32
|
airflow/providers/cncf/kubernetes/operators/__init__.py,sha256=mlJxuZLkd5x-iq2SBwD3mvRQpt3YR7wjz_nceyF1IaI,787
|
|
32
33
|
airflow/providers/cncf/kubernetes/operators/custom_object_launcher.py,sha256=ha3dC4DjAIs2wtmGC504EvViGA-GGce1iOdeS3y1ol0,15464
|
|
33
|
-
airflow/providers/cncf/kubernetes/operators/job.py,sha256=
|
|
34
|
+
airflow/providers/cncf/kubernetes/operators/job.py,sha256=nprHUQ0Nmw3U5LuU6KvhVwLf-CR6Ix6uwjbtSFQMegQ,27482
|
|
34
35
|
airflow/providers/cncf/kubernetes/operators/kueue.py,sha256=E0ZqMQzH2dtNOAaA2W5bAuaS-zRz_ohfOElQ1N7NSTA,5560
|
|
35
|
-
airflow/providers/cncf/kubernetes/operators/pod.py,sha256=
|
|
36
|
+
airflow/providers/cncf/kubernetes/operators/pod.py,sha256=mYLAAwrNnvKIbhaFB27fG_l0dXo4ahvrPttcTKiWY0I,67989
|
|
36
37
|
airflow/providers/cncf/kubernetes/operators/resource.py,sha256=NHU8LtC1B8mq9V6SgIwo1GWZREtmC1-plQb1DALpmCc,7506
|
|
37
38
|
airflow/providers/cncf/kubernetes/operators/spark_kubernetes.py,sha256=9DZnzju7KMXN9SG4JgHEKUAaxKXmR-XyImgN-GnIDnU,16513
|
|
38
39
|
airflow/providers/cncf/kubernetes/pod_template_file_examples/__init__.py,sha256=9hdXHABrVpkbpjZgUft39kOFL2xSGeG4GEua0Hmelus,785
|
|
@@ -47,16 +48,16 @@ airflow/providers/cncf/kubernetes/sensors/__init__.py,sha256=9hdXHABrVpkbpjZgUft
|
|
|
47
48
|
airflow/providers/cncf/kubernetes/sensors/spark_kubernetes.py,sha256=BGB5HzaSU1w1bDN3QnopiyJ_M-Gz2_QEwcCpOPfTS9g,5331
|
|
48
49
|
airflow/providers/cncf/kubernetes/triggers/__init__.py,sha256=9hdXHABrVpkbpjZgUft39kOFL2xSGeG4GEua0Hmelus,785
|
|
49
50
|
airflow/providers/cncf/kubernetes/triggers/job.py,sha256=_lLP6ZYRV4kdwb7U0w5QFnlY1E9deZ5wtg-nrlfl6-8,7505
|
|
50
|
-
airflow/providers/cncf/kubernetes/triggers/pod.py,sha256=
|
|
51
|
+
airflow/providers/cncf/kubernetes/triggers/pod.py,sha256=KGyQS-jNXdyUUJw9waBRpbr7lKF1j5KPmNrbOf91_8I,15290
|
|
51
52
|
airflow/providers/cncf/kubernetes/utils/__init__.py,sha256=ClZN0VPjWySdVwS_ktH7rrgL9VLAcs3OSJSB9s3zaYw,863
|
|
52
53
|
airflow/providers/cncf/kubernetes/utils/container.py,sha256=tuhWyMZrqCGDUT4kzwjhEgJrr0JvD9lMXbFeuMDoh-4,4813
|
|
53
54
|
airflow/providers/cncf/kubernetes/utils/delete_from.py,sha256=poObZSoEJwQyaYWilEURs8f4CDY2sn_pfwS31Lf579A,5195
|
|
54
55
|
airflow/providers/cncf/kubernetes/utils/k8s_resource_iterator.py,sha256=pl-G-2WhZVbewKkwmL9AxPo1hAQWHHEPK43b-ruF4-w,1937
|
|
55
|
-
airflow/providers/cncf/kubernetes/utils/pod_manager.py,sha256=
|
|
56
|
+
airflow/providers/cncf/kubernetes/utils/pod_manager.py,sha256=sk09s55ggGTnjlv1K1ZLgWc49CS8Rq5Lixsqc_nG3Ds,45853
|
|
56
57
|
airflow/providers/cncf/kubernetes/utils/xcom_sidecar.py,sha256=k6bdmVJ21OrAwGmWwledRrAmaty9ZrmbuM-IbaI4mqo,2519
|
|
57
|
-
apache_airflow_providers_cncf_kubernetes-10.12.
|
|
58
|
-
apache_airflow_providers_cncf_kubernetes-10.12.
|
|
59
|
-
apache_airflow_providers_cncf_kubernetes-10.12.
|
|
60
|
-
apache_airflow_providers_cncf_kubernetes-10.12.
|
|
61
|
-
apache_airflow_providers_cncf_kubernetes-10.12.
|
|
62
|
-
apache_airflow_providers_cncf_kubernetes-10.12.
|
|
58
|
+
apache_airflow_providers_cncf_kubernetes-10.12.2.dist-info/entry_points.txt,sha256=ByD3QJJyP9CfmTYtpNI1953akD38RUDgpGXLaq9vpOw,111
|
|
59
|
+
apache_airflow_providers_cncf_kubernetes-10.12.2.dist-info/licenses/LICENSE,sha256=gXPVwptPlW1TJ4HSuG5OMPg-a3h43OGMkZRR1rpwfJA,10850
|
|
60
|
+
apache_airflow_providers_cncf_kubernetes-10.12.2.dist-info/licenses/NOTICE,sha256=_cWHznIoUSbLCY_KfmKqetlKlsoH0c2VBjmZjElAzuc,168
|
|
61
|
+
apache_airflow_providers_cncf_kubernetes-10.12.2.dist-info/WHEEL,sha256=G2gURzTEtmeR8nrdXUJfNiB3VYVxigPQ-bEQujpNiNs,82
|
|
62
|
+
apache_airflow_providers_cncf_kubernetes-10.12.2.dist-info/METADATA,sha256=2mllqfPqucV8Aihg0UFXxZCc7ESWgEBP4sO4M0-teY4,5834
|
|
63
|
+
apache_airflow_providers_cncf_kubernetes-10.12.2.dist-info/RECORD,,
|
|
File without changes
|
|
File without changes
|