dagster-k8s 0.24.9__py3-none-any.whl → 0.24.11__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of dagster-k8s might be problematic. Click here for more details.
- dagster_k8s/client.py +41 -2
- dagster_k8s/container_context.py +56 -21
- dagster_k8s/executor.py +2 -2
- dagster_k8s/job.py +34 -3
- dagster_k8s/kubernetes_version.py +1 -0
- dagster_k8s/models.py +13 -1
- dagster_k8s/ops/k8s_job_op.py +13 -5
- dagster_k8s/version.py +1 -1
- {dagster_k8s-0.24.9.dist-info → dagster_k8s-0.24.11.dist-info}/METADATA +3 -3
- dagster_k8s-0.24.11.dist-info/RECORD +20 -0
- dagster_k8s-0.24.9.dist-info/RECORD +0 -19
- {dagster_k8s-0.24.9.dist-info → dagster_k8s-0.24.11.dist-info}/LICENSE +0 -0
- {dagster_k8s-0.24.9.dist-info → dagster_k8s-0.24.11.dist-info}/WHEEL +0 -0
- {dagster_k8s-0.24.9.dist-info → dagster_k8s-0.24.11.dist-info}/top_level.txt +0 -0
dagster_k8s/client.py
CHANGED
|
@@ -6,11 +6,13 @@ from typing import Any, Callable, List, Optional, Set, TypeVar
|
|
|
6
6
|
|
|
7
7
|
import kubernetes.client
|
|
8
8
|
import kubernetes.client.rest
|
|
9
|
+
import six
|
|
9
10
|
from dagster import (
|
|
10
11
|
DagsterInstance,
|
|
11
12
|
_check as check,
|
|
12
13
|
)
|
|
13
14
|
from dagster._core.storage.dagster_run import DagsterRunStatus
|
|
15
|
+
from kubernetes.client.api_client import ApiClient
|
|
14
16
|
from kubernetes.client.models import V1Job, V1JobStatus
|
|
15
17
|
|
|
16
18
|
try:
|
|
@@ -91,6 +93,39 @@ WHITELISTED_TRANSIENT_K8S_STATUS_CODES = [
|
|
|
91
93
|
]
|
|
92
94
|
|
|
93
95
|
|
|
96
|
+
class PatchedApiClient(ApiClient):
|
|
97
|
+
# Forked from ApiClient implementation to pass configuration object down into created model
|
|
98
|
+
# objects, avoiding lock contention issues. See https://github.com/kubernetes-client/python/issues/2284
|
|
99
|
+
def __deserialize_model(self, data, klass):
|
|
100
|
+
"""Deserializes list or dict to model.
|
|
101
|
+
|
|
102
|
+
:param data: dict, list.
|
|
103
|
+
:param klass: class literal.
|
|
104
|
+
:return: model object.
|
|
105
|
+
"""
|
|
106
|
+
if not klass.openapi_types and not hasattr(klass, "get_real_child_model"):
|
|
107
|
+
return data
|
|
108
|
+
|
|
109
|
+
# Below is the only change from the base ApiClient implementation - pass through the
|
|
110
|
+
# Configuration object to each newly created model so that each one does not have to create
|
|
111
|
+
# one and acquire a lock
|
|
112
|
+
kwargs = {"local_vars_configuration": self.configuration}
|
|
113
|
+
|
|
114
|
+
if data is not None and klass.openapi_types is not None and isinstance(data, (list, dict)):
|
|
115
|
+
for attr, attr_type in six.iteritems(klass.openapi_types):
|
|
116
|
+
if klass.attribute_map[attr] in data:
|
|
117
|
+
value = data[klass.attribute_map[attr]]
|
|
118
|
+
kwargs[attr] = self.__deserialize(value, attr_type)
|
|
119
|
+
|
|
120
|
+
instance = klass(**kwargs)
|
|
121
|
+
|
|
122
|
+
if hasattr(instance, "get_real_child_model"):
|
|
123
|
+
klass_name = instance.get_real_child_model(data)
|
|
124
|
+
if klass_name:
|
|
125
|
+
instance = self.__deserialize(data, klass_name)
|
|
126
|
+
return instance
|
|
127
|
+
|
|
128
|
+
|
|
94
129
|
def k8s_api_retry(
|
|
95
130
|
fn: Callable[..., T],
|
|
96
131
|
max_retries: int,
|
|
@@ -209,8 +244,12 @@ class DagsterKubernetesClient:
|
|
|
209
244
|
@staticmethod
|
|
210
245
|
def production_client(batch_api_override=None, core_api_override=None):
|
|
211
246
|
return DagsterKubernetesClient(
|
|
212
|
-
batch_api=
|
|
213
|
-
|
|
247
|
+
batch_api=(
|
|
248
|
+
batch_api_override or kubernetes.client.BatchV1Api(api_client=PatchedApiClient())
|
|
249
|
+
),
|
|
250
|
+
core_api=(
|
|
251
|
+
core_api_override or kubernetes.client.CoreV1Api(api_client=PatchedApiClient())
|
|
252
|
+
),
|
|
214
253
|
logger=logging.info,
|
|
215
254
|
sleeper=time.sleep,
|
|
216
255
|
timer=time.time,
|
dagster_k8s/container_context.py
CHANGED
|
@@ -51,7 +51,6 @@ class K8sContainerContext(
|
|
|
51
51
|
("server_k8s_config", UserDefinedDagsterK8sConfig),
|
|
52
52
|
("run_k8s_config", UserDefinedDagsterK8sConfig),
|
|
53
53
|
("namespace", Optional[str]),
|
|
54
|
-
("labels", Mapping[str, str]),
|
|
55
54
|
],
|
|
56
55
|
)
|
|
57
56
|
):
|
|
@@ -106,12 +105,19 @@ class K8sContainerContext(
|
|
|
106
105
|
)
|
|
107
106
|
|
|
108
107
|
run_k8s_config = K8sContainerContext._merge_k8s_config(
|
|
109
|
-
top_level_k8s_config
|
|
108
|
+
top_level_k8s_config._replace( # remove k8s service/deployment fields
|
|
109
|
+
deployment_metadata={},
|
|
110
|
+
service_metadata={},
|
|
111
|
+
),
|
|
110
112
|
run_k8s_config or UserDefinedDagsterK8sConfig.from_dict({}),
|
|
111
113
|
)
|
|
112
114
|
|
|
113
115
|
server_k8s_config = K8sContainerContext._merge_k8s_config(
|
|
114
|
-
top_level_k8s_config
|
|
116
|
+
top_level_k8s_config._replace( # remove k8s job fields
|
|
117
|
+
job_config={},
|
|
118
|
+
job_metadata={},
|
|
119
|
+
job_spec_config={},
|
|
120
|
+
),
|
|
115
121
|
server_k8s_config or UserDefinedDagsterK8sConfig.from_dict({}),
|
|
116
122
|
)
|
|
117
123
|
|
|
@@ -120,7 +126,6 @@ class K8sContainerContext(
|
|
|
120
126
|
run_k8s_config=run_k8s_config,
|
|
121
127
|
server_k8s_config=server_k8s_config,
|
|
122
128
|
namespace=namespace,
|
|
123
|
-
labels=check.opt_mapping_param(labels, "labels"),
|
|
124
129
|
)
|
|
125
130
|
|
|
126
131
|
@staticmethod
|
|
@@ -143,6 +148,8 @@ class K8sContainerContext(
|
|
|
143
148
|
pod_spec_config = {}
|
|
144
149
|
pod_template_spec_metadata = {}
|
|
145
150
|
job_metadata = {}
|
|
151
|
+
deployment_metadata = {}
|
|
152
|
+
service_metadata = {}
|
|
146
153
|
|
|
147
154
|
if volume_mounts:
|
|
148
155
|
container_config["volume_mounts"] = volume_mounts
|
|
@@ -159,6 +166,8 @@ class K8sContainerContext(
|
|
|
159
166
|
if labels:
|
|
160
167
|
pod_template_spec_metadata["labels"] = labels
|
|
161
168
|
job_metadata["labels"] = labels
|
|
169
|
+
deployment_metadata["labels"] = labels
|
|
170
|
+
service_metadata["labels"] = labels
|
|
162
171
|
|
|
163
172
|
if image_pull_policy:
|
|
164
173
|
container_config["image_pull_policy"] = image_pull_policy
|
|
@@ -196,6 +205,8 @@ class K8sContainerContext(
|
|
|
196
205
|
pod_spec_config=pod_spec_config,
|
|
197
206
|
pod_template_spec_metadata=pod_template_spec_metadata,
|
|
198
207
|
job_metadata=job_metadata,
|
|
208
|
+
service_metadata=service_metadata,
|
|
209
|
+
deployment_metadata=deployment_metadata,
|
|
199
210
|
)
|
|
200
211
|
|
|
201
212
|
@staticmethod
|
|
@@ -260,7 +271,6 @@ class K8sContainerContext(
|
|
|
260
271
|
),
|
|
261
272
|
run_k8s_config=self._merge_k8s_config(self.run_k8s_config, other.run_k8s_config),
|
|
262
273
|
namespace=other.namespace if other.namespace else self.namespace,
|
|
263
|
-
labels={**self.labels, **other.labels},
|
|
264
274
|
)
|
|
265
275
|
|
|
266
276
|
def _snake_case_allowed_fields(
|
|
@@ -269,9 +279,18 @@ class K8sContainerContext(
|
|
|
269
279
|
result = {}
|
|
270
280
|
|
|
271
281
|
for key in only_allow_user_defined_k8s_config_fields:
|
|
282
|
+
if key == "namespace":
|
|
283
|
+
result[key] = only_allow_user_defined_k8s_config_fields[key]
|
|
284
|
+
continue
|
|
285
|
+
|
|
272
286
|
if key == "container_config":
|
|
273
287
|
model_class = kubernetes.client.V1Container
|
|
274
|
-
elif key in {
|
|
288
|
+
elif key in {
|
|
289
|
+
"job_metadata",
|
|
290
|
+
"pod_template_spec_metadata",
|
|
291
|
+
"deployment_metadata",
|
|
292
|
+
"service_metadata",
|
|
293
|
+
}:
|
|
275
294
|
model_class = kubernetes.client.V1ObjectMeta
|
|
276
295
|
elif key == "pod_spec_config":
|
|
277
296
|
model_class = kubernetes.client.V1PodSpec
|
|
@@ -284,12 +303,35 @@ class K8sContainerContext(
|
|
|
284
303
|
)
|
|
285
304
|
return result
|
|
286
305
|
|
|
287
|
-
def
|
|
306
|
+
def validate_user_k8s_config_for_run(
|
|
288
307
|
self,
|
|
289
308
|
only_allow_user_defined_k8s_config_fields: Optional[Mapping[str, Any]],
|
|
290
309
|
only_allow_user_defined_env_vars: Optional[Sequence[str]],
|
|
310
|
+
):
|
|
311
|
+
return self._validate_user_k8s_config(
|
|
312
|
+
self.run_k8s_config,
|
|
313
|
+
only_allow_user_defined_k8s_config_fields,
|
|
314
|
+
only_allow_user_defined_env_vars,
|
|
315
|
+
)
|
|
316
|
+
|
|
317
|
+
def validate_user_k8s_config_for_code_server(
|
|
318
|
+
self,
|
|
319
|
+
only_allow_user_defined_k8s_config_fields: Optional[Mapping[str, Any]],
|
|
320
|
+
only_allow_user_defined_env_vars: Optional[Sequence[str]],
|
|
321
|
+
):
|
|
322
|
+
return self._validate_user_k8s_config(
|
|
323
|
+
self.server_k8s_config,
|
|
324
|
+
only_allow_user_defined_k8s_config_fields,
|
|
325
|
+
only_allow_user_defined_env_vars,
|
|
326
|
+
)
|
|
327
|
+
|
|
328
|
+
def _validate_user_k8s_config(
|
|
329
|
+
self,
|
|
330
|
+
user_defined_k8s_config: UserDefinedDagsterK8sConfig,
|
|
331
|
+
only_allow_user_defined_k8s_config_fields: Optional[Mapping[str, Any]],
|
|
332
|
+
only_allow_user_defined_env_vars: Optional[Sequence[str]],
|
|
291
333
|
) -> "K8sContainerContext":
|
|
292
|
-
used_fields = self._get_used_k8s_config_fields()
|
|
334
|
+
used_fields = self._get_used_k8s_config_fields(user_defined_k8s_config)
|
|
293
335
|
|
|
294
336
|
if only_allow_user_defined_k8s_config_fields is not None:
|
|
295
337
|
snake_case_allowlist = self._snake_case_allowed_fields(
|
|
@@ -383,17 +425,11 @@ class K8sContainerContext(
|
|
|
383
425
|
server_k8s_config=new_server_k8s_config,
|
|
384
426
|
)
|
|
385
427
|
|
|
386
|
-
def _get_used_k8s_config_fields(
|
|
428
|
+
def _get_used_k8s_config_fields(
|
|
429
|
+
self, user_defined_k8s_config: UserDefinedDagsterK8sConfig
|
|
430
|
+
) -> Mapping[str, Mapping[str, Set[str]]]:
|
|
387
431
|
used_fields = {}
|
|
388
|
-
for key, fields in
|
|
389
|
-
if key == "merge_behavior":
|
|
390
|
-
continue
|
|
391
|
-
|
|
392
|
-
used_fields[key] = used_fields.get(key, set()).union(
|
|
393
|
-
{field_key for field_key in fields}
|
|
394
|
-
)
|
|
395
|
-
|
|
396
|
-
for key, fields in self.server_k8s_config.to_dict().items():
|
|
432
|
+
for key, fields in user_defined_k8s_config.to_dict().items():
|
|
397
433
|
if key == "merge_behavior":
|
|
398
434
|
continue
|
|
399
435
|
|
|
@@ -402,8 +438,7 @@ class K8sContainerContext(
|
|
|
402
438
|
)
|
|
403
439
|
|
|
404
440
|
if self.namespace:
|
|
405
|
-
used_fields["
|
|
406
|
-
used_fields["job_metadata"].add("namespace")
|
|
441
|
+
used_fields["namespace"] = True
|
|
407
442
|
|
|
408
443
|
return used_fields
|
|
409
444
|
|
|
@@ -457,7 +492,7 @@ class K8sContainerContext(
|
|
|
457
492
|
# If there's an allowlist, make sure user_defined_container_context doesn't violate it
|
|
458
493
|
if run_launcher:
|
|
459
494
|
user_defined_container_context = (
|
|
460
|
-
user_defined_container_context.
|
|
495
|
+
user_defined_container_context.validate_user_k8s_config_for_run(
|
|
461
496
|
run_launcher.only_allow_user_defined_k8s_config_fields,
|
|
462
497
|
run_launcher.only_allow_user_defined_env_vars,
|
|
463
498
|
)
|
dagster_k8s/executor.py
CHANGED
|
@@ -27,7 +27,7 @@ from dagster._utils.merger import merge_dicts
|
|
|
27
27
|
from dagster_k8s.client import DagsterKubernetesClient
|
|
28
28
|
from dagster_k8s.container_context import K8sContainerContext
|
|
29
29
|
from dagster_k8s.job import (
|
|
30
|
-
|
|
30
|
+
USER_DEFINED_K8S_JOB_CONFIG_SCHEMA,
|
|
31
31
|
DagsterK8sJobConfig,
|
|
32
32
|
UserDefinedDagsterK8sConfig,
|
|
33
33
|
construct_dagster_k8s_job,
|
|
@@ -69,7 +69,7 @@ _K8S_EXECUTOR_CONFIG_SCHEMA = merge_dicts(
|
|
|
69
69
|
),
|
|
70
70
|
"tag_concurrency_limits": get_tag_concurrency_limits_config(),
|
|
71
71
|
"step_k8s_config": Field(
|
|
72
|
-
|
|
72
|
+
USER_DEFINED_K8S_JOB_CONFIG_SCHEMA,
|
|
73
73
|
is_required=False,
|
|
74
74
|
description="Raw Kubernetes configuration for each step launched by the executor.",
|
|
75
75
|
),
|
dagster_k8s/job.py
CHANGED
|
@@ -63,7 +63,7 @@ class K8sConfigMergeBehavior(Enum):
|
|
|
63
63
|
|
|
64
64
|
|
|
65
65
|
USER_DEFINED_K8S_CONFIG_KEY = "dagster-k8s/config"
|
|
66
|
-
|
|
66
|
+
USER_DEFINED_K8S_JOB_CONFIG_SCHEMA = Shape(
|
|
67
67
|
{
|
|
68
68
|
"container_config": Permissive(),
|
|
69
69
|
"pod_template_spec_metadata": Permissive(),
|
|
@@ -94,18 +94,23 @@ class UserDefinedDagsterK8sConfig(
|
|
|
94
94
|
("job_config", Mapping[str, Any]),
|
|
95
95
|
("job_metadata", Mapping[str, Any]),
|
|
96
96
|
("job_spec_config", Mapping[str, Any]),
|
|
97
|
+
("deployment_metadata", Mapping[str, Any]),
|
|
98
|
+
("service_metadata", Mapping[str, Any]),
|
|
97
99
|
("merge_behavior", K8sConfigMergeBehavior),
|
|
98
100
|
],
|
|
99
101
|
)
|
|
100
102
|
):
|
|
101
103
|
def __new__(
|
|
102
104
|
cls,
|
|
105
|
+
*,
|
|
103
106
|
container_config: Optional[Mapping[str, Any]] = None,
|
|
104
107
|
pod_template_spec_metadata: Optional[Mapping[str, Any]] = None,
|
|
105
108
|
pod_spec_config: Optional[Mapping[str, Any]] = None,
|
|
106
109
|
job_config: Optional[Mapping[str, Any]] = None,
|
|
107
110
|
job_metadata: Optional[Mapping[str, Any]] = None,
|
|
108
111
|
job_spec_config: Optional[Mapping[str, Any]] = None,
|
|
112
|
+
deployment_metadata: Optional[Mapping[str, Any]] = None,
|
|
113
|
+
service_metadata: Optional[Mapping[str, Any]] = None,
|
|
109
114
|
merge_behavior: K8sConfigMergeBehavior = K8sConfigMergeBehavior.DEEP,
|
|
110
115
|
):
|
|
111
116
|
container_config = check.opt_mapping_param(
|
|
@@ -119,6 +124,13 @@ class UserDefinedDagsterK8sConfig(
|
|
|
119
124
|
job_metadata = check.opt_mapping_param(job_metadata, "job_metadata", key_type=str)
|
|
120
125
|
job_spec_config = check.opt_mapping_param(job_spec_config, "job_spec_config", key_type=str)
|
|
121
126
|
|
|
127
|
+
deployment_metadata = check.opt_mapping_param(
|
|
128
|
+
deployment_metadata, "deployment_metadata", key_type=str
|
|
129
|
+
)
|
|
130
|
+
service_metadata = check.opt_mapping_param(
|
|
131
|
+
service_metadata, "service_metadata", key_type=str
|
|
132
|
+
)
|
|
133
|
+
|
|
122
134
|
if container_config:
|
|
123
135
|
container_config = k8s_snake_case_dict(kubernetes.client.V1Container, container_config)
|
|
124
136
|
|
|
@@ -139,6 +151,14 @@ class UserDefinedDagsterK8sConfig(
|
|
|
139
151
|
if job_spec_config:
|
|
140
152
|
job_spec_config = k8s_snake_case_dict(kubernetes.client.V1JobSpec, job_spec_config)
|
|
141
153
|
|
|
154
|
+
if deployment_metadata:
|
|
155
|
+
deployment_metadata = k8s_snake_case_dict(
|
|
156
|
+
kubernetes.client.V1ObjectMeta, deployment_metadata
|
|
157
|
+
)
|
|
158
|
+
|
|
159
|
+
if service_metadata:
|
|
160
|
+
service_metadata = k8s_snake_case_dict(kubernetes.client.V1ObjectMeta, service_metadata)
|
|
161
|
+
|
|
142
162
|
return super(UserDefinedDagsterK8sConfig, cls).__new__(
|
|
143
163
|
cls,
|
|
144
164
|
container_config=container_config,
|
|
@@ -147,6 +167,8 @@ class UserDefinedDagsterK8sConfig(
|
|
|
147
167
|
job_config=job_config,
|
|
148
168
|
job_metadata=job_metadata,
|
|
149
169
|
job_spec_config=job_spec_config,
|
|
170
|
+
deployment_metadata=deployment_metadata,
|
|
171
|
+
service_metadata=service_metadata,
|
|
150
172
|
merge_behavior=check.inst_param(
|
|
151
173
|
merge_behavior, "merge_behavior", K8sConfigMergeBehavior
|
|
152
174
|
),
|
|
@@ -160,6 +182,8 @@ class UserDefinedDagsterK8sConfig(
|
|
|
160
182
|
"job_config": self.job_config,
|
|
161
183
|
"job_metadata": self.job_metadata,
|
|
162
184
|
"job_spec_config": self.job_spec_config,
|
|
185
|
+
"deployment_metadata": self.deployment_metadata,
|
|
186
|
+
"service_metadata": self.service_metadata,
|
|
163
187
|
"merge_behavior": self.merge_behavior.value,
|
|
164
188
|
}
|
|
165
189
|
|
|
@@ -172,6 +196,8 @@ class UserDefinedDagsterK8sConfig(
|
|
|
172
196
|
job_config=config_dict.get("job_config"),
|
|
173
197
|
job_metadata=config_dict.get("job_metadata"),
|
|
174
198
|
job_spec_config=config_dict.get("job_spec_config"),
|
|
199
|
+
deployment_metadata=config_dict.get("deployment_metadata"),
|
|
200
|
+
service_metadata=config_dict.get("service_metadata"),
|
|
175
201
|
merge_behavior=K8sConfigMergeBehavior(
|
|
176
202
|
config_dict.get("merge_behavior", K8sConfigMergeBehavior.DEEP.value)
|
|
177
203
|
),
|
|
@@ -205,7 +231,7 @@ def get_user_defined_k8s_config(tags: Mapping[str, str]):
|
|
|
205
231
|
|
|
206
232
|
if USER_DEFINED_K8S_CONFIG_KEY in tags:
|
|
207
233
|
user_defined_k8s_config_value = json.loads(tags[USER_DEFINED_K8S_CONFIG_KEY])
|
|
208
|
-
result = validate_config(
|
|
234
|
+
result = validate_config(USER_DEFINED_K8S_JOB_CONFIG_SCHEMA, user_defined_k8s_config_value)
|
|
209
235
|
|
|
210
236
|
if not result.success:
|
|
211
237
|
raise DagsterInvalidConfigError(
|
|
@@ -232,6 +258,8 @@ def get_user_defined_k8s_config(tags: Mapping[str, str]):
|
|
|
232
258
|
job_config=user_defined_k8s_config.get("job_config"),
|
|
233
259
|
job_metadata=user_defined_k8s_config.get("job_metadata"),
|
|
234
260
|
job_spec_config=user_defined_k8s_config.get("job_spec_config"),
|
|
261
|
+
deployment_metadata=user_defined_k8s_config.get("deployment_metadata"),
|
|
262
|
+
service_metadata=user_defined_k8s_config.get("service_metadata"),
|
|
235
263
|
merge_behavior=K8sConfigMergeBehavior(
|
|
236
264
|
user_defined_k8s_config.get("merge_behavior", K8sConfigMergeBehavior.DEEP.value)
|
|
237
265
|
),
|
|
@@ -465,6 +493,7 @@ class DagsterK8sJobConfig(
|
|
|
465
493
|
"job_spec_config": Field(
|
|
466
494
|
Map(key_type=str, inner_type=bool), is_required=False
|
|
467
495
|
),
|
|
496
|
+
"namespace": Field(BoolSource, is_required=False),
|
|
468
497
|
}
|
|
469
498
|
),
|
|
470
499
|
is_required=False,
|
|
@@ -647,7 +676,7 @@ class DagsterK8sJobConfig(
|
|
|
647
676
|
),
|
|
648
677
|
),
|
|
649
678
|
"run_k8s_config": Field(
|
|
650
|
-
|
|
679
|
+
USER_DEFINED_K8S_JOB_CONFIG_SCHEMA,
|
|
651
680
|
is_required=False,
|
|
652
681
|
description="Raw Kubernetes configuration for launched runs.",
|
|
653
682
|
),
|
|
@@ -661,6 +690,8 @@ class DagsterK8sJobConfig(
|
|
|
661
690
|
DagsterEnum.from_python_enum(K8sConfigMergeBehavior),
|
|
662
691
|
is_required=False,
|
|
663
692
|
),
|
|
693
|
+
"deployment_metadata": Permissive(),
|
|
694
|
+
"service_metadata": Permissive(),
|
|
664
695
|
}
|
|
665
696
|
),
|
|
666
697
|
is_required=False,
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
KUBERNETES_VERSION_UPPER_BOUND = "32"
|
dagster_k8s/models.py
CHANGED
|
@@ -7,11 +7,20 @@ import kubernetes
|
|
|
7
7
|
import kubernetes.client.models
|
|
8
8
|
from dagster._vendored.dateutil.parser import parse
|
|
9
9
|
from kubernetes.client.api_client import ApiClient
|
|
10
|
+
from kubernetes.client.configuration import Configuration
|
|
10
11
|
|
|
11
12
|
# Unclear what the correct type is to use for a bound here.
|
|
12
13
|
T_KubernetesModel = TypeVar("T_KubernetesModel")
|
|
13
14
|
|
|
14
15
|
|
|
16
|
+
# Create a single Configuration object to pass through to each model creation -
|
|
17
|
+
# the default otherwise in the OpenAPI version currently in use by the k8s
|
|
18
|
+
# client will create one on each model creation otherwise, which can cause
|
|
19
|
+
# lock contention since it acquires the global python logger lock
|
|
20
|
+
# see: https://github.com/kubernetes-client/python/issues/1921
|
|
21
|
+
shared_k8s_model_configuration = Configuration()
|
|
22
|
+
|
|
23
|
+
|
|
15
24
|
def _get_k8s_class(classname: str) -> Type[Any]:
|
|
16
25
|
if classname in ApiClient.NATIVE_TYPES_MAPPING:
|
|
17
26
|
return ApiClient.NATIVE_TYPES_MAPPING[classname]
|
|
@@ -149,7 +158,10 @@ def k8s_model_from_dict(
|
|
|
149
158
|
if len(invalid_keys):
|
|
150
159
|
raise Exception(f"Unexpected keys in model class {model_class.__name__}: {invalid_keys}")
|
|
151
160
|
|
|
152
|
-
|
|
161
|
+
# Pass through the configuration object since the default implementation creates a new one
|
|
162
|
+
# in the constructor, which can create lock contention if multiple threads are calling this
|
|
163
|
+
# simultaneously
|
|
164
|
+
kwargs = {"local_vars_configuration": shared_k8s_model_configuration}
|
|
153
165
|
for attr, attr_type in model_class.openapi_types.items(): # type: ignore
|
|
154
166
|
# e.g. config_map => configMap
|
|
155
167
|
if attr in model_dict:
|
dagster_k8s/ops/k8s_job_op.py
CHANGED
|
@@ -163,6 +163,8 @@ def execute_k8s_job(
|
|
|
163
163
|
job_spec_config: Optional[Dict[str, Any]] = None,
|
|
164
164
|
k8s_job_name: Optional[str] = None,
|
|
165
165
|
merge_behavior: K8sConfigMergeBehavior = K8sConfigMergeBehavior.DEEP,
|
|
166
|
+
delete_failed_k8s_jobs: Optional[bool] = True,
|
|
167
|
+
_kubeconfig_file_context: Optional[str] = None,
|
|
166
168
|
):
|
|
167
169
|
"""This function is a utility for executing a Kubernetes job from within a Dagster op.
|
|
168
170
|
|
|
@@ -238,6 +240,11 @@ def execute_k8s_job(
|
|
|
238
240
|
are recursively merged, appending list fields together and merging dictionary fields.
|
|
239
241
|
Setting it to SHALLOW will make the dictionaries shallowly merged - any shared values
|
|
240
242
|
in the dictionaries will be replaced by the values set on this op.
|
|
243
|
+
delete_failed_k8s_jobs (bool): Whether to immediately delete failed Kubernetes jobs. If False,
|
|
244
|
+
failed jobs will remain accessible through the Kubernetes API until deleted by a user or cleaned up by the
|
|
245
|
+
.spec.ttlSecondsAfterFinished parameter of the job.
|
|
246
|
+
(https://kubernetes.io/docs/concepts/workloads/controllers/ttlafterfinished/).
|
|
247
|
+
Defaults to True.
|
|
241
248
|
"""
|
|
242
249
|
run_container_context = K8sContainerContext.create_for_run(
|
|
243
250
|
context.dagster_run,
|
|
@@ -320,7 +327,7 @@ def execute_k8s_job(
|
|
|
320
327
|
if load_incluster_config:
|
|
321
328
|
kubernetes.config.load_incluster_config()
|
|
322
329
|
else:
|
|
323
|
-
kubernetes.config.load_kube_config(kubeconfig_file)
|
|
330
|
+
kubernetes.config.load_kube_config(kubeconfig_file, context=_kubeconfig_file_context)
|
|
324
331
|
|
|
325
332
|
# changing this to be able to be passed in will allow for unit testing
|
|
326
333
|
api_client = DagsterKubernetesClient.production_client()
|
|
@@ -409,10 +416,11 @@ def execute_k8s_job(
|
|
|
409
416
|
num_pods_to_wait_for=num_pods_to_wait_for,
|
|
410
417
|
)
|
|
411
418
|
except (DagsterExecutionInterruptedError, Exception) as e:
|
|
412
|
-
|
|
413
|
-
|
|
414
|
-
|
|
415
|
-
|
|
419
|
+
if delete_failed_k8s_jobs:
|
|
420
|
+
context.log.info(
|
|
421
|
+
f"Deleting Kubernetes job {job_name} in namespace {namespace} due to exception"
|
|
422
|
+
)
|
|
423
|
+
api_client.delete_job(job_name=job_name, namespace=namespace)
|
|
416
424
|
raise e
|
|
417
425
|
|
|
418
426
|
|
dagster_k8s/version.py
CHANGED
|
@@ -1 +1 @@
|
|
|
1
|
-
__version__ = "0.24.
|
|
1
|
+
__version__ = "0.24.11"
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.1
|
|
2
2
|
Name: dagster-k8s
|
|
3
|
-
Version: 0.24.
|
|
3
|
+
Version: 0.24.11
|
|
4
4
|
Summary: A Dagster integration for k8s
|
|
5
5
|
Home-page: https://github.com/dagster-io/dagster/tree/master/python_modules/libraries/dagster-k8s
|
|
6
6
|
Author: Dagster Labs
|
|
@@ -15,7 +15,7 @@ Classifier: License :: OSI Approved :: Apache Software License
|
|
|
15
15
|
Classifier: Operating System :: OS Independent
|
|
16
16
|
Requires-Python: >=3.8,<3.13
|
|
17
17
|
License-File: LICENSE
|
|
18
|
-
Requires-Dist: dagster ==1.8.
|
|
19
|
-
Requires-Dist: kubernetes
|
|
18
|
+
Requires-Dist: dagster ==1.8.11
|
|
19
|
+
Requires-Dist: kubernetes <32
|
|
20
20
|
Requires-Dist: google-auth !=2.23.1
|
|
21
21
|
|
|
@@ -0,0 +1,20 @@
|
|
|
1
|
+
dagster_k8s/__init__.py,sha256=7LyrMxxhXKdGQYClq7OJwMiVJW0KYPP-8lTDrWLwyzU,750
|
|
2
|
+
dagster_k8s/client.py,sha256=YyYeghDN4R6jCMvDG76RWEvv8S5dxHBBKEJ8kE9AFaQ,39532
|
|
3
|
+
dagster_k8s/container_context.py,sha256=47gts4BKPrlL624rT8MMKOn8kQtY3_f2FI2nmgsDPvU,22637
|
|
4
|
+
dagster_k8s/executor.py,sha256=0I0l-8xcL9wFmoEHR8lARLuTZUIuAxZOxnD7_1WIpek,13942
|
|
5
|
+
dagster_k8s/job.py,sha256=-GzUcz80cwF9NwDAv3QdE6GezSSpUzF0kLRkj1oTO34,43046
|
|
6
|
+
dagster_k8s/kubernetes_version.py,sha256=jIBF12yvVweYUCmhC5AJ2Lb1JPcHxJfohJ1_hHfzS4o,38
|
|
7
|
+
dagster_k8s/launcher.py,sha256=R1OPsJUpjjR-B2m5a0duCc0eKOG1vdxmhBzj6Kqj1Nw,16625
|
|
8
|
+
dagster_k8s/models.py,sha256=OPksL8WjsdqJtGcw4WLkvsnrm1WA6vCUjqHL9dA2oYQ,6270
|
|
9
|
+
dagster_k8s/pipes.py,sha256=lsf657zbvxAH71zjEOIc-Oxo2oRvN-mp2XBjbahPUyU,17195
|
|
10
|
+
dagster_k8s/py.typed,sha256=la67KBlbjXN-_-DfGNcdOcjYumVpKG_Tkw-8n5dnGB4,8
|
|
11
|
+
dagster_k8s/test.py,sha256=cNtcbzxytiZtd01wY5ip7KPi01y0BUQuQhohoIfAFUM,684
|
|
12
|
+
dagster_k8s/utils.py,sha256=c1bHqh5f1p5RZ0JCT6WEbPPjDvbgUp3pl4nYZRaaI4s,786
|
|
13
|
+
dagster_k8s/version.py,sha256=XoAmHmUmn_stAwPPPU1_t-UuvmY7DsPg9-UreBts-RU,24
|
|
14
|
+
dagster_k8s/ops/__init__.py,sha256=ur-9GrE_DRfnsFCpYan03qOY9cWbjagC8KHZFZuiCmc,113
|
|
15
|
+
dagster_k8s/ops/k8s_job_op.py,sha256=MZ807LHeRWPs-yYucHZoJoiTe_W8EhmGaq8z_ZSoBuM,21128
|
|
16
|
+
dagster_k8s-0.24.11.dist-info/LICENSE,sha256=TMatHW4_G9ldRdodEAp-l2Xa2WvsdeOh60E3v1R2jis,11349
|
|
17
|
+
dagster_k8s-0.24.11.dist-info/METADATA,sha256=4F59lzGXslQQWRm_QzVznsHuItXzIqxeUcOf1ksLsU8,784
|
|
18
|
+
dagster_k8s-0.24.11.dist-info/WHEEL,sha256=yQN5g4mg4AybRjkgi-9yy4iQEFibGQmlz78Pik5Or-A,92
|
|
19
|
+
dagster_k8s-0.24.11.dist-info/top_level.txt,sha256=wFPjskoWPlk2hOLugYCaoZhSiZdUcbCA1QZe9I4dals,12
|
|
20
|
+
dagster_k8s-0.24.11.dist-info/RECORD,,
|
|
@@ -1,19 +0,0 @@
|
|
|
1
|
-
dagster_k8s/__init__.py,sha256=7LyrMxxhXKdGQYClq7OJwMiVJW0KYPP-8lTDrWLwyzU,750
|
|
2
|
-
dagster_k8s/client.py,sha256=9stIMp196v-vaVct8kBFp5pveo0lAup78y9Gi7g9VMM,37873
|
|
3
|
-
dagster_k8s/container_context.py,sha256=gyFS0FgdupEUHIIsfYzS-TKHzXOd2mx-yX7onw2XuiY,21353
|
|
4
|
-
dagster_k8s/executor.py,sha256=IZMWEtzXXd37INbU4gxkMU46cX4qoZw4MdgszxngZuQ,13934
|
|
5
|
-
dagster_k8s/job.py,sha256=2tCIYlGUIt3ZydbZ9WRyPgr7YWz8t_2vaD20u7h5efw,41512
|
|
6
|
-
dagster_k8s/launcher.py,sha256=R1OPsJUpjjR-B2m5a0duCc0eKOG1vdxmhBzj6Kqj1Nw,16625
|
|
7
|
-
dagster_k8s/models.py,sha256=AAHdyfoxzz360bNnrvD-30BidTK2M_qT-mRQdpwys0Y,5532
|
|
8
|
-
dagster_k8s/pipes.py,sha256=lsf657zbvxAH71zjEOIc-Oxo2oRvN-mp2XBjbahPUyU,17195
|
|
9
|
-
dagster_k8s/py.typed,sha256=la67KBlbjXN-_-DfGNcdOcjYumVpKG_Tkw-8n5dnGB4,8
|
|
10
|
-
dagster_k8s/test.py,sha256=cNtcbzxytiZtd01wY5ip7KPi01y0BUQuQhohoIfAFUM,684
|
|
11
|
-
dagster_k8s/utils.py,sha256=c1bHqh5f1p5RZ0JCT6WEbPPjDvbgUp3pl4nYZRaaI4s,786
|
|
12
|
-
dagster_k8s/version.py,sha256=9weWImOclCBqHuCBLYEnR7Ab-QgQ2rysqid5TzPUNBk,23
|
|
13
|
-
dagster_k8s/ops/__init__.py,sha256=ur-9GrE_DRfnsFCpYan03qOY9cWbjagC8KHZFZuiCmc,113
|
|
14
|
-
dagster_k8s/ops/k8s_job_op.py,sha256=cIrIn30Ew1MQDVJOf4A4LsERG60m64Vp9J4q90enebw,20533
|
|
15
|
-
dagster_k8s-0.24.9.dist-info/LICENSE,sha256=TMatHW4_G9ldRdodEAp-l2Xa2WvsdeOh60E3v1R2jis,11349
|
|
16
|
-
dagster_k8s-0.24.9.dist-info/METADATA,sha256=xe_Q50aL5A6xIpGjMFAdop5l48lm2zDP4FUKq4FLqJg,778
|
|
17
|
-
dagster_k8s-0.24.9.dist-info/WHEEL,sha256=yQN5g4mg4AybRjkgi-9yy4iQEFibGQmlz78Pik5Or-A,92
|
|
18
|
-
dagster_k8s-0.24.9.dist-info/top_level.txt,sha256=wFPjskoWPlk2hOLugYCaoZhSiZdUcbCA1QZe9I4dals,12
|
|
19
|
-
dagster_k8s-0.24.9.dist-info/RECORD,,
|
|
File without changes
|
|
File without changes
|
|
File without changes
|