runnable 0.18.0__py3-none-any.whl → 0.19.1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- extensions/job_executor/k8s.py +150 -26
- extensions/job_executor/local.py +1 -0
- extensions/job_executor/local_container.py +1 -0
- extensions/pipeline_executor/local_container.py +0 -1
- runnable/__init__.py +1 -0
- runnable/catalog.py +1 -2
- runnable/entrypoints.py +1 -5
- runnable/executor.py +1 -1
- runnable/parameters.py +0 -9
- runnable/utils.py +5 -25
- {runnable-0.18.0.dist-info → runnable-0.19.1.dist-info}/METADATA +1 -1
- {runnable-0.18.0.dist-info → runnable-0.19.1.dist-info}/RECORD +15 -15
- {runnable-0.18.0.dist-info → runnable-0.19.1.dist-info}/entry_points.txt +1 -0
- {runnable-0.18.0.dist-info → runnable-0.19.1.dist-info}/WHEEL +0 -0
- {runnable-0.18.0.dist-info → runnable-0.19.1.dist-info}/licenses/LICENSE +0 -0
extensions/job_executor/k8s.py
CHANGED
@@ -101,23 +101,40 @@ class HostPath(BaseModel):
|
|
101
101
|
path: str
|
102
102
|
|
103
103
|
|
104
|
-
class
|
104
|
+
class HostPathVolume(BaseModel):
|
105
105
|
name: str
|
106
106
|
host_path: HostPath
|
107
107
|
|
108
108
|
|
109
|
-
class
|
109
|
+
class PVCClaim(BaseModel):
|
110
|
+
claim_name: str
|
111
|
+
|
112
|
+
model_config = ConfigDict(
|
113
|
+
alias_generator=to_camel,
|
114
|
+
populate_by_name=True,
|
115
|
+
from_attributes=True,
|
116
|
+
)
|
117
|
+
|
118
|
+
|
119
|
+
class PVCVolume(BaseModel):
|
120
|
+
name: str
|
121
|
+
persistent_volume_claim: PVCClaim
|
122
|
+
|
123
|
+
|
124
|
+
class K8sTemplateSpec(BaseModel):
|
110
125
|
active_deadline_seconds: int = Field(default=60 * 60 * 2) # 2 hours
|
111
126
|
node_selector: Optional[dict[str, str]] = None
|
112
127
|
tolerations: Optional[list[dict[str, str]]] = None
|
113
|
-
volumes: Optional[list[
|
128
|
+
volumes: Optional[list[HostPathVolume | PVCVolume]] = Field(
|
129
|
+
default_factory=lambda: []
|
130
|
+
)
|
114
131
|
service_account_name: Optional[str] = "default"
|
115
132
|
restart_policy: RestartPolicy = RestartPolicy.NEVER
|
116
133
|
container: Container
|
117
134
|
|
118
135
|
|
119
|
-
class
|
120
|
-
spec:
|
136
|
+
class K8sTemplate(BaseModel):
|
137
|
+
spec: K8sTemplateSpec
|
121
138
|
metadata: Optional[ObjectMetaData] = None
|
122
139
|
|
123
140
|
|
@@ -125,32 +142,25 @@ class Spec(BaseModel):
|
|
125
142
|
active_deadline_seconds: Optional[int] = Field(default=60 * 60 * 2) # 2 hours
|
126
143
|
backoff_limit: int = 6
|
127
144
|
selector: Optional[LabelSelector] = None
|
128
|
-
template:
|
145
|
+
template: K8sTemplate
|
129
146
|
ttl_seconds_after_finished: Optional[int] = Field(default=60 * 60 * 24) # 24 hours
|
130
147
|
|
131
148
|
|
132
|
-
class
|
149
|
+
class GenericK8sJobExecutor(GenericJobExecutor):
|
133
150
|
service_name: str = "k8s-job"
|
134
151
|
config_path: Optional[str] = None
|
135
152
|
job_spec: Spec
|
136
153
|
mock: bool = False
|
137
|
-
|
138
|
-
# The location the mount of .run_log_store is mounted to in minikube
|
139
|
-
# ensure that minikube mount $HOME/workspace/runnable/.run_log_store:/volume/run_logs is executed first
|
140
|
-
# $HOME/workspace/runnable/.catalog:/volume/catalog
|
141
|
-
# Ensure that the docker build is done with eval $(minikube docker-env)
|
142
|
-
mini_k8s_run_log_location: str = Field(default="/volume/run_logs/")
|
143
|
-
mini_k8s_catalog_location: str = Field(default="/volume/catalog/")
|
154
|
+
namespace: str = Field(default="default")
|
144
155
|
|
145
156
|
_is_local: bool = PrivateAttr(default=False)
|
157
|
+
_volume_mounts: list[VolumeMount] = PrivateAttr(default_factory=lambda: [])
|
158
|
+
_volumes: list[HostPathVolume | PVCVolume] = PrivateAttr(default_factory=lambda: [])
|
146
159
|
|
147
160
|
_container_log_location: str = PrivateAttr(default="/tmp/run_logs/")
|
148
161
|
_container_catalog_location: str = PrivateAttr(default="/tmp/catalog/")
|
149
162
|
_container_secrets_location: str = PrivateAttr(default="/tmp/dotenv")
|
150
163
|
|
151
|
-
_volumes: list[Volume] = []
|
152
|
-
_volume_mounts: list[VolumeMount] = []
|
153
|
-
|
154
164
|
model_config = ConfigDict(
|
155
165
|
alias_generator=to_camel,
|
156
166
|
populate_by_name=True,
|
@@ -180,6 +190,7 @@ class K8sJobExecutor(GenericJobExecutor):
|
|
180
190
|
self._use_volumes()
|
181
191
|
|
182
192
|
job_log = self._context.run_log_store.get_job_log(run_id=self._context.run_id)
|
193
|
+
self.add_code_identities(job_log)
|
183
194
|
|
184
195
|
attempt_log = job.execute_command(
|
185
196
|
attempt_number=self.step_attempt_number,
|
@@ -287,14 +298,17 @@ class K8sJobExecutor(GenericJobExecutor):
|
|
287
298
|
)
|
288
299
|
|
289
300
|
logger.info(f"Submitting job: {job.__dict__}")
|
301
|
+
if self.mock:
|
302
|
+
print(job.__dict__)
|
303
|
+
return
|
290
304
|
|
291
305
|
try:
|
292
306
|
k8s_batch = self._client.BatchV1Api()
|
293
307
|
response = k8s_batch.create_namespaced_job(
|
294
308
|
body=job,
|
295
|
-
namespace="default",
|
296
309
|
_preload_content=False,
|
297
310
|
pretty=True,
|
311
|
+
namespace=self.namespace,
|
298
312
|
)
|
299
313
|
logger.debug(f"Kubernetes job response: {response}")
|
300
314
|
except Exception as e:
|
@@ -302,6 +316,43 @@ class K8sJobExecutor(GenericJobExecutor):
|
|
302
316
|
print(e)
|
303
317
|
raise
|
304
318
|
|
319
|
+
def _create_volumes(self): ...
|
320
|
+
|
321
|
+
def _use_volumes(self):
|
322
|
+
match self._context.run_log_store.service_name:
|
323
|
+
case "file-system":
|
324
|
+
self._context.run_log_store.log_folder = self._container_log_location
|
325
|
+
case "chunked-fs":
|
326
|
+
self._context.run_log_store.log_folder = self._container_log_location
|
327
|
+
|
328
|
+
match self._context.catalog_handler.service_name:
|
329
|
+
case "file-system":
|
330
|
+
self._context.catalog_handler.catalog_location = (
|
331
|
+
self._container_catalog_location
|
332
|
+
)
|
333
|
+
|
334
|
+
|
335
|
+
class MiniK8sJobExecutor(GenericK8sJobExecutor):
|
336
|
+
service_name: str = "k8s-job"
|
337
|
+
config_path: Optional[str] = None
|
338
|
+
job_spec: Spec
|
339
|
+
mock: bool = False
|
340
|
+
|
341
|
+
# The location the mount of .run_log_store is mounted to in minikube
|
342
|
+
# ensure that minikube mount $HOME/workspace/runnable/.run_log_store:/volume/run_logs is executed first
|
343
|
+
# $HOME/workspace/runnable/.catalog:/volume/catalog
|
344
|
+
# Ensure that the docker build is done with eval $(minikube docker-env)
|
345
|
+
mini_k8s_run_log_location: str = Field(default="/volume/run_logs/")
|
346
|
+
mini_k8s_catalog_location: str = Field(default="/volume/catalog/")
|
347
|
+
|
348
|
+
_is_local: bool = PrivateAttr(default=False)
|
349
|
+
|
350
|
+
model_config = ConfigDict(
|
351
|
+
alias_generator=to_camel,
|
352
|
+
populate_by_name=True,
|
353
|
+
from_attributes=True,
|
354
|
+
)
|
355
|
+
|
305
356
|
def _create_volumes(self):
|
306
357
|
match self._context.run_log_store.service_name:
|
307
358
|
case "file-system":
|
@@ -311,7 +362,7 @@ class K8sJobExecutor(GenericJobExecutor):
|
|
311
362
|
# You then are creating a volume that is mounted to /tmp/run_logs in the container
|
312
363
|
# You are then referring to it.
|
313
364
|
# https://stackoverflow.com/questions/57411456/minikube-mounted-host-folders-are-not-working
|
314
|
-
|
365
|
+
HostPathVolume(
|
315
366
|
name="run-logs",
|
316
367
|
host_path=HostPath(path=self.mini_k8s_run_log_location),
|
317
368
|
)
|
@@ -323,7 +374,7 @@ class K8sJobExecutor(GenericJobExecutor):
|
|
323
374
|
)
|
324
375
|
case "chunked-fs":
|
325
376
|
self._volumes.append(
|
326
|
-
|
377
|
+
HostPathVolume(
|
327
378
|
name="run-logs",
|
328
379
|
host_path=HostPath(path=self.mini_k8s_run_log_location),
|
329
380
|
)
|
@@ -337,7 +388,7 @@ class K8sJobExecutor(GenericJobExecutor):
|
|
337
388
|
match self._context.catalog_handler.service_name:
|
338
389
|
case "file-system":
|
339
390
|
self._volumes.append(
|
340
|
-
|
391
|
+
HostPathVolume(
|
341
392
|
name="catalog",
|
342
393
|
host_path=HostPath(path=self.mini_k8s_catalog_location),
|
343
394
|
)
|
@@ -348,15 +399,88 @@ class K8sJobExecutor(GenericJobExecutor):
|
|
348
399
|
)
|
349
400
|
)
|
350
401
|
|
351
|
-
|
402
|
+
|
403
|
+
class K8sJobExecutor(GenericK8sJobExecutor):
|
404
|
+
service_name: str = "k8s-job"
|
405
|
+
config_path: Optional[str] = None
|
406
|
+
job_spec: Spec
|
407
|
+
mock: bool = False
|
408
|
+
pvc_claim_name: str
|
409
|
+
|
410
|
+
# change the spec to pull image if not present
|
411
|
+
def model_post_init(self, __context):
|
412
|
+
self.job_spec.template.spec.container.image_pull_policy = ImagePullPolicy.ALWAYS
|
413
|
+
|
414
|
+
_is_local: bool = PrivateAttr(default=False)
|
415
|
+
|
416
|
+
model_config = ConfigDict(
|
417
|
+
alias_generator=to_camel,
|
418
|
+
populate_by_name=True,
|
419
|
+
from_attributes=True,
|
420
|
+
)
|
421
|
+
|
422
|
+
def execute_job(self, job: BaseTaskType, catalog_settings=Optional[List[str]]):
|
423
|
+
self._use_volumes()
|
424
|
+
self._set_up_run_log()
|
425
|
+
|
426
|
+
job_log = self._context.run_log_store.create_job_log()
|
427
|
+
self._context.run_log_store.add_job_log(
|
428
|
+
run_id=self._context.run_id, job_log=job_log
|
429
|
+
)
|
430
|
+
|
431
|
+
job_log = self._context.run_log_store.get_job_log(run_id=self._context.run_id)
|
432
|
+
self.add_code_identities(job_log)
|
433
|
+
|
434
|
+
attempt_log = job.execute_command(
|
435
|
+
attempt_number=self.step_attempt_number,
|
436
|
+
mock=self.mock,
|
437
|
+
)
|
438
|
+
|
439
|
+
job_log.status = attempt_log.status
|
440
|
+
job_log.attempts.append(attempt_log)
|
441
|
+
|
442
|
+
data_catalogs_put: Optional[List[DataCatalog]] = self._sync_catalog(
|
443
|
+
catalog_settings=catalog_settings
|
444
|
+
)
|
445
|
+
logger.debug(f"data_catalogs_put: {data_catalogs_put}")
|
446
|
+
|
447
|
+
job_log.add_data_catalogs(data_catalogs_put or [])
|
448
|
+
|
449
|
+
console.print("Summary of job")
|
450
|
+
console.print(job_log.get_summary())
|
451
|
+
|
452
|
+
self._context.run_log_store.add_job_log(
|
453
|
+
run_id=self._context.run_id, job_log=job_log
|
454
|
+
)
|
455
|
+
|
456
|
+
def _create_volumes(self):
|
457
|
+
self._volumes.append(
|
458
|
+
PVCVolume(
|
459
|
+
name=self.pvc_claim_name,
|
460
|
+
persistent_volume_claim=PVCClaim(claim_name=self.pvc_claim_name),
|
461
|
+
)
|
462
|
+
)
|
352
463
|
match self._context.run_log_store.service_name:
|
353
464
|
case "file-system":
|
354
|
-
self.
|
465
|
+
self._volume_mounts.append(
|
466
|
+
VolumeMount(
|
467
|
+
name=self.pvc_claim_name,
|
468
|
+
mount_path=self._container_log_location,
|
469
|
+
)
|
470
|
+
)
|
355
471
|
case "chunked-fs":
|
356
|
-
self.
|
472
|
+
self._volume_mounts.append(
|
473
|
+
VolumeMount(
|
474
|
+
name=self.pvc_claim_name,
|
475
|
+
mount_path=self._container_log_location,
|
476
|
+
)
|
477
|
+
)
|
357
478
|
|
358
479
|
match self._context.catalog_handler.service_name:
|
359
480
|
case "file-system":
|
360
|
-
self.
|
361
|
-
|
481
|
+
self._volume_mounts.append(
|
482
|
+
VolumeMount(
|
483
|
+
name=self.pvc_claim_name,
|
484
|
+
mount_path=self._container_catalog_location,
|
485
|
+
)
|
362
486
|
)
|
extensions/job_executor/local.py
CHANGED
@@ -37,6 +37,7 @@ class LocalJobExecutor(GenericJobExecutor):
|
|
37
37
|
logger.info("Trying to execute job")
|
38
38
|
|
39
39
|
job_log = self._context.run_log_store.get_job_log(run_id=self._context.run_id)
|
40
|
+
self.add_code_identities(job_log)
|
40
41
|
|
41
42
|
attempt_log = job.execute_command(
|
42
43
|
attempt_number=self.step_attempt_number,
|
@@ -52,6 +52,7 @@ class LocalContainerJobExecutor(GenericJobExecutor):
|
|
52
52
|
logger.info("Trying to execute job")
|
53
53
|
|
54
54
|
job_log = self._context.run_log_store.get_job_log(run_id=self._context.run_id)
|
55
|
+
self.add_code_identities(job_log)
|
55
56
|
|
56
57
|
attempt_log = job.execute_command(
|
57
58
|
attempt_number=self.step_attempt_number,
|
@@ -268,7 +268,6 @@ class LocalContainerExecutor(GenericPipelineExecutor):
|
|
268
268
|
f"Please provide a docker_image using executor_config of the step {node.name} or at global config"
|
269
269
|
)
|
270
270
|
|
271
|
-
# TODO: Should consider using getpass.getuser() when running the docker container? Volume permissions
|
272
271
|
container = client.containers.create(
|
273
272
|
image=docker_image,
|
274
273
|
command=command,
|
runnable/__init__.py
CHANGED
runnable/catalog.py
CHANGED
@@ -10,8 +10,6 @@ from runnable.datastore import DataCatalog
|
|
10
10
|
|
11
11
|
logger = logging.getLogger(defaults.LOGGER_NAME)
|
12
12
|
|
13
|
-
# TODO: Should ** be allowed as glob pattern as it can potentially copy everything to catalog
|
14
|
-
|
15
13
|
|
16
14
|
def is_catalog_out_of_sync(
|
17
15
|
catalog, synced_catalogs=Optional[List[DataCatalog]]
|
@@ -170,3 +168,4 @@ class DoNothingCatalog(BaseCatalog):
|
|
170
168
|
Does nothing
|
171
169
|
"""
|
172
170
|
logger.info("Using a do-nothing catalog, doing nothing while sync between runs")
|
171
|
+
logger.info("Using a do-nothing catalog, doing nothing while sync between runs")
|
runnable/entrypoints.py
CHANGED
@@ -16,9 +16,6 @@ from runnable.executor import BaseJobExecutor, BasePipelineExecutor
|
|
16
16
|
logger = logging.getLogger(defaults.LOGGER_NAME)
|
17
17
|
|
18
18
|
|
19
|
-
print("") # removes the buffer print
|
20
|
-
|
21
|
-
|
22
19
|
def get_default_configs() -> RunnableConfig:
|
23
20
|
"""
|
24
21
|
User can provide extensions as part of their code base, runnable-config.yaml provides the place to put them.
|
@@ -128,11 +125,10 @@ def prepare_configurations(
|
|
128
125
|
"job-executor", None
|
129
126
|
) # type: ignore
|
130
127
|
if not job_executor_config:
|
131
|
-
|
128
|
+
job_executor_config = cast(
|
132
129
|
ServiceConfig,
|
133
130
|
runnable_defaults.get("job-executor", defaults.DEFAULT_JOB_EXECUTOR),
|
134
131
|
)
|
135
|
-
|
136
132
|
assert job_executor_config, "Job executor is not provided"
|
137
133
|
configured_executor = utils.get_provider_by_name_and_type(
|
138
134
|
"job_executor", job_executor_config
|
runnable/executor.py
CHANGED
@@ -11,9 +11,9 @@ import runnable.context as context
|
|
11
11
|
from runnable import defaults
|
12
12
|
from runnable.datastore import DataCatalog, JobLog, StepLog
|
13
13
|
from runnable.defaults import TypeMapVariable
|
14
|
-
from runnable.graph import Graph
|
15
14
|
|
16
15
|
if TYPE_CHECKING: # pragma: no cover
|
16
|
+
from runnable.graph import Graph
|
17
17
|
from runnable.nodes import BaseNode
|
18
18
|
from runnable.tasks import BaseTaskType
|
19
19
|
|
runnable/parameters.py
CHANGED
@@ -15,8 +15,6 @@ from runnable.utils import remove_prefix
|
|
15
15
|
|
16
16
|
logger = logging.getLogger(defaults.LOGGER_NAME)
|
17
17
|
|
18
|
-
# TODO: Revisit this, it might be a bit too complicated than required
|
19
|
-
|
20
18
|
|
21
19
|
def get_user_set_parameters(remove: bool = False) -> Dict[str, JsonParameter]:
|
22
20
|
"""
|
@@ -50,13 +48,6 @@ def get_user_set_parameters(remove: bool = False) -> Dict[str, JsonParameter]:
|
|
50
48
|
return parameters
|
51
49
|
|
52
50
|
|
53
|
-
def serialize_parameter_as_str(value: Any) -> str:
|
54
|
-
if isinstance(value, BaseModel):
|
55
|
-
return json.dumps(value.model_dump())
|
56
|
-
|
57
|
-
return json.dumps(value)
|
58
|
-
|
59
|
-
|
60
51
|
def filter_arguments_for_func(
|
61
52
|
func: Callable[..., Any],
|
62
53
|
params: Dict[str, Any],
|
runnable/utils.py
CHANGED
@@ -17,7 +17,7 @@ from ruamel.yaml import YAML
|
|
17
17
|
from stevedore import driver
|
18
18
|
|
19
19
|
import runnable.context as context
|
20
|
-
from runnable import defaults, names
|
20
|
+
from runnable import console, defaults, names
|
21
21
|
from runnable.defaults import TypeMapVariable
|
22
22
|
|
23
23
|
if TYPE_CHECKING: # pragma: no cover
|
@@ -176,7 +176,7 @@ def is_a_git_repo() -> bool:
|
|
176
176
|
logger.info("Found the code to be git versioned")
|
177
177
|
return True
|
178
178
|
except BaseException: # pylint: disable=W0702
|
179
|
-
|
179
|
+
console.print("Not a git repo", style="bold red")
|
180
180
|
|
181
181
|
return False
|
182
182
|
|
@@ -195,27 +195,7 @@ def get_current_code_commit() -> Union[str, None]:
|
|
195
195
|
logger.info("Found the git commit to be: %s", label)
|
196
196
|
return label
|
197
197
|
except BaseException: # pylint: disable=W0702
|
198
|
-
|
199
|
-
raise
|
200
|
-
|
201
|
-
|
202
|
-
def archive_git_tracked(name: str):
|
203
|
-
"""Generate a git archive of the tracked files.
|
204
|
-
|
205
|
-
Args:
|
206
|
-
name (str): The name to give the archive
|
207
|
-
|
208
|
-
Raises:
|
209
|
-
Exception: If its not a git repo
|
210
|
-
"""
|
211
|
-
command = f"git archive -v -o {name}.tar.gz --format=tar.gz HEAD"
|
212
|
-
|
213
|
-
if not is_a_git_repo():
|
214
|
-
raise Exception("Not a git repo")
|
215
|
-
try:
|
216
|
-
subprocess.check_output(command.split()).strip().decode("utf-8")
|
217
|
-
except BaseException: # pylint: disable=W0702
|
218
|
-
logger.exception("Error archiving repo")
|
198
|
+
console.print("Not a git repo, error getting hash", style="bold red")
|
219
199
|
raise
|
220
200
|
|
221
201
|
|
@@ -234,7 +214,7 @@ def is_git_clean() -> Tuple[bool, Union[None, str]]:
|
|
234
214
|
return True, None
|
235
215
|
return False, label
|
236
216
|
except BaseException: # pylint: disable=W0702
|
237
|
-
|
217
|
+
console.print("Not a git repo, not clean", style="bold red")
|
238
218
|
|
239
219
|
return False, None
|
240
220
|
|
@@ -253,7 +233,7 @@ def get_git_remote() -> Union[str, None]:
|
|
253
233
|
logger.info("Found the git remote to be: %s", label)
|
254
234
|
return label
|
255
235
|
except BaseException: # pylint: disable=W0702
|
256
|
-
|
236
|
+
console.print("Not a git repo, no remote", style="bold red")
|
257
237
|
raise
|
258
238
|
|
259
239
|
|
@@ -5,10 +5,10 @@ extensions/catalog/file_system.py,sha256=VZEUx4X-GDSM8rJ_2kiCOyw1eek3roN0CiSB8wd
|
|
5
5
|
extensions/catalog/pyproject.toml,sha256=lLNxY6v04c8I5QK_zKw_E6sJTArSJRA_V-79ktaA3Hk,279
|
6
6
|
extensions/job_executor/README.md,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
7
7
|
extensions/job_executor/__init__.py,sha256=HINaPjBWz04Ni7GqhuDLi0lS0-gYzq52HcOioYueYJE,5513
|
8
|
-
extensions/job_executor/k8s.py,sha256=
|
8
|
+
extensions/job_executor/k8s.py,sha256=V5k6Rnf_sAFqptVbCrWs_x5sl3x3fSHwO96IZoiJxKU,15342
|
9
9
|
extensions/job_executor/k8s_job_spec.yaml,sha256=7aFpxHdO_p6Hkc3YxusUOuAQTD1Myu0yTPX9DrhxbOg,1158
|
10
|
-
extensions/job_executor/local.py,sha256=
|
11
|
-
extensions/job_executor/local_container.py,sha256=
|
10
|
+
extensions/job_executor/local.py,sha256=FvxTk0vyxdrbLOAyNkLyjvmmowypabWOSITQBK_ffVE,1907
|
11
|
+
extensions/job_executor/local_container.py,sha256=hyFnpicCp3_87mZsW64P6KSVbz7XMLjwJUWVjeCJ0_I,6627
|
12
12
|
extensions/job_executor/pyproject.toml,sha256=UIEgiCYHTXcRWSByNMFuKJFKgxTBpQqTqyUecIsb_Vc,286
|
13
13
|
extensions/nodes/README.md,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
14
14
|
extensions/nodes/nodes.py,sha256=ib68QE737ihGLIVp3V2wea13u7lmMZdRvK80bgUkRtA,34645
|
@@ -18,7 +18,7 @@ extensions/pipeline_executor/__init__.py,sha256=YnKILiy-SxfnG3rYUoinjh1lfkuAF5QX
|
|
18
18
|
extensions/pipeline_executor/argo.py,sha256=ClfuU_Of_2f5mvqVgY1QQwwJwXHB0LbzwNArG1x2Axc,44666
|
19
19
|
extensions/pipeline_executor/argo_specification.yaml,sha256=wXQcm2gOQYqy-IOQIhucohS32ZrHKCfGA5zZ0RraPYc,1276
|
20
20
|
extensions/pipeline_executor/local.py,sha256=H8s6AdML_9_f-vdGG_6k0y9FbLqAqvA1S_7xMNyARzY,1946
|
21
|
-
extensions/pipeline_executor/local_container.py,sha256=
|
21
|
+
extensions/pipeline_executor/local_container.py,sha256=UCap8wCbHrtTN5acECBBkvcXkA3SXtrAOGW88JT7ofw,13853
|
22
22
|
extensions/pipeline_executor/mocked.py,sha256=SuObJ6Myt7p8duW8sylIp1cYIAnFutsJW1avWaOUY3c,5798
|
23
23
|
extensions/pipeline_executor/pyproject.toml,sha256=ykTX7srR10PBYb8LsIwEj8vIPPIEZQ5V_R7VYbZ-ido,291
|
24
24
|
extensions/pipeline_executor/retry.py,sha256=KGenhWrLLmOQgzMvqloXHDRJyoNs91t05rRW8aLW6FA,6969
|
@@ -33,26 +33,26 @@ extensions/run_log_store/db/integration_FF.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeR
|
|
33
33
|
extensions/secrets/README.md,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
34
34
|
extensions/secrets/dotenv.py,sha256=FbYYd_pVuJuVuIDIvXbzKuSSQ9GPq7xJXTDbJMTQbhM,1583
|
35
35
|
extensions/secrets/pyproject.toml,sha256=mLJNImNcBlbLKHh-0ugVWT9V83R4RibyyYDtBCSqVF4,282
|
36
|
-
runnable/__init__.py,sha256=
|
37
|
-
runnable/catalog.py,sha256=
|
36
|
+
runnable/__init__.py,sha256=fYkOrbsb-E1rGkrof7kOJ3KboTFH-HriGa-8npn4-50,625
|
37
|
+
runnable/catalog.py,sha256=b9N40kTv1IBidzlWjkHcBGyYhq6qIDHZfBuFenzjsMI,4924
|
38
38
|
runnable/cli.py,sha256=01zmzOdynEmLI4vWDtSHQ6y1od_Jlc8G1RF69fi2L8g,8446
|
39
39
|
runnable/context.py,sha256=by5uepmuCP0dmM9BmsliXihSes5QEFejwAsmekcqylE,1388
|
40
40
|
runnable/datastore.py,sha256=9y5enzn6AXLHLdwvgkdjGPrBkVlrcjfbaAHsst-lJzg,32466
|
41
41
|
runnable/defaults.py,sha256=3o9IVGryyCE6PoQTOoaIaHHTbJGEzmdXMcwzOhwAYoI,3518
|
42
|
-
runnable/entrypoints.py,sha256=
|
42
|
+
runnable/entrypoints.py,sha256=P958nFz5WAsgTwd9sW04Q30vtjweYpr3rPsHVY4gh2U,18876
|
43
43
|
runnable/exceptions.py,sha256=LFbp0-Qxg2PAMLEVt7w2whhBxSG-5pzUEv5qN-Rc4_c,3003
|
44
|
-
runnable/executor.py,sha256=
|
44
|
+
runnable/executor.py,sha256=ZPpfKwjDJnta03M2cWIINXcwke2ZDVc_QrIw7kwpHDQ,15547
|
45
45
|
runnable/graph.py,sha256=jVjikRLR-so3b2ufmNKpEQ_Ny68qN4bcGDAdXBRKiCY,16574
|
46
46
|
runnable/names.py,sha256=vn92Kv9ANROYSZX6Z4z1v_WA3WiEdIYmG6KEStBFZug,8134
|
47
47
|
runnable/nodes.py,sha256=YU9u7r1ESzui1uVtJ1dgwdv1ozyJnF2k-MCFieT8CLI,17519
|
48
|
-
runnable/parameters.py,sha256=
|
48
|
+
runnable/parameters.py,sha256=LyQb1d0SaFeI4PJ_yDYt9wArm9ThSPASWb36TwIdDUs,5213
|
49
49
|
runnable/pickler.py,sha256=ydJ_eti_U1F4l-YacFp7BWm6g5vTn04UXye25S1HVok,2684
|
50
50
|
runnable/sdk.py,sha256=xN5F4XX8r5wCN131kgN2xG7MkNm0bSGJ3Ukw8prHYJ8,31444
|
51
51
|
runnable/secrets.py,sha256=PXcEJw-4WPzeWRLfsatcPPyr1zkqgHzdRWRcS9vvpvM,2354
|
52
52
|
runnable/tasks.py,sha256=JnIIYQf3YUidHXIN6hiUIfDnegc7_rJMNXuHW4WS9ig,29378
|
53
|
-
runnable/utils.py,sha256=
|
54
|
-
runnable-0.
|
55
|
-
runnable-0.
|
56
|
-
runnable-0.
|
57
|
-
runnable-0.
|
58
|
-
runnable-0.
|
53
|
+
runnable/utils.py,sha256=Kwf54tHMVXYK7MCmvAi_FG08U_bHDKIQO-HDpM9X0QI,19500
|
54
|
+
runnable-0.19.1.dist-info/METADATA,sha256=INBkKmT9vbToqxJs3UCH4G2Db1G8Gk7mR-Jsk-r99EE,9945
|
55
|
+
runnable-0.19.1.dist-info/WHEEL,sha256=qtCwoSJWgHk21S1Kb4ihdzI2rlJ1ZKaIurTj_ngOhyQ,87
|
56
|
+
runnable-0.19.1.dist-info/entry_points.txt,sha256=seek5WVGvwYALm8lZ0TfPXwG5NaCeUKjU8urF8k3gvY,1621
|
57
|
+
runnable-0.19.1.dist-info/licenses/LICENSE,sha256=xx0jnfkXJvxRnG63LTGOxlggYnIysveWIZ6H3PNdCrQ,11357
|
58
|
+
runnable-0.19.1.dist-info/RECORD,,
|
@@ -9,6 +9,7 @@ file-system = extensions.catalog.file_system:FileSystemCatalog
|
|
9
9
|
k8s-job = extensions.job_executor.k8s:K8sJobExecutor
|
10
10
|
local = extensions.job_executor.local:LocalJobExecutor
|
11
11
|
local-container = extensions.job_executor.local_container:LocalContainerJobExecutor
|
12
|
+
mini-k8s-job = extensions.job_executor.k8s:MiniK8sJobExecutor
|
12
13
|
|
13
14
|
[nodes]
|
14
15
|
dag = extensions.nodes.nodes:DagNode
|
File without changes
|
File without changes
|