fractal-server 2.14.0a6__py3-none-any.whl → 2.14.0a8__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- fractal_server/__init__.py +1 -1
- fractal_server/app/history/__init__.py +0 -4
- fractal_server/app/routes/api/v2/_aux_functions.py +35 -45
- fractal_server/app/routes/api/v2/_aux_functions_history.py +109 -0
- fractal_server/app/routes/api/v2/history.py +42 -71
- fractal_server/app/routes/api/v2/job.py +30 -0
- fractal_server/app/routes/api/v2/status_legacy.py +5 -5
- fractal_server/app/routes/api/v2/task.py +3 -10
- fractal_server/app/routes/api/v2/workflowtask.py +13 -2
- fractal_server/app/runner/executors/base_runner.py +53 -26
- fractal_server/app/runner/executors/local/runner.py +6 -5
- fractal_server/app/runner/executors/slurm_ssh/executor.py +3 -4
- fractal_server/app/runner/executors/slurm_sudo/runner.py +5 -60
- fractal_server/app/runner/v2/runner.py +68 -33
- fractal_server/app/runner/v2/runner_functions.py +256 -14
- fractal_server/app/schemas/v2/__init__.py +7 -1
- fractal_server/app/schemas/v2/dumps.py +20 -4
- fractal_server/app/schemas/v2/history.py +54 -0
- fractal_server/app/schemas/v2/manifest.py +11 -0
- fractal_server/app/schemas/v2/status_legacy.py +35 -0
- fractal_server/app/schemas/v2/task.py +32 -1
- fractal_server/app/schemas/v2/workflowtask.py +2 -21
- fractal_server/tasks/v2/utils_database.py +1 -16
- {fractal_server-2.14.0a6.dist-info → fractal_server-2.14.0a8.dist-info}/METADATA +2 -2
- {fractal_server-2.14.0a6.dist-info → fractal_server-2.14.0a8.dist-info}/RECORD +28 -29
- {fractal_server-2.14.0a6.dist-info → fractal_server-2.14.0a8.dist-info}/WHEEL +1 -1
- fractal_server/app/history/image_updates.py +0 -124
- fractal_server/app/history/status_enum.py +0 -16
- fractal_server/app/schemas/v2/status.py +0 -16
- {fractal_server-2.14.0a6.dist-info → fractal_server-2.14.0a8.dist-info}/LICENSE +0 -0
- {fractal_server-2.14.0a6.dist-info → fractal_server-2.14.0a8.dist-info}/entry_points.txt +0 -0
@@ -1,6 +1,20 @@
|
|
1
1
|
from typing import Any
|
2
2
|
|
3
3
|
from fractal_server.app.runner.components import _COMPONENT_KEY_
|
4
|
+
from fractal_server.app.schemas.v2.task import TaskTypeType
|
5
|
+
|
6
|
+
|
7
|
+
TASK_TYPES_SUBMIT: list[TaskTypeType] = [
|
8
|
+
"compound",
|
9
|
+
"converter_compound",
|
10
|
+
"non_parallel",
|
11
|
+
"converter_non_parallel",
|
12
|
+
]
|
13
|
+
TASK_TYPES_MULTISUBMIT: list[TaskTypeType] = [
|
14
|
+
"compound",
|
15
|
+
"converter_compound",
|
16
|
+
"parallel",
|
17
|
+
]
|
4
18
|
|
5
19
|
|
6
20
|
class BaseRunner(object):
|
@@ -16,7 +30,7 @@ class BaseRunner(object):
|
|
16
30
|
func: callable,
|
17
31
|
parameters: dict[str, Any],
|
18
32
|
history_item_id: int,
|
19
|
-
|
33
|
+
task_type: TaskTypeType,
|
20
34
|
**kwargs,
|
21
35
|
) -> tuple[Any, BaseException]:
|
22
36
|
"""
|
@@ -25,16 +39,13 @@ class BaseRunner(object):
|
|
25
39
|
# FIXME: Describe more in detail
|
26
40
|
|
27
41
|
Args:
|
28
|
-
func:
|
29
|
-
Function to be executed.
|
42
|
+
func: Function to be executed.
|
30
43
|
parameters:
|
31
44
|
Dictionary of parameters. Must include `zarr_urls` key.
|
32
45
|
history_item_id:
|
33
46
|
Database ID of the corresponding `HistoryItemV2` entry.
|
34
|
-
|
35
|
-
|
36
|
-
kwargs:
|
37
|
-
Runner-specific parameters.
|
47
|
+
task_type: Task type.
|
48
|
+
kwargs: Runner-specific parameters.
|
38
49
|
"""
|
39
50
|
raise NotImplementedError()
|
40
51
|
|
@@ -43,7 +54,7 @@ class BaseRunner(object):
|
|
43
54
|
func: callable,
|
44
55
|
list_parameters: list[dict[str, Any]],
|
45
56
|
history_item_id: int,
|
46
|
-
|
57
|
+
task_type: TaskTypeType,
|
47
58
|
**kwargs,
|
48
59
|
) -> tuple[dict[int, Any], dict[int, BaseException]]:
|
49
60
|
"""
|
@@ -52,33 +63,44 @@ class BaseRunner(object):
|
|
52
63
|
# FIXME: Describe more in detail
|
53
64
|
|
54
65
|
Args:
|
55
|
-
func:
|
56
|
-
Function to be executed.
|
66
|
+
func: Function to be executed.
|
57
67
|
list_parameters:
|
58
68
|
List of dictionaries of parameters. Each one must include a
|
59
69
|
`zarr_url` key.
|
60
70
|
history_item_id:
|
61
71
|
Database ID of the corresponding `HistoryItemV2` entry.
|
62
|
-
|
63
|
-
|
64
|
-
kwargs:
|
65
|
-
Runner-specific parameters.
|
72
|
+
task_type: Task type.
|
73
|
+
kwargs: Runner-specific parameters.
|
66
74
|
"""
|
67
75
|
raise NotImplementedError()
|
68
76
|
|
69
|
-
def validate_submit_parameters(
|
77
|
+
def validate_submit_parameters(
|
78
|
+
self,
|
79
|
+
parameters: dict[str, Any],
|
80
|
+
task_type: TaskTypeType,
|
81
|
+
) -> None:
|
70
82
|
"""
|
71
83
|
Validate parameters for `submit` method
|
72
84
|
|
73
85
|
Args:
|
74
86
|
parameters: Parameters dictionary.
|
87
|
+
task_type: Task type.s
|
75
88
|
"""
|
89
|
+
if task_type not in TASK_TYPES_SUBMIT:
|
90
|
+
raise ValueError(f"Invalid {task_type=} for `submit`.")
|
76
91
|
if not isinstance(parameters, dict):
|
77
92
|
raise ValueError("`parameters` must be a dictionary.")
|
78
|
-
if "
|
79
|
-
|
80
|
-
|
81
|
-
|
93
|
+
if task_type in ["non_parallel", "compound"]:
|
94
|
+
if "zarr_urls" not in parameters.keys():
|
95
|
+
raise ValueError(
|
96
|
+
f"No 'zarr_urls' key in in {list(parameters.keys())}"
|
97
|
+
)
|
98
|
+
elif task_type in ["converter_non_parallel", "converter_compound"]:
|
99
|
+
if "zarr_urls" in parameters.keys():
|
100
|
+
raise ValueError(
|
101
|
+
f"Forbidden 'zarr_urls' key in {list(parameters.keys())}"
|
102
|
+
)
|
103
|
+
|
82
104
|
if _COMPONENT_KEY_ not in parameters.keys():
|
83
105
|
raise ValueError(
|
84
106
|
f"No '{_COMPONENT_KEY_}' key in in {list(parameters.keys())}"
|
@@ -87,21 +109,26 @@ class BaseRunner(object):
|
|
87
109
|
def validate_multisubmit_parameters(
|
88
110
|
self,
|
89
111
|
list_parameters: list[dict[str, Any]],
|
90
|
-
|
112
|
+
task_type: TaskTypeType,
|
91
113
|
) -> None:
|
92
114
|
"""
|
93
115
|
Validate parameters for `multi_submit` method
|
94
116
|
|
95
117
|
Args:
|
96
118
|
list_parameters: List of parameters dictionaries.
|
97
|
-
|
98
|
-
Whether this is the compute part of a compound task.
|
119
|
+
task_type: Task type.
|
99
120
|
"""
|
121
|
+
if task_type not in TASK_TYPES_MULTISUBMIT:
|
122
|
+
raise ValueError(f"Invalid {task_type=} for `multisubmit`.")
|
123
|
+
|
124
|
+
if not isinstance(list_parameters, list):
|
125
|
+
raise ValueError("`parameters` must be a list.")
|
126
|
+
|
100
127
|
for single_kwargs in list_parameters:
|
101
128
|
if not isinstance(single_kwargs, dict):
|
102
|
-
raise
|
129
|
+
raise ValueError("kwargs itemt must be a dictionary.")
|
103
130
|
if "zarr_url" not in single_kwargs.keys():
|
104
|
-
raise
|
131
|
+
raise ValueError(
|
105
132
|
f"No 'zarr_url' key in in {list(single_kwargs.keys())}"
|
106
133
|
)
|
107
134
|
if _COMPONENT_KEY_ not in single_kwargs.keys():
|
@@ -109,7 +136,7 @@ class BaseRunner(object):
|
|
109
136
|
f"No '{_COMPONENT_KEY_}' key "
|
110
137
|
f"in {list(single_kwargs.keys())}"
|
111
138
|
)
|
112
|
-
if
|
139
|
+
if task_type == "parallel":
|
113
140
|
zarr_urls = [kwargs["zarr_url"] for kwargs in list_parameters]
|
114
141
|
if len(zarr_urls) != len(set(zarr_urls)):
|
115
|
-
raise
|
142
|
+
raise ValueError("Non-unique zarr_urls")
|
@@ -9,9 +9,9 @@ from ._local_config import LocalBackendConfig
|
|
9
9
|
from fractal_server.app.runner.components import _COMPONENT_KEY_
|
10
10
|
from fractal_server.app.runner.executors.base_runner import BaseRunner
|
11
11
|
from fractal_server.app.runner.task_files import TaskFiles
|
12
|
+
from fractal_server.app.schemas.v2.task import TaskTypeType
|
12
13
|
from fractal_server.logger import set_logger
|
13
14
|
|
14
|
-
|
15
15
|
logger = set_logger(__name__)
|
16
16
|
|
17
17
|
|
@@ -50,6 +50,7 @@ class LocalRunner(BaseRunner):
|
|
50
50
|
func: callable,
|
51
51
|
parameters: dict[str, Any],
|
52
52
|
task_files: TaskFiles,
|
53
|
+
task_type: TaskTypeType,
|
53
54
|
local_backend_config: Optional[LocalBackendConfig] = None,
|
54
55
|
) -> tuple[Any, Exception]:
|
55
56
|
logger.debug("[submit] START")
|
@@ -61,7 +62,7 @@ class LocalRunner(BaseRunner):
|
|
61
62
|
component=parameters[_COMPONENT_KEY_],
|
62
63
|
)
|
63
64
|
|
64
|
-
self.validate_submit_parameters(parameters)
|
65
|
+
self.validate_submit_parameters(parameters, task_type=task_type)
|
65
66
|
workdir_local = current_task_files.wftask_subfolder_local
|
66
67
|
workdir_local.mkdir()
|
67
68
|
|
@@ -83,18 +84,18 @@ class LocalRunner(BaseRunner):
|
|
83
84
|
func: callable,
|
84
85
|
list_parameters: list[dict],
|
85
86
|
task_files: TaskFiles,
|
86
|
-
|
87
|
+
task_type: TaskTypeType,
|
87
88
|
local_backend_config: Optional[LocalBackendConfig] = None,
|
88
89
|
):
|
89
90
|
logger.debug(f"[multisubmit] START, {len(list_parameters)=}")
|
90
91
|
|
91
92
|
self.validate_multisubmit_parameters(
|
92
93
|
list_parameters=list_parameters,
|
93
|
-
|
94
|
+
task_type=task_type,
|
94
95
|
)
|
95
96
|
|
96
97
|
workdir_local = task_files.wftask_subfolder_local
|
97
|
-
if not
|
98
|
+
if task_type not in ["compound", "converter_compound"]:
|
98
99
|
workdir_local.mkdir()
|
99
100
|
|
100
101
|
# Get local_backend_config
|
@@ -9,7 +9,6 @@ from concurrent.futures import InvalidStateError
|
|
9
9
|
from copy import copy
|
10
10
|
from pathlib import Path
|
11
11
|
from typing import Any
|
12
|
-
from typing import Callable
|
13
12
|
from typing import Optional
|
14
13
|
from typing import Sequence
|
15
14
|
|
@@ -216,7 +215,7 @@ class FractalSlurmSSHExecutor(Executor):
|
|
216
215
|
|
217
216
|
def submit(
|
218
217
|
self,
|
219
|
-
fun:
|
218
|
+
fun: callable,
|
220
219
|
*fun_args: Sequence[Any],
|
221
220
|
slurm_config: SlurmConfig,
|
222
221
|
task_files: TaskFiles,
|
@@ -278,7 +277,7 @@ class FractalSlurmSSHExecutor(Executor):
|
|
278
277
|
|
279
278
|
def map(
|
280
279
|
self,
|
281
|
-
fn:
|
280
|
+
fn: callable,
|
282
281
|
iterable: list[Sequence[Any]],
|
283
282
|
*,
|
284
283
|
slurm_config: SlurmConfig,
|
@@ -446,7 +445,7 @@ class FractalSlurmSSHExecutor(Executor):
|
|
446
445
|
|
447
446
|
def _prepare_job(
|
448
447
|
self,
|
449
|
-
fun:
|
448
|
+
fun: callable,
|
450
449
|
slurm_file_prefix: str,
|
451
450
|
task_files: TaskFiles,
|
452
451
|
slurm_config: SlurmConfig,
|
@@ -30,15 +30,11 @@ from fractal_server.app.runner.executors.slurm_common._slurm_config import (
|
|
30
30
|
)
|
31
31
|
from fractal_server.app.runner.filenames import SHUTDOWN_FILENAME
|
32
32
|
from fractal_server.app.runner.task_files import TaskFiles
|
33
|
+
from fractal_server.app.schemas.v2.task import TaskTypeType
|
33
34
|
from fractal_server.config import get_settings
|
34
35
|
from fractal_server.logger import set_logger
|
35
36
|
from fractal_server.syringe import Inject
|
36
37
|
|
37
|
-
# from fractal_server.app.history import ImageStatus
|
38
|
-
# from fractal_server.app.history import update_all_images
|
39
|
-
# from fractal_server.app.history import update_single_image
|
40
|
-
# from fractal_server.app.history import update_single_image_logfile
|
41
|
-
|
42
38
|
|
43
39
|
logger = set_logger(__name__)
|
44
40
|
|
@@ -426,7 +422,7 @@ class RunnerSlurmSudo(BaseRunner):
|
|
426
422
|
history_item_id: int,
|
427
423
|
task_files: TaskFiles,
|
428
424
|
slurm_config: SlurmConfig,
|
429
|
-
|
425
|
+
task_type: TaskTypeType,
|
430
426
|
) -> tuple[Any, Exception]:
|
431
427
|
workdir_local = task_files.wftask_subfolder_local
|
432
428
|
workdir_remote = task_files.wftask_subfolder_remote
|
@@ -439,21 +435,9 @@ class RunnerSlurmSudo(BaseRunner):
|
|
439
435
|
)
|
440
436
|
|
441
437
|
if self.jobs != {}:
|
442
|
-
if not in_compound_task:
|
443
|
-
pass
|
444
|
-
# update_all_images(
|
445
|
-
# history_item_id=history_item_id,
|
446
|
-
# status=ImageStatus.FAILED,
|
447
|
-
# )
|
448
438
|
raise JobExecutionError("Unexpected branch: jobs should be empty.")
|
449
439
|
|
450
440
|
if self.is_shutdown():
|
451
|
-
if not in_compound_task:
|
452
|
-
pass
|
453
|
-
# update_all_images(
|
454
|
-
# history_item_id=history_item_id,
|
455
|
-
# status=ImageStatus.FAILED,
|
456
|
-
# )
|
457
441
|
raise JobExecutionError("Cannot continue after shutdown.")
|
458
442
|
|
459
443
|
# Validation phase
|
@@ -505,22 +489,6 @@ class RunnerSlurmSudo(BaseRunner):
|
|
505
489
|
)
|
506
490
|
time.sleep(self.slurm_poll_interval)
|
507
491
|
|
508
|
-
if not in_compound_task:
|
509
|
-
if exception is None:
|
510
|
-
pass
|
511
|
-
# update_all_images(
|
512
|
-
# history_item_id=history_item_id,
|
513
|
-
# status=ImageStatus.DONE,
|
514
|
-
# logfile=LOGFILE,
|
515
|
-
# )
|
516
|
-
else:
|
517
|
-
pass
|
518
|
-
# update_all_images(
|
519
|
-
# history_item_id=history_item_id,
|
520
|
-
# status=ImageStatus.FAILED,
|
521
|
-
# logfile=LOGFILE,
|
522
|
-
# )
|
523
|
-
|
524
492
|
return result, exception
|
525
493
|
|
526
494
|
def multisubmit(
|
@@ -530,20 +498,19 @@ class RunnerSlurmSudo(BaseRunner):
|
|
530
498
|
history_item_id: int,
|
531
499
|
task_files: TaskFiles,
|
532
500
|
slurm_config: SlurmConfig,
|
533
|
-
|
501
|
+
task_type: TaskTypeType,
|
534
502
|
):
|
535
503
|
# self.scancel_jobs()
|
536
504
|
|
537
505
|
self.validate_multisubmit_parameters(
|
538
|
-
list_parameters=list_parameters,
|
539
|
-
in_compound_task=in_compound_task,
|
506
|
+
list_parameters=list_parameters, task_type=task_type
|
540
507
|
)
|
541
508
|
|
542
509
|
workdir_local = task_files.wftask_subfolder_local
|
543
510
|
workdir_remote = task_files.wftask_subfolder_remote
|
544
511
|
|
545
512
|
# Create local&remote task subfolders
|
546
|
-
if not
|
513
|
+
if task_type not in ["converter_compound", "compound"]:
|
547
514
|
original_umask = os.umask(0)
|
548
515
|
workdir_local.mkdir(parents=True, mode=0o755)
|
549
516
|
os.umask(original_umask)
|
@@ -640,28 +607,6 @@ class RunnerSlurmSudo(BaseRunner):
|
|
640
607
|
result, exception = self._postprocess_single_task(
|
641
608
|
task=task
|
642
609
|
)
|
643
|
-
if not in_compound_task:
|
644
|
-
pass
|
645
|
-
# update_single_image_logfile(
|
646
|
-
# history_item_id=history_item_id,
|
647
|
-
# zarr_url=task.zarr_url,
|
648
|
-
# logfile=task.task_files.log_file_local,
|
649
|
-
# )
|
650
|
-
if not in_compound_task:
|
651
|
-
if exception is None:
|
652
|
-
pass
|
653
|
-
# update_single_image(
|
654
|
-
# zarr_url=task.zarr_url,
|
655
|
-
# history_item_id=history_item_id,
|
656
|
-
# status=ImageStatus.DONE,
|
657
|
-
# )
|
658
|
-
else:
|
659
|
-
pass
|
660
|
-
# update_single_image(
|
661
|
-
# zarr_url=task.zarr_url,
|
662
|
-
# history_item_id=history_item_id,
|
663
|
-
# status=ImageStatus.FAILED,
|
664
|
-
# )
|
665
610
|
if exception is None:
|
666
611
|
results[task.index] = result
|
667
612
|
else:
|
@@ -2,7 +2,6 @@ import logging
|
|
2
2
|
from copy import copy
|
3
3
|
from copy import deepcopy
|
4
4
|
from pathlib import Path
|
5
|
-
from typing import Callable
|
6
5
|
from typing import Optional
|
7
6
|
|
8
7
|
from sqlalchemy.orm.attributes import flag_modified
|
@@ -14,17 +13,21 @@ from ....images.tools import find_image_by_zarr_url
|
|
14
13
|
from ..exceptions import JobExecutionError
|
15
14
|
from .runner_functions import no_op_submit_setup_call
|
16
15
|
from .runner_functions import run_v2_task_compound
|
16
|
+
from .runner_functions import run_v2_task_converter_compound
|
17
|
+
from .runner_functions import run_v2_task_converter_non_parallel
|
17
18
|
from .runner_functions import run_v2_task_non_parallel
|
18
19
|
from .runner_functions import run_v2_task_parallel
|
19
20
|
from .task_interface import TaskOutput
|
20
21
|
from fractal_server.app.db import get_sync_db
|
21
|
-
from fractal_server.app.history.status_enum import XXXStatus
|
22
22
|
from fractal_server.app.models.v2 import AccountingRecord
|
23
23
|
from fractal_server.app.models.v2 import DatasetV2
|
24
24
|
from fractal_server.app.models.v2 import HistoryRun
|
25
25
|
from fractal_server.app.models.v2 import TaskGroupV2
|
26
26
|
from fractal_server.app.models.v2 import WorkflowTaskV2
|
27
27
|
from fractal_server.app.runner.executors.base_runner import BaseRunner
|
28
|
+
from fractal_server.app.schemas.v2 import HistoryUnitStatus
|
29
|
+
from fractal_server.app.schemas.v2 import TaskDumpV2
|
30
|
+
from fractal_server.app.schemas.v2 import TaskGroupDumpV2
|
28
31
|
from fractal_server.images.models import AttributeFiltersType
|
29
32
|
from fractal_server.images.tools import merge_type_filters
|
30
33
|
|
@@ -38,7 +41,7 @@ def execute_tasks_v2(
|
|
38
41
|
workflow_dir_local: Path,
|
39
42
|
workflow_dir_remote: Optional[Path] = None,
|
40
43
|
logger_name: Optional[str] = None,
|
41
|
-
submit_setup_call:
|
44
|
+
submit_setup_call: callable = no_op_submit_setup_call,
|
42
45
|
job_attribute_filters: AttributeFiltersType,
|
43
46
|
) -> None:
|
44
47
|
logger = logging.getLogger(logger_name)
|
@@ -63,45 +66,45 @@ def execute_tasks_v2(
|
|
63
66
|
# PRE TASK EXECUTION
|
64
67
|
|
65
68
|
# Filter images by types and attributes (in two steps)
|
66
|
-
|
67
|
-
|
68
|
-
|
69
|
-
|
70
|
-
|
71
|
-
|
72
|
-
|
73
|
-
|
74
|
-
|
75
|
-
|
76
|
-
|
77
|
-
|
78
|
-
|
79
|
-
|
80
|
-
|
81
|
-
|
69
|
+
if wftask.task_type in ["compound", "parallel", "non_parallel"]:
|
70
|
+
type_filters = copy(current_dataset_type_filters)
|
71
|
+
type_filters_patch = merge_type_filters(
|
72
|
+
task_input_types=task.input_types,
|
73
|
+
wftask_type_filters=wftask.type_filters,
|
74
|
+
)
|
75
|
+
type_filters.update(type_filters_patch)
|
76
|
+
type_filtered_images = filter_image_list(
|
77
|
+
images=tmp_images,
|
78
|
+
type_filters=type_filters,
|
79
|
+
attribute_filters=None,
|
80
|
+
)
|
81
|
+
num_available_images = len(type_filtered_images)
|
82
|
+
filtered_images = filter_image_list(
|
83
|
+
images=type_filtered_images,
|
84
|
+
type_filters=None,
|
85
|
+
attribute_filters=job_attribute_filters,
|
86
|
+
)
|
87
|
+
else:
|
88
|
+
num_available_images = 0
|
82
89
|
|
83
|
-
# Create history item
|
84
90
|
with next(get_sync_db()) as db:
|
91
|
+
# Create dumps for workflowtask and taskgroup
|
85
92
|
workflowtask_dump = dict(
|
86
93
|
**wftask.model_dump(exclude={"task"}),
|
87
|
-
task=wftask.task.model_dump(),
|
94
|
+
task=TaskDumpV2(**wftask.task.model_dump()).model_dump(),
|
88
95
|
)
|
89
|
-
|
90
|
-
# Exclude timestamps since they'd need to be serialized properly
|
91
96
|
task_group = db.get(TaskGroupV2, wftask.task.taskgroupv2_id)
|
92
|
-
task_group_dump =
|
93
|
-
|
94
|
-
|
95
|
-
|
96
|
-
}
|
97
|
-
)
|
97
|
+
task_group_dump = TaskGroupDumpV2(
|
98
|
+
**task_group.model_dump()
|
99
|
+
).model_dump()
|
100
|
+
# Create HistoryRun
|
98
101
|
history_run = HistoryRun(
|
99
102
|
dataset_id=dataset.id,
|
100
103
|
workflowtask_id=wftask.id,
|
101
104
|
workflowtask_dump=workflowtask_dump,
|
102
105
|
task_group_dump=task_group_dump,
|
103
|
-
num_available_images=
|
104
|
-
status=
|
106
|
+
num_available_images=num_available_images,
|
107
|
+
status=HistoryUnitStatus.SUBMITTED,
|
105
108
|
)
|
106
109
|
db.add(history_run)
|
107
110
|
db.commit()
|
@@ -126,6 +129,22 @@ def execute_tasks_v2(
|
|
126
129
|
history_run_id=history_run_id,
|
127
130
|
dataset_id=dataset.id,
|
128
131
|
)
|
132
|
+
elif task.type == "converter_non_parallel":
|
133
|
+
(
|
134
|
+
current_task_output,
|
135
|
+
num_tasks,
|
136
|
+
exceptions,
|
137
|
+
) = run_v2_task_converter_non_parallel(
|
138
|
+
zarr_dir=zarr_dir,
|
139
|
+
wftask=wftask,
|
140
|
+
task=task,
|
141
|
+
workflow_dir_local=workflow_dir_local,
|
142
|
+
workflow_dir_remote=workflow_dir_remote,
|
143
|
+
executor=runner,
|
144
|
+
submit_setup_call=submit_setup_call,
|
145
|
+
history_run_id=history_run_id,
|
146
|
+
dataset_id=dataset.id,
|
147
|
+
)
|
129
148
|
elif task.type == "parallel":
|
130
149
|
current_task_output, num_tasks, exceptions = run_v2_task_parallel(
|
131
150
|
images=filtered_images,
|
@@ -151,6 +170,22 @@ def execute_tasks_v2(
|
|
151
170
|
history_run_id=history_run_id,
|
152
171
|
dataset_id=dataset.id,
|
153
172
|
)
|
173
|
+
elif task.type == "converter_compound":
|
174
|
+
(
|
175
|
+
current_task_output,
|
176
|
+
num_tasks,
|
177
|
+
exceptions,
|
178
|
+
) = run_v2_task_converter_compound(
|
179
|
+
zarr_dir=zarr_dir,
|
180
|
+
wftask=wftask,
|
181
|
+
task=task,
|
182
|
+
workflow_dir_local=workflow_dir_local,
|
183
|
+
workflow_dir_remote=workflow_dir_remote,
|
184
|
+
executor=runner,
|
185
|
+
submit_setup_call=submit_setup_call,
|
186
|
+
history_run_id=history_run_id,
|
187
|
+
dataset_id=dataset.id,
|
188
|
+
)
|
154
189
|
else:
|
155
190
|
raise ValueError(f"Unexpected error: Invalid {task.type=}.")
|
156
191
|
|
@@ -323,14 +358,14 @@ def execute_tasks_v2(
|
|
323
358
|
db.execute(
|
324
359
|
update(HistoryRun)
|
325
360
|
.where(HistoryRun.id == history_run_id)
|
326
|
-
.values(status=
|
361
|
+
.values(status=HistoryUnitStatus.DONE)
|
327
362
|
)
|
328
363
|
db.commit()
|
329
364
|
else:
|
330
365
|
db.execute(
|
331
366
|
update(HistoryRun)
|
332
367
|
.where(HistoryRun.id == history_run_id)
|
333
|
-
.values(status=
|
368
|
+
.values(status=HistoryUnitStatus.FAILED)
|
334
369
|
)
|
335
370
|
db.commit()
|
336
371
|
logger.error(
|