fractal-server 2.10.5__py3-none-any.whl → 2.11.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- fractal_server/__init__.py +1 -1
- fractal_server/app/models/v2/dataset.py +9 -6
- fractal_server/app/models/v2/job.py +5 -0
- fractal_server/app/models/v2/workflowtask.py +5 -8
- fractal_server/app/routes/api/v1/dataset.py +2 -2
- fractal_server/app/routes/api/v2/_aux_functions.py +3 -10
- fractal_server/app/routes/api/v2/_aux_functions_tasks.py +21 -0
- fractal_server/app/routes/api/v2/images.py +30 -7
- fractal_server/app/routes/api/v2/job.py +14 -1
- fractal_server/app/routes/api/v2/status.py +20 -20
- fractal_server/app/routes/api/v2/submit.py +11 -4
- fractal_server/app/routes/api/v2/workflow.py +95 -0
- fractal_server/app/routes/api/v2/workflow_import.py +8 -0
- fractal_server/app/routes/api/v2/workflowtask.py +45 -26
- fractal_server/app/runner/{async_wrap.py → async_wrap_v1.py} +1 -1
- fractal_server/app/runner/executors/slurm/_slurm_config.py +1 -1
- fractal_server/app/runner/executors/slurm/ssh/executor.py +2 -2
- fractal_server/app/runner/filenames.py +2 -4
- fractal_server/app/runner/v1/_common.py +4 -4
- fractal_server/app/runner/v1/_local/__init__.py +2 -2
- fractal_server/app/runner/v1/_slurm/__init__.py +2 -2
- fractal_server/app/runner/v1/handle_failed_job.py +4 -4
- fractal_server/app/runner/v2/__init__.py +12 -66
- fractal_server/app/runner/v2/_local/__init__.py +17 -47
- fractal_server/app/runner/v2/_local_experimental/__init__.py +27 -61
- fractal_server/app/runner/v2/_slurm_ssh/__init__.py +26 -65
- fractal_server/app/runner/v2/_slurm_sudo/__init__.py +24 -66
- fractal_server/app/runner/v2/handle_failed_job.py +31 -130
- fractal_server/app/runner/v2/merge_outputs.py +6 -17
- fractal_server/app/runner/v2/runner.py +51 -89
- fractal_server/app/runner/v2/task_interface.py +0 -2
- fractal_server/app/schemas/_filter_validators.py +43 -0
- fractal_server/app/schemas/_validators.py +13 -2
- fractal_server/app/schemas/v2/dataset.py +85 -12
- fractal_server/app/schemas/v2/dumps.py +6 -8
- fractal_server/app/schemas/v2/job.py +14 -0
- fractal_server/app/schemas/v2/task.py +9 -9
- fractal_server/app/schemas/v2/task_group.py +2 -2
- fractal_server/app/schemas/v2/workflowtask.py +69 -20
- fractal_server/data_migrations/2_11_0.py +168 -0
- fractal_server/images/__init__.py +0 -1
- fractal_server/images/models.py +12 -35
- fractal_server/images/tools.py +53 -14
- fractal_server/logger.py +4 -1
- fractal_server/migrations/versions/db09233ad13a_split_filters_and_keep_old_columns.py +96 -0
- fractal_server/tasks/v2/local/collect.py +2 -2
- fractal_server/tasks/v2/local/deactivate.py +2 -2
- fractal_server/tasks/v2/local/reactivate.py +2 -3
- fractal_server/tasks/v2/ssh/collect.py +2 -2
- fractal_server/tasks/v2/ssh/deactivate.py +2 -2
- fractal_server/tasks/v2/ssh/reactivate.py +2 -2
- fractal_server/utils.py +9 -7
- {fractal_server-2.10.5.dist-info → fractal_server-2.11.0.dist-info}/METADATA +1 -1
- {fractal_server-2.10.5.dist-info → fractal_server-2.11.0.dist-info}/RECORD +57 -54
- {fractal_server-2.10.5.dist-info → fractal_server-2.11.0.dist-info}/LICENSE +0 -0
- {fractal_server-2.10.5.dist-info → fractal_server-2.11.0.dist-info}/WHEEL +0 -0
- {fractal_server-2.10.5.dist-info → fractal_server-2.11.0.dist-info}/entry_points.txt +0 -0
@@ -1,4 +1,3 @@
|
|
1
|
-
import json
|
2
1
|
import logging
|
3
2
|
from concurrent.futures import ThreadPoolExecutor
|
4
3
|
from copy import copy
|
@@ -7,27 +6,28 @@ from pathlib import Path
|
|
7
6
|
from typing import Callable
|
8
7
|
from typing import Optional
|
9
8
|
|
10
|
-
from
|
9
|
+
from sqlalchemy.orm.attributes import flag_modified
|
10
|
+
|
11
11
|
from ....images import SingleImage
|
12
12
|
from ....images.tools import filter_image_list
|
13
13
|
from ....images.tools import find_image_by_zarr_url
|
14
|
-
from ....images.tools import match_filter
|
15
14
|
from ..exceptions import JobExecutionError
|
16
|
-
from ..filenames import FILTERS_FILENAME
|
17
|
-
from ..filenames import HISTORY_FILENAME
|
18
|
-
from ..filenames import IMAGES_FILENAME
|
19
15
|
from .runner_functions import no_op_submit_setup_call
|
20
16
|
from .runner_functions import run_v2_task_compound
|
21
17
|
from .runner_functions import run_v2_task_non_parallel
|
22
18
|
from .runner_functions import run_v2_task_parallel
|
23
19
|
from .task_interface import TaskOutput
|
20
|
+
from fractal_server.app.db import get_sync_db
|
24
21
|
from fractal_server.app.models.v2 import DatasetV2
|
25
22
|
from fractal_server.app.models.v2 import WorkflowTaskV2
|
26
23
|
from fractal_server.app.schemas.v2.dataset import _DatasetHistoryItemV2
|
27
24
|
from fractal_server.app.schemas.v2.workflowtask import WorkflowTaskStatusTypeV2
|
25
|
+
from fractal_server.images.models import AttributeFiltersType
|
26
|
+
from fractal_server.images.tools import merge_type_filters
|
28
27
|
|
29
28
|
|
30
29
|
def execute_tasks_v2(
|
30
|
+
*,
|
31
31
|
wf_task_list: list[WorkflowTaskV2],
|
32
32
|
dataset: DatasetV2,
|
33
33
|
executor: ThreadPoolExecutor,
|
@@ -35,20 +35,21 @@ def execute_tasks_v2(
|
|
35
35
|
workflow_dir_remote: Optional[Path] = None,
|
36
36
|
logger_name: Optional[str] = None,
|
37
37
|
submit_setup_call: Callable = no_op_submit_setup_call,
|
38
|
-
|
39
|
-
|
38
|
+
job_attribute_filters: AttributeFiltersType,
|
39
|
+
) -> None:
|
40
40
|
logger = logging.getLogger(logger_name)
|
41
41
|
|
42
|
-
if (
|
43
|
-
|
44
|
-
|
42
|
+
if not workflow_dir_local.exists():
|
43
|
+
logger.warning(
|
44
|
+
f"Now creating {workflow_dir_local}, "
|
45
|
+
"but it should have already happened."
|
46
|
+
)
|
45
47
|
workflow_dir_local.mkdir()
|
46
48
|
|
47
49
|
# Initialize local dataset attributes
|
48
50
|
zarr_dir = dataset.zarr_dir
|
49
51
|
tmp_images = deepcopy(dataset.images)
|
50
|
-
|
51
|
-
tmp_history = []
|
52
|
+
current_dataset_type_filters = deepcopy(dataset.type_filters)
|
52
53
|
|
53
54
|
for wftask in wf_task_list:
|
54
55
|
task = wftask.task
|
@@ -58,26 +59,30 @@ def execute_tasks_v2(
|
|
58
59
|
# PRE TASK EXECUTION
|
59
60
|
|
60
61
|
# Get filtered images
|
61
|
-
|
62
|
-
|
63
|
-
|
62
|
+
type_filters = copy(current_dataset_type_filters)
|
63
|
+
type_filters_patch = merge_type_filters(
|
64
|
+
task_input_types=task.input_types,
|
65
|
+
wftask_type_filters=wftask.type_filters,
|
64
66
|
)
|
65
|
-
|
66
|
-
pre_filters["attributes"].update(wftask.input_filters["attributes"])
|
67
|
+
type_filters.update(type_filters_patch)
|
67
68
|
filtered_images = filter_image_list(
|
68
69
|
images=tmp_images,
|
69
|
-
|
70
|
+
type_filters=type_filters,
|
71
|
+
attribute_filters=job_attribute_filters,
|
70
72
|
)
|
71
|
-
# Verify that filtered images comply with task input_types
|
72
|
-
for image in filtered_images:
|
73
|
-
if not match_filter(image, Filters(types=task.input_types)):
|
74
|
-
raise JobExecutionError(
|
75
|
-
"Invalid filtered image list\n"
|
76
|
-
f"Task input types: {task.input_types=}\n"
|
77
|
-
f'Image zarr_url: {image["zarr_url"]}\n'
|
78
|
-
f'Image types: {image["types"]}\n'
|
79
|
-
)
|
80
73
|
|
74
|
+
# First, set status SUBMITTED in dataset.history for each wftask
|
75
|
+
with next(get_sync_db()) as db:
|
76
|
+
db_dataset = db.get(DatasetV2, dataset.id)
|
77
|
+
new_history_item = _DatasetHistoryItemV2(
|
78
|
+
workflowtask=wftask,
|
79
|
+
status=WorkflowTaskStatusTypeV2.SUBMITTED,
|
80
|
+
parallelization=dict(), # FIXME: re-include parallelization
|
81
|
+
).dict()
|
82
|
+
db_dataset.history.append(new_history_item)
|
83
|
+
flag_modified(db_dataset, "history")
|
84
|
+
db.merge(db_dataset)
|
85
|
+
db.commit()
|
81
86
|
# TASK EXECUTION (V2)
|
82
87
|
if task.type == "non_parallel":
|
83
88
|
current_task_output = run_v2_task_non_parallel(
|
@@ -249,69 +254,26 @@ def execute_tasks_v2(
|
|
249
254
|
else:
|
250
255
|
tmp_images.pop(img_search["index"])
|
251
256
|
|
252
|
-
# Update
|
253
|
-
|
254
|
-
|
255
|
-
tmp_filters["attributes"].update(
|
256
|
-
current_task_output.filters.attributes
|
257
|
-
)
|
258
|
-
|
259
|
-
# Find manifest ouptut types
|
260
|
-
types_from_manifest = task.output_types
|
257
|
+
# Update type_filters based on task-manifest output_types
|
258
|
+
type_filters_from_task_manifest = task.output_types
|
259
|
+
current_dataset_type_filters.update(type_filters_from_task_manifest)
|
261
260
|
|
262
|
-
#
|
263
|
-
|
264
|
-
types_from_task = current_task_output.filters.types
|
265
|
-
else:
|
266
|
-
types_from_task = {}
|
267
|
-
|
268
|
-
# Check that key sets are disjoint
|
269
|
-
set_types_from_manifest = set(types_from_manifest.keys())
|
270
|
-
set_types_from_task = set(types_from_task.keys())
|
271
|
-
if not set_types_from_manifest.isdisjoint(set_types_from_task):
|
272
|
-
overlap = set_types_from_manifest.intersection(set_types_from_task)
|
273
|
-
raise JobExecutionError(
|
274
|
-
"Some type filters are being set twice, "
|
275
|
-
f"for task '{task_name}'.\n"
|
276
|
-
f"Types from task output: {types_from_task}\n"
|
277
|
-
f"Types from task maniest: {types_from_manifest}\n"
|
278
|
-
f"Overlapping keys: {overlap}"
|
279
|
-
)
|
280
|
-
|
281
|
-
# Update filters.types
|
282
|
-
tmp_filters["types"].update(types_from_manifest)
|
283
|
-
tmp_filters["types"].update(types_from_task)
|
284
|
-
|
285
|
-
# Update history (based on _DatasetHistoryItemV2)
|
286
|
-
history_item = _DatasetHistoryItemV2(
|
287
|
-
workflowtask=wftask,
|
288
|
-
status=WorkflowTaskStatusTypeV2.DONE,
|
289
|
-
parallelization=dict(
|
290
|
-
# task_type=wftask.task.type, # FIXME: breaks for V1 tasks
|
291
|
-
# component_list=fil, #FIXME
|
292
|
-
),
|
293
|
-
).dict()
|
294
|
-
tmp_history.append(history_item)
|
295
|
-
|
296
|
-
# Write current dataset attributes (history, images, filters) into
|
297
|
-
# temporary files which can be used (1) to retrieve the latest state
|
261
|
+
# Write current dataset attributes (history, images, filters) into the
|
262
|
+
# database. They can be used (1) to retrieve the latest state
|
298
263
|
# when the job fails, (2) from within endpoints that need up-to-date
|
299
264
|
# information
|
300
|
-
with
|
301
|
-
|
302
|
-
|
303
|
-
|
304
|
-
|
305
|
-
|
265
|
+
with next(get_sync_db()) as db:
|
266
|
+
db_dataset = db.get(DatasetV2, dataset.id)
|
267
|
+
db_dataset.history[-1]["status"] = WorkflowTaskStatusTypeV2.DONE
|
268
|
+
db_dataset.type_filters = current_dataset_type_filters
|
269
|
+
db_dataset.images = tmp_images
|
270
|
+
for attribute_name in [
|
271
|
+
"type_filters",
|
272
|
+
"history",
|
273
|
+
"images",
|
274
|
+
]:
|
275
|
+
flag_modified(db_dataset, attribute_name)
|
276
|
+
db.merge(db_dataset)
|
277
|
+
db.commit()
|
306
278
|
|
307
279
|
logger.debug(f'END {wftask.order}-th task (name="{task_name}")')
|
308
|
-
|
309
|
-
# NOTE: tmp_history only contains the newly-added history items (to be
|
310
|
-
# appended to the original history), while tmp_filters and tmp_images
|
311
|
-
# represent the new attributes (to replace the original ones)
|
312
|
-
result = dict(
|
313
|
-
history=tmp_history,
|
314
|
-
filters=tmp_filters,
|
315
|
-
images=tmp_images,
|
316
|
-
)
|
317
|
-
return result
|
@@ -6,7 +6,6 @@ from pydantic import Field
|
|
6
6
|
from pydantic import validator
|
7
7
|
|
8
8
|
from ....images import SingleImageTaskOutput
|
9
|
-
from fractal_server.images import Filters
|
10
9
|
from fractal_server.urls import normalize_url
|
11
10
|
|
12
11
|
|
@@ -16,7 +15,6 @@ class TaskOutput(BaseModel, extra=Extra.forbid):
|
|
16
15
|
default_factory=list
|
17
16
|
)
|
18
17
|
image_list_removals: list[str] = Field(default_factory=list)
|
19
|
-
filters: Filters = Field(default_factory=Filters)
|
20
18
|
|
21
19
|
def check_zarr_urls_are_unique(self) -> None:
|
22
20
|
zarr_urls = [img.zarr_url for img in self.image_list_updates]
|
@@ -0,0 +1,43 @@
|
|
1
|
+
from typing import Optional
|
2
|
+
|
3
|
+
from ._validators import valdict_keys
|
4
|
+
from fractal_server.images.models import AttributeFiltersType
|
5
|
+
|
6
|
+
|
7
|
+
def validate_type_filters(
|
8
|
+
type_filters: Optional[dict[str, bool]]
|
9
|
+
) -> dict[str, bool]:
|
10
|
+
if type_filters is None:
|
11
|
+
raise ValueError("'type_filters' cannot be 'None'.")
|
12
|
+
|
13
|
+
type_filters = valdict_keys("type_filters")(type_filters)
|
14
|
+
return type_filters
|
15
|
+
|
16
|
+
|
17
|
+
def validate_attribute_filters(
|
18
|
+
attribute_filters: Optional[AttributeFiltersType],
|
19
|
+
) -> AttributeFiltersType:
|
20
|
+
if attribute_filters is None:
|
21
|
+
raise ValueError("'attribute_filters' cannot be 'None'.")
|
22
|
+
|
23
|
+
attribute_filters = valdict_keys("attribute_filters")(attribute_filters)
|
24
|
+
for key, values in attribute_filters.items():
|
25
|
+
if values == []:
|
26
|
+
raise ValueError(
|
27
|
+
f"attribute_filters[{key}] cannot be an empty list."
|
28
|
+
)
|
29
|
+
else:
|
30
|
+
# values is a non-empty list, and its items must all be of the
|
31
|
+
# same scalar non-None type
|
32
|
+
_type = type(values[0])
|
33
|
+
if not all(type(value) is _type for value in values):
|
34
|
+
raise ValueError(
|
35
|
+
f"attribute_filters[{key}] has values with "
|
36
|
+
f"non-homogeneous types: {values}."
|
37
|
+
)
|
38
|
+
if _type not in (int, float, str, bool):
|
39
|
+
raise ValueError(
|
40
|
+
f"attribute_filters[{key}] has values with "
|
41
|
+
f"invalid types: {values}."
|
42
|
+
)
|
43
|
+
return attribute_filters
|
@@ -27,7 +27,7 @@ def valstr(attribute: str, accept_none: bool = False):
|
|
27
27
|
return val
|
28
28
|
|
29
29
|
|
30
|
-
def
|
30
|
+
def valdict_keys(attribute: str):
|
31
31
|
def val(d: Optional[dict[str, Any]]) -> Optional[dict[str, Any]]:
|
32
32
|
"""
|
33
33
|
Apply valstr to every key of the dictionary, and fail if there are
|
@@ -38,7 +38,7 @@ def valdictkeys(attribute: str):
|
|
38
38
|
new_keys = [valstr(f"{attribute}[{key}]")(key) for key in old_keys]
|
39
39
|
if len(new_keys) != len(set(new_keys)):
|
40
40
|
raise ValueError(
|
41
|
-
f"Dictionary contains multiple identical keys: {d}."
|
41
|
+
f"Dictionary contains multiple identical keys: '{d}'."
|
42
42
|
)
|
43
43
|
for old_key, new_key in zip(old_keys, new_keys):
|
44
44
|
if new_key != old_key:
|
@@ -101,3 +101,14 @@ def val_unique_list(attribute: str):
|
|
101
101
|
return must_be_unique
|
102
102
|
|
103
103
|
return val
|
104
|
+
|
105
|
+
|
106
|
+
def root_validate_dict_keys(cls, object: dict) -> dict:
|
107
|
+
"""
|
108
|
+
For each dictionary in `object.values()`,
|
109
|
+
checks that that dictionary has only keys of type str.
|
110
|
+
"""
|
111
|
+
for dictionary in (v for v in object.values() if isinstance(v, dict)):
|
112
|
+
if not all(isinstance(key, str) for key in dictionary.keys()):
|
113
|
+
raise ValueError("Dictionary keys must be strings.")
|
114
|
+
return object
|
@@ -1,17 +1,22 @@
|
|
1
1
|
from datetime import datetime
|
2
|
+
from typing import Any
|
2
3
|
from typing import Optional
|
3
4
|
|
4
5
|
from pydantic import BaseModel
|
5
6
|
from pydantic import Extra
|
6
7
|
from pydantic import Field
|
8
|
+
from pydantic import root_validator
|
7
9
|
from pydantic import validator
|
8
10
|
|
11
|
+
from .._filter_validators import validate_attribute_filters
|
12
|
+
from .._filter_validators import validate_type_filters
|
13
|
+
from .._validators import root_validate_dict_keys
|
9
14
|
from .._validators import valstr
|
10
15
|
from .dumps import WorkflowTaskDumpV2
|
11
16
|
from .project import ProjectReadV2
|
12
17
|
from .workflowtask import WorkflowTaskStatusTypeV2
|
13
|
-
from fractal_server.images import Filters
|
14
18
|
from fractal_server.images import SingleImage
|
19
|
+
from fractal_server.images.models import AttributeFiltersType
|
15
20
|
from fractal_server.urls import normalize_url
|
16
21
|
|
17
22
|
|
@@ -34,17 +39,29 @@ class DatasetCreateV2(BaseModel, extra=Extra.forbid):
|
|
34
39
|
|
35
40
|
zarr_dir: Optional[str] = None
|
36
41
|
|
37
|
-
|
42
|
+
type_filters: dict[str, bool] = Field(default_factory=dict)
|
43
|
+
attribute_filters: AttributeFiltersType = Field(default_factory=dict)
|
38
44
|
|
39
45
|
# Validators
|
46
|
+
|
47
|
+
_dict_keys = root_validator(pre=True, allow_reuse=True)(
|
48
|
+
root_validate_dict_keys
|
49
|
+
)
|
50
|
+
_type_filters = validator("type_filters", allow_reuse=True)(
|
51
|
+
validate_type_filters
|
52
|
+
)
|
53
|
+
_attribute_filters = validator("attribute_filters", allow_reuse=True)(
|
54
|
+
validate_attribute_filters
|
55
|
+
)
|
56
|
+
|
57
|
+
_name = validator("name", allow_reuse=True)(valstr("name"))
|
58
|
+
|
40
59
|
@validator("zarr_dir")
|
41
60
|
def normalize_zarr_dir(cls, v: Optional[str]) -> Optional[str]:
|
42
61
|
if v is not None:
|
43
62
|
return normalize_url(v)
|
44
63
|
return v
|
45
64
|
|
46
|
-
_name = validator("name", allow_reuse=True)(valstr("name"))
|
47
|
-
|
48
65
|
|
49
66
|
class DatasetReadV2(BaseModel):
|
50
67
|
|
@@ -59,24 +76,37 @@ class DatasetReadV2(BaseModel):
|
|
59
76
|
timestamp_created: datetime
|
60
77
|
|
61
78
|
zarr_dir: str
|
62
|
-
|
79
|
+
type_filters: dict[str, bool]
|
80
|
+
attribute_filters: AttributeFiltersType
|
63
81
|
|
64
82
|
|
65
83
|
class DatasetUpdateV2(BaseModel, extra=Extra.forbid):
|
66
84
|
|
67
85
|
name: Optional[str]
|
68
86
|
zarr_dir: Optional[str]
|
69
|
-
|
87
|
+
type_filters: Optional[dict[str, bool]]
|
88
|
+
attribute_filters: Optional[dict[str, list[Any]]]
|
70
89
|
|
71
90
|
# Validators
|
91
|
+
|
92
|
+
_dict_keys = root_validator(pre=True, allow_reuse=True)(
|
93
|
+
root_validate_dict_keys
|
94
|
+
)
|
95
|
+
_type_filters = validator("type_filters", allow_reuse=True)(
|
96
|
+
validate_type_filters
|
97
|
+
)
|
98
|
+
_attribute_filters = validator("attribute_filters", allow_reuse=True)(
|
99
|
+
validate_attribute_filters
|
100
|
+
)
|
101
|
+
|
102
|
+
_name = validator("name", allow_reuse=True)(valstr("name"))
|
103
|
+
|
72
104
|
@validator("zarr_dir")
|
73
105
|
def normalize_zarr_dir(cls, v: Optional[str]) -> Optional[str]:
|
74
106
|
if v is not None:
|
75
107
|
return normalize_url(v)
|
76
108
|
return v
|
77
109
|
|
78
|
-
_name = validator("name", allow_reuse=True)(valstr("name"))
|
79
|
-
|
80
110
|
|
81
111
|
class DatasetImportV2(BaseModel, extra=Extra.forbid):
|
82
112
|
"""
|
@@ -87,14 +117,55 @@ class DatasetImportV2(BaseModel, extra=Extra.forbid):
|
|
87
117
|
zarr_dir:
|
88
118
|
images:
|
89
119
|
filters:
|
120
|
+
type_filters:
|
121
|
+
attribute_filters:
|
90
122
|
"""
|
91
123
|
|
92
124
|
name: str
|
93
125
|
zarr_dir: str
|
94
126
|
images: list[SingleImage] = Field(default_factory=list)
|
95
|
-
filters: Filters = Field(default_factory=Filters)
|
96
127
|
|
97
|
-
|
128
|
+
filters: Optional[dict[str, Any]] = None
|
129
|
+
type_filters: dict[str, bool] = Field(default_factory=dict)
|
130
|
+
attribute_filters: AttributeFiltersType = Field(default_factory=dict)
|
131
|
+
|
132
|
+
@root_validator(pre=True)
|
133
|
+
def update_legacy_filters(cls, values: dict):
|
134
|
+
"""
|
135
|
+
Transform legacy filters (created with fractal-server<2.11.0)
|
136
|
+
into attribute/type filters
|
137
|
+
"""
|
138
|
+
if values.get("filters") is not None:
|
139
|
+
if (
|
140
|
+
"type_filters" in values.keys()
|
141
|
+
or "attribute_filters" in values.keys()
|
142
|
+
):
|
143
|
+
raise ValueError(
|
144
|
+
"Cannot set filters both through the legacy field "
|
145
|
+
"('filters') and the new ones ('type_filters' and/or "
|
146
|
+
"'attribute_filters')."
|
147
|
+
)
|
148
|
+
|
149
|
+
else:
|
150
|
+
# Convert legacy filters.types into new type_filters
|
151
|
+
values["type_filters"] = values["filters"].get("types", {})
|
152
|
+
values["attribute_filters"] = {
|
153
|
+
key: [value]
|
154
|
+
for key, value in values["filters"]
|
155
|
+
.get("attributes", {})
|
156
|
+
.items()
|
157
|
+
}
|
158
|
+
values["filters"] = None
|
159
|
+
|
160
|
+
return values
|
161
|
+
|
162
|
+
_type_filters = validator("type_filters", allow_reuse=True)(
|
163
|
+
validate_type_filters
|
164
|
+
)
|
165
|
+
_attribute_filters = validator("attribute_filters", allow_reuse=True)(
|
166
|
+
validate_attribute_filters
|
167
|
+
)
|
168
|
+
|
98
169
|
@validator("zarr_dir")
|
99
170
|
def normalize_zarr_dir(cls, v: str) -> str:
|
100
171
|
return normalize_url(v)
|
@@ -108,10 +179,12 @@ class DatasetExportV2(BaseModel):
|
|
108
179
|
name:
|
109
180
|
zarr_dir:
|
110
181
|
images:
|
111
|
-
|
182
|
+
type_filters:
|
183
|
+
attribute_filters:
|
112
184
|
"""
|
113
185
|
|
114
186
|
name: str
|
115
187
|
zarr_dir: str
|
116
188
|
images: list[SingleImage]
|
117
|
-
|
189
|
+
type_filters: dict[str, bool]
|
190
|
+
attribute_filters: AttributeFiltersType
|
@@ -13,7 +13,7 @@ from typing import Optional
|
|
13
13
|
from pydantic import BaseModel
|
14
14
|
from pydantic import Extra
|
15
15
|
|
16
|
-
from fractal_server.images import
|
16
|
+
from fractal_server.images.models import AttributeFiltersType
|
17
17
|
|
18
18
|
|
19
19
|
class ProjectDumpV2(BaseModel, extra=Extra.forbid):
|
@@ -39,19 +39,16 @@ class TaskDumpV2(BaseModel):
|
|
39
39
|
|
40
40
|
class WorkflowTaskDumpV2(BaseModel):
|
41
41
|
"""
|
42
|
-
|
43
|
-
|
44
|
-
may still exist in the database after version updates, we are setting
|
45
|
-
`task_id` and `task` to `Optional` to avoid response-validation errors
|
42
|
+
We do not include 'extra=Extra.forbid' because legacy data may include
|
43
|
+
'input_filters' field and we want to avoid response-validation errors
|
46
44
|
for the endpoints that GET datasets.
|
47
|
-
Ref issue #1783.
|
48
45
|
"""
|
49
46
|
|
50
47
|
id: int
|
51
48
|
workflow_id: int
|
52
49
|
order: Optional[int]
|
53
50
|
|
54
|
-
|
51
|
+
type_filters: dict[str, bool]
|
55
52
|
|
56
53
|
task_id: Optional[int]
|
57
54
|
task: Optional[TaskDumpV2]
|
@@ -71,4 +68,5 @@ class DatasetDumpV2(BaseModel, extra=Extra.forbid):
|
|
71
68
|
timestamp_created: str
|
72
69
|
|
73
70
|
zarr_dir: str
|
74
|
-
|
71
|
+
type_filters: dict[str, bool]
|
72
|
+
attribute_filters: AttributeFiltersType
|
@@ -4,13 +4,18 @@ from typing import Optional
|
|
4
4
|
|
5
5
|
from pydantic import BaseModel
|
6
6
|
from pydantic import Extra
|
7
|
+
from pydantic import Field
|
8
|
+
from pydantic import root_validator
|
7
9
|
from pydantic import validator
|
8
10
|
from pydantic.types import StrictStr
|
9
11
|
|
12
|
+
from .._filter_validators import validate_attribute_filters
|
13
|
+
from .._validators import root_validate_dict_keys
|
10
14
|
from .._validators import valstr
|
11
15
|
from .dumps import DatasetDumpV2
|
12
16
|
from .dumps import ProjectDumpV2
|
13
17
|
from .dumps import WorkflowDumpV2
|
18
|
+
from fractal_server.images.models import AttributeFiltersType
|
14
19
|
|
15
20
|
|
16
21
|
class JobStatusTypeV2(str, Enum):
|
@@ -41,10 +46,18 @@ class JobCreateV2(BaseModel, extra=Extra.forbid):
|
|
41
46
|
slurm_account: Optional[StrictStr] = None
|
42
47
|
worker_init: Optional[str]
|
43
48
|
|
49
|
+
attribute_filters: AttributeFiltersType = Field(default_factory=dict)
|
50
|
+
|
44
51
|
# Validators
|
45
52
|
_worker_init = validator("worker_init", allow_reuse=True)(
|
46
53
|
valstr("worker_init")
|
47
54
|
)
|
55
|
+
_dict_keys = root_validator(pre=True, allow_reuse=True)(
|
56
|
+
root_validate_dict_keys
|
57
|
+
)
|
58
|
+
_attribute_filters = validator("attribute_filters", allow_reuse=True)(
|
59
|
+
validate_attribute_filters
|
60
|
+
)
|
48
61
|
|
49
62
|
@validator("first_task_index", always=True)
|
50
63
|
def first_task_index_non_negative(cls, v, values):
|
@@ -99,6 +112,7 @@ class JobReadV2(BaseModel):
|
|
99
112
|
first_task_index: Optional[int]
|
100
113
|
last_task_index: Optional[int]
|
101
114
|
worker_init: Optional[str]
|
115
|
+
attribute_filters: AttributeFiltersType
|
102
116
|
|
103
117
|
|
104
118
|
class JobUpdateV2(BaseModel, extra=Extra.forbid):
|
@@ -10,7 +10,7 @@ from pydantic import root_validator
|
|
10
10
|
from pydantic import validator
|
11
11
|
|
12
12
|
from fractal_server.app.schemas._validators import val_unique_list
|
13
|
-
from fractal_server.app.schemas._validators import
|
13
|
+
from fractal_server.app.schemas._validators import valdict_keys
|
14
14
|
from fractal_server.app.schemas._validators import valstr
|
15
15
|
from fractal_server.string_tools import validate_cmd
|
16
16
|
|
@@ -66,25 +66,25 @@ class TaskCreateV2(BaseModel, extra=Extra.forbid):
|
|
66
66
|
_version = validator("version", allow_reuse=True)(valstr("version"))
|
67
67
|
|
68
68
|
_meta_non_parallel = validator("meta_non_parallel", allow_reuse=True)(
|
69
|
-
|
69
|
+
valdict_keys("meta_non_parallel")
|
70
70
|
)
|
71
71
|
_meta_parallel = validator("meta_parallel", allow_reuse=True)(
|
72
|
-
|
72
|
+
valdict_keys("meta_parallel")
|
73
73
|
)
|
74
74
|
_args_schema_non_parallel = validator(
|
75
75
|
"args_schema_non_parallel", allow_reuse=True
|
76
|
-
)(
|
76
|
+
)(valdict_keys("args_schema_non_parallel"))
|
77
77
|
_args_schema_parallel = validator(
|
78
78
|
"args_schema_parallel", allow_reuse=True
|
79
|
-
)(
|
79
|
+
)(valdict_keys("args_schema_parallel"))
|
80
80
|
_args_schema_version = validator("args_schema_version", allow_reuse=True)(
|
81
81
|
valstr("args_schema_version")
|
82
82
|
)
|
83
83
|
_input_types = validator("input_types", allow_reuse=True)(
|
84
|
-
|
84
|
+
valdict_keys("input_types")
|
85
85
|
)
|
86
86
|
_output_types = validator("output_types", allow_reuse=True)(
|
87
|
-
|
87
|
+
valdict_keys("output_types")
|
88
88
|
)
|
89
89
|
|
90
90
|
_category = validator("category", allow_reuse=True)(
|
@@ -158,10 +158,10 @@ class TaskUpdateV2(BaseModel, extra=Extra.forbid):
|
|
158
158
|
"command_non_parallel", allow_reuse=True
|
159
159
|
)(valstr("command_non_parallel"))
|
160
160
|
_input_types = validator("input_types", allow_reuse=True)(
|
161
|
-
|
161
|
+
valdict_keys("input_types")
|
162
162
|
)
|
163
163
|
_output_types = validator("output_types", allow_reuse=True)(
|
164
|
-
|
164
|
+
valdict_keys("output_types")
|
165
165
|
)
|
166
166
|
|
167
167
|
_category = validator("category", allow_reuse=True)(
|
@@ -8,7 +8,7 @@ from pydantic import Field
|
|
8
8
|
from pydantic import validator
|
9
9
|
|
10
10
|
from .._validators import val_absolute_path
|
11
|
-
from .._validators import
|
11
|
+
from .._validators import valdict_keys
|
12
12
|
from .._validators import valstr
|
13
13
|
from .task import TaskReadV2
|
14
14
|
|
@@ -57,7 +57,7 @@ class TaskGroupCreateV2(BaseModel, extra=Extra.forbid):
|
|
57
57
|
)
|
58
58
|
_pinned_package_versions = validator(
|
59
59
|
"pinned_package_versions", allow_reuse=True
|
60
|
-
)(
|
60
|
+
)(valdict_keys("pinned_package_versions"))
|
61
61
|
_pip_extras = validator("pip_extras", allow_reuse=True)(
|
62
62
|
valstr("pip_extras")
|
63
63
|
)
|