fractal-server 2.10.5__py3-none-any.whl → 2.11.0a2__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- fractal_server/__init__.py +1 -1
- fractal_server/app/models/v2/dataset.py +9 -6
- fractal_server/app/models/v2/job.py +5 -0
- fractal_server/app/models/v2/workflowtask.py +5 -8
- fractal_server/app/routes/api/v1/dataset.py +2 -2
- fractal_server/app/routes/api/v2/_aux_functions.py +3 -10
- fractal_server/app/routes/api/v2/images.py +29 -6
- fractal_server/app/routes/api/v2/status.py +20 -20
- fractal_server/app/routes/api/v2/submit.py +5 -1
- fractal_server/app/routes/api/v2/workflowtask.py +3 -3
- fractal_server/app/runner/filenames.py +2 -4
- fractal_server/app/runner/v1/_common.py +4 -4
- fractal_server/app/runner/v1/handle_failed_job.py +4 -4
- fractal_server/app/runner/v2/__init__.py +11 -65
- fractal_server/app/runner/v2/_local/__init__.py +12 -17
- fractal_server/app/runner/v2/_local_experimental/__init__.py +11 -20
- fractal_server/app/runner/v2/_slurm_ssh/__init__.py +14 -16
- fractal_server/app/runner/v2/_slurm_sudo/__init__.py +12 -14
- fractal_server/app/runner/v2/handle_failed_job.py +31 -130
- fractal_server/app/runner/v2/merge_outputs.py +13 -16
- fractal_server/app/runner/v2/runner.py +63 -72
- fractal_server/app/runner/v2/task_interface.py +41 -2
- fractal_server/app/schemas/_filter_validators.py +47 -0
- fractal_server/app/schemas/_validators.py +13 -2
- fractal_server/app/schemas/v2/dataset.py +58 -12
- fractal_server/app/schemas/v2/dumps.py +6 -8
- fractal_server/app/schemas/v2/job.py +14 -0
- fractal_server/app/schemas/v2/task.py +9 -9
- fractal_server/app/schemas/v2/task_group.py +2 -2
- fractal_server/app/schemas/v2/workflowtask.py +42 -19
- fractal_server/data_migrations/2_11_0.py +67 -0
- fractal_server/images/__init__.py +0 -1
- fractal_server/images/models.py +12 -35
- fractal_server/images/tools.py +29 -13
- fractal_server/migrations/versions/db09233ad13a_split_filters_and_keep_old_columns.py +96 -0
- {fractal_server-2.10.5.dist-info → fractal_server-2.11.0a2.dist-info}/METADATA +1 -1
- {fractal_server-2.10.5.dist-info → fractal_server-2.11.0a2.dist-info}/RECORD +40 -37
- {fractal_server-2.10.5.dist-info → fractal_server-2.11.0a2.dist-info}/LICENSE +0 -0
- {fractal_server-2.10.5.dist-info → fractal_server-2.11.0a2.dist-info}/WHEEL +0 -0
- {fractal_server-2.10.5.dist-info → fractal_server-2.11.0a2.dist-info}/entry_points.txt +0 -0
@@ -1,4 +1,3 @@
|
|
1
|
-
import json
|
2
1
|
import logging
|
3
2
|
from concurrent.futures import ThreadPoolExecutor
|
4
3
|
from copy import copy
|
@@ -7,27 +6,28 @@ from pathlib import Path
|
|
7
6
|
from typing import Callable
|
8
7
|
from typing import Optional
|
9
8
|
|
10
|
-
from
|
9
|
+
from sqlalchemy.orm.attributes import flag_modified
|
10
|
+
|
11
11
|
from ....images import SingleImage
|
12
12
|
from ....images.tools import filter_image_list
|
13
13
|
from ....images.tools import find_image_by_zarr_url
|
14
14
|
from ....images.tools import match_filter
|
15
15
|
from ..exceptions import JobExecutionError
|
16
|
-
from ..filenames import FILTERS_FILENAME
|
17
|
-
from ..filenames import HISTORY_FILENAME
|
18
|
-
from ..filenames import IMAGES_FILENAME
|
19
16
|
from .runner_functions import no_op_submit_setup_call
|
20
17
|
from .runner_functions import run_v2_task_compound
|
21
18
|
from .runner_functions import run_v2_task_non_parallel
|
22
19
|
from .runner_functions import run_v2_task_parallel
|
23
20
|
from .task_interface import TaskOutput
|
21
|
+
from fractal_server.app.db import get_sync_db
|
24
22
|
from fractal_server.app.models.v2 import DatasetV2
|
25
23
|
from fractal_server.app.models.v2 import WorkflowTaskV2
|
26
24
|
from fractal_server.app.schemas.v2.dataset import _DatasetHistoryItemV2
|
27
25
|
from fractal_server.app.schemas.v2.workflowtask import WorkflowTaskStatusTypeV2
|
26
|
+
from fractal_server.images.models import AttributeFiltersType
|
28
27
|
|
29
28
|
|
30
29
|
def execute_tasks_v2(
|
30
|
+
*,
|
31
31
|
wf_task_list: list[WorkflowTaskV2],
|
32
32
|
dataset: DatasetV2,
|
33
33
|
executor: ThreadPoolExecutor,
|
@@ -35,20 +35,21 @@ def execute_tasks_v2(
|
|
35
35
|
workflow_dir_remote: Optional[Path] = None,
|
36
36
|
logger_name: Optional[str] = None,
|
37
37
|
submit_setup_call: Callable = no_op_submit_setup_call,
|
38
|
-
|
39
|
-
|
38
|
+
job_attribute_filters: AttributeFiltersType,
|
39
|
+
) -> None:
|
40
40
|
logger = logging.getLogger(logger_name)
|
41
41
|
|
42
|
-
if (
|
43
|
-
|
44
|
-
|
42
|
+
if not workflow_dir_local.exists():
|
43
|
+
logger.warning(
|
44
|
+
f"Now creating {workflow_dir_local}, "
|
45
|
+
"but it should have already happened."
|
46
|
+
)
|
45
47
|
workflow_dir_local.mkdir()
|
46
48
|
|
47
49
|
# Initialize local dataset attributes
|
48
50
|
zarr_dir = dataset.zarr_dir
|
49
51
|
tmp_images = deepcopy(dataset.images)
|
50
|
-
|
51
|
-
tmp_history = []
|
52
|
+
tmp_type_filters = deepcopy(dataset.type_filters)
|
52
53
|
|
53
54
|
for wftask in wf_task_list:
|
54
55
|
task = wftask.task
|
@@ -58,26 +59,38 @@ def execute_tasks_v2(
|
|
58
59
|
# PRE TASK EXECUTION
|
59
60
|
|
60
61
|
# Get filtered images
|
61
|
-
|
62
|
-
|
63
|
-
attributes=copy(tmp_filters["attributes"]),
|
64
|
-
)
|
65
|
-
pre_filters["types"].update(wftask.input_filters["types"])
|
66
|
-
pre_filters["attributes"].update(wftask.input_filters["attributes"])
|
62
|
+
pre_type_filters = copy(tmp_type_filters)
|
63
|
+
pre_type_filters.update(wftask.type_filters)
|
67
64
|
filtered_images = filter_image_list(
|
68
65
|
images=tmp_images,
|
69
|
-
|
66
|
+
type_filters=pre_type_filters,
|
67
|
+
attribute_filters=job_attribute_filters,
|
70
68
|
)
|
71
69
|
# Verify that filtered images comply with task input_types
|
72
70
|
for image in filtered_images:
|
73
|
-
if not match_filter(
|
71
|
+
if not match_filter(
|
72
|
+
image=image,
|
73
|
+
type_filters=task.input_types,
|
74
|
+
attribute_filters={},
|
75
|
+
):
|
74
76
|
raise JobExecutionError(
|
75
77
|
"Invalid filtered image list\n"
|
76
78
|
f"Task input types: {task.input_types=}\n"
|
77
79
|
f'Image zarr_url: {image["zarr_url"]}\n'
|
78
80
|
f'Image types: {image["types"]}\n'
|
79
81
|
)
|
80
|
-
|
82
|
+
# First, set status SUBMITTED in dataset.history for each wftask
|
83
|
+
with next(get_sync_db()) as db:
|
84
|
+
db_dataset = db.get(DatasetV2, dataset.id)
|
85
|
+
new_history_item = _DatasetHistoryItemV2(
|
86
|
+
workflowtask=wftask,
|
87
|
+
status=WorkflowTaskStatusTypeV2.SUBMITTED,
|
88
|
+
parallelization=dict(), # FIXME: re-include parallelization
|
89
|
+
).dict()
|
90
|
+
db_dataset.history.append(new_history_item)
|
91
|
+
flag_modified(db_dataset, "history")
|
92
|
+
db.merge(db_dataset)
|
93
|
+
db.commit()
|
81
94
|
# TASK EXECUTION (V2)
|
82
95
|
if task.type == "non_parallel":
|
83
96
|
current_task_output = run_v2_task_non_parallel(
|
@@ -249,69 +262,47 @@ def execute_tasks_v2(
|
|
249
262
|
else:
|
250
263
|
tmp_images.pop(img_search["index"])
|
251
264
|
|
252
|
-
# Update
|
253
|
-
# current + (task_output: not really, in current examples..)
|
254
|
-
if current_task_output.filters is not None:
|
255
|
-
tmp_filters["attributes"].update(
|
256
|
-
current_task_output.filters.attributes
|
257
|
-
)
|
258
|
-
|
259
|
-
# Find manifest ouptut types
|
260
|
-
types_from_manifest = task.output_types
|
265
|
+
# Update type_filters
|
261
266
|
|
262
|
-
#
|
263
|
-
|
264
|
-
|
265
|
-
|
266
|
-
types_from_task = {}
|
267
|
+
# Assign the type filters based on different sources
|
268
|
+
# (task manifest and post-execution task output)
|
269
|
+
type_filters_from_task_manifest = task.output_types
|
270
|
+
type_filters_from_task_output = current_task_output.type_filters
|
267
271
|
|
268
272
|
# Check that key sets are disjoint
|
269
|
-
|
270
|
-
|
271
|
-
if not
|
272
|
-
overlap =
|
273
|
+
keys_from_manifest = set(type_filters_from_task_manifest.keys())
|
274
|
+
keys_from_task_output = set(type_filters_from_task_output.keys())
|
275
|
+
if not keys_from_manifest.isdisjoint(keys_from_task_output):
|
276
|
+
overlap = keys_from_manifest.intersection(keys_from_task_output)
|
273
277
|
raise JobExecutionError(
|
274
278
|
"Some type filters are being set twice, "
|
275
279
|
f"for task '{task_name}'.\n"
|
276
|
-
f"Types from task output: {
|
277
|
-
|
280
|
+
f"Types from task output: {type_filters_from_task_output}\n"
|
281
|
+
"Types from task manifest: "
|
282
|
+
f"{type_filters_from_task_manifest}\n"
|
278
283
|
f"Overlapping keys: {overlap}"
|
279
284
|
)
|
280
285
|
|
281
286
|
# Update filters.types
|
282
|
-
|
283
|
-
|
287
|
+
tmp_type_filters.update(type_filters_from_task_manifest)
|
288
|
+
tmp_type_filters.update(type_filters_from_task_output)
|
284
289
|
|
285
|
-
#
|
286
|
-
|
287
|
-
workflowtask=wftask,
|
288
|
-
status=WorkflowTaskStatusTypeV2.DONE,
|
289
|
-
parallelization=dict(
|
290
|
-
# task_type=wftask.task.type, # FIXME: breaks for V1 tasks
|
291
|
-
# component_list=fil, #FIXME
|
292
|
-
),
|
293
|
-
).dict()
|
294
|
-
tmp_history.append(history_item)
|
295
|
-
|
296
|
-
# Write current dataset attributes (history, images, filters) into
|
297
|
-
# temporary files which can be used (1) to retrieve the latest state
|
290
|
+
# Write current dataset attributes (history, images, filters) into the
|
291
|
+
# database. They can be used (1) to retrieve the latest state
|
298
292
|
# when the job fails, (2) from within endpoints that need up-to-date
|
299
293
|
# information
|
300
|
-
with
|
301
|
-
|
302
|
-
|
303
|
-
|
304
|
-
|
305
|
-
|
294
|
+
with next(get_sync_db()) as db:
|
295
|
+
db_dataset = db.get(DatasetV2, dataset.id)
|
296
|
+
db_dataset.history[-1]["status"] = WorkflowTaskStatusTypeV2.DONE
|
297
|
+
db_dataset.type_filters = tmp_type_filters
|
298
|
+
db_dataset.images = tmp_images
|
299
|
+
for attribute_name in [
|
300
|
+
"type_filters",
|
301
|
+
"history",
|
302
|
+
"images",
|
303
|
+
]:
|
304
|
+
flag_modified(db_dataset, attribute_name)
|
305
|
+
db.merge(db_dataset)
|
306
|
+
db.commit()
|
306
307
|
|
307
308
|
logger.debug(f'END {wftask.order}-th task (name="{task_name}")')
|
308
|
-
|
309
|
-
# NOTE: tmp_history only contains the newly-added history items (to be
|
310
|
-
# appended to the original history), while tmp_filters and tmp_images
|
311
|
-
# represent the new attributes (to replace the original ones)
|
312
|
-
result = dict(
|
313
|
-
history=tmp_history,
|
314
|
-
filters=tmp_filters,
|
315
|
-
images=tmp_images,
|
316
|
-
)
|
317
|
-
return result
|
@@ -1,22 +1,47 @@
|
|
1
1
|
from typing import Any
|
2
|
+
from typing import Optional
|
2
3
|
|
3
4
|
from pydantic import BaseModel
|
4
5
|
from pydantic import Extra
|
5
6
|
from pydantic import Field
|
7
|
+
from pydantic import root_validator
|
6
8
|
from pydantic import validator
|
7
9
|
|
8
10
|
from ....images import SingleImageTaskOutput
|
9
|
-
from fractal_server.
|
11
|
+
from fractal_server.app.schemas._filter_validators import validate_type_filters
|
12
|
+
from fractal_server.app.schemas._validators import root_validate_dict_keys
|
10
13
|
from fractal_server.urls import normalize_url
|
11
14
|
|
12
15
|
|
16
|
+
class LegacyFilters(BaseModel, extra=Extra.forbid):
|
17
|
+
"""
|
18
|
+
For fractal-server<2.11, task output could include both
|
19
|
+
`filters["attributes"]` and `filters["types"]`. In the new version
|
20
|
+
there is a single field, named `type_filters`.
|
21
|
+
The current schema is only used to convert old type filters into the
|
22
|
+
new form, but it will reject any attribute filters.
|
23
|
+
"""
|
24
|
+
|
25
|
+
types: dict[str, bool] = Field(default_factory=dict)
|
26
|
+
_types = validator("types", allow_reuse=True)(validate_type_filters)
|
27
|
+
|
28
|
+
|
13
29
|
class TaskOutput(BaseModel, extra=Extra.forbid):
|
14
30
|
|
15
31
|
image_list_updates: list[SingleImageTaskOutput] = Field(
|
16
32
|
default_factory=list
|
17
33
|
)
|
18
34
|
image_list_removals: list[str] = Field(default_factory=list)
|
19
|
-
|
35
|
+
|
36
|
+
filters: Optional[LegacyFilters] = None
|
37
|
+
type_filters: dict[str, bool] = Field(default_factory=dict)
|
38
|
+
|
39
|
+
_dict_keys = root_validator(pre=True, allow_reuse=True)(
|
40
|
+
root_validate_dict_keys
|
41
|
+
)
|
42
|
+
_type_filters = validator("type_filters", allow_reuse=True)(
|
43
|
+
validate_type_filters
|
44
|
+
)
|
20
45
|
|
21
46
|
def check_zarr_urls_are_unique(self) -> None:
|
22
47
|
zarr_urls = [img.zarr_url for img in self.image_list_updates]
|
@@ -37,6 +62,20 @@ class TaskOutput(BaseModel, extra=Extra.forbid):
|
|
37
62
|
msg = f"{msg}\n{duplicate}"
|
38
63
|
raise ValueError(msg)
|
39
64
|
|
65
|
+
@root_validator()
|
66
|
+
def update_legacy_filters(cls, values):
|
67
|
+
if values["filters"] is not None:
|
68
|
+
if values["type_filters"] != {}:
|
69
|
+
raise ValueError(
|
70
|
+
"Cannot set both (legacy) 'filters' and 'type_filters'."
|
71
|
+
)
|
72
|
+
else:
|
73
|
+
# Convert legacy filters.types into new type_filters
|
74
|
+
values["type_filters"] = values["filters"].types
|
75
|
+
values["filters"] = None
|
76
|
+
|
77
|
+
return values
|
78
|
+
|
40
79
|
@validator("image_list_removals")
|
41
80
|
def normalize_paths(cls, v: list[str]) -> list[str]:
|
42
81
|
return [normalize_url(zarr_url) for zarr_url in v]
|
@@ -0,0 +1,47 @@
|
|
1
|
+
from typing import Optional
|
2
|
+
|
3
|
+
from ._validators import valdict_keys
|
4
|
+
from fractal_server.images.models import AttributeFiltersType
|
5
|
+
|
6
|
+
|
7
|
+
def validate_type_filters(
|
8
|
+
type_filters: Optional[dict[str, bool]]
|
9
|
+
) -> dict[str, bool]:
|
10
|
+
if type_filters is None:
|
11
|
+
raise ValueError("'type_filters' cannot be 'None'.")
|
12
|
+
|
13
|
+
type_filters = valdict_keys("type_filters")(type_filters)
|
14
|
+
return type_filters
|
15
|
+
|
16
|
+
|
17
|
+
def validate_attribute_filters(
|
18
|
+
attribute_filters: Optional[AttributeFiltersType],
|
19
|
+
) -> AttributeFiltersType:
|
20
|
+
if attribute_filters is None:
|
21
|
+
raise ValueError("'attribute_filters' cannot be 'None'.")
|
22
|
+
|
23
|
+
attribute_filters = valdict_keys("attribute_filters")(attribute_filters)
|
24
|
+
for key, values in attribute_filters.items():
|
25
|
+
if values is None:
|
26
|
+
# values=None corresponds to not applying any filter for
|
27
|
+
# attribute `key`
|
28
|
+
pass
|
29
|
+
elif values == []:
|
30
|
+
# WARNING: in this case, no image can match with the current
|
31
|
+
# filter. In the future we may deprecate this possibility.
|
32
|
+
pass
|
33
|
+
else:
|
34
|
+
# values is a non-empty list, and its items must all be of the
|
35
|
+
# same scalar non-None type
|
36
|
+
_type = type(values[0])
|
37
|
+
if not all(isinstance(value, _type) for value in values):
|
38
|
+
raise ValueError(
|
39
|
+
f"attribute_filters[{key}] has values with "
|
40
|
+
f"non-homogeneous types: {values}."
|
41
|
+
)
|
42
|
+
if _type not in (int, float, str, bool):
|
43
|
+
raise ValueError(
|
44
|
+
f"attribute_filters[{key}] has values with "
|
45
|
+
f"invalid types: {values}."
|
46
|
+
)
|
47
|
+
return attribute_filters
|
@@ -27,7 +27,7 @@ def valstr(attribute: str, accept_none: bool = False):
|
|
27
27
|
return val
|
28
28
|
|
29
29
|
|
30
|
-
def
|
30
|
+
def valdict_keys(attribute: str):
|
31
31
|
def val(d: Optional[dict[str, Any]]) -> Optional[dict[str, Any]]:
|
32
32
|
"""
|
33
33
|
Apply valstr to every key of the dictionary, and fail if there are
|
@@ -38,7 +38,7 @@ def valdictkeys(attribute: str):
|
|
38
38
|
new_keys = [valstr(f"{attribute}[{key}]")(key) for key in old_keys]
|
39
39
|
if len(new_keys) != len(set(new_keys)):
|
40
40
|
raise ValueError(
|
41
|
-
f"Dictionary contains multiple identical keys: {d}."
|
41
|
+
f"Dictionary contains multiple identical keys: '{d}'."
|
42
42
|
)
|
43
43
|
for old_key, new_key in zip(old_keys, new_keys):
|
44
44
|
if new_key != old_key:
|
@@ -101,3 +101,14 @@ def val_unique_list(attribute: str):
|
|
101
101
|
return must_be_unique
|
102
102
|
|
103
103
|
return val
|
104
|
+
|
105
|
+
|
106
|
+
def root_validate_dict_keys(cls, object: dict) -> dict:
|
107
|
+
"""
|
108
|
+
For each dictionary in `object.values()`,
|
109
|
+
checks that that dictionary has only keys of type str.
|
110
|
+
"""
|
111
|
+
for dictionary in (v for v in object.values() if isinstance(v, dict)):
|
112
|
+
if not all(isinstance(key, str) for key in dictionary.keys()):
|
113
|
+
raise ValueError("Dictionary keys must be strings.")
|
114
|
+
return object
|
@@ -1,17 +1,22 @@
|
|
1
1
|
from datetime import datetime
|
2
|
+
from typing import Any
|
2
3
|
from typing import Optional
|
3
4
|
|
4
5
|
from pydantic import BaseModel
|
5
6
|
from pydantic import Extra
|
6
7
|
from pydantic import Field
|
8
|
+
from pydantic import root_validator
|
7
9
|
from pydantic import validator
|
8
10
|
|
11
|
+
from .._filter_validators import validate_attribute_filters
|
12
|
+
from .._filter_validators import validate_type_filters
|
13
|
+
from .._validators import root_validate_dict_keys
|
9
14
|
from .._validators import valstr
|
10
15
|
from .dumps import WorkflowTaskDumpV2
|
11
16
|
from .project import ProjectReadV2
|
12
17
|
from .workflowtask import WorkflowTaskStatusTypeV2
|
13
|
-
from fractal_server.images import Filters
|
14
18
|
from fractal_server.images import SingleImage
|
19
|
+
from fractal_server.images.models import AttributeFiltersType
|
15
20
|
from fractal_server.urls import normalize_url
|
16
21
|
|
17
22
|
|
@@ -34,17 +39,29 @@ class DatasetCreateV2(BaseModel, extra=Extra.forbid):
|
|
34
39
|
|
35
40
|
zarr_dir: Optional[str] = None
|
36
41
|
|
37
|
-
|
42
|
+
type_filters: dict[str, bool] = Field(default_factory=dict)
|
43
|
+
attribute_filters: AttributeFiltersType = Field(default_factory=dict)
|
38
44
|
|
39
45
|
# Validators
|
46
|
+
|
47
|
+
_dict_keys = root_validator(pre=True, allow_reuse=True)(
|
48
|
+
root_validate_dict_keys
|
49
|
+
)
|
50
|
+
_type_filters = validator("type_filters", allow_reuse=True)(
|
51
|
+
validate_type_filters
|
52
|
+
)
|
53
|
+
_attribute_filters = validator("attribute_filters", allow_reuse=True)(
|
54
|
+
validate_attribute_filters
|
55
|
+
)
|
56
|
+
|
57
|
+
_name = validator("name", allow_reuse=True)(valstr("name"))
|
58
|
+
|
40
59
|
@validator("zarr_dir")
|
41
60
|
def normalize_zarr_dir(cls, v: Optional[str]) -> Optional[str]:
|
42
61
|
if v is not None:
|
43
62
|
return normalize_url(v)
|
44
63
|
return v
|
45
64
|
|
46
|
-
_name = validator("name", allow_reuse=True)(valstr("name"))
|
47
|
-
|
48
65
|
|
49
66
|
class DatasetReadV2(BaseModel):
|
50
67
|
|
@@ -59,24 +76,37 @@ class DatasetReadV2(BaseModel):
|
|
59
76
|
timestamp_created: datetime
|
60
77
|
|
61
78
|
zarr_dir: str
|
62
|
-
|
79
|
+
type_filters: dict[str, bool]
|
80
|
+
attribute_filters: AttributeFiltersType
|
63
81
|
|
64
82
|
|
65
83
|
class DatasetUpdateV2(BaseModel, extra=Extra.forbid):
|
66
84
|
|
67
85
|
name: Optional[str]
|
68
86
|
zarr_dir: Optional[str]
|
69
|
-
|
87
|
+
type_filters: Optional[dict[str, bool]]
|
88
|
+
attribute_filters: Optional[dict[str, list[Any]]]
|
70
89
|
|
71
90
|
# Validators
|
91
|
+
|
92
|
+
_dict_keys = root_validator(pre=True, allow_reuse=True)(
|
93
|
+
root_validate_dict_keys
|
94
|
+
)
|
95
|
+
_type_filters = validator("type_filters", allow_reuse=True)(
|
96
|
+
validate_type_filters
|
97
|
+
)
|
98
|
+
_attribute_filters = validator("attribute_filters", allow_reuse=True)(
|
99
|
+
validate_attribute_filters
|
100
|
+
)
|
101
|
+
|
102
|
+
_name = validator("name", allow_reuse=True)(valstr("name"))
|
103
|
+
|
72
104
|
@validator("zarr_dir")
|
73
105
|
def normalize_zarr_dir(cls, v: Optional[str]) -> Optional[str]:
|
74
106
|
if v is not None:
|
75
107
|
return normalize_url(v)
|
76
108
|
return v
|
77
109
|
|
78
|
-
_name = validator("name", allow_reuse=True)(valstr("name"))
|
79
|
-
|
80
110
|
|
81
111
|
class DatasetImportV2(BaseModel, extra=Extra.forbid):
|
82
112
|
"""
|
@@ -86,15 +116,29 @@ class DatasetImportV2(BaseModel, extra=Extra.forbid):
|
|
86
116
|
name:
|
87
117
|
zarr_dir:
|
88
118
|
images:
|
89
|
-
|
119
|
+
type_filters:
|
120
|
+
attribute_filters:
|
90
121
|
"""
|
91
122
|
|
92
123
|
name: str
|
93
124
|
zarr_dir: str
|
94
125
|
images: list[SingleImage] = Field(default_factory=list)
|
95
|
-
|
126
|
+
|
127
|
+
type_filters: dict[str, bool] = Field(default_factory=dict)
|
128
|
+
attribute_filters: AttributeFiltersType = Field(default_factory=dict)
|
96
129
|
|
97
130
|
# Validators
|
131
|
+
|
132
|
+
_dict_keys = root_validator(pre=True, allow_reuse=True)(
|
133
|
+
root_validate_dict_keys
|
134
|
+
)
|
135
|
+
_type_filters = validator("type_filters", allow_reuse=True)(
|
136
|
+
validate_type_filters
|
137
|
+
)
|
138
|
+
_attribute_filters = validator("attribute_filters", allow_reuse=True)(
|
139
|
+
validate_attribute_filters
|
140
|
+
)
|
141
|
+
|
98
142
|
@validator("zarr_dir")
|
99
143
|
def normalize_zarr_dir(cls, v: str) -> str:
|
100
144
|
return normalize_url(v)
|
@@ -108,10 +152,12 @@ class DatasetExportV2(BaseModel):
|
|
108
152
|
name:
|
109
153
|
zarr_dir:
|
110
154
|
images:
|
111
|
-
|
155
|
+
type_filters:
|
156
|
+
attribute_filters:
|
112
157
|
"""
|
113
158
|
|
114
159
|
name: str
|
115
160
|
zarr_dir: str
|
116
161
|
images: list[SingleImage]
|
117
|
-
|
162
|
+
type_filters: dict[str, bool]
|
163
|
+
attribute_filters: AttributeFiltersType
|
@@ -13,7 +13,7 @@ from typing import Optional
|
|
13
13
|
from pydantic import BaseModel
|
14
14
|
from pydantic import Extra
|
15
15
|
|
16
|
-
from fractal_server.images import
|
16
|
+
from fractal_server.images.models import AttributeFiltersType
|
17
17
|
|
18
18
|
|
19
19
|
class ProjectDumpV2(BaseModel, extra=Extra.forbid):
|
@@ -39,19 +39,16 @@ class TaskDumpV2(BaseModel):
|
|
39
39
|
|
40
40
|
class WorkflowTaskDumpV2(BaseModel):
|
41
41
|
"""
|
42
|
-
|
43
|
-
|
44
|
-
may still exist in the database after version updates, we are setting
|
45
|
-
`task_id` and `task` to `Optional` to avoid response-validation errors
|
42
|
+
We do not include 'extra=Extra.forbid' because legacy data may include
|
43
|
+
'input_filters' field and we want to avoid response-validation errors
|
46
44
|
for the endpoints that GET datasets.
|
47
|
-
Ref issue #1783.
|
48
45
|
"""
|
49
46
|
|
50
47
|
id: int
|
51
48
|
workflow_id: int
|
52
49
|
order: Optional[int]
|
53
50
|
|
54
|
-
|
51
|
+
type_filters: dict[str, bool]
|
55
52
|
|
56
53
|
task_id: Optional[int]
|
57
54
|
task: Optional[TaskDumpV2]
|
@@ -71,4 +68,5 @@ class DatasetDumpV2(BaseModel, extra=Extra.forbid):
|
|
71
68
|
timestamp_created: str
|
72
69
|
|
73
70
|
zarr_dir: str
|
74
|
-
|
71
|
+
type_filters: dict[str, bool]
|
72
|
+
attribute_filters: AttributeFiltersType
|
@@ -4,13 +4,18 @@ from typing import Optional
|
|
4
4
|
|
5
5
|
from pydantic import BaseModel
|
6
6
|
from pydantic import Extra
|
7
|
+
from pydantic import Field
|
8
|
+
from pydantic import root_validator
|
7
9
|
from pydantic import validator
|
8
10
|
from pydantic.types import StrictStr
|
9
11
|
|
12
|
+
from .._filter_validators import validate_attribute_filters
|
13
|
+
from .._validators import root_validate_dict_keys
|
10
14
|
from .._validators import valstr
|
11
15
|
from .dumps import DatasetDumpV2
|
12
16
|
from .dumps import ProjectDumpV2
|
13
17
|
from .dumps import WorkflowDumpV2
|
18
|
+
from fractal_server.images.models import AttributeFiltersType
|
14
19
|
|
15
20
|
|
16
21
|
class JobStatusTypeV2(str, Enum):
|
@@ -41,10 +46,18 @@ class JobCreateV2(BaseModel, extra=Extra.forbid):
|
|
41
46
|
slurm_account: Optional[StrictStr] = None
|
42
47
|
worker_init: Optional[str]
|
43
48
|
|
49
|
+
attribute_filters: AttributeFiltersType = Field(default_factory=dict)
|
50
|
+
|
44
51
|
# Validators
|
45
52
|
_worker_init = validator("worker_init", allow_reuse=True)(
|
46
53
|
valstr("worker_init")
|
47
54
|
)
|
55
|
+
_dict_keys = root_validator(pre=True, allow_reuse=True)(
|
56
|
+
root_validate_dict_keys
|
57
|
+
)
|
58
|
+
_attribute_filters = validator("attribute_filters", allow_reuse=True)(
|
59
|
+
validate_attribute_filters
|
60
|
+
)
|
48
61
|
|
49
62
|
@validator("first_task_index", always=True)
|
50
63
|
def first_task_index_non_negative(cls, v, values):
|
@@ -99,6 +112,7 @@ class JobReadV2(BaseModel):
|
|
99
112
|
first_task_index: Optional[int]
|
100
113
|
last_task_index: Optional[int]
|
101
114
|
worker_init: Optional[str]
|
115
|
+
attribute_filters: AttributeFiltersType
|
102
116
|
|
103
117
|
|
104
118
|
class JobUpdateV2(BaseModel, extra=Extra.forbid):
|
@@ -10,7 +10,7 @@ from pydantic import root_validator
|
|
10
10
|
from pydantic import validator
|
11
11
|
|
12
12
|
from fractal_server.app.schemas._validators import val_unique_list
|
13
|
-
from fractal_server.app.schemas._validators import
|
13
|
+
from fractal_server.app.schemas._validators import valdict_keys
|
14
14
|
from fractal_server.app.schemas._validators import valstr
|
15
15
|
from fractal_server.string_tools import validate_cmd
|
16
16
|
|
@@ -66,25 +66,25 @@ class TaskCreateV2(BaseModel, extra=Extra.forbid):
|
|
66
66
|
_version = validator("version", allow_reuse=True)(valstr("version"))
|
67
67
|
|
68
68
|
_meta_non_parallel = validator("meta_non_parallel", allow_reuse=True)(
|
69
|
-
|
69
|
+
valdict_keys("meta_non_parallel")
|
70
70
|
)
|
71
71
|
_meta_parallel = validator("meta_parallel", allow_reuse=True)(
|
72
|
-
|
72
|
+
valdict_keys("meta_parallel")
|
73
73
|
)
|
74
74
|
_args_schema_non_parallel = validator(
|
75
75
|
"args_schema_non_parallel", allow_reuse=True
|
76
|
-
)(
|
76
|
+
)(valdict_keys("args_schema_non_parallel"))
|
77
77
|
_args_schema_parallel = validator(
|
78
78
|
"args_schema_parallel", allow_reuse=True
|
79
|
-
)(
|
79
|
+
)(valdict_keys("args_schema_parallel"))
|
80
80
|
_args_schema_version = validator("args_schema_version", allow_reuse=True)(
|
81
81
|
valstr("args_schema_version")
|
82
82
|
)
|
83
83
|
_input_types = validator("input_types", allow_reuse=True)(
|
84
|
-
|
84
|
+
valdict_keys("input_types")
|
85
85
|
)
|
86
86
|
_output_types = validator("output_types", allow_reuse=True)(
|
87
|
-
|
87
|
+
valdict_keys("output_types")
|
88
88
|
)
|
89
89
|
|
90
90
|
_category = validator("category", allow_reuse=True)(
|
@@ -158,10 +158,10 @@ class TaskUpdateV2(BaseModel, extra=Extra.forbid):
|
|
158
158
|
"command_non_parallel", allow_reuse=True
|
159
159
|
)(valstr("command_non_parallel"))
|
160
160
|
_input_types = validator("input_types", allow_reuse=True)(
|
161
|
-
|
161
|
+
valdict_keys("input_types")
|
162
162
|
)
|
163
163
|
_output_types = validator("output_types", allow_reuse=True)(
|
164
|
-
|
164
|
+
valdict_keys("output_types")
|
165
165
|
)
|
166
166
|
|
167
167
|
_category = validator("category", allow_reuse=True)(
|
@@ -8,7 +8,7 @@ from pydantic import Field
|
|
8
8
|
from pydantic import validator
|
9
9
|
|
10
10
|
from .._validators import val_absolute_path
|
11
|
-
from .._validators import
|
11
|
+
from .._validators import valdict_keys
|
12
12
|
from .._validators import valstr
|
13
13
|
from .task import TaskReadV2
|
14
14
|
|
@@ -57,7 +57,7 @@ class TaskGroupCreateV2(BaseModel, extra=Extra.forbid):
|
|
57
57
|
)
|
58
58
|
_pinned_package_versions = validator(
|
59
59
|
"pinned_package_versions", allow_reuse=True
|
60
|
-
)(
|
60
|
+
)(valdict_keys("pinned_package_versions"))
|
61
61
|
_pip_extras = validator("pip_extras", allow_reuse=True)(
|
62
62
|
valstr("pip_extras")
|
63
63
|
)
|