fractal-server 2.11.0a0__py3-none-any.whl → 2.11.0a3__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- fractal_server/__init__.py +1 -1
- fractal_server/app/models/v2/dataset.py +9 -6
- fractal_server/app/models/v2/job.py +5 -0
- fractal_server/app/models/v2/workflowtask.py +5 -8
- fractal_server/app/routes/api/v2/_aux_functions.py +3 -10
- fractal_server/app/routes/api/v2/images.py +29 -6
- fractal_server/app/routes/api/v2/submit.py +5 -1
- fractal_server/app/routes/api/v2/workflowtask.py +3 -3
- fractal_server/app/runner/v2/__init__.py +1 -0
- fractal_server/app/runner/v2/_local/__init__.py +5 -0
- fractal_server/app/runner/v2/_local_experimental/__init__.py +5 -0
- fractal_server/app/runner/v2/_slurm_ssh/__init__.py +7 -3
- fractal_server/app/runner/v2/_slurm_sudo/__init__.py +5 -0
- fractal_server/app/runner/v2/merge_outputs.py +13 -16
- fractal_server/app/runner/v2/runner.py +33 -34
- fractal_server/app/runner/v2/task_interface.py +41 -2
- fractal_server/app/schemas/_filter_validators.py +47 -0
- fractal_server/app/schemas/_validators.py +13 -2
- fractal_server/app/schemas/v2/dataset.py +85 -12
- fractal_server/app/schemas/v2/dumps.py +6 -8
- fractal_server/app/schemas/v2/job.py +14 -0
- fractal_server/app/schemas/v2/task.py +9 -9
- fractal_server/app/schemas/v2/task_group.py +2 -2
- fractal_server/app/schemas/v2/workflowtask.py +67 -20
- fractal_server/data_migrations/2_11_0.py +67 -0
- fractal_server/images/__init__.py +0 -1
- fractal_server/images/models.py +12 -35
- fractal_server/images/tools.py +29 -13
- fractal_server/migrations/versions/db09233ad13a_split_filters_and_keep_old_columns.py +96 -0
- {fractal_server-2.11.0a0.dist-info → fractal_server-2.11.0a3.dist-info}/METADATA +1 -1
- {fractal_server-2.11.0a0.dist-info → fractal_server-2.11.0a3.dist-info}/RECORD +34 -31
- {fractal_server-2.11.0a0.dist-info → fractal_server-2.11.0a3.dist-info}/LICENSE +0 -0
- {fractal_server-2.11.0a0.dist-info → fractal_server-2.11.0a3.dist-info}/WHEEL +0 -0
- {fractal_server-2.11.0a0.dist-info → fractal_server-2.11.0a3.dist-info}/entry_points.txt +0 -0
@@ -0,0 +1,47 @@
|
|
1
|
+
from typing import Optional
|
2
|
+
|
3
|
+
from ._validators import valdict_keys
|
4
|
+
from fractal_server.images.models import AttributeFiltersType
|
5
|
+
|
6
|
+
|
7
|
+
def validate_type_filters(
|
8
|
+
type_filters: Optional[dict[str, bool]]
|
9
|
+
) -> dict[str, bool]:
|
10
|
+
if type_filters is None:
|
11
|
+
raise ValueError("'type_filters' cannot be 'None'.")
|
12
|
+
|
13
|
+
type_filters = valdict_keys("type_filters")(type_filters)
|
14
|
+
return type_filters
|
15
|
+
|
16
|
+
|
17
|
+
def validate_attribute_filters(
|
18
|
+
attribute_filters: Optional[AttributeFiltersType],
|
19
|
+
) -> AttributeFiltersType:
|
20
|
+
if attribute_filters is None:
|
21
|
+
raise ValueError("'attribute_filters' cannot be 'None'.")
|
22
|
+
|
23
|
+
attribute_filters = valdict_keys("attribute_filters")(attribute_filters)
|
24
|
+
for key, values in attribute_filters.items():
|
25
|
+
if values is None:
|
26
|
+
# values=None corresponds to not applying any filter for
|
27
|
+
# attribute `key`
|
28
|
+
pass
|
29
|
+
elif values == []:
|
30
|
+
# WARNING: in this case, no image can match with the current
|
31
|
+
# filter. In the future we may deprecate this possibility.
|
32
|
+
pass
|
33
|
+
else:
|
34
|
+
# values is a non-empty list, and its items must all be of the
|
35
|
+
# same scalar non-None type
|
36
|
+
_type = type(values[0])
|
37
|
+
if not all(isinstance(value, _type) for value in values):
|
38
|
+
raise ValueError(
|
39
|
+
f"attribute_filters[{key}] has values with "
|
40
|
+
f"non-homogeneous types: {values}."
|
41
|
+
)
|
42
|
+
if _type not in (int, float, str, bool):
|
43
|
+
raise ValueError(
|
44
|
+
f"attribute_filters[{key}] has values with "
|
45
|
+
f"invalid types: {values}."
|
46
|
+
)
|
47
|
+
return attribute_filters
|
@@ -27,7 +27,7 @@ def valstr(attribute: str, accept_none: bool = False):
|
|
27
27
|
return val
|
28
28
|
|
29
29
|
|
30
|
-
def
|
30
|
+
def valdict_keys(attribute: str):
|
31
31
|
def val(d: Optional[dict[str, Any]]) -> Optional[dict[str, Any]]:
|
32
32
|
"""
|
33
33
|
Apply valstr to every key of the dictionary, and fail if there are
|
@@ -38,7 +38,7 @@ def valdictkeys(attribute: str):
|
|
38
38
|
new_keys = [valstr(f"{attribute}[{key}]")(key) for key in old_keys]
|
39
39
|
if len(new_keys) != len(set(new_keys)):
|
40
40
|
raise ValueError(
|
41
|
-
f"Dictionary contains multiple identical keys: {d}."
|
41
|
+
f"Dictionary contains multiple identical keys: '{d}'."
|
42
42
|
)
|
43
43
|
for old_key, new_key in zip(old_keys, new_keys):
|
44
44
|
if new_key != old_key:
|
@@ -101,3 +101,14 @@ def val_unique_list(attribute: str):
|
|
101
101
|
return must_be_unique
|
102
102
|
|
103
103
|
return val
|
104
|
+
|
105
|
+
|
106
|
+
def root_validate_dict_keys(cls, object: dict) -> dict:
|
107
|
+
"""
|
108
|
+
For each dictionary in `object.values()`,
|
109
|
+
checks that that dictionary has only keys of type str.
|
110
|
+
"""
|
111
|
+
for dictionary in (v for v in object.values() if isinstance(v, dict)):
|
112
|
+
if not all(isinstance(key, str) for key in dictionary.keys()):
|
113
|
+
raise ValueError("Dictionary keys must be strings.")
|
114
|
+
return object
|
@@ -1,17 +1,22 @@
|
|
1
1
|
from datetime import datetime
|
2
|
+
from typing import Any
|
2
3
|
from typing import Optional
|
3
4
|
|
4
5
|
from pydantic import BaseModel
|
5
6
|
from pydantic import Extra
|
6
7
|
from pydantic import Field
|
8
|
+
from pydantic import root_validator
|
7
9
|
from pydantic import validator
|
8
10
|
|
11
|
+
from .._filter_validators import validate_attribute_filters
|
12
|
+
from .._filter_validators import validate_type_filters
|
13
|
+
from .._validators import root_validate_dict_keys
|
9
14
|
from .._validators import valstr
|
10
15
|
from .dumps import WorkflowTaskDumpV2
|
11
16
|
from .project import ProjectReadV2
|
12
17
|
from .workflowtask import WorkflowTaskStatusTypeV2
|
13
|
-
from fractal_server.images import Filters
|
14
18
|
from fractal_server.images import SingleImage
|
19
|
+
from fractal_server.images.models import AttributeFiltersType
|
15
20
|
from fractal_server.urls import normalize_url
|
16
21
|
|
17
22
|
|
@@ -34,17 +39,29 @@ class DatasetCreateV2(BaseModel, extra=Extra.forbid):
|
|
34
39
|
|
35
40
|
zarr_dir: Optional[str] = None
|
36
41
|
|
37
|
-
|
42
|
+
type_filters: dict[str, bool] = Field(default_factory=dict)
|
43
|
+
attribute_filters: AttributeFiltersType = Field(default_factory=dict)
|
38
44
|
|
39
45
|
# Validators
|
46
|
+
|
47
|
+
_dict_keys = root_validator(pre=True, allow_reuse=True)(
|
48
|
+
root_validate_dict_keys
|
49
|
+
)
|
50
|
+
_type_filters = validator("type_filters", allow_reuse=True)(
|
51
|
+
validate_type_filters
|
52
|
+
)
|
53
|
+
_attribute_filters = validator("attribute_filters", allow_reuse=True)(
|
54
|
+
validate_attribute_filters
|
55
|
+
)
|
56
|
+
|
57
|
+
_name = validator("name", allow_reuse=True)(valstr("name"))
|
58
|
+
|
40
59
|
@validator("zarr_dir")
|
41
60
|
def normalize_zarr_dir(cls, v: Optional[str]) -> Optional[str]:
|
42
61
|
if v is not None:
|
43
62
|
return normalize_url(v)
|
44
63
|
return v
|
45
64
|
|
46
|
-
_name = validator("name", allow_reuse=True)(valstr("name"))
|
47
|
-
|
48
65
|
|
49
66
|
class DatasetReadV2(BaseModel):
|
50
67
|
|
@@ -59,24 +76,37 @@ class DatasetReadV2(BaseModel):
|
|
59
76
|
timestamp_created: datetime
|
60
77
|
|
61
78
|
zarr_dir: str
|
62
|
-
|
79
|
+
type_filters: dict[str, bool]
|
80
|
+
attribute_filters: AttributeFiltersType
|
63
81
|
|
64
82
|
|
65
83
|
class DatasetUpdateV2(BaseModel, extra=Extra.forbid):
|
66
84
|
|
67
85
|
name: Optional[str]
|
68
86
|
zarr_dir: Optional[str]
|
69
|
-
|
87
|
+
type_filters: Optional[dict[str, bool]]
|
88
|
+
attribute_filters: Optional[dict[str, list[Any]]]
|
70
89
|
|
71
90
|
# Validators
|
91
|
+
|
92
|
+
_dict_keys = root_validator(pre=True, allow_reuse=True)(
|
93
|
+
root_validate_dict_keys
|
94
|
+
)
|
95
|
+
_type_filters = validator("type_filters", allow_reuse=True)(
|
96
|
+
validate_type_filters
|
97
|
+
)
|
98
|
+
_attribute_filters = validator("attribute_filters", allow_reuse=True)(
|
99
|
+
validate_attribute_filters
|
100
|
+
)
|
101
|
+
|
102
|
+
_name = validator("name", allow_reuse=True)(valstr("name"))
|
103
|
+
|
72
104
|
@validator("zarr_dir")
|
73
105
|
def normalize_zarr_dir(cls, v: Optional[str]) -> Optional[str]:
|
74
106
|
if v is not None:
|
75
107
|
return normalize_url(v)
|
76
108
|
return v
|
77
109
|
|
78
|
-
_name = validator("name", allow_reuse=True)(valstr("name"))
|
79
|
-
|
80
110
|
|
81
111
|
class DatasetImportV2(BaseModel, extra=Extra.forbid):
|
82
112
|
"""
|
@@ -87,14 +117,55 @@ class DatasetImportV2(BaseModel, extra=Extra.forbid):
|
|
87
117
|
zarr_dir:
|
88
118
|
images:
|
89
119
|
filters:
|
120
|
+
type_filters:
|
121
|
+
attribute_filters:
|
90
122
|
"""
|
91
123
|
|
92
124
|
name: str
|
93
125
|
zarr_dir: str
|
94
126
|
images: list[SingleImage] = Field(default_factory=list)
|
95
|
-
filters: Filters = Field(default_factory=Filters)
|
96
127
|
|
97
|
-
|
128
|
+
filters: Optional[dict[str, Any]] = None
|
129
|
+
type_filters: dict[str, bool] = Field(default_factory=dict)
|
130
|
+
attribute_filters: AttributeFiltersType = Field(default_factory=dict)
|
131
|
+
|
132
|
+
@root_validator(pre=True)
|
133
|
+
def update_legacy_filters(cls, values: dict):
|
134
|
+
"""
|
135
|
+
Transform legacy filters (created with fractal-server<2.11.0)
|
136
|
+
into attribute/type filters
|
137
|
+
"""
|
138
|
+
if values.get("filters") is not None:
|
139
|
+
if (
|
140
|
+
"type_filters" in values.keys()
|
141
|
+
or "attribute_filters" in values.keys()
|
142
|
+
):
|
143
|
+
raise ValueError(
|
144
|
+
"Cannot set filters both through the legacy field "
|
145
|
+
"('filters') and the new ones ('type_filters' and/or "
|
146
|
+
"'attribute_filters')."
|
147
|
+
)
|
148
|
+
|
149
|
+
else:
|
150
|
+
# Convert legacy filters.types into new type_filters
|
151
|
+
values["type_filters"] = values["filters"].get("types", {})
|
152
|
+
values["attribute_filters"] = {
|
153
|
+
key: [value]
|
154
|
+
for key, value in values["filters"]
|
155
|
+
.get("attributes", {})
|
156
|
+
.items()
|
157
|
+
}
|
158
|
+
values["filters"] = None
|
159
|
+
|
160
|
+
return values
|
161
|
+
|
162
|
+
_type_filters = validator("type_filters", allow_reuse=True)(
|
163
|
+
validate_type_filters
|
164
|
+
)
|
165
|
+
_attribute_filters = validator("attribute_filters", allow_reuse=True)(
|
166
|
+
validate_attribute_filters
|
167
|
+
)
|
168
|
+
|
98
169
|
@validator("zarr_dir")
|
99
170
|
def normalize_zarr_dir(cls, v: str) -> str:
|
100
171
|
return normalize_url(v)
|
@@ -108,10 +179,12 @@ class DatasetExportV2(BaseModel):
|
|
108
179
|
name:
|
109
180
|
zarr_dir:
|
110
181
|
images:
|
111
|
-
|
182
|
+
type_filters:
|
183
|
+
attribute_filters:
|
112
184
|
"""
|
113
185
|
|
114
186
|
name: str
|
115
187
|
zarr_dir: str
|
116
188
|
images: list[SingleImage]
|
117
|
-
|
189
|
+
type_filters: dict[str, bool]
|
190
|
+
attribute_filters: AttributeFiltersType
|
@@ -13,7 +13,7 @@ from typing import Optional
|
|
13
13
|
from pydantic import BaseModel
|
14
14
|
from pydantic import Extra
|
15
15
|
|
16
|
-
from fractal_server.images import
|
16
|
+
from fractal_server.images.models import AttributeFiltersType
|
17
17
|
|
18
18
|
|
19
19
|
class ProjectDumpV2(BaseModel, extra=Extra.forbid):
|
@@ -39,19 +39,16 @@ class TaskDumpV2(BaseModel):
|
|
39
39
|
|
40
40
|
class WorkflowTaskDumpV2(BaseModel):
|
41
41
|
"""
|
42
|
-
|
43
|
-
|
44
|
-
may still exist in the database after version updates, we are setting
|
45
|
-
`task_id` and `task` to `Optional` to avoid response-validation errors
|
42
|
+
We do not include 'extra=Extra.forbid' because legacy data may include
|
43
|
+
'input_filters' field and we want to avoid response-validation errors
|
46
44
|
for the endpoints that GET datasets.
|
47
|
-
Ref issue #1783.
|
48
45
|
"""
|
49
46
|
|
50
47
|
id: int
|
51
48
|
workflow_id: int
|
52
49
|
order: Optional[int]
|
53
50
|
|
54
|
-
|
51
|
+
type_filters: dict[str, bool]
|
55
52
|
|
56
53
|
task_id: Optional[int]
|
57
54
|
task: Optional[TaskDumpV2]
|
@@ -71,4 +68,5 @@ class DatasetDumpV2(BaseModel, extra=Extra.forbid):
|
|
71
68
|
timestamp_created: str
|
72
69
|
|
73
70
|
zarr_dir: str
|
74
|
-
|
71
|
+
type_filters: dict[str, bool]
|
72
|
+
attribute_filters: AttributeFiltersType
|
@@ -4,13 +4,18 @@ from typing import Optional
|
|
4
4
|
|
5
5
|
from pydantic import BaseModel
|
6
6
|
from pydantic import Extra
|
7
|
+
from pydantic import Field
|
8
|
+
from pydantic import root_validator
|
7
9
|
from pydantic import validator
|
8
10
|
from pydantic.types import StrictStr
|
9
11
|
|
12
|
+
from .._filter_validators import validate_attribute_filters
|
13
|
+
from .._validators import root_validate_dict_keys
|
10
14
|
from .._validators import valstr
|
11
15
|
from .dumps import DatasetDumpV2
|
12
16
|
from .dumps import ProjectDumpV2
|
13
17
|
from .dumps import WorkflowDumpV2
|
18
|
+
from fractal_server.images.models import AttributeFiltersType
|
14
19
|
|
15
20
|
|
16
21
|
class JobStatusTypeV2(str, Enum):
|
@@ -41,10 +46,18 @@ class JobCreateV2(BaseModel, extra=Extra.forbid):
|
|
41
46
|
slurm_account: Optional[StrictStr] = None
|
42
47
|
worker_init: Optional[str]
|
43
48
|
|
49
|
+
attribute_filters: AttributeFiltersType = Field(default_factory=dict)
|
50
|
+
|
44
51
|
# Validators
|
45
52
|
_worker_init = validator("worker_init", allow_reuse=True)(
|
46
53
|
valstr("worker_init")
|
47
54
|
)
|
55
|
+
_dict_keys = root_validator(pre=True, allow_reuse=True)(
|
56
|
+
root_validate_dict_keys
|
57
|
+
)
|
58
|
+
_attribute_filters = validator("attribute_filters", allow_reuse=True)(
|
59
|
+
validate_attribute_filters
|
60
|
+
)
|
48
61
|
|
49
62
|
@validator("first_task_index", always=True)
|
50
63
|
def first_task_index_non_negative(cls, v, values):
|
@@ -99,6 +112,7 @@ class JobReadV2(BaseModel):
|
|
99
112
|
first_task_index: Optional[int]
|
100
113
|
last_task_index: Optional[int]
|
101
114
|
worker_init: Optional[str]
|
115
|
+
attribute_filters: AttributeFiltersType
|
102
116
|
|
103
117
|
|
104
118
|
class JobUpdateV2(BaseModel, extra=Extra.forbid):
|
@@ -10,7 +10,7 @@ from pydantic import root_validator
|
|
10
10
|
from pydantic import validator
|
11
11
|
|
12
12
|
from fractal_server.app.schemas._validators import val_unique_list
|
13
|
-
from fractal_server.app.schemas._validators import
|
13
|
+
from fractal_server.app.schemas._validators import valdict_keys
|
14
14
|
from fractal_server.app.schemas._validators import valstr
|
15
15
|
from fractal_server.string_tools import validate_cmd
|
16
16
|
|
@@ -66,25 +66,25 @@ class TaskCreateV2(BaseModel, extra=Extra.forbid):
|
|
66
66
|
_version = validator("version", allow_reuse=True)(valstr("version"))
|
67
67
|
|
68
68
|
_meta_non_parallel = validator("meta_non_parallel", allow_reuse=True)(
|
69
|
-
|
69
|
+
valdict_keys("meta_non_parallel")
|
70
70
|
)
|
71
71
|
_meta_parallel = validator("meta_parallel", allow_reuse=True)(
|
72
|
-
|
72
|
+
valdict_keys("meta_parallel")
|
73
73
|
)
|
74
74
|
_args_schema_non_parallel = validator(
|
75
75
|
"args_schema_non_parallel", allow_reuse=True
|
76
|
-
)(
|
76
|
+
)(valdict_keys("args_schema_non_parallel"))
|
77
77
|
_args_schema_parallel = validator(
|
78
78
|
"args_schema_parallel", allow_reuse=True
|
79
|
-
)(
|
79
|
+
)(valdict_keys("args_schema_parallel"))
|
80
80
|
_args_schema_version = validator("args_schema_version", allow_reuse=True)(
|
81
81
|
valstr("args_schema_version")
|
82
82
|
)
|
83
83
|
_input_types = validator("input_types", allow_reuse=True)(
|
84
|
-
|
84
|
+
valdict_keys("input_types")
|
85
85
|
)
|
86
86
|
_output_types = validator("output_types", allow_reuse=True)(
|
87
|
-
|
87
|
+
valdict_keys("output_types")
|
88
88
|
)
|
89
89
|
|
90
90
|
_category = validator("category", allow_reuse=True)(
|
@@ -158,10 +158,10 @@ class TaskUpdateV2(BaseModel, extra=Extra.forbid):
|
|
158
158
|
"command_non_parallel", allow_reuse=True
|
159
159
|
)(valstr("command_non_parallel"))
|
160
160
|
_input_types = validator("input_types", allow_reuse=True)(
|
161
|
-
|
161
|
+
valdict_keys("input_types")
|
162
162
|
)
|
163
163
|
_output_types = validator("output_types", allow_reuse=True)(
|
164
|
-
|
164
|
+
valdict_keys("output_types")
|
165
165
|
)
|
166
166
|
|
167
167
|
_category = validator("category", allow_reuse=True)(
|
@@ -8,7 +8,7 @@ from pydantic import Field
|
|
8
8
|
from pydantic import validator
|
9
9
|
|
10
10
|
from .._validators import val_absolute_path
|
11
|
-
from .._validators import
|
11
|
+
from .._validators import valdict_keys
|
12
12
|
from .._validators import valstr
|
13
13
|
from .task import TaskReadV2
|
14
14
|
|
@@ -57,7 +57,7 @@ class TaskGroupCreateV2(BaseModel, extra=Extra.forbid):
|
|
57
57
|
)
|
58
58
|
_pinned_package_versions = validator(
|
59
59
|
"pinned_package_versions", allow_reuse=True
|
60
|
-
)(
|
60
|
+
)(valdict_keys("pinned_package_versions"))
|
61
61
|
_pip_extras = validator("pip_extras", allow_reuse=True)(
|
62
62
|
valstr("pip_extras")
|
63
63
|
)
|
@@ -6,14 +6,16 @@ from typing import Union
|
|
6
6
|
from pydantic import BaseModel
|
7
7
|
from pydantic import Extra
|
8
8
|
from pydantic import Field
|
9
|
+
from pydantic import root_validator
|
9
10
|
from pydantic import validator
|
10
11
|
|
11
|
-
from ..
|
12
|
+
from .._filter_validators import validate_type_filters
|
13
|
+
from .._validators import root_validate_dict_keys
|
14
|
+
from .._validators import valdict_keys
|
12
15
|
from .task import TaskExportV2
|
13
16
|
from .task import TaskImportV2
|
14
17
|
from .task import TaskImportV2Legacy
|
15
18
|
from .task import TaskReadV2
|
16
|
-
from fractal_server.images import Filters
|
17
19
|
|
18
20
|
RESERVED_ARGUMENTS = {"zarr_dir", "zarr_url", "zarr_urls", "init_args"}
|
19
21
|
|
@@ -43,21 +45,28 @@ class WorkflowTaskCreateV2(BaseModel, extra=Extra.forbid):
|
|
43
45
|
meta_parallel: Optional[dict[str, Any]]
|
44
46
|
args_non_parallel: Optional[dict[str, Any]]
|
45
47
|
args_parallel: Optional[dict[str, Any]]
|
46
|
-
|
48
|
+
type_filters: dict[str, bool] = Field(default_factory=dict)
|
47
49
|
|
48
50
|
# Validators
|
51
|
+
_dict_keys = root_validator(pre=True, allow_reuse=True)(
|
52
|
+
root_validate_dict_keys
|
53
|
+
)
|
54
|
+
_type_filters = validator("type_filters", allow_reuse=True)(
|
55
|
+
validate_type_filters
|
56
|
+
)
|
57
|
+
|
49
58
|
_meta_non_parallel = validator("meta_non_parallel", allow_reuse=True)(
|
50
|
-
|
59
|
+
valdict_keys("meta_non_parallel")
|
51
60
|
)
|
52
61
|
_meta_parallel = validator("meta_parallel", allow_reuse=True)(
|
53
|
-
|
62
|
+
valdict_keys("meta_parallel")
|
54
63
|
)
|
55
64
|
|
56
65
|
@validator("args_non_parallel")
|
57
66
|
def validate_args_non_parallel(cls, value):
|
58
67
|
if value is None:
|
59
68
|
return
|
60
|
-
|
69
|
+
valdict_keys("args_non_parallel")(value)
|
61
70
|
args_keys = set(value.keys())
|
62
71
|
intersect_keys = RESERVED_ARGUMENTS.intersection(args_keys)
|
63
72
|
if intersect_keys:
|
@@ -71,7 +80,7 @@ class WorkflowTaskCreateV2(BaseModel, extra=Extra.forbid):
|
|
71
80
|
def validate_args_parallel(cls, value):
|
72
81
|
if value is None:
|
73
82
|
return
|
74
|
-
|
83
|
+
valdict_keys("args_parallel")(value)
|
75
84
|
args_keys = set(value.keys())
|
76
85
|
intersect_keys = RESERVED_ARGUMENTS.intersection(args_keys)
|
77
86
|
if intersect_keys:
|
@@ -101,7 +110,7 @@ class WorkflowTaskReadV2(BaseModel):
|
|
101
110
|
args_non_parallel: Optional[dict[str, Any]]
|
102
111
|
args_parallel: Optional[dict[str, Any]]
|
103
112
|
|
104
|
-
|
113
|
+
type_filters: dict[str, bool]
|
105
114
|
|
106
115
|
task_type: str
|
107
116
|
task_id: int
|
@@ -118,21 +127,28 @@ class WorkflowTaskUpdateV2(BaseModel, extra=Extra.forbid):
|
|
118
127
|
meta_parallel: Optional[dict[str, Any]]
|
119
128
|
args_non_parallel: Optional[dict[str, Any]]
|
120
129
|
args_parallel: Optional[dict[str, Any]]
|
121
|
-
|
130
|
+
type_filters: Optional[dict[str, bool]]
|
122
131
|
|
123
132
|
# Validators
|
133
|
+
_dict_keys = root_validator(pre=True, allow_reuse=True)(
|
134
|
+
root_validate_dict_keys
|
135
|
+
)
|
136
|
+
_type_filters = validator("type_filters", allow_reuse=True)(
|
137
|
+
validate_type_filters
|
138
|
+
)
|
139
|
+
|
124
140
|
_meta_non_parallel = validator("meta_non_parallel", allow_reuse=True)(
|
125
|
-
|
141
|
+
valdict_keys("meta_non_parallel")
|
126
142
|
)
|
127
143
|
_meta_parallel = validator("meta_parallel", allow_reuse=True)(
|
128
|
-
|
144
|
+
valdict_keys("meta_parallel")
|
129
145
|
)
|
130
146
|
|
131
147
|
@validator("args_non_parallel")
|
132
148
|
def validate_args_non_parallel(cls, value):
|
133
149
|
if value is None:
|
134
150
|
return
|
135
|
-
|
151
|
+
valdict_keys("args_non_parallel")(value)
|
136
152
|
args_keys = set(value.keys())
|
137
153
|
intersect_keys = RESERVED_ARGUMENTS.intersection(args_keys)
|
138
154
|
if intersect_keys:
|
@@ -146,7 +162,7 @@ class WorkflowTaskUpdateV2(BaseModel, extra=Extra.forbid):
|
|
146
162
|
def validate_args_parallel(cls, value):
|
147
163
|
if value is None:
|
148
164
|
return
|
149
|
-
|
165
|
+
valdict_keys("args_parallel")(value)
|
150
166
|
args_keys = set(value.keys())
|
151
167
|
intersect_keys = RESERVED_ARGUMENTS.intersection(args_keys)
|
152
168
|
if intersect_keys:
|
@@ -163,22 +179,53 @@ class WorkflowTaskImportV2(BaseModel, extra=Extra.forbid):
|
|
163
179
|
meta_parallel: Optional[dict[str, Any]] = None
|
164
180
|
args_non_parallel: Optional[dict[str, Any]] = None
|
165
181
|
args_parallel: Optional[dict[str, Any]] = None
|
166
|
-
|
167
|
-
|
182
|
+
type_filters: Optional[dict[str, bool]] = None
|
183
|
+
filters: Optional[dict[str, Any]] = None
|
168
184
|
|
169
185
|
task: Union[TaskImportV2, TaskImportV2Legacy]
|
170
186
|
|
187
|
+
# Validators
|
188
|
+
@root_validator(pre=True)
|
189
|
+
def update_legacy_filters(cls, values: dict):
|
190
|
+
"""
|
191
|
+
Transform legacy filters (created with fractal-server<2.11.0)
|
192
|
+
into type filters
|
193
|
+
"""
|
194
|
+
if values.get("filters") is not None:
|
195
|
+
if "type_filters" in values.keys():
|
196
|
+
raise ValueError(
|
197
|
+
"Cannot set filters both through the legacy field "
|
198
|
+
"('filters') and the new one ('type_filters')."
|
199
|
+
)
|
200
|
+
|
201
|
+
else:
|
202
|
+
# As of 2.11.0, WorkflowTask do not have attribute filters
|
203
|
+
# any more.
|
204
|
+
if values["filters"]["attributes"] != {}:
|
205
|
+
raise ValueError(
|
206
|
+
"Cannot set attribute filters for WorkflowTasks."
|
207
|
+
)
|
208
|
+
# Convert legacy filters.types into new type_filters
|
209
|
+
values["type_filters"] = values["filters"].get("types", {})
|
210
|
+
values["filters"] = None
|
211
|
+
|
212
|
+
return values
|
213
|
+
|
214
|
+
_type_filters = validator("type_filters", allow_reuse=True)(
|
215
|
+
validate_type_filters
|
216
|
+
)
|
217
|
+
|
171
218
|
_meta_non_parallel = validator("meta_non_parallel", allow_reuse=True)(
|
172
|
-
|
219
|
+
valdict_keys("meta_non_parallel")
|
173
220
|
)
|
174
221
|
_meta_parallel = validator("meta_parallel", allow_reuse=True)(
|
175
|
-
|
222
|
+
valdict_keys("meta_parallel")
|
176
223
|
)
|
177
224
|
_args_non_parallel = validator("args_non_parallel", allow_reuse=True)(
|
178
|
-
|
225
|
+
valdict_keys("args_non_parallel")
|
179
226
|
)
|
180
227
|
_args_parallel = validator("args_parallel", allow_reuse=True)(
|
181
|
-
|
228
|
+
valdict_keys("args_parallel")
|
182
229
|
)
|
183
230
|
|
184
231
|
|
@@ -188,6 +235,6 @@ class WorkflowTaskExportV2(BaseModel):
|
|
188
235
|
meta_parallel: Optional[dict[str, Any]] = None
|
189
236
|
args_non_parallel: Optional[dict[str, Any]] = None
|
190
237
|
args_parallel: Optional[dict[str, Any]] = None
|
191
|
-
|
238
|
+
type_filters: dict[str, bool] = Field(default_factory=dict)
|
192
239
|
|
193
240
|
task: TaskExportV2
|
@@ -0,0 +1,67 @@
|
|
1
|
+
import logging
|
2
|
+
|
3
|
+
from sqlalchemy.orm.attributes import flag_modified
|
4
|
+
from sqlmodel import select
|
5
|
+
|
6
|
+
from fractal_server.app.db import get_sync_db
|
7
|
+
from fractal_server.app.models import DatasetV2
|
8
|
+
from fractal_server.app.models import JobV2
|
9
|
+
from fractal_server.app.models import WorkflowTaskV2
|
10
|
+
|
11
|
+
logger = logging.getLogger("fix_db")
|
12
|
+
logger.setLevel(logging.INFO)
|
13
|
+
|
14
|
+
|
15
|
+
def fix_db():
|
16
|
+
|
17
|
+
logger.info("START execution of fix_db function")
|
18
|
+
|
19
|
+
with next(get_sync_db()) as db:
|
20
|
+
|
21
|
+
# DatasetV2.filters
|
22
|
+
# DatasetV2.history[].workflowtask.input_filters
|
23
|
+
stm = select(DatasetV2).order_by(DatasetV2.id)
|
24
|
+
datasets = db.execute(stm).scalars().all()
|
25
|
+
for ds in datasets:
|
26
|
+
ds.attribute_filters = ds.filters["attributes"]
|
27
|
+
ds.type_filters = ds.filters["types"]
|
28
|
+
ds.filters = None
|
29
|
+
for i, h in enumerate(ds.history):
|
30
|
+
ds.history[i]["workflowtask"]["type_filters"] = h[
|
31
|
+
"workflowtask"
|
32
|
+
]["input_filters"]["types"]
|
33
|
+
flag_modified(ds, "history")
|
34
|
+
db.add(ds)
|
35
|
+
logger.info(f"Fixed filters in DatasetV2[{ds.id}]")
|
36
|
+
|
37
|
+
# WorkflowTaskV2.input_filters
|
38
|
+
stm = select(WorkflowTaskV2).order_by(WorkflowTaskV2.id)
|
39
|
+
wftasks = db.execute(stm).scalars().all()
|
40
|
+
for wft in wftasks:
|
41
|
+
wft.type_filters = wft.input_filters["types"]
|
42
|
+
if wft.input_filters["attributes"]:
|
43
|
+
logger.warning(
|
44
|
+
f"Removing WorkflowTaskV2[{wft.id}].input_filters"
|
45
|
+
f"['attributes'] = {wft.input_filters['attributes']}"
|
46
|
+
)
|
47
|
+
wft.input_filters = None
|
48
|
+
flag_modified(wft, "input_filters")
|
49
|
+
db.add(wft)
|
50
|
+
logger.info(f"Fixed filters in WorkflowTaskV2[{wft.id}]")
|
51
|
+
|
52
|
+
# JOBS V2
|
53
|
+
stm = select(JobV2).order_by(JobV2.id)
|
54
|
+
jobs = db.execute(stm).scalars().all()
|
55
|
+
for job in jobs:
|
56
|
+
job.dataset_dump["type_filters"] = job.dataset_dump["filters"][
|
57
|
+
"types"
|
58
|
+
]
|
59
|
+
job.dataset_dump["attribute_filters"] = job.dataset_dump[
|
60
|
+
"filters"
|
61
|
+
]["attributes"]
|
62
|
+
job.dataset_dump.pop("filters")
|
63
|
+
flag_modified(job, "dataset_dump")
|
64
|
+
logger.info(f"Fixed filters in JobV2[{job.id}].datasetdump")
|
65
|
+
|
66
|
+
db.commit()
|
67
|
+
logger.info("Changes committed.")
|