fractal-server 2.10.6__py3-none-any.whl → 2.11.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (50) hide show
  1. fractal_server/__init__.py +1 -1
  2. fractal_server/app/models/v2/dataset.py +9 -6
  3. fractal_server/app/models/v2/job.py +5 -0
  4. fractal_server/app/models/v2/workflowtask.py +5 -8
  5. fractal_server/app/routes/api/v1/dataset.py +2 -2
  6. fractal_server/app/routes/api/v2/_aux_functions.py +3 -10
  7. fractal_server/app/routes/api/v2/_aux_functions_tasks.py +21 -0
  8. fractal_server/app/routes/api/v2/images.py +30 -7
  9. fractal_server/app/routes/api/v2/job.py +14 -1
  10. fractal_server/app/routes/api/v2/status.py +20 -20
  11. fractal_server/app/routes/api/v2/submit.py +11 -4
  12. fractal_server/app/routes/api/v2/workflow.py +95 -0
  13. fractal_server/app/routes/api/v2/workflow_import.py +8 -0
  14. fractal_server/app/routes/api/v2/workflowtask.py +45 -26
  15. fractal_server/app/runner/{async_wrap.py → async_wrap_v1.py} +1 -1
  16. fractal_server/app/runner/executors/slurm/_slurm_config.py +1 -1
  17. fractal_server/app/runner/executors/slurm/ssh/executor.py +2 -2
  18. fractal_server/app/runner/filenames.py +2 -4
  19. fractal_server/app/runner/v1/_common.py +4 -4
  20. fractal_server/app/runner/v1/_local/__init__.py +2 -2
  21. fractal_server/app/runner/v1/_slurm/__init__.py +2 -2
  22. fractal_server/app/runner/v1/handle_failed_job.py +4 -4
  23. fractal_server/app/runner/v2/__init__.py +12 -66
  24. fractal_server/app/runner/v2/_local/__init__.py +17 -47
  25. fractal_server/app/runner/v2/_local_experimental/__init__.py +27 -61
  26. fractal_server/app/runner/v2/_slurm_ssh/__init__.py +26 -65
  27. fractal_server/app/runner/v2/_slurm_sudo/__init__.py +24 -66
  28. fractal_server/app/runner/v2/handle_failed_job.py +31 -130
  29. fractal_server/app/runner/v2/merge_outputs.py +6 -17
  30. fractal_server/app/runner/v2/runner.py +51 -89
  31. fractal_server/app/runner/v2/task_interface.py +0 -2
  32. fractal_server/app/schemas/_filter_validators.py +43 -0
  33. fractal_server/app/schemas/_validators.py +13 -2
  34. fractal_server/app/schemas/v2/dataset.py +85 -12
  35. fractal_server/app/schemas/v2/dumps.py +6 -8
  36. fractal_server/app/schemas/v2/job.py +14 -0
  37. fractal_server/app/schemas/v2/task.py +9 -9
  38. fractal_server/app/schemas/v2/task_group.py +2 -2
  39. fractal_server/app/schemas/v2/workflowtask.py +69 -20
  40. fractal_server/data_migrations/2_11_0.py +168 -0
  41. fractal_server/images/__init__.py +0 -1
  42. fractal_server/images/models.py +12 -35
  43. fractal_server/images/tools.py +53 -14
  44. fractal_server/migrations/versions/db09233ad13a_split_filters_and_keep_old_columns.py +96 -0
  45. fractal_server/utils.py +9 -7
  46. {fractal_server-2.10.6.dist-info → fractal_server-2.11.0.dist-info}/METADATA +1 -1
  47. {fractal_server-2.10.6.dist-info → fractal_server-2.11.0.dist-info}/RECORD +50 -47
  48. {fractal_server-2.10.6.dist-info → fractal_server-2.11.0.dist-info}/LICENSE +0 -0
  49. {fractal_server-2.10.6.dist-info → fractal_server-2.11.0.dist-info}/WHEEL +0 -0
  50. {fractal_server-2.10.6.dist-info → fractal_server-2.11.0.dist-info}/entry_points.txt +0 -0
@@ -6,14 +6,16 @@ from typing import Union
6
6
  from pydantic import BaseModel
7
7
  from pydantic import Extra
8
8
  from pydantic import Field
9
+ from pydantic import root_validator
9
10
  from pydantic import validator
10
11
 
11
- from .._validators import valdictkeys
12
+ from .._filter_validators import validate_type_filters
13
+ from .._validators import root_validate_dict_keys
14
+ from .._validators import valdict_keys
12
15
  from .task import TaskExportV2
13
16
  from .task import TaskImportV2
14
17
  from .task import TaskImportV2Legacy
15
18
  from .task import TaskReadV2
16
- from fractal_server.images import Filters
17
19
 
18
20
  RESERVED_ARGUMENTS = {"zarr_dir", "zarr_url", "zarr_urls", "init_args"}
19
21
 
@@ -43,21 +45,28 @@ class WorkflowTaskCreateV2(BaseModel, extra=Extra.forbid):
43
45
  meta_parallel: Optional[dict[str, Any]]
44
46
  args_non_parallel: Optional[dict[str, Any]]
45
47
  args_parallel: Optional[dict[str, Any]]
46
- input_filters: Filters = Field(default_factory=Filters)
48
+ type_filters: dict[str, bool] = Field(default_factory=dict)
47
49
 
48
50
  # Validators
51
+ _dict_keys = root_validator(pre=True, allow_reuse=True)(
52
+ root_validate_dict_keys
53
+ )
54
+ _type_filters = validator("type_filters", allow_reuse=True)(
55
+ validate_type_filters
56
+ )
57
+
49
58
  _meta_non_parallel = validator("meta_non_parallel", allow_reuse=True)(
50
- valdictkeys("meta_non_parallel")
59
+ valdict_keys("meta_non_parallel")
51
60
  )
52
61
  _meta_parallel = validator("meta_parallel", allow_reuse=True)(
53
- valdictkeys("meta_parallel")
62
+ valdict_keys("meta_parallel")
54
63
  )
55
64
 
56
65
  @validator("args_non_parallel")
57
66
  def validate_args_non_parallel(cls, value):
58
67
  if value is None:
59
68
  return
60
- valdictkeys("args_non_parallel")(value)
69
+ valdict_keys("args_non_parallel")(value)
61
70
  args_keys = set(value.keys())
62
71
  intersect_keys = RESERVED_ARGUMENTS.intersection(args_keys)
63
72
  if intersect_keys:
@@ -71,7 +80,7 @@ class WorkflowTaskCreateV2(BaseModel, extra=Extra.forbid):
71
80
  def validate_args_parallel(cls, value):
72
81
  if value is None:
73
82
  return
74
- valdictkeys("args_parallel")(value)
83
+ valdict_keys("args_parallel")(value)
75
84
  args_keys = set(value.keys())
76
85
  intersect_keys = RESERVED_ARGUMENTS.intersection(args_keys)
77
86
  if intersect_keys:
@@ -101,7 +110,7 @@ class WorkflowTaskReadV2(BaseModel):
101
110
  args_non_parallel: Optional[dict[str, Any]]
102
111
  args_parallel: Optional[dict[str, Any]]
103
112
 
104
- input_filters: Filters
113
+ type_filters: dict[str, bool]
105
114
 
106
115
  task_type: str
107
116
  task_id: int
@@ -118,21 +127,28 @@ class WorkflowTaskUpdateV2(BaseModel, extra=Extra.forbid):
118
127
  meta_parallel: Optional[dict[str, Any]]
119
128
  args_non_parallel: Optional[dict[str, Any]]
120
129
  args_parallel: Optional[dict[str, Any]]
121
- input_filters: Optional[Filters]
130
+ type_filters: Optional[dict[str, bool]]
122
131
 
123
132
  # Validators
133
+ _dict_keys = root_validator(pre=True, allow_reuse=True)(
134
+ root_validate_dict_keys
135
+ )
136
+ _type_filters = validator("type_filters", allow_reuse=True)(
137
+ validate_type_filters
138
+ )
139
+
124
140
  _meta_non_parallel = validator("meta_non_parallel", allow_reuse=True)(
125
- valdictkeys("meta_non_parallel")
141
+ valdict_keys("meta_non_parallel")
126
142
  )
127
143
  _meta_parallel = validator("meta_parallel", allow_reuse=True)(
128
- valdictkeys("meta_parallel")
144
+ valdict_keys("meta_parallel")
129
145
  )
130
146
 
131
147
  @validator("args_non_parallel")
132
148
  def validate_args_non_parallel(cls, value):
133
149
  if value is None:
134
150
  return
135
- valdictkeys("args_non_parallel")(value)
151
+ valdict_keys("args_non_parallel")(value)
136
152
  args_keys = set(value.keys())
137
153
  intersect_keys = RESERVED_ARGUMENTS.intersection(args_keys)
138
154
  if intersect_keys:
@@ -146,7 +162,7 @@ class WorkflowTaskUpdateV2(BaseModel, extra=Extra.forbid):
146
162
  def validate_args_parallel(cls, value):
147
163
  if value is None:
148
164
  return
149
- valdictkeys("args_parallel")(value)
165
+ valdict_keys("args_parallel")(value)
150
166
  args_keys = set(value.keys())
151
167
  intersect_keys = RESERVED_ARGUMENTS.intersection(args_keys)
152
168
  if intersect_keys:
@@ -163,22 +179,55 @@ class WorkflowTaskImportV2(BaseModel, extra=Extra.forbid):
163
179
  meta_parallel: Optional[dict[str, Any]] = None
164
180
  args_non_parallel: Optional[dict[str, Any]] = None
165
181
  args_parallel: Optional[dict[str, Any]] = None
166
-
167
- input_filters: Optional[Filters] = None
182
+ type_filters: Optional[dict[str, bool]] = None
183
+ input_filters: Optional[dict[str, Any]] = None
168
184
 
169
185
  task: Union[TaskImportV2, TaskImportV2Legacy]
170
186
 
187
+ # Validators
188
+ @root_validator(pre=True)
189
+ def update_legacy_filters(cls, values: dict):
190
+ """
191
+ Transform legacy filters (created with fractal-server<2.11.0)
192
+ into type filters
193
+ """
194
+ if values.get("input_filters") is not None:
195
+ if "type_filters" in values.keys():
196
+ raise ValueError(
197
+ "Cannot set filters both through the legacy field "
198
+ "('filters') and the new one ('type_filters')."
199
+ )
200
+
201
+ else:
202
+ # As of 2.11.0, WorkflowTask do not have attribute filters
203
+ # any more.
204
+ if values["input_filters"]["attributes"] != {}:
205
+ raise ValueError(
206
+ "Cannot set attribute filters for WorkflowTasks."
207
+ )
208
+ # Convert legacy filters.types into new type_filters
209
+ values["type_filters"] = values["input_filters"].get(
210
+ "types", {}
211
+ )
212
+ values["input_filters"] = None
213
+
214
+ return values
215
+
216
+ _type_filters = validator("type_filters", allow_reuse=True)(
217
+ validate_type_filters
218
+ )
219
+
171
220
  _meta_non_parallel = validator("meta_non_parallel", allow_reuse=True)(
172
- valdictkeys("meta_non_parallel")
221
+ valdict_keys("meta_non_parallel")
173
222
  )
174
223
  _meta_parallel = validator("meta_parallel", allow_reuse=True)(
175
- valdictkeys("meta_parallel")
224
+ valdict_keys("meta_parallel")
176
225
  )
177
226
  _args_non_parallel = validator("args_non_parallel", allow_reuse=True)(
178
- valdictkeys("args_non_parallel")
227
+ valdict_keys("args_non_parallel")
179
228
  )
180
229
  _args_parallel = validator("args_parallel", allow_reuse=True)(
181
- valdictkeys("args_parallel")
230
+ valdict_keys("args_parallel")
182
231
  )
183
232
 
184
233
 
@@ -188,6 +237,6 @@ class WorkflowTaskExportV2(BaseModel):
188
237
  meta_parallel: Optional[dict[str, Any]] = None
189
238
  args_non_parallel: Optional[dict[str, Any]] = None
190
239
  args_parallel: Optional[dict[str, Any]] = None
191
- input_filters: Filters = Field(default_factory=Filters)
240
+ type_filters: dict[str, bool] = Field(default_factory=dict)
192
241
 
193
242
  task: TaskExportV2
@@ -0,0 +1,168 @@
1
+ import logging
2
+ from typing import Union
3
+
4
+ from sqlalchemy.orm.attributes import flag_modified
5
+ from sqlmodel import select
6
+
7
+ from fractal_server.app.db import get_sync_db
8
+ from fractal_server.app.models import DatasetV2
9
+ from fractal_server.app.models import JobV2
10
+ from fractal_server.app.models import ProjectV2
11
+ from fractal_server.app.models import WorkflowTaskV2
12
+ from fractal_server.app.models import WorkflowV2
13
+ from fractal_server.app.schemas.v2 import DatasetReadV2
14
+ from fractal_server.app.schemas.v2 import JobReadV2
15
+ from fractal_server.app.schemas.v2 import ProjectReadV2
16
+ from fractal_server.app.schemas.v2 import TaskReadV2
17
+ from fractal_server.app.schemas.v2 import WorkflowTaskReadV2
18
+ from fractal_server.images.models import AttributeFiltersType
19
+
20
+ logger = logging.getLogger("fix_db")
21
+ logger.setLevel(logging.INFO)
22
+
23
+
24
+ def dict_values_to_list(
25
+ input_dict: dict[str, Union[int, float, bool, str, None]],
26
+ identifier: str,
27
+ ) -> tuple[AttributeFiltersType, bool]:
28
+ was_there_a_warning = False
29
+ for k, v in input_dict.items():
30
+ if not isinstance(v, (int, float, bool, str, type(None))):
31
+ error_msg = (
32
+ f"Attribute '{k}' from '{identifier}' "
33
+ "has invalid type '{type(v)}'."
34
+ )
35
+ logger.error(error_msg)
36
+ raise RuntimeError(error_msg)
37
+ elif v is None:
38
+ logger.warning(
39
+ f"Attribute '{k}' from '{identifier}' is "
40
+ "None and it will be removed."
41
+ )
42
+ was_there_a_warning = True
43
+ else:
44
+ input_dict[k] = [v]
45
+ return input_dict, was_there_a_warning
46
+
47
+
48
+ def fix_db():
49
+ logger.info("START execution of fix_db function")
50
+
51
+ with next(get_sync_db()) as db:
52
+ # DatasetV2.filters
53
+ stm = select(DatasetV2).order_by(DatasetV2.id)
54
+ datasets = db.execute(stm).scalars().all()
55
+ for ds in datasets:
56
+ logger.info(f"DatasetV2[{ds.id}] START")
57
+ if ds.filters is None:
58
+ logger.info(f"DatasetV2[{ds.id}] SKIP")
59
+ continue
60
+
61
+ ds.attribute_filters, warning = dict_values_to_list(
62
+ ds.filters["attributes"],
63
+ f"Dataset[{ds.id}].filters.attributes",
64
+ )
65
+ if warning:
66
+ proj = db.get(ProjectV2, ds.project_id)
67
+ logger.warning(
68
+ "Additional information: "
69
+ f"{proj.id=}, "
70
+ f"{proj.name=}, "
71
+ f"{proj.user_list[0].email=}, "
72
+ f"{ds.id=}, "
73
+ f"{ds.name=}"
74
+ )
75
+ ds.type_filters = ds.filters["types"]
76
+ ds.filters = None
77
+ for i, h in enumerate(ds.history):
78
+ ds.history[i]["workflowtask"]["type_filters"] = h[
79
+ "workflowtask"
80
+ ]["input_filters"]["types"]
81
+ ds.history[i]["workflowtask"].pop("input_filters")
82
+ flag_modified(ds, "history")
83
+ DatasetReadV2(
84
+ **ds.model_dump(),
85
+ project=ProjectReadV2(**ds.project.model_dump()),
86
+ )
87
+ db.add(ds)
88
+ logger.info(f"DatasetV2[{ds.id}] END - fixed filters")
89
+
90
+ logger.info("------ switch from dataset to workflowtasks ------")
91
+
92
+ # WorkflowTaskV2.input_filters
93
+ stm = select(WorkflowTaskV2).order_by(WorkflowTaskV2.id)
94
+ wftasks = db.execute(stm).scalars().all()
95
+ for wft in wftasks:
96
+ logger.info(f"WorkflowTaskV2[{wft.id}] START")
97
+ if wft.input_filters is None:
98
+ logger.info(f"WorkflowTaskV2[{wft.id}] SKIP")
99
+ continue
100
+ wft.type_filters = wft.input_filters["types"]
101
+ if wft.input_filters["attributes"]:
102
+ logger.warning(
103
+ "Removing input_filters['attributes']. "
104
+ f"(previous value: {wft.input_filters['attributes']})"
105
+ )
106
+ wf = db.get(WorkflowV2, wft.workflow_id)
107
+ proj = db.get(ProjectV2, wf.project_id)
108
+ logger.warning(
109
+ "Additional information: "
110
+ f"{proj.id=}, "
111
+ f"{proj.name=}, "
112
+ f"{proj.user_list[0].email=}, "
113
+ f"{wf.id=}, "
114
+ f"{wf.name=}, "
115
+ f"{wft.task.name=}"
116
+ )
117
+ wft.input_filters = None
118
+ flag_modified(wft, "input_filters")
119
+ WorkflowTaskReadV2(
120
+ **wft.model_dump(),
121
+ task=TaskReadV2(**wft.task.model_dump()),
122
+ )
123
+ db.add(wft)
124
+ logger.info(f"WorkflowTaskV2[{wft.id}] END - fixed filters")
125
+
126
+ logger.info("------ switch from workflowtasks to jobs ------")
127
+
128
+ # JOBS V2
129
+ stm = select(JobV2).order_by(JobV2.id)
130
+ jobs = db.execute(stm).scalars().all()
131
+ for job in jobs:
132
+ logger.info(f"JobV2[{job.id}] START")
133
+ if "filters" not in job.dataset_dump.keys():
134
+ logger.info(f"JobV2[{job.id}] SKIP")
135
+ continue
136
+ job.dataset_dump["type_filters"] = job.dataset_dump["filters"][
137
+ "types"
138
+ ]
139
+ (
140
+ job.dataset_dump["attribute_filters"],
141
+ warning,
142
+ ) = dict_values_to_list(
143
+ job.dataset_dump["filters"]["attributes"],
144
+ f"JobV2[{job.id}].dataset_dump.filters.attributes",
145
+ )
146
+ if warning and job.project_id is not None:
147
+ proj = db.get(ProjectV2, job.project_id)
148
+ logger.warning(
149
+ "Additional information: "
150
+ f"{proj.id=}, "
151
+ f"{proj.name=}, "
152
+ f"{proj.user_list[0].email=}, "
153
+ f"{job.id=}, "
154
+ f"{job.start_timestamp=}, "
155
+ f"{job.end_timestamp=}, "
156
+ f"{job.dataset_id=}, "
157
+ f"{job.workflow_id=}."
158
+ )
159
+ job.dataset_dump.pop("filters")
160
+ flag_modified(job, "dataset_dump")
161
+ JobReadV2(**job.model_dump())
162
+ db.add(job)
163
+ logger.info(f"JobV2[{job.id}] END - fixed filters")
164
+
165
+ db.commit()
166
+ logger.info("Changes committed.")
167
+
168
+ logger.info("END execution of fix_db function")
@@ -1,4 +1,3 @@
1
- from .models import Filters # noqa: F401
2
1
  from .models import SingleImage # noqa: F401
3
2
  from .models import SingleImageTaskOutput # noqa: F401
4
3
  from .models import SingleImageUpdate # noqa: F401
@@ -3,15 +3,16 @@ from typing import Optional
3
3
  from typing import Union
4
4
 
5
5
  from pydantic import BaseModel
6
- from pydantic import Extra
7
6
  from pydantic import Field
8
7
  from pydantic import validator
9
8
 
10
- from fractal_server.app.schemas._validators import valdictkeys
9
+ from fractal_server.app.schemas._validators import valdict_keys
11
10
  from fractal_server.urls import normalize_url
12
11
 
12
+ AttributeFiltersType = dict[str, list[Any]]
13
13
 
14
- class SingleImageBase(BaseModel):
14
+
15
+ class _SingleImageBase(BaseModel):
15
16
  """
16
17
  Base for SingleImage and SingleImageTaskOutput.
17
18
 
@@ -30,9 +31,9 @@ class SingleImageBase(BaseModel):
30
31
 
31
32
  # Validators
32
33
  _attributes = validator("attributes", allow_reuse=True)(
33
- valdictkeys("attributes")
34
+ valdict_keys("attributes")
34
35
  )
35
- _types = validator("types", allow_reuse=True)(valdictkeys("types"))
36
+ _types = validator("types", allow_reuse=True)(valdict_keys("types"))
36
37
 
37
38
  @validator("zarr_url")
38
39
  def normalize_zarr_url(cls, v: str) -> str:
@@ -44,7 +45,7 @@ class SingleImageBase(BaseModel):
44
45
  return normalize_url(v)
45
46
 
46
47
 
47
- class SingleImageTaskOutput(SingleImageBase):
48
+ class SingleImageTaskOutput(_SingleImageBase):
48
49
  """
49
50
  `SingleImageBase`, with scalar `attributes` values (`None` included).
50
51
  """
@@ -63,7 +64,7 @@ class SingleImageTaskOutput(SingleImageBase):
63
64
  return v
64
65
 
65
66
 
66
- class SingleImage(SingleImageBase):
67
+ class SingleImage(_SingleImageBase):
67
68
  """
68
69
  `SingleImageBase`, with scalar `attributes` values (`None` excluded).
69
70
  """
@@ -83,8 +84,8 @@ class SingleImage(SingleImageBase):
83
84
 
84
85
  class SingleImageUpdate(BaseModel):
85
86
  zarr_url: str
86
- attributes: Optional[dict[str, Any]]
87
- types: Optional[dict[str, bool]]
87
+ attributes: Optional[dict[str, Any]] = None
88
+ types: Optional[dict[str, bool]] = None
88
89
 
89
90
  @validator("zarr_url")
90
91
  def normalize_zarr_url(cls, v: str) -> str:
@@ -96,7 +97,7 @@ class SingleImageUpdate(BaseModel):
96
97
  ) -> dict[str, Union[int, float, str, bool]]:
97
98
  if v is not None:
98
99
  # validate keys
99
- valdictkeys("attributes")(v)
100
+ valdict_keys("attributes")(v)
100
101
  # validate values
101
102
  for key, value in v.items():
102
103
  if not isinstance(value, (int, float, str, bool)):
@@ -107,28 +108,4 @@ class SingleImageUpdate(BaseModel):
107
108
  )
108
109
  return v
109
110
 
110
- _types = validator("types", allow_reuse=True)(valdictkeys("types"))
111
-
112
-
113
- class Filters(BaseModel, extra=Extra.forbid):
114
- attributes: dict[str, Any] = Field(default_factory=dict)
115
- types: dict[str, bool] = Field(default_factory=dict)
116
-
117
- # Validators
118
- _attributes = validator("attributes", allow_reuse=True)(
119
- valdictkeys("attributes")
120
- )
121
- _types = validator("types", allow_reuse=True)(valdictkeys("types"))
122
-
123
- @validator("attributes")
124
- def validate_attributes(
125
- cls, v: dict[str, Any]
126
- ) -> dict[str, Union[int, float, str, bool, None]]:
127
- for key, value in v.items():
128
- if not isinstance(value, (int, float, str, bool, type(None))):
129
- raise ValueError(
130
- f"Filters.attributes[{key}] must be a scalar "
131
- "(int, float, str, bool, or None). "
132
- f"Given {value} ({type(value)})"
133
- )
134
- return v
111
+ _types = validator("types", allow_reuse=True)(valdict_keys("types"))
@@ -4,8 +4,7 @@ from typing import Literal
4
4
  from typing import Optional
5
5
  from typing import Union
6
6
 
7
- from fractal_server.images import Filters
8
-
7
+ from fractal_server.images.models import AttributeFiltersType
9
8
 
10
9
  ImageSearch = dict[Literal["image", "index"], Union[int, dict[str, Any]]]
11
10
 
@@ -33,52 +32,92 @@ def find_image_by_zarr_url(
33
32
  return dict(image=copy(images[ind]), index=ind)
34
33
 
35
34
 
36
- def match_filter(image: dict[str, Any], filters: Filters) -> bool:
35
+ def match_filter(
36
+ *,
37
+ image: dict[str, Any],
38
+ type_filters: dict[str, bool],
39
+ attribute_filters: AttributeFiltersType,
40
+ ) -> bool:
37
41
  """
38
42
  Find whether an image matches a filter set.
39
43
 
40
44
  Arguments:
41
45
  image: A single image.
42
- filters: A set of filters.
46
+ type_filters:
47
+ attribute_filters:
43
48
 
44
49
  Returns:
45
50
  Whether the image matches the filter set.
46
51
  """
52
+
47
53
  # Verify match with types (using a False default)
48
- for key, value in filters.types.items():
54
+ for key, value in type_filters.items():
49
55
  if image["types"].get(key, False) != value:
50
56
  return False
51
- # Verify match with attributes (only for non-None filters)
52
- for key, value in filters.attributes.items():
53
- if value is None:
54
- continue
55
- if image["attributes"].get(key) != value:
57
+
58
+ # Verify match with attributes (only for not-None filters)
59
+ for key, values in attribute_filters.items():
60
+ if image["attributes"].get(key) not in values:
56
61
  return False
62
+
57
63
  return True
58
64
 
59
65
 
60
66
  def filter_image_list(
61
67
  images: list[dict[str, Any]],
62
- filters: Filters,
68
+ type_filters: Optional[dict[str, bool]] = None,
69
+ attribute_filters: Optional[AttributeFiltersType] = None,
63
70
  ) -> list[dict[str, Any]]:
64
71
  """
65
72
  Compute a sublist with images that match a filter set.
66
73
 
67
74
  Arguments:
68
75
  images: A list of images.
69
- filters: A set of filters.
76
+ type_filters:
77
+ attribute_filters:
70
78
 
71
79
  Returns:
72
80
  List of the `images` elements which match the filter set.
73
81
  """
74
82
 
75
83
  # When no filter is provided, return all images
76
- if filters.attributes == {} and filters.types == {}:
84
+ if type_filters is None and attribute_filters is None:
77
85
  return images
86
+ actual_type_filters = type_filters or {}
87
+ actual_attribute_filters = attribute_filters or {}
78
88
 
79
89
  filtered_images = [
80
90
  copy(this_image)
81
91
  for this_image in images
82
- if match_filter(this_image, filters=filters)
92
+ if match_filter(
93
+ image=this_image,
94
+ type_filters=actual_type_filters,
95
+ attribute_filters=actual_attribute_filters,
96
+ )
83
97
  ]
84
98
  return filtered_images
99
+
100
+
101
+ def merge_type_filters(
102
+ *,
103
+ task_input_types: dict[str, bool],
104
+ wftask_type_filters: dict[str, bool],
105
+ ) -> dict[str, bool]:
106
+ """
107
+ Merge two type-filters sets, if they are compatible.
108
+ """
109
+ all_keys = set(task_input_types.keys()) | set(wftask_type_filters.keys())
110
+ for key in all_keys:
111
+ if (
112
+ key in task_input_types.keys()
113
+ and key in wftask_type_filters.keys()
114
+ and task_input_types[key] != wftask_type_filters[key]
115
+ ):
116
+ raise ValueError(
117
+ "Cannot merge type filters "
118
+ f"`{task_input_types}` (from task) "
119
+ f"and `{wftask_type_filters}` (from workflowtask)."
120
+ )
121
+ merged_dict = task_input_types
122
+ merged_dict.update(wftask_type_filters)
123
+ return merged_dict
@@ -0,0 +1,96 @@
1
+ """split filters and keep old columns
2
+
3
+ Revision ID: db09233ad13a
4
+ Revises: 316140ff7ee1
5
+ Create Date: 2025-01-14 14:50:46.007222
6
+
7
+ """
8
+ import sqlalchemy as sa
9
+ from alembic import op
10
+ from sqlalchemy.dialects import postgresql
11
+
12
+ # revision identifiers, used by Alembic.
13
+ revision = "db09233ad13a"
14
+ down_revision = "316140ff7ee1"
15
+ branch_labels = None
16
+ depends_on = None
17
+
18
+
19
+ def upgrade() -> None:
20
+ # ### commands auto generated by Alembic - please adjust! ###
21
+ with op.batch_alter_table("datasetv2", schema=None) as batch_op:
22
+ batch_op.add_column(
23
+ sa.Column(
24
+ "type_filters", sa.JSON(), server_default="{}", nullable=False
25
+ )
26
+ )
27
+ batch_op.add_column(
28
+ sa.Column(
29
+ "attribute_filters",
30
+ sa.JSON(),
31
+ server_default="{}",
32
+ nullable=False,
33
+ )
34
+ )
35
+ batch_op.alter_column(
36
+ "filters",
37
+ existing_type=postgresql.JSON(astext_type=sa.Text()),
38
+ nullable=True,
39
+ server_default="null",
40
+ )
41
+
42
+ with op.batch_alter_table("jobv2", schema=None) as batch_op:
43
+ batch_op.add_column(
44
+ sa.Column(
45
+ "attribute_filters",
46
+ sa.JSON(),
47
+ server_default="{}",
48
+ nullable=False,
49
+ )
50
+ )
51
+
52
+ with op.batch_alter_table("workflowtaskv2", schema=None) as batch_op:
53
+ batch_op.add_column(
54
+ sa.Column(
55
+ "type_filters", sa.JSON(), server_default="{}", nullable=False
56
+ )
57
+ )
58
+ batch_op.alter_column(
59
+ "input_filters",
60
+ existing_type=postgresql.JSON(astext_type=sa.Text()),
61
+ nullable=True,
62
+ server_default="null",
63
+ )
64
+
65
+ # ### end Alembic commands ###
66
+
67
+
68
+ def downgrade() -> None:
69
+ # ### commands auto generated by Alembic - please adjust! ###
70
+ with op.batch_alter_table("workflowtaskv2", schema=None) as batch_op:
71
+ batch_op.alter_column(
72
+ "input_filters",
73
+ existing_type=postgresql.JSON(astext_type=sa.Text()),
74
+ nullable=False,
75
+ existing_server_default=sa.text(
76
+ '\'{"attributes": {}, "types": {}}\'::json'
77
+ ),
78
+ )
79
+ batch_op.drop_column("type_filters")
80
+
81
+ with op.batch_alter_table("jobv2", schema=None) as batch_op:
82
+ batch_op.drop_column("attribute_filters")
83
+
84
+ with op.batch_alter_table("datasetv2", schema=None) as batch_op:
85
+ batch_op.alter_column(
86
+ "filters",
87
+ existing_type=postgresql.JSON(astext_type=sa.Text()),
88
+ nullable=False,
89
+ existing_server_default=sa.text(
90
+ '\'{"attributes": {}, "types": {}}\'::json'
91
+ ),
92
+ )
93
+ batch_op.drop_column("attribute_filters")
94
+ batch_op.drop_column("type_filters")
95
+
96
+ # ### end Alembic commands ###
fractal_server/utils.py CHANGED
@@ -107,18 +107,20 @@ def execute_command_sync(
107
107
  returncode = res.returncode
108
108
  stdout = res.stdout
109
109
  stderr = res.stderr
110
- logger.debug(f"{returncode=}")
111
- logger.debug("STDOUT:")
112
- logger.debug(stdout)
113
- logger.debug("STDERR:")
114
- logger.debug(stderr)
115
110
  if res.returncode != 0:
116
111
  logger.debug(f"ERROR in subprocess call to '{command}'")
117
112
  raise RuntimeError(
118
113
  f"Command {command} failed.\n"
119
114
  f"returncode={res.returncode}\n"
120
- f"{stdout=}\n"
121
- f"{stderr=}\n"
115
+ "STDOUT:\n"
116
+ f"{stdout}\n"
117
+ "STDERR:\n"
118
+ f"{stderr}\n"
122
119
  )
120
+ logger.debug(f"{returncode=}")
121
+ logger.debug("STDOUT:")
122
+ logger.debug(stdout)
123
+ logger.debug("STDERR:")
124
+ logger.debug(stderr)
123
125
  logger.debug(f"END subprocess call to '{command}'")
124
126
  return stdout