fractal-server 2.11.0a0__py3-none-any.whl → 2.11.0a3__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (34) hide show
  1. fractal_server/__init__.py +1 -1
  2. fractal_server/app/models/v2/dataset.py +9 -6
  3. fractal_server/app/models/v2/job.py +5 -0
  4. fractal_server/app/models/v2/workflowtask.py +5 -8
  5. fractal_server/app/routes/api/v2/_aux_functions.py +3 -10
  6. fractal_server/app/routes/api/v2/images.py +29 -6
  7. fractal_server/app/routes/api/v2/submit.py +5 -1
  8. fractal_server/app/routes/api/v2/workflowtask.py +3 -3
  9. fractal_server/app/runner/v2/__init__.py +1 -0
  10. fractal_server/app/runner/v2/_local/__init__.py +5 -0
  11. fractal_server/app/runner/v2/_local_experimental/__init__.py +5 -0
  12. fractal_server/app/runner/v2/_slurm_ssh/__init__.py +7 -3
  13. fractal_server/app/runner/v2/_slurm_sudo/__init__.py +5 -0
  14. fractal_server/app/runner/v2/merge_outputs.py +13 -16
  15. fractal_server/app/runner/v2/runner.py +33 -34
  16. fractal_server/app/runner/v2/task_interface.py +41 -2
  17. fractal_server/app/schemas/_filter_validators.py +47 -0
  18. fractal_server/app/schemas/_validators.py +13 -2
  19. fractal_server/app/schemas/v2/dataset.py +85 -12
  20. fractal_server/app/schemas/v2/dumps.py +6 -8
  21. fractal_server/app/schemas/v2/job.py +14 -0
  22. fractal_server/app/schemas/v2/task.py +9 -9
  23. fractal_server/app/schemas/v2/task_group.py +2 -2
  24. fractal_server/app/schemas/v2/workflowtask.py +67 -20
  25. fractal_server/data_migrations/2_11_0.py +67 -0
  26. fractal_server/images/__init__.py +0 -1
  27. fractal_server/images/models.py +12 -35
  28. fractal_server/images/tools.py +29 -13
  29. fractal_server/migrations/versions/db09233ad13a_split_filters_and_keep_old_columns.py +96 -0
  30. {fractal_server-2.11.0a0.dist-info → fractal_server-2.11.0a3.dist-info}/METADATA +1 -1
  31. {fractal_server-2.11.0a0.dist-info → fractal_server-2.11.0a3.dist-info}/RECORD +34 -31
  32. {fractal_server-2.11.0a0.dist-info → fractal_server-2.11.0a3.dist-info}/LICENSE +0 -0
  33. {fractal_server-2.11.0a0.dist-info → fractal_server-2.11.0a3.dist-info}/WHEEL +0 -0
  34. {fractal_server-2.11.0a0.dist-info → fractal_server-2.11.0a3.dist-info}/entry_points.txt +0 -0
@@ -1 +1 @@
1
- __VERSION__ = "2.11.0a0"
1
+ __VERSION__ = "2.11.0a3"
@@ -11,6 +11,7 @@ from sqlmodel import Relationship
11
11
  from sqlmodel import SQLModel
12
12
 
13
13
  from ....utils import get_timestamp
14
+ from fractal_server.images.models import AttributeFiltersType
14
15
 
15
16
 
16
17
  class DatasetV2(SQLModel, table=True):
@@ -41,12 +42,14 @@ class DatasetV2(SQLModel, table=True):
41
42
  sa_column=Column(JSON, server_default="[]", nullable=False)
42
43
  )
43
44
 
44
- filters: dict[Literal["attributes", "types"], dict[str, Any]] = Field(
45
- sa_column=Column(
46
- JSON,
47
- nullable=False,
48
- server_default='{"attributes": {}, "types": {}}',
49
- )
45
+ filters: Optional[
46
+ dict[Literal["attributes", "types"], dict[str, Any]]
47
+ ] = Field(sa_column=Column(JSON, nullable=True, server_default="null"))
48
+ type_filters: dict[str, bool] = Field(
49
+ sa_column=Column(JSON, nullable=False, server_default="{}")
50
+ )
51
+ attribute_filters: AttributeFiltersType = Field(
52
+ sa_column=Column(JSON, nullable=False, server_default="{}")
50
53
  )
51
54
 
52
55
  @property
@@ -10,6 +10,7 @@ from sqlmodel import SQLModel
10
10
 
11
11
  from ....utils import get_timestamp
12
12
  from ...schemas.v2 import JobStatusTypeV2
13
+ from fractal_server.images.models import AttributeFiltersType
13
14
 
14
15
 
15
16
  class JobV2(SQLModel, table=True):
@@ -49,3 +50,7 @@ class JobV2(SQLModel, table=True):
49
50
  )
50
51
  status: str = JobStatusTypeV2.SUBMITTED
51
52
  log: Optional[str] = None
53
+
54
+ attribute_filters: AttributeFiltersType = Field(
55
+ sa_column=Column(JSON, nullable=False, server_default="{}")
56
+ )
@@ -25,14 +25,11 @@ class WorkflowTaskV2(SQLModel, table=True):
25
25
  args_parallel: Optional[dict[str, Any]] = Field(sa_column=Column(JSON))
26
26
  args_non_parallel: Optional[dict[str, Any]] = Field(sa_column=Column(JSON))
27
27
 
28
- input_filters: dict[
29
- Literal["attributes", "types"], dict[str, Any]
30
- ] = Field(
31
- sa_column=Column(
32
- JSON,
33
- nullable=False,
34
- server_default='{"attributes": {}, "types": {}}',
35
- )
28
+ input_filters: Optional[
29
+ dict[Literal["attributes", "types"], dict[str, Any]]
30
+ ] = Field(sa_column=Column(JSON, nullable=True, server_default="null"))
31
+ type_filters: dict[str, bool] = Field(
32
+ sa_column=Column(JSON, nullable=False, server_default="{}")
36
33
  )
37
34
 
38
35
  # Task
@@ -21,7 +21,6 @@ from ....models.v2 import TaskV2
21
21
  from ....models.v2 import WorkflowTaskV2
22
22
  from ....models.v2 import WorkflowV2
23
23
  from ....schemas.v2 import JobStatusTypeV2
24
- from fractal_server.images import Filters
25
24
 
26
25
 
27
26
  async def _get_project_check_owner(
@@ -336,7 +335,7 @@ async def _workflow_insert_task(
336
335
  meta_non_parallel: Optional[dict[str, Any]] = None,
337
336
  args_non_parallel: Optional[dict[str, Any]] = None,
338
337
  args_parallel: Optional[dict[str, Any]] = None,
339
- input_filters: Optional[Filters] = None,
338
+ type_filters: Optional[dict[str, bool]] = None,
340
339
  db: AsyncSession,
341
340
  ) -> WorkflowTaskV2:
342
341
  """
@@ -350,7 +349,7 @@ async def _workflow_insert_task(
350
349
  meta_non_parallel:
351
350
  args_non_parallel:
352
351
  args_parallel:
353
- input_filters:
352
+ type_filters:
354
353
  db:
355
354
  """
356
355
  db_workflow = await db.get(WorkflowV2, workflow_id)
@@ -376,12 +375,6 @@ async def _workflow_insert_task(
376
375
  if final_meta_non_parallel == {}:
377
376
  final_meta_non_parallel = None
378
377
 
379
- # Prepare input_filters attribute
380
- if input_filters is None:
381
- input_filters_kwarg = {}
382
- else:
383
- input_filters_kwarg = dict(input_filters=input_filters)
384
-
385
378
  # Create DB entry
386
379
  wf_task = WorkflowTaskV2(
387
380
  task_type=task_type,
@@ -390,7 +383,7 @@ async def _workflow_insert_task(
390
383
  args_parallel=args_parallel,
391
384
  meta_parallel=final_meta_parallel,
392
385
  meta_non_parallel=final_meta_non_parallel,
393
- **input_filters_kwarg,
386
+ type_filters=(type_filters or dict()),
394
387
  )
395
388
  db_workflow.task_list.append(wf_task)
396
389
  flag_modified(db_workflow, "task_list")
@@ -8,6 +8,8 @@ from fastapi import Response
8
8
  from fastapi import status
9
9
  from pydantic import BaseModel
10
10
  from pydantic import Field
11
+ from pydantic import root_validator
12
+ from pydantic import validator
11
13
  from sqlalchemy.orm.attributes import flag_modified
12
14
 
13
15
  from ._aux_functions import _get_dataset_check_owner
@@ -15,9 +17,14 @@ from fractal_server.app.db import AsyncSession
15
17
  from fractal_server.app.db import get_async_db
16
18
  from fractal_server.app.models import UserOAuth
17
19
  from fractal_server.app.routes.auth import current_active_user
18
- from fractal_server.images import Filters
20
+ from fractal_server.app.schemas._filter_validators import (
21
+ validate_attribute_filters,
22
+ )
23
+ from fractal_server.app.schemas._filter_validators import validate_type_filters
24
+ from fractal_server.app.schemas._validators import root_validate_dict_keys
19
25
  from fractal_server.images import SingleImage
20
26
  from fractal_server.images import SingleImageUpdate
27
+ from fractal_server.images.models import AttributeFiltersType
21
28
  from fractal_server.images.tools import find_image_by_zarr_url
22
29
  from fractal_server.images.tools import match_filter
23
30
 
@@ -38,7 +45,18 @@ class ImagePage(BaseModel):
38
45
 
39
46
  class ImageQuery(BaseModel):
40
47
  zarr_url: Optional[str]
41
- filters: Filters = Field(default_factory=Filters)
48
+ type_filters: dict[str, bool] = Field(default_factory=dict)
49
+ attribute_filters: AttributeFiltersType = Field(default_factory=dict)
50
+
51
+ _dict_keys = root_validator(pre=True, allow_reuse=True)(
52
+ root_validate_dict_keys
53
+ )
54
+ _type_filters = validator("type_filters", allow_reuse=True)(
55
+ validate_type_filters
56
+ )
57
+ _attribute_filters = validator("attribute_filters", allow_reuse=True)(
58
+ validate_attribute_filters
59
+ )
42
60
 
43
61
 
44
62
  @router.post(
@@ -124,7 +142,11 @@ async def query_dataset_images(
124
142
  images = [
125
143
  image
126
144
  for image in images
127
- if match_filter(image, Filters(**dataset.filters))
145
+ if match_filter(
146
+ image=image,
147
+ type_filters=dataset.type_filters,
148
+ attribute_filters=dataset.attribute_filters,
149
+ )
128
150
  ]
129
151
 
130
152
  attributes = {}
@@ -154,13 +176,14 @@ async def query_dataset_images(
154
176
  else:
155
177
  images = [image]
156
178
 
157
- if query.filters.attributes or query.filters.types:
179
+ if query.attribute_filters or query.type_filters:
158
180
  images = [
159
181
  image
160
182
  for image in images
161
183
  if match_filter(
162
- image,
163
- Filters(**query.filters.dict()),
184
+ image=image,
185
+ type_filters=query.type_filters,
186
+ attribute_filters=query.attribute_filters,
164
187
  )
165
188
  ]
166
189
 
@@ -159,7 +159,11 @@ async def apply_workflow(
159
159
  dataset_id=dataset_id,
160
160
  workflow_id=workflow_id,
161
161
  user_email=user.email,
162
- dataset_dump=json.loads(dataset.json(exclude={"images", "history"})),
162
+ # The 'filters' field is not supported any more but still exists as a
163
+ # database column, therefore we manually exclude it from dumps.
164
+ dataset_dump=json.loads(
165
+ dataset.json(exclude={"images", "history", "filters"})
166
+ ),
163
167
  workflow_dump=json.loads(workflow.json(exclude={"task_list"})),
164
168
  project_dump=json.loads(project.json(exclude={"user_list"})),
165
169
  **job_create.dict(),
@@ -109,7 +109,7 @@ async def replace_workflowtask(
109
109
  task_type=task.type,
110
110
  task=task,
111
111
  # old-task values
112
- input_filters=old_workflow_task.input_filters,
112
+ type_filters=old_workflow_task.type_filters,
113
113
  # possibly new values
114
114
  args_non_parallel=_args_non_parallel,
115
115
  args_parallel=_args_parallel,
@@ -183,7 +183,7 @@ async def create_workflowtask(
183
183
  meta_parallel=new_task.meta_parallel,
184
184
  args_non_parallel=new_task.args_non_parallel,
185
185
  args_parallel=new_task.args_parallel,
186
- input_filters=new_task.input_filters,
186
+ type_filters=new_task.type_filters,
187
187
  db=db,
188
188
  )
189
189
 
@@ -274,7 +274,7 @@ async def update_workflowtask(
274
274
  if not actual_args:
275
275
  actual_args = None
276
276
  setattr(db_wf_task, key, actual_args)
277
- elif key in ["meta_parallel", "meta_non_parallel", "input_filters"]:
277
+ elif key in ["meta_parallel", "meta_non_parallel", "type_filters"]:
278
278
  setattr(db_wf_task, key, value)
279
279
  else:
280
280
  raise HTTPException(
@@ -327,6 +327,7 @@ async def submit_workflow(
327
327
  worker_init=worker_init,
328
328
  first_task_index=job.first_task_index,
329
329
  last_task_index=job.last_task_index,
330
+ job_attribute_filters=job.attribute_filters,
330
331
  **backend_specific_kwargs,
331
332
  )
332
333
 
@@ -29,6 +29,7 @@ from ...set_start_and_last_task_index import set_start_and_last_task_index
29
29
  from ..runner import execute_tasks_v2
30
30
  from ._submit_setup import _local_submit_setup
31
31
  from .executor import FractalThreadPoolExecutor
32
+ from fractal_server.images.models import AttributeFiltersType
32
33
 
33
34
 
34
35
  def _process_workflow(
@@ -39,6 +40,7 @@ def _process_workflow(
39
40
  workflow_dir_local: Path,
40
41
  first_task_index: int,
41
42
  last_task_index: int,
43
+ job_attribute_filters: AttributeFiltersType,
42
44
  ) -> None:
43
45
  """
44
46
  Run the workflow using a `FractalThreadPoolExecutor`.
@@ -54,6 +56,7 @@ def _process_workflow(
54
56
  workflow_dir_remote=workflow_dir_local,
55
57
  logger_name=logger_name,
56
58
  submit_setup_call=_local_submit_setup,
59
+ job_attribute_filters=job_attribute_filters,
57
60
  )
58
61
 
59
62
 
@@ -66,6 +69,7 @@ async def process_workflow(
66
69
  first_task_index: Optional[int] = None,
67
70
  last_task_index: Optional[int] = None,
68
71
  logger_name: str,
72
+ job_attribute_filters: AttributeFiltersType,
69
73
  # Slurm-specific
70
74
  user_cache_dir: Optional[str] = None,
71
75
  slurm_user: Optional[str] = None,
@@ -146,4 +150,5 @@ async def process_workflow(
146
150
  workflow_dir_local=workflow_dir_local,
147
151
  first_task_index=first_task_index,
148
152
  last_task_index=last_task_index,
153
+ job_attribute_filters=job_attribute_filters,
149
154
  )
@@ -11,6 +11,7 @@ from ...set_start_and_last_task_index import set_start_and_last_task_index
11
11
  from ..runner import execute_tasks_v2
12
12
  from ._submit_setup import _local_submit_setup
13
13
  from .executor import FractalProcessPoolExecutor
14
+ from fractal_server.images.models import AttributeFiltersType
14
15
 
15
16
 
16
17
  def _process_workflow(
@@ -21,6 +22,7 @@ def _process_workflow(
21
22
  workflow_dir_local: Path,
22
23
  first_task_index: int,
23
24
  last_task_index: int,
25
+ job_attribute_filters: AttributeFiltersType,
24
26
  ) -> None:
25
27
  """
26
28
  Run the workflow using a `FractalProcessPoolExecutor`.
@@ -39,6 +41,7 @@ def _process_workflow(
39
41
  workflow_dir_remote=workflow_dir_local,
40
42
  logger_name=logger_name,
41
43
  submit_setup_call=_local_submit_setup,
44
+ job_attribute_filters=job_attribute_filters,
42
45
  )
43
46
  except BrokenProcessPool as e:
44
47
  raise JobExecutionError(
@@ -58,6 +61,7 @@ async def process_workflow(
58
61
  first_task_index: Optional[int] = None,
59
62
  last_task_index: Optional[int] = None,
60
63
  logger_name: str,
64
+ job_attribute_filters: AttributeFiltersType,
61
65
  # Slurm-specific
62
66
  user_cache_dir: Optional[str] = None,
63
67
  slurm_user: Optional[str] = None,
@@ -138,4 +142,5 @@ async def process_workflow(
138
142
  workflow_dir_local=workflow_dir_local,
139
143
  first_task_index=first_task_index,
140
144
  last_task_index=last_task_index,
145
+ job_attribute_filters=job_attribute_filters,
141
146
  )
@@ -29,9 +29,9 @@ from ...executors.slurm.ssh.executor import FractalSlurmSSHExecutor
29
29
  from ...set_start_and_last_task_index import set_start_and_last_task_index
30
30
  from ..runner import execute_tasks_v2
31
31
  from ._submit_setup import _slurm_submit_setup
32
+ from fractal_server.images.models import AttributeFiltersType
32
33
  from fractal_server.logger import set_logger
33
34
 
34
-
35
35
  logger = set_logger(__name__)
36
36
 
37
37
 
@@ -46,6 +46,7 @@ def _process_workflow(
46
46
  last_task_index: int,
47
47
  fractal_ssh: FractalSSH,
48
48
  worker_init: Optional[Union[str, list[str]]] = None,
49
+ job_attribute_filters: AttributeFiltersType,
49
50
  ) -> None:
50
51
  """
51
52
  Run the workflow using a `FractalSlurmSSHExecutor`.
@@ -86,6 +87,7 @@ def _process_workflow(
86
87
  workflow_dir_remote=workflow_dir_remote,
87
88
  logger_name=logger_name,
88
89
  submit_setup_call=_slurm_submit_setup,
90
+ job_attribute_filters=job_attribute_filters,
89
91
  )
90
92
 
91
93
 
@@ -98,12 +100,13 @@ async def process_workflow(
98
100
  first_task_index: Optional[int] = None,
99
101
  last_task_index: Optional[int] = None,
100
102
  logger_name: str,
101
- # Not used
103
+ job_attribute_filters: AttributeFiltersType,
102
104
  fractal_ssh: FractalSSH,
105
+ worker_init: Optional[str] = None,
106
+ # Not used
103
107
  user_cache_dir: Optional[str] = None,
104
108
  slurm_user: Optional[str] = None,
105
109
  slurm_account: Optional[str] = None,
106
- worker_init: Optional[str] = None,
107
110
  ) -> None:
108
111
  """
109
112
  Process workflow (SLURM backend public interface)
@@ -127,4 +130,5 @@ async def process_workflow(
127
130
  last_task_index=last_task_index,
128
131
  worker_init=worker_init,
129
132
  fractal_ssh=fractal_ssh,
133
+ job_attribute_filters=job_attribute_filters,
130
134
  )
@@ -27,6 +27,7 @@ from ...executors.slurm.sudo.executor import FractalSlurmExecutor
27
27
  from ...set_start_and_last_task_index import set_start_and_last_task_index
28
28
  from ..runner import execute_tasks_v2
29
29
  from ._submit_setup import _slurm_submit_setup
30
+ from fractal_server.images.models import AttributeFiltersType
30
31
 
31
32
 
32
33
  def _process_workflow(
@@ -42,6 +43,7 @@ def _process_workflow(
42
43
  slurm_account: Optional[str] = None,
43
44
  user_cache_dir: str,
44
45
  worker_init: Optional[Union[str, list[str]]] = None,
46
+ job_attribute_filters: AttributeFiltersType,
45
47
  ) -> None:
46
48
  """
47
49
  Run the workflow using a `FractalSlurmExecutor`.
@@ -79,6 +81,7 @@ def _process_workflow(
79
81
  workflow_dir_remote=workflow_dir_remote,
80
82
  logger_name=logger_name,
81
83
  submit_setup_call=_slurm_submit_setup,
84
+ job_attribute_filters=job_attribute_filters,
82
85
  )
83
86
 
84
87
 
@@ -91,6 +94,7 @@ async def process_workflow(
91
94
  first_task_index: Optional[int] = None,
92
95
  last_task_index: Optional[int] = None,
93
96
  logger_name: str,
97
+ job_attribute_filters: AttributeFiltersType,
94
98
  # Slurm-specific
95
99
  user_cache_dir: Optional[str] = None,
96
100
  slurm_user: Optional[str] = None,
@@ -120,4 +124,5 @@ async def process_workflow(
120
124
  slurm_user=slurm_user,
121
125
  slurm_account=slurm_account,
122
126
  worker_init=worker_init,
127
+ job_attribute_filters=job_attribute_filters,
123
128
  )
@@ -1,38 +1,35 @@
1
- from copy import copy
2
-
3
1
  from fractal_server.app.runner.v2.deduplicate_list import deduplicate_list
4
2
  from fractal_server.app.runner.v2.task_interface import TaskOutput
5
3
 
6
4
 
7
5
  def merge_outputs(task_outputs: list[TaskOutput]) -> TaskOutput:
8
6
 
7
+ if len(task_outputs) == 0:
8
+ return TaskOutput()
9
+
9
10
  final_image_list_updates = []
10
11
  final_image_list_removals = []
11
- last_new_filters = None
12
12
 
13
- for ind, task_output in enumerate(task_outputs):
13
+ for task_output in task_outputs:
14
14
 
15
15
  final_image_list_updates.extend(task_output.image_list_updates)
16
16
  final_image_list_removals.extend(task_output.image_list_removals)
17
17
 
18
- # Check that all filters are the same
19
- current_new_filters = task_output.filters
20
- if ind == 0:
21
- last_new_filters = copy(current_new_filters)
22
- if current_new_filters != last_new_filters:
23
- raise ValueError(f"{current_new_filters=} but {last_new_filters=}")
24
- last_new_filters = copy(current_new_filters)
18
+ # Check that all type_filters are the same
19
+ if task_output.type_filters != task_outputs[0].type_filters:
20
+ raise ValueError(
21
+ f"{task_output.type_filters=} "
22
+ f"but {task_outputs[0].type_filters=}"
23
+ )
25
24
 
25
+ # Note: the ordering of `image_list_removals` is not guaranteed
26
26
  final_image_list_updates = deduplicate_list(final_image_list_updates)
27
-
28
- additional_args = {}
29
- if last_new_filters is not None:
30
- additional_args["filters"] = last_new_filters
27
+ final_image_list_removals = list(set(final_image_list_removals))
31
28
 
32
29
  final_output = TaskOutput(
33
30
  image_list_updates=final_image_list_updates,
34
31
  image_list_removals=final_image_list_removals,
35
- **additional_args,
32
+ type_filters=task_outputs[0].type_filters,
36
33
  )
37
34
 
38
35
  return final_output
@@ -8,7 +8,6 @@ from typing import Optional
8
8
 
9
9
  from sqlalchemy.orm.attributes import flag_modified
10
10
 
11
- from ....images import Filters
12
11
  from ....images import SingleImage
13
12
  from ....images.tools import filter_image_list
14
13
  from ....images.tools import find_image_by_zarr_url
@@ -24,9 +23,11 @@ from fractal_server.app.models.v2 import DatasetV2
24
23
  from fractal_server.app.models.v2 import WorkflowTaskV2
25
24
  from fractal_server.app.schemas.v2.dataset import _DatasetHistoryItemV2
26
25
  from fractal_server.app.schemas.v2.workflowtask import WorkflowTaskStatusTypeV2
26
+ from fractal_server.images.models import AttributeFiltersType
27
27
 
28
28
 
29
29
  def execute_tasks_v2(
30
+ *,
30
31
  wf_task_list: list[WorkflowTaskV2],
31
32
  dataset: DatasetV2,
32
33
  executor: ThreadPoolExecutor,
@@ -34,6 +35,7 @@ def execute_tasks_v2(
34
35
  workflow_dir_remote: Optional[Path] = None,
35
36
  logger_name: Optional[str] = None,
36
37
  submit_setup_call: Callable = no_op_submit_setup_call,
38
+ job_attribute_filters: AttributeFiltersType,
37
39
  ) -> None:
38
40
  logger = logging.getLogger(logger_name)
39
41
 
@@ -47,7 +49,7 @@ def execute_tasks_v2(
47
49
  # Initialize local dataset attributes
48
50
  zarr_dir = dataset.zarr_dir
49
51
  tmp_images = deepcopy(dataset.images)
50
- tmp_filters = deepcopy(dataset.filters)
52
+ tmp_type_filters = deepcopy(dataset.type_filters)
51
53
 
52
54
  for wftask in wf_task_list:
53
55
  task = wftask.task
@@ -57,19 +59,20 @@ def execute_tasks_v2(
57
59
  # PRE TASK EXECUTION
58
60
 
59
61
  # Get filtered images
60
- pre_filters = dict(
61
- types=copy(tmp_filters["types"]),
62
- attributes=copy(tmp_filters["attributes"]),
63
- )
64
- pre_filters["types"].update(wftask.input_filters["types"])
65
- pre_filters["attributes"].update(wftask.input_filters["attributes"])
62
+ pre_type_filters = copy(tmp_type_filters)
63
+ pre_type_filters.update(wftask.type_filters)
66
64
  filtered_images = filter_image_list(
67
65
  images=tmp_images,
68
- filters=Filters(**pre_filters),
66
+ type_filters=pre_type_filters,
67
+ attribute_filters=job_attribute_filters,
69
68
  )
70
69
  # Verify that filtered images comply with task input_types
71
70
  for image in filtered_images:
72
- if not match_filter(image, Filters(types=task.input_types)):
71
+ if not match_filter(
72
+ image=image,
73
+ type_filters=task.input_types,
74
+ attribute_filters={},
75
+ ):
73
76
  raise JobExecutionError(
74
77
  "Invalid filtered image list\n"
75
78
  f"Task input types: {task.input_types=}\n"
@@ -259,38 +262,30 @@ def execute_tasks_v2(
259
262
  else:
260
263
  tmp_images.pop(img_search["index"])
261
264
 
262
- # Update filters.attributes:
263
- # current + (task_output: not really, in current examples..)
264
- if current_task_output.filters is not None:
265
- tmp_filters["attributes"].update(
266
- current_task_output.filters.attributes
267
- )
268
-
269
- # Find manifest ouptut types
270
- types_from_manifest = task.output_types
265
+ # Update type_filters
271
266
 
272
- # Find task-output types
273
- if current_task_output.filters is not None:
274
- types_from_task = current_task_output.filters.types
275
- else:
276
- types_from_task = {}
267
+ # Assign the type filters based on different sources
268
+ # (task manifest and post-execution task output)
269
+ type_filters_from_task_manifest = task.output_types
270
+ type_filters_from_task_output = current_task_output.type_filters
277
271
 
278
272
  # Check that key sets are disjoint
279
- set_types_from_manifest = set(types_from_manifest.keys())
280
- set_types_from_task = set(types_from_task.keys())
281
- if not set_types_from_manifest.isdisjoint(set_types_from_task):
282
- overlap = set_types_from_manifest.intersection(set_types_from_task)
273
+ keys_from_manifest = set(type_filters_from_task_manifest.keys())
274
+ keys_from_task_output = set(type_filters_from_task_output.keys())
275
+ if not keys_from_manifest.isdisjoint(keys_from_task_output):
276
+ overlap = keys_from_manifest.intersection(keys_from_task_output)
283
277
  raise JobExecutionError(
284
278
  "Some type filters are being set twice, "
285
279
  f"for task '{task_name}'.\n"
286
- f"Types from task output: {types_from_task}\n"
287
- f"Types from task maniest: {types_from_manifest}\n"
280
+ f"Types from task output: {type_filters_from_task_output}\n"
281
+ "Types from task manifest: "
282
+ f"{type_filters_from_task_manifest}\n"
288
283
  f"Overlapping keys: {overlap}"
289
284
  )
290
285
 
291
286
  # Update filters.types
292
- tmp_filters["types"].update(types_from_manifest)
293
- tmp_filters["types"].update(types_from_task)
287
+ tmp_type_filters.update(type_filters_from_task_manifest)
288
+ tmp_type_filters.update(type_filters_from_task_output)
294
289
 
295
290
  # Write current dataset attributes (history, images, filters) into the
296
291
  # database. They can be used (1) to retrieve the latest state
@@ -299,9 +294,13 @@ def execute_tasks_v2(
299
294
  with next(get_sync_db()) as db:
300
295
  db_dataset = db.get(DatasetV2, dataset.id)
301
296
  db_dataset.history[-1]["status"] = WorkflowTaskStatusTypeV2.DONE
302
- db_dataset.filters = tmp_filters
297
+ db_dataset.type_filters = tmp_type_filters
303
298
  db_dataset.images = tmp_images
304
- for attribute_name in ["filters", "history", "images"]:
299
+ for attribute_name in [
300
+ "type_filters",
301
+ "history",
302
+ "images",
303
+ ]:
305
304
  flag_modified(db_dataset, attribute_name)
306
305
  db.merge(db_dataset)
307
306
  db.commit()
@@ -1,22 +1,47 @@
1
1
  from typing import Any
2
+ from typing import Optional
2
3
 
3
4
  from pydantic import BaseModel
4
5
  from pydantic import Extra
5
6
  from pydantic import Field
7
+ from pydantic import root_validator
6
8
  from pydantic import validator
7
9
 
8
10
  from ....images import SingleImageTaskOutput
9
- from fractal_server.images import Filters
11
+ from fractal_server.app.schemas._filter_validators import validate_type_filters
12
+ from fractal_server.app.schemas._validators import root_validate_dict_keys
10
13
  from fractal_server.urls import normalize_url
11
14
 
12
15
 
16
+ class LegacyFilters(BaseModel, extra=Extra.forbid):
17
+ """
18
+ For fractal-server<2.11, task output could include both
19
+ `filters["attributes"]` and `filters["types"]`. In the new version
20
+ there is a single field, named `type_filters`.
21
+ The current schema is only used to convert old type filters into the
22
+ new form, but it will reject any attribute filters.
23
+ """
24
+
25
+ types: dict[str, bool] = Field(default_factory=dict)
26
+ _types = validator("types", allow_reuse=True)(validate_type_filters)
27
+
28
+
13
29
  class TaskOutput(BaseModel, extra=Extra.forbid):
14
30
 
15
31
  image_list_updates: list[SingleImageTaskOutput] = Field(
16
32
  default_factory=list
17
33
  )
18
34
  image_list_removals: list[str] = Field(default_factory=list)
19
- filters: Filters = Field(default_factory=Filters)
35
+
36
+ filters: Optional[LegacyFilters] = None
37
+ type_filters: dict[str, bool] = Field(default_factory=dict)
38
+
39
+ _dict_keys = root_validator(pre=True, allow_reuse=True)(
40
+ root_validate_dict_keys
41
+ )
42
+ _type_filters = validator("type_filters", allow_reuse=True)(
43
+ validate_type_filters
44
+ )
20
45
 
21
46
  def check_zarr_urls_are_unique(self) -> None:
22
47
  zarr_urls = [img.zarr_url for img in self.image_list_updates]
@@ -37,6 +62,20 @@ class TaskOutput(BaseModel, extra=Extra.forbid):
37
62
  msg = f"{msg}\n{duplicate}"
38
63
  raise ValueError(msg)
39
64
 
65
+ @root_validator()
66
+ def update_legacy_filters(cls, values):
67
+ if values["filters"] is not None:
68
+ if values["type_filters"] != {}:
69
+ raise ValueError(
70
+ "Cannot set both (legacy) 'filters' and 'type_filters'."
71
+ )
72
+ else:
73
+ # Convert legacy filters.types into new type_filters
74
+ values["type_filters"] = values["filters"].types
75
+ values["filters"] = None
76
+
77
+ return values
78
+
40
79
  @validator("image_list_removals")
41
80
  def normalize_paths(cls, v: list[str]) -> list[str]:
42
81
  return [normalize_url(zarr_url) for zarr_url in v]