fractal-server 2.0.0a0__py3-none-any.whl → 2.0.0a2__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (52) hide show
  1. fractal_server/__init__.py +1 -1
  2. fractal_server/app/models/__init__.py +0 -1
  3. fractal_server/app/models/v1/__init__.py +1 -0
  4. fractal_server/app/routes/admin/v2.py +0 -1
  5. fractal_server/app/routes/api/v1/job.py +3 -3
  6. fractal_server/app/routes/api/v2/__init__.py +2 -2
  7. fractal_server/app/routes/api/v2/dataset.py +0 -1
  8. fractal_server/app/routes/api/v2/images.py +4 -9
  9. fractal_server/app/routes/api/v2/project.py +0 -3
  10. fractal_server/app/routes/api/v2/{apply.py → submit.py} +1 -1
  11. fractal_server/app/routes/api/v2/workflow.py +0 -1
  12. fractal_server/app/runner/executors/slurm/executor.py +23 -10
  13. fractal_server/app/runner/task_files.py +0 -2
  14. fractal_server/app/runner/v1/__init__.py +2 -2
  15. fractal_server/app/runner/v1/_local/__init__.py +1 -1
  16. fractal_server/app/runner/{executors/local → v1/_local}/executor.py +2 -2
  17. fractal_server/app/runner/v2/__init__.py +0 -1
  18. fractal_server/app/runner/v2/_local/__init__.py +1 -3
  19. fractal_server/app/runner/v2/_local/executor.py +100 -0
  20. fractal_server/app/runner/v2/_slurm/__init__.py +68 -86
  21. fractal_server/app/runner/v2/deduplicate_list.py +7 -9
  22. fractal_server/app/runner/v2/merge_outputs.py +1 -4
  23. fractal_server/app/runner/v2/runner.py +19 -16
  24. fractal_server/app/runner/v2/runner_functions.py +14 -12
  25. fractal_server/app/runner/v2/runner_functions_low_level.py +1 -1
  26. fractal_server/app/schemas/v2/dataset.py +2 -1
  27. fractal_server/app/schemas/v2/job.py +2 -1
  28. fractal_server/app/schemas/v2/manifest.py +51 -1
  29. fractal_server/app/schemas/v2/project.py +2 -1
  30. fractal_server/app/schemas/v2/task.py +2 -3
  31. fractal_server/app/schemas/v2/workflow.py +2 -1
  32. fractal_server/app/schemas/v2/workflowtask.py +2 -1
  33. fractal_server/images/__init__.py +2 -50
  34. fractal_server/images/models.py +50 -0
  35. fractal_server/images/tools.py +35 -36
  36. fractal_server/migrations/env.py +0 -2
  37. fractal_server/migrations/versions/{56af171b0159_v2.py → d71e732236cd_v2.py} +29 -7
  38. fractal_server/tasks/v2/background_operations.py +0 -1
  39. {fractal_server-2.0.0a0.dist-info → fractal_server-2.0.0a2.dist-info}/METADATA +1 -1
  40. {fractal_server-2.0.0a0.dist-info → fractal_server-2.0.0a2.dist-info}/RECORD +44 -50
  41. fractal_server/app/runner/executors/local/__init__.py +0 -3
  42. fractal_server/migrations/versions/4b35c5cefbe3_tmp_is_v2_compatible.py +0 -39
  43. fractal_server/migrations/versions/876f28db9d4e_tmp_split_task_and_wftask_meta.py +0 -68
  44. fractal_server/migrations/versions/974c802f0dd0_tmp_workflowtaskv2_type_in_db.py +0 -37
  45. fractal_server/migrations/versions/9cd305cd6023_tmp_workflowtaskv2.py +0 -40
  46. fractal_server/migrations/versions/a6231ed6273c_tmp_args_schemas_in_taskv2.py +0 -42
  47. fractal_server/migrations/versions/b9e9eed9d442_tmp_taskv2_type.py +0 -37
  48. fractal_server/migrations/versions/e3e639454d4b_tmp_make_task_meta_non_optional.py +0 -50
  49. /fractal_server/app/runner/{v2/components.py → components.py} +0 -0
  50. {fractal_server-2.0.0a0.dist-info → fractal_server-2.0.0a2.dist-info}/LICENSE +0 -0
  51. {fractal_server-2.0.0a0.dist-info → fractal_server-2.0.0a2.dist-info}/WHEEL +0 -0
  52. {fractal_server-2.0.0a0.dist-info → fractal_server-2.0.0a2.dist-info}/entry_points.txt +0 -0
@@ -8,7 +8,7 @@ from typing import Optional
8
8
 
9
9
  from ....images import Filters
10
10
  from ....images import SingleImage
11
- from ....images.tools import _filter_image_list
11
+ from ....images.tools import filter_image_list
12
12
  from ....images.tools import find_image_by_path
13
13
  from ....images.tools import match_filter
14
14
  from ..filenames import FILTERS_FILENAME
@@ -52,16 +52,15 @@ def execute_tasks_v2(
52
52
  # PRE TASK EXECUTION
53
53
 
54
54
  # Get filtered images
55
- pre_type_filters = copy(tmp_filters["types"])
56
- pre_type_filters.update(wftask.input_filters["types"])
57
- pre_attribute_filters = copy(tmp_filters["attributes"])
58
- pre_attribute_filters.update(wftask.input_filters["attributes"])
59
- filtered_images = _filter_image_list(
55
+ pre_filters = dict(
56
+ types=copy(tmp_filters["types"]),
57
+ attributes=copy(tmp_filters["attributes"]),
58
+ )
59
+ pre_filters["types"].update(wftask.input_filters["types"])
60
+ pre_filters["attributes"].update(wftask.input_filters["attributes"])
61
+ filtered_images = filter_image_list(
60
62
  images=tmp_images,
61
- filters=Filters(
62
- types=pre_type_filters,
63
- attributes=pre_attribute_filters,
64
- ),
63
+ filters=Filters(**pre_filters),
65
64
  )
66
65
  # Verify that filtered images comply with task input_types
67
66
  for image in filtered_images:
@@ -183,22 +182,26 @@ def execute_tasks_v2(
183
182
  updated_attributes.update(image["attributes"])
184
183
  updated_types.update(image["types"])
185
184
  updated_types.update(task.output_types)
186
- new_image = SingleImage(
185
+ new_image = dict(
187
186
  path=image["path"],
188
187
  origin=image["origin"],
189
188
  attributes=updated_attributes,
190
189
  types=updated_types,
191
190
  )
191
+ # Validate new image
192
+ SingleImage(**new_image)
192
193
  # Add image into the dataset image list
193
- tmp_images.append(new_image.dict())
194
+ tmp_images.append(new_image)
194
195
 
195
196
  # Remove images from tmp_images
196
- for image in current_task_output.image_list_removals:
197
+ for image_path in current_task_output.image_list_removals:
197
198
  image_search = find_image_by_path(
198
- images=tmp_images, path=image["path"]
199
+ images=tmp_images, path=image_path
199
200
  )
200
- if image_search["index"] is None:
201
- raise
201
+ if image_search is None:
202
+ raise ValueError(
203
+ f"Cannot remove missing image with path {image_path=}"
204
+ )
202
205
  else:
203
206
  tmp_images.pop(image_search["index"])
204
207
 
@@ -1,24 +1,24 @@
1
1
  import functools
2
+ import logging
2
3
  import traceback
3
4
  from concurrent.futures import Executor
4
5
  from pathlib import Path
6
+ from typing import Any
5
7
  from typing import Callable
6
8
  from typing import Literal
7
9
  from typing import Optional
8
10
 
9
- from ....images import SingleImage
10
11
  from .deduplicate_list import deduplicate_list
11
12
  from .merge_outputs import merge_outputs
12
13
  from .runner_functions_low_level import run_single_task
13
- from .task_interface import InitArgsModel
14
14
  from .task_interface import InitTaskOutput
15
15
  from .task_interface import TaskOutput
16
16
  from .v1_compat import convert_v2_args_into_v1
17
17
  from fractal_server.app.models.v1 import Task as TaskV1
18
18
  from fractal_server.app.models.v2 import TaskV2
19
19
  from fractal_server.app.models.v2 import WorkflowTaskV2
20
- from fractal_server.app.runner.v2.components import _COMPONENT_KEY_
21
- from fractal_server.app.runner.v2.components import _index_to_component
20
+ from fractal_server.app.runner.components import _COMPONENT_KEY_
21
+ from fractal_server.app.runner.components import _index_to_component
22
22
 
23
23
 
24
24
  __all__ = [
@@ -80,7 +80,7 @@ def _check_parallelization_list_size(my_list):
80
80
 
81
81
  def run_v2_task_non_parallel(
82
82
  *,
83
- images: list[SingleImage],
83
+ images: list[dict[str, Any]],
84
84
  zarr_dir: str,
85
85
  task: TaskV2,
86
86
  wftask: WorkflowTaskV2,
@@ -94,7 +94,11 @@ def run_v2_task_non_parallel(
94
94
  This runs server-side (see `executor` argument)
95
95
  """
96
96
 
97
- if not workflow_dir_user:
97
+ if workflow_dir_user is None:
98
+ workflow_dir_user = workflow_dir
99
+ logging.warning(
100
+ "In `run_single_task`, workflow_dir_user=None. Is this right?"
101
+ )
98
102
  workflow_dir_user = workflow_dir
99
103
 
100
104
  executor_options = _get_executor_options(
@@ -132,7 +136,7 @@ def run_v2_task_non_parallel(
132
136
 
133
137
  def run_v2_task_parallel(
134
138
  *,
135
- images: list[SingleImage],
139
+ images: list[dict[str, Any]],
136
140
  task: TaskV2,
137
141
  wftask: WorkflowTaskV2,
138
142
  executor: Executor,
@@ -191,7 +195,7 @@ def run_v2_task_parallel(
191
195
 
192
196
  def run_v2_task_compound(
193
197
  *,
194
- images: list[SingleImage],
198
+ images: list[dict[str, Any]],
195
199
  zarr_dir: str,
196
200
  task: TaskV2,
197
201
  wftask: WorkflowTaskV2,
@@ -240,9 +244,7 @@ def run_v2_task_compound(
240
244
  else:
241
245
  init_task_output = InitTaskOutput(**output)
242
246
  parallelization_list = init_task_output.parallelization_list
243
- parallelization_list = deduplicate_list(
244
- parallelization_list, PydanticModel=InitArgsModel
245
- )
247
+ parallelization_list = deduplicate_list(parallelization_list)
246
248
 
247
249
  # 3/B: parallel part of a compound task
248
250
  _check_parallelization_list_size(parallelization_list)
@@ -287,7 +289,7 @@ def run_v2_task_compound(
287
289
 
288
290
  def run_v1_task_parallel(
289
291
  *,
290
- images: list[SingleImage],
292
+ images: list[dict[str, Any]],
291
293
  task_legacy: TaskV1,
292
294
  wftask: WorkflowTaskV2,
293
295
  executor: Executor,
@@ -7,9 +7,9 @@ from shlex import split as shlex_split
7
7
  from typing import Any
8
8
  from typing import Optional
9
9
 
10
+ from ..components import _COMPONENT_KEY_
10
11
  from ..exceptions import JobExecutionError
11
12
  from ..exceptions import TaskExecutionError
12
- from .components import _COMPONENT_KEY_
13
13
  from fractal_server.app.models.v2 import WorkflowTaskV2
14
14
  from fractal_server.app.runner.task_files import get_task_file_paths
15
15
 
@@ -2,6 +2,7 @@ from datetime import datetime
2
2
  from typing import Optional
3
3
 
4
4
  from pydantic import BaseModel
5
+ from pydantic import Extra
5
6
  from pydantic import Field
6
7
  from pydantic import validator
7
8
 
@@ -40,7 +41,7 @@ class DatasetStatusReadV2(BaseModel):
40
41
  # CRUD
41
42
 
42
43
 
43
- class DatasetCreateV2(BaseModel):
44
+ class DatasetCreateV2(BaseModel, extra=Extra.forbid):
44
45
 
45
46
  name: str
46
47
 
@@ -3,6 +3,7 @@ from enum import Enum
3
3
  from typing import Optional
4
4
 
5
5
  from pydantic import BaseModel
6
+ from pydantic import Extra
6
7
  from pydantic import validator
7
8
  from pydantic.types import StrictStr
8
9
 
@@ -34,7 +35,7 @@ class JobStatusTypeV2(str, Enum):
34
35
  FAILED = "failed"
35
36
 
36
37
 
37
- class JobCreateV2(BaseModel):
38
+ class JobCreateV2(BaseModel, extra=Extra.forbid):
38
39
 
39
40
  first_task_index: Optional[int] = None
40
41
  last_task_index: Optional[int] = None
@@ -43,13 +43,63 @@ class TaskManifestV2(BaseModel):
43
43
  executable_parallel: Optional[str] = None
44
44
  input_types: dict[str, bool] = Field(default_factory=dict)
45
45
  output_types: dict[str, bool] = Field(default_factory=dict)
46
- meta_parallel: dict[str, Any] = Field(default_factory=dict)
47
46
  meta_non_parallel: dict[str, Any] = Field(default_factory=dict)
47
+ meta_parallel: dict[str, Any] = Field(default_factory=dict)
48
48
  args_schema_non_parallel: Optional[dict[str, Any]] = None
49
49
  args_schema_parallel: Optional[dict[str, Any]] = None
50
50
  docs_info: Optional[str] = None
51
51
  docs_link: Optional[HttpUrl] = None
52
52
 
53
+ @root_validator
54
+ def validate_executable_args_meta(cls, values):
55
+
56
+ executable_non_parallel = values.get("executable_non_parallel")
57
+ executable_parallel = values.get("executable_parallel")
58
+ if (executable_non_parallel is None) and (executable_parallel is None):
59
+
60
+ raise ValueError(
61
+ "`TaskManifestV2.executable_non_parallel` and "
62
+ "`TaskManifestV2.executable_parallel` cannot be both None."
63
+ )
64
+
65
+ elif executable_non_parallel is None:
66
+
67
+ meta_non_parallel = values.get("meta_non_parallel")
68
+ if meta_non_parallel != {}:
69
+ raise ValueError(
70
+ "`TaskManifestV2.meta_non_parallel` must be an empty dict "
71
+ "if `TaskManifestV2.executable_non_parallel` is None. "
72
+ f"Given: {meta_non_parallel}."
73
+ )
74
+
75
+ args_schema_non_parallel = values.get("args_schema_non_parallel")
76
+ if args_schema_non_parallel is not None:
77
+ raise ValueError(
78
+ "`TaskManifestV2.args_schema_non_parallel` must be None "
79
+ "if `TaskManifestV2.executable_non_parallel` is None. "
80
+ f"Given: {args_schema_non_parallel}."
81
+ )
82
+
83
+ elif executable_parallel is None:
84
+
85
+ meta_parallel = values.get("meta_parallel")
86
+ if meta_parallel != {}:
87
+ raise ValueError(
88
+ "`TaskManifestV2.meta_parallel` must be an empty dict if "
89
+ "`TaskManifestV2.executable_parallel` is None. "
90
+ f"Given: {meta_parallel}."
91
+ )
92
+
93
+ args_schema_parallel = values.get("args_schema_parallel")
94
+ if args_schema_parallel is not None:
95
+ raise ValueError(
96
+ "`TaskManifestV2.args_schema_parallel` must be None if "
97
+ "`TaskManifestV2.executable_parallel` is None. "
98
+ f"Given: {args_schema_parallel}."
99
+ )
100
+
101
+ return values
102
+
53
103
 
54
104
  class ManifestV2(BaseModel):
55
105
  """
@@ -2,13 +2,14 @@ from datetime import datetime
2
2
  from typing import Optional
3
3
 
4
4
  from pydantic import BaseModel
5
+ from pydantic import Extra
5
6
  from pydantic import validator
6
7
 
7
8
  from .._validators import valstr
8
9
  from .._validators import valutc
9
10
 
10
11
 
11
- class ProjectCreateV2(BaseModel):
12
+ class ProjectCreateV2(BaseModel, extra=Extra.forbid):
12
13
 
13
14
  name: str
14
15
  read_only: bool = False
@@ -3,6 +3,7 @@ from typing import Literal
3
3
  from typing import Optional
4
4
 
5
5
  from pydantic import BaseModel
6
+ from pydantic import Extra
6
7
  from pydantic import Field
7
8
  from pydantic import HttpUrl
8
9
  from pydantic import root_validator
@@ -11,9 +12,7 @@ from pydantic import validator
11
12
  from .._validators import valstr
12
13
 
13
14
 
14
- class TaskCreateV2(BaseModel):
15
- class Config:
16
- extra = "forbid"
15
+ class TaskCreateV2(BaseModel, extra=Extra.forbid):
17
16
 
18
17
  name: str
19
18
 
@@ -2,6 +2,7 @@ from datetime import datetime
2
2
  from typing import Optional
3
3
 
4
4
  from pydantic import BaseModel
5
+ from pydantic import Extra
5
6
  from pydantic import validator
6
7
 
7
8
  from .._validators import valstr
@@ -12,7 +13,7 @@ from .workflowtask import WorkflowTaskImportV2
12
13
  from .workflowtask import WorkflowTaskReadV2
13
14
 
14
15
 
15
- class WorkflowCreateV2(BaseModel):
16
+ class WorkflowCreateV2(BaseModel, extra=Extra.forbid):
16
17
 
17
18
  name: str
18
19
 
@@ -3,6 +3,7 @@ from typing import Any
3
3
  from typing import Optional
4
4
 
5
5
  from pydantic import BaseModel
6
+ from pydantic import Extra
6
7
  from pydantic import Field
7
8
  from pydantic import validator
8
9
 
@@ -35,7 +36,7 @@ class WorkflowTaskStatusTypeV2(str, Enum):
35
36
  FAILED = "failed"
36
37
 
37
38
 
38
- class WorkflowTaskCreateV2(BaseModel):
39
+ class WorkflowTaskCreateV2(BaseModel, extra=Extra.forbid):
39
40
 
40
41
  meta_parallel: Optional[dict[str, Any]]
41
42
  meta_non_parallel: Optional[dict[str, Any]]
@@ -1,50 +1,2 @@
1
- from typing import Any
2
- from typing import Optional
3
- from typing import Union
4
-
5
- from pydantic import BaseModel
6
- from pydantic import Field
7
- from pydantic import validator
8
-
9
-
10
- class SingleImage(BaseModel):
11
-
12
- path: str
13
- origin: Optional[str] = None
14
-
15
- attributes: dict[str, Any] = Field(default_factory=dict)
16
- types: dict[str, bool] = Field(default_factory=dict)
17
-
18
- @validator("attributes")
19
- def validate_attributes(
20
- cls, v: dict[str, Any]
21
- ) -> dict[str, Union[int, float, str, bool]]:
22
- for key, value in v.items():
23
- if not isinstance(value, (int, float, str, bool)):
24
- raise ValueError(
25
- f"SingleImage.attributes[{key}] must be a scalar "
26
- f"(int, float, str or bool). Given {value} ({type(value)})"
27
- )
28
- return v
29
-
30
-
31
- class Filters(BaseModel):
32
- attributes: dict[str, Any] = Field(default_factory=dict)
33
- types: dict[str, bool] = Field(default_factory=dict)
34
-
35
- class Config:
36
- extra = "forbid"
37
-
38
- # Validators
39
- @validator("attributes")
40
- def validate_attributes(
41
- cls, v: dict[str, Any]
42
- ) -> dict[str, Union[int, float, str, bool, None]]:
43
- for key, value in v.items():
44
- if not isinstance(value, (int, float, str, bool, type(None))):
45
- raise ValueError(
46
- f"Filters.attributes[{key}] must be a scalar "
47
- "(int, float, str, bool, or None). "
48
- f"Given {value} ({type(value)})"
49
- )
50
- return v
1
+ from .models import Filters # noqa: F401
2
+ from .models import SingleImage # noqa: F401
@@ -0,0 +1,50 @@
1
+ from typing import Any
2
+ from typing import Optional
3
+ from typing import Union
4
+
5
+ from pydantic import BaseModel
6
+ from pydantic import Field
7
+ from pydantic import validator
8
+
9
+
10
+ class SingleImage(BaseModel):
11
+
12
+ path: str
13
+ origin: Optional[str] = None
14
+
15
+ attributes: dict[str, Any] = Field(default_factory=dict)
16
+ types: dict[str, bool] = Field(default_factory=dict)
17
+
18
+ @validator("attributes")
19
+ def validate_attributes(
20
+ cls, v: dict[str, Any]
21
+ ) -> dict[str, Union[int, float, str, bool]]:
22
+ for key, value in v.items():
23
+ if not isinstance(value, (int, float, str, bool)):
24
+ raise ValueError(
25
+ f"SingleImage.attributes[{key}] must be a scalar "
26
+ f"(int, float, str or bool). Given {value} ({type(value)})"
27
+ )
28
+ return v
29
+
30
+
31
+ class Filters(BaseModel):
32
+ attributes: dict[str, Any] = Field(default_factory=dict)
33
+ types: dict[str, bool] = Field(default_factory=dict)
34
+
35
+ class Config:
36
+ extra = "forbid"
37
+
38
+ # Validators
39
+ @validator("attributes")
40
+ def validate_attributes(
41
+ cls, v: dict[str, Any]
42
+ ) -> dict[str, Union[int, float, str, bool, None]]:
43
+ for key, value in v.items():
44
+ if not isinstance(value, (int, float, str, bool, type(None))):
45
+ raise ValueError(
46
+ f"Filters.attributes[{key}] must be a scalar "
47
+ "(int, float, str, bool, or None). "
48
+ f"Given {value} ({type(value)})"
49
+ )
50
+ return v
@@ -1,21 +1,24 @@
1
1
  from copy import copy
2
2
  from typing import Any
3
+ from typing import Literal
3
4
  from typing import Optional
4
5
  from typing import Union
5
6
 
6
7
  from fractal_server.images import Filters
7
- from fractal_server.images import SingleImage
8
+
9
+
10
+ ImageSearch = dict[Literal["image", "index"], Union[int, dict[str, Any]]]
8
11
 
9
12
 
10
13
  def find_image_by_path(
11
14
  *,
12
15
  images: list[dict[str, Any]],
13
16
  path: str,
14
- ) -> Optional[dict[str, Union[int, dict[str, Any]]]]:
17
+ ) -> Optional[ImageSearch]:
15
18
  """
16
- Return a copy of the image with a given path and its positional index.
19
+ Return a copy of the image with a given path, and its positional index.
17
20
 
18
- Args:
21
+ Arguments:
19
22
  images: List of images.
20
23
  path: Path that the returned image must have.
21
24
 
@@ -30,10 +33,23 @@ def find_image_by_path(
30
33
  return dict(image=copy(images[ind]), index=ind)
31
34
 
32
35
 
33
- def match_filter(image: dict[str, Any], filters: Filters):
36
+ # FIXME: what is filters
37
+ def match_filter(image: dict[str, Any], filters: Filters) -> bool:
38
+ """
39
+ Find whether an image matches a filter set.
40
+
41
+ Arguments:
42
+ image: A single image.
43
+ filters: A set of filters.
44
+
45
+ Returns:
46
+ Whether the image matches the filter set.
47
+ """
48
+ # Verify match with types (using a False default)
34
49
  for key, value in filters.types.items():
35
50
  if image["types"].get(key, False) != value:
36
51
  return False
52
+ # Verify match with attributes (only for non-None filters)
37
53
  for key, value in filters.attributes.items():
38
54
  if value is None:
39
55
  continue
@@ -42,45 +58,28 @@ def match_filter(image: dict[str, Any], filters: Filters):
42
58
  return True
43
59
 
44
60
 
45
- def _filter_image_list(
61
+ def filter_image_list(
46
62
  images: list[dict[str, Any]],
47
63
  filters: Filters,
48
64
  ) -> list[dict[str, Any]]:
65
+ """
66
+ Compute a sublist with images that match a filter set.
49
67
 
50
- # When no filter is provided, return all images
51
- if filters.attributes == {} and filters.types == {}:
52
- return images
53
-
54
- filtered_images = []
55
- for this_image in images:
56
- if match_filter(this_image, filters=filters):
57
- filtered_images.append(copy(this_image))
58
- return filtered_images
59
-
60
-
61
- def match_filter_SingleImage(image: SingleImage, filters: Filters):
62
- for key, value in filters.types.items():
63
- if image.types.get(key, False) != value:
64
- return False
65
- for key, value in filters.attributes.items():
66
- if value is None:
67
- continue
68
- if image.attributes.get(key) != value:
69
- return False
70
- return True
71
-
68
+ Arguments:
69
+ images: A list of images.
70
+ filters: A set of filters.
72
71
 
73
- def _filter_image_list_SingleImage(
74
- images: list[SingleImage],
75
- filters: Filters,
76
- ) -> list[SingleImage]:
72
+ Returns:
73
+ List of the `images` elements which match the filter set.
74
+ """
77
75
 
78
76
  # When no filter is provided, return all images
79
77
  if filters.attributes == {} and filters.types == {}:
80
78
  return images
81
79
 
82
- filtered_images = []
83
- for this_image in images:
84
- if match_filter_SingleImage(this_image, filters=filters):
85
- filtered_images.append(copy(this_image))
80
+ filtered_images = [
81
+ copy(this_image)
82
+ for this_image in images
83
+ if match_filter(this_image, filters=filters)
84
+ ]
86
85
  return filtered_images
@@ -58,7 +58,6 @@ def run_migrations_offline() -> None:
58
58
  target_metadata=target_metadata,
59
59
  literal_binds=True,
60
60
  dialect_opts={"paramstyle": "named"},
61
- render_as_batch=True,
62
61
  )
63
62
 
64
63
  with context.begin_transaction():
@@ -69,7 +68,6 @@ def do_run_migrations(connection: Connection) -> None:
69
68
  context.configure(
70
69
  connection=connection,
71
70
  target_metadata=target_metadata,
72
- render_as_batch=True,
73
71
  )
74
72
 
75
73
  with context.begin_transaction():
@@ -1,8 +1,8 @@
1
1
  """v2
2
2
 
3
- Revision ID: 56af171b0159
3
+ Revision ID: d71e732236cd
4
4
  Revises: 9fd26a2b0de4
5
- Create Date: 2024-03-22 11:09:02.458011
5
+ Create Date: 2024-04-05 11:09:17.639183
6
6
 
7
7
  """
8
8
  import sqlalchemy as sa
@@ -11,7 +11,7 @@ from alembic import op
11
11
 
12
12
 
13
13
  # revision identifiers, used by Alembic.
14
- revision = "56af171b0159"
14
+ revision = "d71e732236cd"
15
15
  down_revision = "9fd26a2b0de4"
16
16
  branch_labels = None
17
17
  depends_on = None
@@ -33,6 +33,7 @@ def upgrade() -> None:
33
33
  "taskv2",
34
34
  sa.Column("id", sa.Integer(), nullable=False),
35
35
  sa.Column("name", sqlmodel.sql.sqltypes.AutoString(), nullable=False),
36
+ sa.Column("type", sqlmodel.sql.sqltypes.AutoString(), nullable=False),
36
37
  sa.Column(
37
38
  "command_non_parallel",
38
39
  sqlmodel.sql.sqltypes.AutoString(),
@@ -46,12 +47,18 @@ def upgrade() -> None:
46
47
  sa.Column(
47
48
  "source", sqlmodel.sql.sqltypes.AutoString(), nullable=False
48
49
  ),
49
- sa.Column("meta", sa.JSON(), nullable=True),
50
+ sa.Column(
51
+ "meta_non_parallel", sa.JSON(), server_default="{}", nullable=False
52
+ ),
53
+ sa.Column(
54
+ "meta_parallel", sa.JSON(), server_default="{}", nullable=False
55
+ ),
50
56
  sa.Column("owner", sqlmodel.sql.sqltypes.AutoString(), nullable=True),
51
57
  sa.Column(
52
58
  "version", sqlmodel.sql.sqltypes.AutoString(), nullable=True
53
59
  ),
54
- sa.Column("args_schema", sa.JSON(), nullable=True),
60
+ sa.Column("args_schema_non_parallel", sa.JSON(), nullable=True),
61
+ sa.Column("args_schema_parallel", sa.JSON(), nullable=True),
55
62
  sa.Column(
56
63
  "args_schema_version",
57
64
  sqlmodel.sql.sqltypes.AutoString(),
@@ -177,8 +184,10 @@ def upgrade() -> None:
177
184
  sa.Column("id", sa.Integer(), nullable=False),
178
185
  sa.Column("workflow_id", sa.Integer(), nullable=False),
179
186
  sa.Column("order", sa.Integer(), nullable=True),
180
- sa.Column("meta", sa.JSON(), nullable=True),
181
- sa.Column("args", sa.JSON(), nullable=True),
187
+ sa.Column("meta_parallel", sa.JSON(), nullable=True),
188
+ sa.Column("meta_non_parallel", sa.JSON(), nullable=True),
189
+ sa.Column("args_parallel", sa.JSON(), nullable=True),
190
+ sa.Column("args_non_parallel", sa.JSON(), nullable=True),
182
191
  sa.Column(
183
192
  "input_filters",
184
193
  sa.JSON(),
@@ -186,6 +195,9 @@ def upgrade() -> None:
186
195
  nullable=False,
187
196
  ),
188
197
  sa.Column("is_legacy_task", sa.Boolean(), nullable=False),
198
+ sa.Column(
199
+ "task_type", sqlmodel.sql.sqltypes.AutoString(), nullable=False
200
+ ),
189
201
  sa.Column("task_id", sa.Integer(), nullable=True),
190
202
  sa.Column("task_legacy_id", sa.Integer(), nullable=True),
191
203
  sa.ForeignKeyConstraint(
@@ -202,11 +214,21 @@ def upgrade() -> None:
202
214
  ),
203
215
  sa.PrimaryKeyConstraint("id"),
204
216
  )
217
+ op.add_column(
218
+ "task",
219
+ sa.Column(
220
+ "is_v2_compatible",
221
+ sa.Boolean(),
222
+ server_default=sa.text("false"),
223
+ nullable=False,
224
+ ),
225
+ )
205
226
  # ### end Alembic commands ###
206
227
 
207
228
 
208
229
  def downgrade() -> None:
209
230
  # ### commands auto generated by Alembic - please adjust! ###
231
+ op.drop_column("task", "is_v2_compatible")
210
232
  op.drop_table("workflowtaskv2")
211
233
  op.drop_table("jobv2")
212
234
  op.drop_table("workflowv2")
@@ -352,7 +352,6 @@ async def background_collect_pip(
352
352
  data.status = "OK"
353
353
  data.log = get_collection_log(venv_path)
354
354
  state.data = data.sanitised_dict()
355
- db.add(state)
356
355
  db.merge(state)
357
356
  db.commit()
358
357
 
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: fractal-server
3
- Version: 2.0.0a0
3
+ Version: 2.0.0a2
4
4
  Summary: Server component of the Fractal analytics platform
5
5
  Home-page: https://github.com/fractal-analytics-platform/fractal-server
6
6
  License: BSD-3-Clause