fractal-server 2.10.5__py3-none-any.whl → 2.11.0a2__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (40) hide show
  1. fractal_server/__init__.py +1 -1
  2. fractal_server/app/models/v2/dataset.py +9 -6
  3. fractal_server/app/models/v2/job.py +5 -0
  4. fractal_server/app/models/v2/workflowtask.py +5 -8
  5. fractal_server/app/routes/api/v1/dataset.py +2 -2
  6. fractal_server/app/routes/api/v2/_aux_functions.py +3 -10
  7. fractal_server/app/routes/api/v2/images.py +29 -6
  8. fractal_server/app/routes/api/v2/status.py +20 -20
  9. fractal_server/app/routes/api/v2/submit.py +5 -1
  10. fractal_server/app/routes/api/v2/workflowtask.py +3 -3
  11. fractal_server/app/runner/filenames.py +2 -4
  12. fractal_server/app/runner/v1/_common.py +4 -4
  13. fractal_server/app/runner/v1/handle_failed_job.py +4 -4
  14. fractal_server/app/runner/v2/__init__.py +11 -65
  15. fractal_server/app/runner/v2/_local/__init__.py +12 -17
  16. fractal_server/app/runner/v2/_local_experimental/__init__.py +11 -20
  17. fractal_server/app/runner/v2/_slurm_ssh/__init__.py +14 -16
  18. fractal_server/app/runner/v2/_slurm_sudo/__init__.py +12 -14
  19. fractal_server/app/runner/v2/handle_failed_job.py +31 -130
  20. fractal_server/app/runner/v2/merge_outputs.py +13 -16
  21. fractal_server/app/runner/v2/runner.py +63 -72
  22. fractal_server/app/runner/v2/task_interface.py +41 -2
  23. fractal_server/app/schemas/_filter_validators.py +47 -0
  24. fractal_server/app/schemas/_validators.py +13 -2
  25. fractal_server/app/schemas/v2/dataset.py +58 -12
  26. fractal_server/app/schemas/v2/dumps.py +6 -8
  27. fractal_server/app/schemas/v2/job.py +14 -0
  28. fractal_server/app/schemas/v2/task.py +9 -9
  29. fractal_server/app/schemas/v2/task_group.py +2 -2
  30. fractal_server/app/schemas/v2/workflowtask.py +42 -19
  31. fractal_server/data_migrations/2_11_0.py +67 -0
  32. fractal_server/images/__init__.py +0 -1
  33. fractal_server/images/models.py +12 -35
  34. fractal_server/images/tools.py +29 -13
  35. fractal_server/migrations/versions/db09233ad13a_split_filters_and_keep_old_columns.py +96 -0
  36. {fractal_server-2.10.5.dist-info → fractal_server-2.11.0a2.dist-info}/METADATA +1 -1
  37. {fractal_server-2.10.5.dist-info → fractal_server-2.11.0a2.dist-info}/RECORD +40 -37
  38. {fractal_server-2.10.5.dist-info → fractal_server-2.11.0a2.dist-info}/LICENSE +0 -0
  39. {fractal_server-2.10.5.dist-info → fractal_server-2.11.0a2.dist-info}/WHEEL +0 -0
  40. {fractal_server-2.10.5.dist-info → fractal_server-2.11.0a2.dist-info}/entry_points.txt +0 -0
@@ -6,14 +6,16 @@ from typing import Union
6
6
  from pydantic import BaseModel
7
7
  from pydantic import Extra
8
8
  from pydantic import Field
9
+ from pydantic import root_validator
9
10
  from pydantic import validator
10
11
 
11
- from .._validators import valdictkeys
12
+ from .._filter_validators import validate_type_filters
13
+ from .._validators import root_validate_dict_keys
14
+ from .._validators import valdict_keys
12
15
  from .task import TaskExportV2
13
16
  from .task import TaskImportV2
14
17
  from .task import TaskImportV2Legacy
15
18
  from .task import TaskReadV2
16
- from fractal_server.images import Filters
17
19
 
18
20
  RESERVED_ARGUMENTS = {"zarr_dir", "zarr_url", "zarr_urls", "init_args"}
19
21
 
@@ -43,21 +45,28 @@ class WorkflowTaskCreateV2(BaseModel, extra=Extra.forbid):
43
45
  meta_parallel: Optional[dict[str, Any]]
44
46
  args_non_parallel: Optional[dict[str, Any]]
45
47
  args_parallel: Optional[dict[str, Any]]
46
- input_filters: Filters = Field(default_factory=Filters)
48
+ type_filters: dict[str, bool] = Field(default_factory=dict)
47
49
 
48
50
  # Validators
51
+ _dict_keys = root_validator(pre=True, allow_reuse=True)(
52
+ root_validate_dict_keys
53
+ )
54
+ _type_filters = validator("type_filters", allow_reuse=True)(
55
+ validate_type_filters
56
+ )
57
+
49
58
  _meta_non_parallel = validator("meta_non_parallel", allow_reuse=True)(
50
- valdictkeys("meta_non_parallel")
59
+ valdict_keys("meta_non_parallel")
51
60
  )
52
61
  _meta_parallel = validator("meta_parallel", allow_reuse=True)(
53
- valdictkeys("meta_parallel")
62
+ valdict_keys("meta_parallel")
54
63
  )
55
64
 
56
65
  @validator("args_non_parallel")
57
66
  def validate_args_non_parallel(cls, value):
58
67
  if value is None:
59
68
  return
60
- valdictkeys("args_non_parallel")(value)
69
+ valdict_keys("args_non_parallel")(value)
61
70
  args_keys = set(value.keys())
62
71
  intersect_keys = RESERVED_ARGUMENTS.intersection(args_keys)
63
72
  if intersect_keys:
@@ -71,7 +80,7 @@ class WorkflowTaskCreateV2(BaseModel, extra=Extra.forbid):
71
80
  def validate_args_parallel(cls, value):
72
81
  if value is None:
73
82
  return
74
- valdictkeys("args_parallel")(value)
83
+ valdict_keys("args_parallel")(value)
75
84
  args_keys = set(value.keys())
76
85
  intersect_keys = RESERVED_ARGUMENTS.intersection(args_keys)
77
86
  if intersect_keys:
@@ -101,7 +110,7 @@ class WorkflowTaskReadV2(BaseModel):
101
110
  args_non_parallel: Optional[dict[str, Any]]
102
111
  args_parallel: Optional[dict[str, Any]]
103
112
 
104
- input_filters: Filters
113
+ type_filters: dict[str, bool]
105
114
 
106
115
  task_type: str
107
116
  task_id: int
@@ -118,21 +127,28 @@ class WorkflowTaskUpdateV2(BaseModel, extra=Extra.forbid):
118
127
  meta_parallel: Optional[dict[str, Any]]
119
128
  args_non_parallel: Optional[dict[str, Any]]
120
129
  args_parallel: Optional[dict[str, Any]]
121
- input_filters: Optional[Filters]
130
+ type_filters: Optional[dict[str, bool]]
122
131
 
123
132
  # Validators
133
+ _dict_keys = root_validator(pre=True, allow_reuse=True)(
134
+ root_validate_dict_keys
135
+ )
136
+ _type_filters = validator("type_filters", allow_reuse=True)(
137
+ validate_type_filters
138
+ )
139
+
124
140
  _meta_non_parallel = validator("meta_non_parallel", allow_reuse=True)(
125
- valdictkeys("meta_non_parallel")
141
+ valdict_keys("meta_non_parallel")
126
142
  )
127
143
  _meta_parallel = validator("meta_parallel", allow_reuse=True)(
128
- valdictkeys("meta_parallel")
144
+ valdict_keys("meta_parallel")
129
145
  )
130
146
 
131
147
  @validator("args_non_parallel")
132
148
  def validate_args_non_parallel(cls, value):
133
149
  if value is None:
134
150
  return
135
- valdictkeys("args_non_parallel")(value)
151
+ valdict_keys("args_non_parallel")(value)
136
152
  args_keys = set(value.keys())
137
153
  intersect_keys = RESERVED_ARGUMENTS.intersection(args_keys)
138
154
  if intersect_keys:
@@ -146,7 +162,7 @@ class WorkflowTaskUpdateV2(BaseModel, extra=Extra.forbid):
146
162
  def validate_args_parallel(cls, value):
147
163
  if value is None:
148
164
  return
149
- valdictkeys("args_parallel")(value)
165
+ valdict_keys("args_parallel")(value)
150
166
  args_keys = set(value.keys())
151
167
  intersect_keys = RESERVED_ARGUMENTS.intersection(args_keys)
152
168
  if intersect_keys:
@@ -164,21 +180,28 @@ class WorkflowTaskImportV2(BaseModel, extra=Extra.forbid):
164
180
  args_non_parallel: Optional[dict[str, Any]] = None
165
181
  args_parallel: Optional[dict[str, Any]] = None
166
182
 
167
- input_filters: Optional[Filters] = None
183
+ type_filters: Optional[dict[str, bool]] = None
168
184
 
169
185
  task: Union[TaskImportV2, TaskImportV2Legacy]
170
186
 
187
+ _dict_keys = root_validator(pre=True, allow_reuse=True)(
188
+ root_validate_dict_keys
189
+ )
190
+ _type_filters = validator("type_filters", allow_reuse=True)(
191
+ validate_type_filters
192
+ )
193
+
171
194
  _meta_non_parallel = validator("meta_non_parallel", allow_reuse=True)(
172
- valdictkeys("meta_non_parallel")
195
+ valdict_keys("meta_non_parallel")
173
196
  )
174
197
  _meta_parallel = validator("meta_parallel", allow_reuse=True)(
175
- valdictkeys("meta_parallel")
198
+ valdict_keys("meta_parallel")
176
199
  )
177
200
  _args_non_parallel = validator("args_non_parallel", allow_reuse=True)(
178
- valdictkeys("args_non_parallel")
201
+ valdict_keys("args_non_parallel")
179
202
  )
180
203
  _args_parallel = validator("args_parallel", allow_reuse=True)(
181
- valdictkeys("args_parallel")
204
+ valdict_keys("args_parallel")
182
205
  )
183
206
 
184
207
 
@@ -188,6 +211,6 @@ class WorkflowTaskExportV2(BaseModel):
188
211
  meta_parallel: Optional[dict[str, Any]] = None
189
212
  args_non_parallel: Optional[dict[str, Any]] = None
190
213
  args_parallel: Optional[dict[str, Any]] = None
191
- input_filters: Filters = Field(default_factory=Filters)
214
+ type_filters: dict[str, bool] = Field(default_factory=dict)
192
215
 
193
216
  task: TaskExportV2
@@ -0,0 +1,67 @@
1
+ import logging
2
+
3
+ from sqlalchemy.orm.attributes import flag_modified
4
+ from sqlmodel import select
5
+
6
+ from fractal_server.app.db import get_sync_db
7
+ from fractal_server.app.models import DatasetV2
8
+ from fractal_server.app.models import JobV2
9
+ from fractal_server.app.models import WorkflowTaskV2
10
+
11
+ logger = logging.getLogger("fix_db")
12
+ logger.setLevel(logging.INFO)
13
+
14
+
15
+ def fix_db():
16
+
17
+ logger.info("START execution of fix_db function")
18
+
19
+ with next(get_sync_db()) as db:
20
+
21
+ # DatasetV2.filters
22
+ # DatasetV2.history[].workflowtask.input_filters
23
+ stm = select(DatasetV2).order_by(DatasetV2.id)
24
+ datasets = db.execute(stm).scalars().all()
25
+ for ds in datasets:
26
+ ds.attribute_filters = ds.filters["attributes"]
27
+ ds.type_filters = ds.filters["types"]
28
+ ds.filters = None
29
+ for i, h in enumerate(ds.history):
30
+ ds.history[i]["workflowtask"]["type_filters"] = h[
31
+ "workflowtask"
32
+ ]["input_filters"]["types"]
33
+ flag_modified(ds, "history")
34
+ db.add(ds)
35
+ logger.info(f"Fixed filters in DatasetV2[{ds.id}]")
36
+
37
+ # WorkflowTaskV2.input_filters
38
+ stm = select(WorkflowTaskV2).order_by(WorkflowTaskV2.id)
39
+ wftasks = db.execute(stm).scalars().all()
40
+ for wft in wftasks:
41
+ wft.type_filters = wft.input_filters["types"]
42
+ if wft.input_filters["attributes"]:
43
+ logger.warning(
44
+ f"Removing WorkflowTaskV2[{wft.id}].input_filters"
45
+ f"['attributes'] = {wft.input_filters['attributes']}"
46
+ )
47
+ wft.input_filters = None
48
+ flag_modified(wft, "input_filters")
49
+ db.add(wft)
50
+ logger.info(f"Fixed filters in WorkflowTaskV2[{wft.id}]")
51
+
52
+ # JOBS V2
53
+ stm = select(JobV2).order_by(JobV2.id)
54
+ jobs = db.execute(stm).scalars().all()
55
+ for job in jobs:
56
+ job.dataset_dump["type_filters"] = job.dataset_dump["filters"][
57
+ "types"
58
+ ]
59
+ job.dataset_dump["attribute_filters"] = job.dataset_dump[
60
+ "filters"
61
+ ]["attributes"]
62
+ job.dataset_dump.pop("filters")
63
+ flag_modified(job, "dataset_dump")
64
+ logger.info(f"Fixed filters in JobV2[{job.id}].datasetdump")
65
+
66
+ db.commit()
67
+ logger.info("Changes committed.")
@@ -1,4 +1,3 @@
1
- from .models import Filters # noqa: F401
2
1
  from .models import SingleImage # noqa: F401
3
2
  from .models import SingleImageTaskOutput # noqa: F401
4
3
  from .models import SingleImageUpdate # noqa: F401
@@ -3,15 +3,16 @@ from typing import Optional
3
3
  from typing import Union
4
4
 
5
5
  from pydantic import BaseModel
6
- from pydantic import Extra
7
6
  from pydantic import Field
8
7
  from pydantic import validator
9
8
 
10
- from fractal_server.app.schemas._validators import valdictkeys
9
+ from fractal_server.app.schemas._validators import valdict_keys
11
10
  from fractal_server.urls import normalize_url
12
11
 
12
+ AttributeFiltersType = dict[str, Optional[list[Any]]]
13
13
 
14
- class SingleImageBase(BaseModel):
14
+
15
+ class _SingleImageBase(BaseModel):
15
16
  """
16
17
  Base for SingleImage and SingleImageTaskOutput.
17
18
 
@@ -30,9 +31,9 @@ class SingleImageBase(BaseModel):
30
31
 
31
32
  # Validators
32
33
  _attributes = validator("attributes", allow_reuse=True)(
33
- valdictkeys("attributes")
34
+ valdict_keys("attributes")
34
35
  )
35
- _types = validator("types", allow_reuse=True)(valdictkeys("types"))
36
+ _types = validator("types", allow_reuse=True)(valdict_keys("types"))
36
37
 
37
38
  @validator("zarr_url")
38
39
  def normalize_zarr_url(cls, v: str) -> str:
@@ -44,7 +45,7 @@ class SingleImageBase(BaseModel):
44
45
  return normalize_url(v)
45
46
 
46
47
 
47
- class SingleImageTaskOutput(SingleImageBase):
48
+ class SingleImageTaskOutput(_SingleImageBase):
48
49
  """
49
50
  `SingleImageBase`, with scalar `attributes` values (`None` included).
50
51
  """
@@ -63,7 +64,7 @@ class SingleImageTaskOutput(SingleImageBase):
63
64
  return v
64
65
 
65
66
 
66
- class SingleImage(SingleImageBase):
67
+ class SingleImage(_SingleImageBase):
67
68
  """
68
69
  `SingleImageBase`, with scalar `attributes` values (`None` excluded).
69
70
  """
@@ -83,8 +84,8 @@ class SingleImage(SingleImageBase):
83
84
 
84
85
  class SingleImageUpdate(BaseModel):
85
86
  zarr_url: str
86
- attributes: Optional[dict[str, Any]]
87
- types: Optional[dict[str, bool]]
87
+ attributes: Optional[dict[str, Any]] = None
88
+ types: Optional[dict[str, bool]] = None
88
89
 
89
90
  @validator("zarr_url")
90
91
  def normalize_zarr_url(cls, v: str) -> str:
@@ -96,7 +97,7 @@ class SingleImageUpdate(BaseModel):
96
97
  ) -> dict[str, Union[int, float, str, bool]]:
97
98
  if v is not None:
98
99
  # validate keys
99
- valdictkeys("attributes")(v)
100
+ valdict_keys("attributes")(v)
100
101
  # validate values
101
102
  for key, value in v.items():
102
103
  if not isinstance(value, (int, float, str, bool)):
@@ -107,28 +108,4 @@ class SingleImageUpdate(BaseModel):
107
108
  )
108
109
  return v
109
110
 
110
- _types = validator("types", allow_reuse=True)(valdictkeys("types"))
111
-
112
-
113
- class Filters(BaseModel, extra=Extra.forbid):
114
- attributes: dict[str, Any] = Field(default_factory=dict)
115
- types: dict[str, bool] = Field(default_factory=dict)
116
-
117
- # Validators
118
- _attributes = validator("attributes", allow_reuse=True)(
119
- valdictkeys("attributes")
120
- )
121
- _types = validator("types", allow_reuse=True)(valdictkeys("types"))
122
-
123
- @validator("attributes")
124
- def validate_attributes(
125
- cls, v: dict[str, Any]
126
- ) -> dict[str, Union[int, float, str, bool, None]]:
127
- for key, value in v.items():
128
- if not isinstance(value, (int, float, str, bool, type(None))):
129
- raise ValueError(
130
- f"Filters.attributes[{key}] must be a scalar "
131
- "(int, float, str, bool, or None). "
132
- f"Given {value} ({type(value)})"
133
- )
134
- return v
111
+ _types = validator("types", allow_reuse=True)(valdict_keys("types"))
@@ -4,8 +4,7 @@ from typing import Literal
4
4
  from typing import Optional
5
5
  from typing import Union
6
6
 
7
- from fractal_server.images import Filters
8
-
7
+ from fractal_server.images.models import AttributeFiltersType
9
8
 
10
9
  ImageSearch = dict[Literal["image", "index"], Union[int, dict[str, Any]]]
11
10
 
@@ -33,52 +32,69 @@ def find_image_by_zarr_url(
33
32
  return dict(image=copy(images[ind]), index=ind)
34
33
 
35
34
 
36
- def match_filter(image: dict[str, Any], filters: Filters) -> bool:
35
+ def match_filter(
36
+ *,
37
+ image: dict[str, Any],
38
+ type_filters: dict[str, bool],
39
+ attribute_filters: AttributeFiltersType,
40
+ ) -> bool:
37
41
  """
38
42
  Find whether an image matches a filter set.
39
43
 
40
44
  Arguments:
41
45
  image: A single image.
42
- filters: A set of filters.
46
+ type_filters:
47
+ attribute_filters:
43
48
 
44
49
  Returns:
45
50
  Whether the image matches the filter set.
46
51
  """
52
+
47
53
  # Verify match with types (using a False default)
48
- for key, value in filters.types.items():
54
+ for key, value in type_filters.items():
49
55
  if image["types"].get(key, False) != value:
50
56
  return False
51
- # Verify match with attributes (only for non-None filters)
52
- for key, value in filters.attributes.items():
53
- if value is None:
57
+
58
+ # Verify match with attributes (only for not-None filters)
59
+ for key, values in attribute_filters.items():
60
+ if values is None:
54
61
  continue
55
- if image["attributes"].get(key) != value:
62
+ if image["attributes"].get(key) not in values:
56
63
  return False
64
+
57
65
  return True
58
66
 
59
67
 
60
68
  def filter_image_list(
61
69
  images: list[dict[str, Any]],
62
- filters: Filters,
70
+ type_filters: Optional[dict[str, bool]] = None,
71
+ attribute_filters: Optional[AttributeFiltersType] = None,
63
72
  ) -> list[dict[str, Any]]:
64
73
  """
65
74
  Compute a sublist with images that match a filter set.
66
75
 
67
76
  Arguments:
68
77
  images: A list of images.
69
- filters: A set of filters.
78
+ type_filters:
79
+ attribute_filters:
70
80
 
71
81
  Returns:
72
82
  List of the `images` elements which match the filter set.
73
83
  """
74
84
 
75
85
  # When no filter is provided, return all images
76
- if filters.attributes == {} and filters.types == {}:
86
+ if type_filters is None and attribute_filters is None:
77
87
  return images
88
+ actual_type_filters = type_filters or {}
89
+ actual_attribute_filters = attribute_filters or {}
78
90
 
79
91
  filtered_images = [
80
92
  copy(this_image)
81
93
  for this_image in images
82
- if match_filter(this_image, filters=filters)
94
+ if match_filter(
95
+ image=this_image,
96
+ type_filters=actual_type_filters,
97
+ attribute_filters=actual_attribute_filters,
98
+ )
83
99
  ]
84
100
  return filtered_images
@@ -0,0 +1,96 @@
1
+ """split filters and keep old columns
2
+
3
+ Revision ID: db09233ad13a
4
+ Revises: 316140ff7ee1
5
+ Create Date: 2025-01-14 14:50:46.007222
6
+
7
+ """
8
+ import sqlalchemy as sa
9
+ from alembic import op
10
+ from sqlalchemy.dialects import postgresql
11
+
12
+ # revision identifiers, used by Alembic.
13
+ revision = "db09233ad13a"
14
+ down_revision = "316140ff7ee1"
15
+ branch_labels = None
16
+ depends_on = None
17
+
18
+
19
+ def upgrade() -> None:
20
+ # ### commands auto generated by Alembic - please adjust! ###
21
+ with op.batch_alter_table("datasetv2", schema=None) as batch_op:
22
+ batch_op.add_column(
23
+ sa.Column(
24
+ "type_filters", sa.JSON(), server_default="{}", nullable=False
25
+ )
26
+ )
27
+ batch_op.add_column(
28
+ sa.Column(
29
+ "attribute_filters",
30
+ sa.JSON(),
31
+ server_default="{}",
32
+ nullable=False,
33
+ )
34
+ )
35
+ batch_op.alter_column(
36
+ "filters",
37
+ existing_type=postgresql.JSON(astext_type=sa.Text()),
38
+ nullable=True,
39
+ server_default="null",
40
+ )
41
+
42
+ with op.batch_alter_table("jobv2", schema=None) as batch_op:
43
+ batch_op.add_column(
44
+ sa.Column(
45
+ "attribute_filters",
46
+ sa.JSON(),
47
+ server_default="{}",
48
+ nullable=False,
49
+ )
50
+ )
51
+
52
+ with op.batch_alter_table("workflowtaskv2", schema=None) as batch_op:
53
+ batch_op.add_column(
54
+ sa.Column(
55
+ "type_filters", sa.JSON(), server_default="{}", nullable=False
56
+ )
57
+ )
58
+ batch_op.alter_column(
59
+ "input_filters",
60
+ existing_type=postgresql.JSON(astext_type=sa.Text()),
61
+ nullable=True,
62
+ server_default="null",
63
+ )
64
+
65
+ # ### end Alembic commands ###
66
+
67
+
68
+ def downgrade() -> None:
69
+ # ### commands auto generated by Alembic - please adjust! ###
70
+ with op.batch_alter_table("workflowtaskv2", schema=None) as batch_op:
71
+ batch_op.alter_column(
72
+ "input_filters",
73
+ existing_type=postgresql.JSON(astext_type=sa.Text()),
74
+ nullable=False,
75
+ existing_server_default=sa.text(
76
+ '\'{"attributes": {}, "types": {}}\'::json'
77
+ ),
78
+ )
79
+ batch_op.drop_column("type_filters")
80
+
81
+ with op.batch_alter_table("jobv2", schema=None) as batch_op:
82
+ batch_op.drop_column("attribute_filters")
83
+
84
+ with op.batch_alter_table("datasetv2", schema=None) as batch_op:
85
+ batch_op.alter_column(
86
+ "filters",
87
+ existing_type=postgresql.JSON(astext_type=sa.Text()),
88
+ nullable=False,
89
+ existing_server_default=sa.text(
90
+ '\'{"attributes": {}, "types": {}}\'::json'
91
+ ),
92
+ )
93
+ batch_op.drop_column("attribute_filters")
94
+ batch_op.drop_column("type_filters")
95
+
96
+ # ### end Alembic commands ###
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.3
2
2
  Name: fractal-server
3
- Version: 2.10.5
3
+ Version: 2.11.0a2
4
4
  Summary: Backend component of the Fractal analytics platform
5
5
  Home-page: https://github.com/fractal-analytics-platform/fractal-server
6
6
  License: BSD-3-Clause