fractal-server 1.4.6__py3-none-any.whl → 2.0.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- fractal_server/__init__.py +1 -1
- fractal_server/app/db/__init__.py +0 -1
- fractal_server/app/models/__init__.py +6 -8
- fractal_server/app/models/linkuserproject.py +9 -0
- fractal_server/app/models/security.py +6 -0
- fractal_server/app/models/v1/__init__.py +12 -0
- fractal_server/app/models/{dataset.py → v1/dataset.py} +5 -5
- fractal_server/app/models/{job.py → v1/job.py} +5 -5
- fractal_server/app/models/{project.py → v1/project.py} +5 -5
- fractal_server/app/models/{state.py → v1/state.py} +2 -2
- fractal_server/app/models/{task.py → v1/task.py} +7 -2
- fractal_server/app/models/{workflow.py → v1/workflow.py} +5 -5
- fractal_server/app/models/v2/__init__.py +22 -0
- fractal_server/app/models/v2/collection_state.py +21 -0
- fractal_server/app/models/v2/dataset.py +54 -0
- fractal_server/app/models/v2/job.py +51 -0
- fractal_server/app/models/v2/project.py +30 -0
- fractal_server/app/models/v2/task.py +93 -0
- fractal_server/app/models/v2/workflow.py +35 -0
- fractal_server/app/models/v2/workflowtask.py +49 -0
- fractal_server/app/routes/admin/__init__.py +0 -0
- fractal_server/app/routes/{admin.py → admin/v1.py} +42 -42
- fractal_server/app/routes/admin/v2.py +309 -0
- fractal_server/app/routes/api/v1/__init__.py +7 -7
- fractal_server/app/routes/api/v1/_aux_functions.py +8 -8
- fractal_server/app/routes/api/v1/dataset.py +48 -41
- fractal_server/app/routes/api/v1/job.py +14 -14
- fractal_server/app/routes/api/v1/project.py +30 -27
- fractal_server/app/routes/api/v1/task.py +26 -16
- fractal_server/app/routes/api/v1/task_collection.py +28 -16
- fractal_server/app/routes/api/v1/workflow.py +28 -28
- fractal_server/app/routes/api/v1/workflowtask.py +11 -11
- fractal_server/app/routes/api/v2/__init__.py +34 -0
- fractal_server/app/routes/api/v2/_aux_functions.py +502 -0
- fractal_server/app/routes/api/v2/dataset.py +293 -0
- fractal_server/app/routes/api/v2/images.py +279 -0
- fractal_server/app/routes/api/v2/job.py +200 -0
- fractal_server/app/routes/api/v2/project.py +186 -0
- fractal_server/app/routes/api/v2/status.py +150 -0
- fractal_server/app/routes/api/v2/submit.py +210 -0
- fractal_server/app/routes/api/v2/task.py +222 -0
- fractal_server/app/routes/api/v2/task_collection.py +239 -0
- fractal_server/app/routes/api/v2/task_legacy.py +59 -0
- fractal_server/app/routes/api/v2/workflow.py +380 -0
- fractal_server/app/routes/api/v2/workflowtask.py +265 -0
- fractal_server/app/routes/aux/_job.py +2 -2
- fractal_server/app/runner/__init__.py +0 -379
- fractal_server/app/runner/async_wrap.py +27 -0
- fractal_server/app/runner/components.py +5 -0
- fractal_server/app/runner/exceptions.py +129 -0
- fractal_server/app/runner/executors/__init__.py +0 -0
- fractal_server/app/runner/executors/slurm/__init__.py +3 -0
- fractal_server/app/runner/{_slurm → executors/slurm}/_batching.py +1 -1
- fractal_server/app/runner/executors/slurm/_check_jobs_status.py +72 -0
- fractal_server/app/runner/{_slurm → executors/slurm}/_executor_wait_thread.py +3 -4
- fractal_server/app/runner/{_slurm → executors/slurm}/_slurm_config.py +3 -152
- fractal_server/app/runner/{_slurm → executors/slurm}/_subprocess_run_as_user.py +42 -1
- fractal_server/app/runner/{_slurm → executors/slurm}/executor.py +46 -27
- fractal_server/app/runner/filenames.py +6 -0
- fractal_server/app/runner/set_start_and_last_task_index.py +39 -0
- fractal_server/app/runner/task_files.py +103 -0
- fractal_server/app/runner/v1/__init__.py +366 -0
- fractal_server/app/runner/{_common.py → v1/_common.py} +56 -111
- fractal_server/app/runner/{_local → v1/_local}/__init__.py +5 -4
- fractal_server/app/runner/{_local → v1/_local}/_local_config.py +6 -7
- fractal_server/app/runner/{_local → v1/_local}/_submit_setup.py +1 -5
- fractal_server/app/runner/v1/_slurm/__init__.py +312 -0
- fractal_server/app/runner/{_slurm → v1/_slurm}/_submit_setup.py +5 -11
- fractal_server/app/runner/v1/_slurm/get_slurm_config.py +163 -0
- fractal_server/app/runner/v1/common.py +117 -0
- fractal_server/app/runner/{handle_failed_job.py → v1/handle_failed_job.py} +8 -8
- fractal_server/app/runner/v2/__init__.py +336 -0
- fractal_server/app/runner/v2/_local/__init__.py +162 -0
- fractal_server/app/runner/v2/_local/_local_config.py +118 -0
- fractal_server/app/runner/v2/_local/_submit_setup.py +52 -0
- fractal_server/app/runner/v2/_local/executor.py +100 -0
- fractal_server/app/runner/{_slurm → v2/_slurm}/__init__.py +38 -47
- fractal_server/app/runner/v2/_slurm/_submit_setup.py +82 -0
- fractal_server/app/runner/v2/_slurm/get_slurm_config.py +182 -0
- fractal_server/app/runner/v2/deduplicate_list.py +23 -0
- fractal_server/app/runner/v2/handle_failed_job.py +165 -0
- fractal_server/app/runner/v2/merge_outputs.py +38 -0
- fractal_server/app/runner/v2/runner.py +343 -0
- fractal_server/app/runner/v2/runner_functions.py +374 -0
- fractal_server/app/runner/v2/runner_functions_low_level.py +130 -0
- fractal_server/app/runner/v2/task_interface.py +62 -0
- fractal_server/app/runner/v2/v1_compat.py +31 -0
- fractal_server/app/schemas/__init__.py +1 -42
- fractal_server/app/schemas/_validators.py +28 -5
- fractal_server/app/schemas/v1/__init__.py +36 -0
- fractal_server/app/schemas/{applyworkflow.py → v1/applyworkflow.py} +18 -18
- fractal_server/app/schemas/{dataset.py → v1/dataset.py} +30 -30
- fractal_server/app/schemas/{dumps.py → v1/dumps.py} +8 -8
- fractal_server/app/schemas/{manifest.py → v1/manifest.py} +5 -5
- fractal_server/app/schemas/{project.py → v1/project.py} +9 -9
- fractal_server/app/schemas/{task.py → v1/task.py} +12 -12
- fractal_server/app/schemas/{task_collection.py → v1/task_collection.py} +7 -7
- fractal_server/app/schemas/{workflow.py → v1/workflow.py} +38 -38
- fractal_server/app/schemas/v2/__init__.py +37 -0
- fractal_server/app/schemas/v2/dataset.py +126 -0
- fractal_server/app/schemas/v2/dumps.py +87 -0
- fractal_server/app/schemas/v2/job.py +114 -0
- fractal_server/app/schemas/v2/manifest.py +159 -0
- fractal_server/app/schemas/v2/project.py +34 -0
- fractal_server/app/schemas/v2/status.py +16 -0
- fractal_server/app/schemas/v2/task.py +151 -0
- fractal_server/app/schemas/v2/task_collection.py +109 -0
- fractal_server/app/schemas/v2/workflow.py +79 -0
- fractal_server/app/schemas/v2/workflowtask.py +208 -0
- fractal_server/config.py +13 -10
- fractal_server/images/__init__.py +4 -0
- fractal_server/images/models.py +136 -0
- fractal_server/images/tools.py +84 -0
- fractal_server/main.py +11 -3
- fractal_server/migrations/env.py +0 -2
- fractal_server/migrations/versions/5bf02391cfef_v2.py +245 -0
- fractal_server/tasks/__init__.py +0 -5
- fractal_server/tasks/endpoint_operations.py +13 -19
- fractal_server/tasks/utils.py +35 -0
- fractal_server/tasks/{_TaskCollectPip.py → v1/_TaskCollectPip.py} +3 -3
- fractal_server/tasks/v1/__init__.py +0 -0
- fractal_server/tasks/{background_operations.py → v1/background_operations.py} +20 -52
- fractal_server/tasks/v1/get_collection_data.py +14 -0
- fractal_server/tasks/v2/_TaskCollectPip.py +103 -0
- fractal_server/tasks/v2/__init__.py +0 -0
- fractal_server/tasks/v2/background_operations.py +381 -0
- fractal_server/tasks/v2/get_collection_data.py +14 -0
- fractal_server/urls.py +13 -0
- {fractal_server-1.4.6.dist-info → fractal_server-2.0.0.dist-info}/METADATA +11 -12
- fractal_server-2.0.0.dist-info/RECORD +169 -0
- fractal_server/app/runner/_slurm/.gitignore +0 -2
- fractal_server/app/runner/common.py +0 -307
- fractal_server/app/schemas/json_schemas/manifest.json +0 -81
- fractal_server-1.4.6.dist-info/RECORD +0 -97
- /fractal_server/app/runner/{_slurm → executors/slurm}/remote.py +0 -0
- /fractal_server/app/runner/{_local → v1/_local}/executor.py +0 -0
- {fractal_server-1.4.6.dist-info → fractal_server-2.0.0.dist-info}/LICENSE +0 -0
- {fractal_server-1.4.6.dist-info → fractal_server-2.0.0.dist-info}/WHEEL +0 -0
- {fractal_server-1.4.6.dist-info → fractal_server-2.0.0.dist-info}/entry_points.txt +0 -0
@@ -0,0 +1,208 @@
|
|
1
|
+
from enum import Enum
|
2
|
+
from typing import Any
|
3
|
+
from typing import Optional
|
4
|
+
|
5
|
+
from pydantic import BaseModel
|
6
|
+
from pydantic import Extra
|
7
|
+
from pydantic import Field
|
8
|
+
from pydantic import root_validator
|
9
|
+
from pydantic import validator
|
10
|
+
|
11
|
+
from .._validators import valdictkeys
|
12
|
+
from .._validators import valint
|
13
|
+
from ..v1.task import TaskExportV1
|
14
|
+
from ..v1.task import TaskImportV1
|
15
|
+
from .task import TaskExportV2
|
16
|
+
from .task import TaskImportV2
|
17
|
+
from .task import TaskLegacyReadV2
|
18
|
+
from .task import TaskReadV2
|
19
|
+
from fractal_server.images import Filters
|
20
|
+
|
21
|
+
RESERVED_ARGUMENTS = {"zarr_dir", "zarr_url", "zarr_urls", "init_args"}
|
22
|
+
|
23
|
+
|
24
|
+
class WorkflowTaskStatusTypeV2(str, Enum):
|
25
|
+
"""
|
26
|
+
Define the available values for the status of a `WorkflowTask`.
|
27
|
+
|
28
|
+
This model is used within the `Dataset.history` attribute, which is
|
29
|
+
constructed in the runner and then used in the API (e.g. in the
|
30
|
+
`api/v2/project/{project_id}/dataset/{dataset_id}/status` endpoint).
|
31
|
+
|
32
|
+
Attributes:
|
33
|
+
SUBMITTED: The `WorkflowTask` is part of a running job.
|
34
|
+
DONE: The most-recent execution of this `WorkflowTask` was successful.
|
35
|
+
FAILED: The most-recent execution of this `WorkflowTask` failed.
|
36
|
+
"""
|
37
|
+
|
38
|
+
SUBMITTED = "submitted"
|
39
|
+
DONE = "done"
|
40
|
+
FAILED = "failed"
|
41
|
+
|
42
|
+
|
43
|
+
class WorkflowTaskCreateV2(BaseModel, extra=Extra.forbid):
|
44
|
+
|
45
|
+
meta_non_parallel: Optional[dict[str, Any]]
|
46
|
+
meta_parallel: Optional[dict[str, Any]]
|
47
|
+
args_non_parallel: Optional[dict[str, Any]]
|
48
|
+
args_parallel: Optional[dict[str, Any]]
|
49
|
+
order: Optional[int]
|
50
|
+
input_filters: Filters = Field(default_factory=Filters)
|
51
|
+
|
52
|
+
is_legacy_task: bool = False
|
53
|
+
|
54
|
+
# Validators
|
55
|
+
_meta_non_parallel = validator("meta_non_parallel", allow_reuse=True)(
|
56
|
+
valdictkeys("meta_non_parallel")
|
57
|
+
)
|
58
|
+
_meta_parallel = validator("meta_parallel", allow_reuse=True)(
|
59
|
+
valdictkeys("meta_parallel")
|
60
|
+
)
|
61
|
+
_order = validator("order", allow_reuse=True)(valint("order", min_val=0))
|
62
|
+
|
63
|
+
@validator("args_non_parallel")
|
64
|
+
def validate_args_non_parallel(cls, value):
|
65
|
+
if value is None:
|
66
|
+
return
|
67
|
+
valdictkeys("args_non_parallel")(value)
|
68
|
+
args_keys = set(value.keys())
|
69
|
+
intersect_keys = RESERVED_ARGUMENTS.intersection(args_keys)
|
70
|
+
if intersect_keys:
|
71
|
+
raise ValueError(
|
72
|
+
"`args` contains the following forbidden keys: "
|
73
|
+
f"{intersect_keys}"
|
74
|
+
)
|
75
|
+
return value
|
76
|
+
|
77
|
+
@validator("args_parallel")
|
78
|
+
def validate_args_parallel(cls, value):
|
79
|
+
if value is None:
|
80
|
+
return
|
81
|
+
valdictkeys("args_parallel")(value)
|
82
|
+
args_keys = set(value.keys())
|
83
|
+
intersect_keys = RESERVED_ARGUMENTS.intersection(args_keys)
|
84
|
+
if intersect_keys:
|
85
|
+
raise ValueError(
|
86
|
+
"`args` contains the following forbidden keys: "
|
87
|
+
f"{intersect_keys}"
|
88
|
+
)
|
89
|
+
return value
|
90
|
+
|
91
|
+
@root_validator
|
92
|
+
def validate_legacy_task(cls, values):
|
93
|
+
if values["is_legacy_task"] and (
|
94
|
+
values.get("meta_non_parallel") is not None
|
95
|
+
or values.get("args_non_parallel") is not None
|
96
|
+
):
|
97
|
+
raise ValueError(
|
98
|
+
"If Task is legacy, 'args_non_parallel' and 'meta_non_parallel"
|
99
|
+
"must be None"
|
100
|
+
)
|
101
|
+
return values
|
102
|
+
|
103
|
+
|
104
|
+
class WorkflowTaskReadV2(BaseModel):
|
105
|
+
|
106
|
+
id: int
|
107
|
+
|
108
|
+
workflow_id: int
|
109
|
+
order: Optional[int]
|
110
|
+
meta_non_parallel: Optional[dict[str, Any]]
|
111
|
+
meta_parallel: Optional[dict[str, Any]]
|
112
|
+
|
113
|
+
args_non_parallel: Optional[dict[str, Any]]
|
114
|
+
args_parallel: Optional[dict[str, Any]]
|
115
|
+
|
116
|
+
input_filters: Filters
|
117
|
+
|
118
|
+
is_legacy_task: bool
|
119
|
+
task_type: str
|
120
|
+
task_id: Optional[int]
|
121
|
+
task: Optional[TaskReadV2]
|
122
|
+
task_legacy_id: Optional[int]
|
123
|
+
task_legacy: Optional[TaskLegacyReadV2]
|
124
|
+
|
125
|
+
|
126
|
+
class WorkflowTaskUpdateV2(BaseModel):
|
127
|
+
|
128
|
+
meta_non_parallel: Optional[dict[str, Any]]
|
129
|
+
meta_parallel: Optional[dict[str, Any]]
|
130
|
+
args_non_parallel: Optional[dict[str, Any]]
|
131
|
+
args_parallel: Optional[dict[str, Any]]
|
132
|
+
input_filters: Optional[Filters]
|
133
|
+
|
134
|
+
# Validators
|
135
|
+
_meta_non_parallel = validator("meta_non_parallel", allow_reuse=True)(
|
136
|
+
valdictkeys("meta_non_parallel")
|
137
|
+
)
|
138
|
+
_meta_parallel = validator("meta_parallel", allow_reuse=True)(
|
139
|
+
valdictkeys("meta_parallel")
|
140
|
+
)
|
141
|
+
|
142
|
+
@validator("args_non_parallel")
|
143
|
+
def validate_args_non_parallel(cls, value):
|
144
|
+
if value is None:
|
145
|
+
return
|
146
|
+
valdictkeys("args_non_parallel")(value)
|
147
|
+
args_keys = set(value.keys())
|
148
|
+
intersect_keys = RESERVED_ARGUMENTS.intersection(args_keys)
|
149
|
+
if intersect_keys:
|
150
|
+
raise ValueError(
|
151
|
+
"`args` contains the following forbidden keys: "
|
152
|
+
f"{intersect_keys}"
|
153
|
+
)
|
154
|
+
return value
|
155
|
+
|
156
|
+
@validator("args_parallel")
|
157
|
+
def validate_args_parallel(cls, value):
|
158
|
+
if value is None:
|
159
|
+
return
|
160
|
+
valdictkeys("args_parallel")(value)
|
161
|
+
args_keys = set(value.keys())
|
162
|
+
intersect_keys = RESERVED_ARGUMENTS.intersection(args_keys)
|
163
|
+
if intersect_keys:
|
164
|
+
raise ValueError(
|
165
|
+
"`args` contains the following forbidden keys: "
|
166
|
+
f"{intersect_keys}"
|
167
|
+
)
|
168
|
+
return value
|
169
|
+
|
170
|
+
|
171
|
+
class WorkflowTaskImportV2(BaseModel):
|
172
|
+
|
173
|
+
meta_non_parallel: Optional[dict[str, Any]] = None
|
174
|
+
meta_parallel: Optional[dict[str, Any]] = None
|
175
|
+
args_non_parallel: Optional[dict[str, Any]] = None
|
176
|
+
args_parallel: Optional[dict[str, Any]] = None
|
177
|
+
|
178
|
+
input_filters: Optional[Filters] = None
|
179
|
+
|
180
|
+
is_legacy_task: bool = False
|
181
|
+
task: Optional[TaskImportV2] = None
|
182
|
+
task_legacy: Optional[TaskImportV1] = None
|
183
|
+
|
184
|
+
_meta_non_parallel = validator("meta_non_parallel", allow_reuse=True)(
|
185
|
+
valdictkeys("meta_non_parallel")
|
186
|
+
)
|
187
|
+
_meta_parallel = validator("meta_parallel", allow_reuse=True)(
|
188
|
+
valdictkeys("meta_parallel")
|
189
|
+
)
|
190
|
+
_args_non_parallel = validator("args_non_parallel", allow_reuse=True)(
|
191
|
+
valdictkeys("args_non_parallel")
|
192
|
+
)
|
193
|
+
_args_parallel = validator("args_parallel", allow_reuse=True)(
|
194
|
+
valdictkeys("args_parallel")
|
195
|
+
)
|
196
|
+
|
197
|
+
|
198
|
+
class WorkflowTaskExportV2(BaseModel):
|
199
|
+
|
200
|
+
meta_non_parallel: Optional[dict[str, Any]] = None
|
201
|
+
meta_parallel: Optional[dict[str, Any]] = None
|
202
|
+
args_non_parallel: Optional[dict[str, Any]] = None
|
203
|
+
args_parallel: Optional[dict[str, Any]] = None
|
204
|
+
input_filters: Filters = Field(default_factory=Filters)
|
205
|
+
|
206
|
+
is_legacy_task: bool = False
|
207
|
+
task: Optional[TaskExportV2]
|
208
|
+
task_legacy: Optional[TaskExportV1]
|
fractal_server/config.py
CHANGED
@@ -347,6 +347,14 @@ class Settings(BaseSettings):
|
|
347
347
|
with the same path and query parameters.
|
348
348
|
"""
|
349
349
|
|
350
|
+
FRACTAL_RUNNER_TASKS_INCLUDE_IMAGE: str = (
|
351
|
+
"Copy OME-Zarr structure;Convert Metadata Components from 2D to 3D"
|
352
|
+
)
|
353
|
+
"""
|
354
|
+
`;`-separated list of names for task that require the `metadata["image"]`
|
355
|
+
attribute in their input-arguments JSON file.
|
356
|
+
"""
|
357
|
+
|
350
358
|
###########################################################################
|
351
359
|
# BUSINESS LOGIC
|
352
360
|
###########################################################################
|
@@ -382,12 +390,11 @@ class Settings(BaseSettings):
|
|
382
390
|
|
383
391
|
info = f"FRACTAL_RUNNER_BACKEND={self.FRACTAL_RUNNER_BACKEND}"
|
384
392
|
if self.FRACTAL_RUNNER_BACKEND == "slurm":
|
385
|
-
|
386
|
-
|
387
|
-
|
388
|
-
|
389
|
-
|
390
|
-
)
|
393
|
+
|
394
|
+
from fractal_server.app.runner.executors.slurm._slurm_config import ( # noqa: E501
|
395
|
+
load_slurm_config_file,
|
396
|
+
)
|
397
|
+
|
391
398
|
if not self.FRACTAL_SLURM_CONFIG_FILE:
|
392
399
|
raise FractalConfigurationError(
|
393
400
|
f"Must set FRACTAL_SLURM_CONFIG_FILE when {info}"
|
@@ -399,10 +406,6 @@ class Settings(BaseSettings):
|
|
399
406
|
f"{self.FRACTAL_SLURM_CONFIG_FILE} not found."
|
400
407
|
)
|
401
408
|
|
402
|
-
from fractal_server.app.runner._slurm._slurm_config import (
|
403
|
-
load_slurm_config_file,
|
404
|
-
)
|
405
|
-
|
406
409
|
load_slurm_config_file(self.FRACTAL_SLURM_CONFIG_FILE)
|
407
410
|
if not shutil.which("sbatch"):
|
408
411
|
raise FractalConfigurationError(
|
@@ -0,0 +1,136 @@
|
|
1
|
+
from typing import Any
|
2
|
+
from typing import Optional
|
3
|
+
from typing import Union
|
4
|
+
|
5
|
+
from pydantic import BaseModel
|
6
|
+
from pydantic import Field
|
7
|
+
from pydantic import validator
|
8
|
+
|
9
|
+
from fractal_server.app.schemas._validators import valdictkeys
|
10
|
+
from fractal_server.urls import normalize_url
|
11
|
+
|
12
|
+
|
13
|
+
class SingleImageBase(BaseModel):
|
14
|
+
"""
|
15
|
+
Base for SingleImage and SingleImageTaskOutput.
|
16
|
+
|
17
|
+
Attributes:
|
18
|
+
zarr_url:
|
19
|
+
origin:
|
20
|
+
attributes:
|
21
|
+
types:
|
22
|
+
"""
|
23
|
+
|
24
|
+
zarr_url: str
|
25
|
+
origin: Optional[str] = None
|
26
|
+
|
27
|
+
attributes: dict[str, Any] = Field(default_factory=dict)
|
28
|
+
types: dict[str, bool] = Field(default_factory=dict)
|
29
|
+
|
30
|
+
# Validators
|
31
|
+
_attributes = validator("attributes", allow_reuse=True)(
|
32
|
+
valdictkeys("attributes")
|
33
|
+
)
|
34
|
+
_types = validator("types", allow_reuse=True)(valdictkeys("types"))
|
35
|
+
|
36
|
+
@validator("zarr_url")
|
37
|
+
def normalize_zarr_url(cls, v: str) -> str:
|
38
|
+
return normalize_url(v)
|
39
|
+
|
40
|
+
@validator("origin")
|
41
|
+
def normalize_orig(cls, v: Optional[str]) -> Optional[str]:
|
42
|
+
if v is not None:
|
43
|
+
return normalize_url(v)
|
44
|
+
|
45
|
+
|
46
|
+
class SingleImageTaskOutput(SingleImageBase):
|
47
|
+
"""
|
48
|
+
`SingleImageBase`, with scalar `attributes` values (`None` included).
|
49
|
+
"""
|
50
|
+
|
51
|
+
@validator("attributes")
|
52
|
+
def validate_attributes(
|
53
|
+
cls, v: dict[str, Any]
|
54
|
+
) -> dict[str, Union[int, float, str, bool, None]]:
|
55
|
+
for key, value in v.items():
|
56
|
+
if not isinstance(value, (int, float, str, bool, type(None))):
|
57
|
+
raise ValueError(
|
58
|
+
f"SingleImageTaskOutput.attributes[{key}] must be a "
|
59
|
+
"scalar (int, float, str or bool). "
|
60
|
+
f"Given {value} ({type(value)})"
|
61
|
+
)
|
62
|
+
return v
|
63
|
+
|
64
|
+
|
65
|
+
class SingleImage(SingleImageBase):
|
66
|
+
"""
|
67
|
+
`SingleImageBase`, with scalar `attributes` values (`None` excluded).
|
68
|
+
"""
|
69
|
+
|
70
|
+
@validator("attributes")
|
71
|
+
def validate_attributes(
|
72
|
+
cls, v: dict[str, Any]
|
73
|
+
) -> dict[str, Union[int, float, str, bool]]:
|
74
|
+
for key, value in v.items():
|
75
|
+
if not isinstance(value, (int, float, str, bool)):
|
76
|
+
raise ValueError(
|
77
|
+
f"SingleImage.attributes[{key}] must be a scalar "
|
78
|
+
f"(int, float, str or bool). Given {value} ({type(value)})"
|
79
|
+
)
|
80
|
+
return v
|
81
|
+
|
82
|
+
|
83
|
+
class SingleImageUpdate(BaseModel):
|
84
|
+
zarr_url: str
|
85
|
+
attributes: Optional[dict[str, Any]]
|
86
|
+
types: Optional[dict[str, bool]]
|
87
|
+
|
88
|
+
@validator("zarr_url")
|
89
|
+
def normalize_zarr_url(cls, v: str) -> str:
|
90
|
+
return normalize_url(v)
|
91
|
+
|
92
|
+
@validator("attributes")
|
93
|
+
def validate_attributes(
|
94
|
+
cls, v: dict[str, Any]
|
95
|
+
) -> dict[str, Union[int, float, str, bool]]:
|
96
|
+
if v is not None:
|
97
|
+
# validate keys
|
98
|
+
valdictkeys("attributes")(v)
|
99
|
+
# validate values
|
100
|
+
for key, value in v.items():
|
101
|
+
if not isinstance(value, (int, float, str, bool)):
|
102
|
+
raise ValueError(
|
103
|
+
f"SingleImageUpdate.attributes[{key}] must be a scalar"
|
104
|
+
" (int, float, str or bool). "
|
105
|
+
f"Given {value} ({type(value)})"
|
106
|
+
)
|
107
|
+
return v
|
108
|
+
|
109
|
+
_types = validator("types", allow_reuse=True)(valdictkeys("types"))
|
110
|
+
|
111
|
+
|
112
|
+
class Filters(BaseModel):
|
113
|
+
attributes: dict[str, Any] = Field(default_factory=dict)
|
114
|
+
types: dict[str, bool] = Field(default_factory=dict)
|
115
|
+
|
116
|
+
class Config:
|
117
|
+
extra = "forbid"
|
118
|
+
|
119
|
+
# Validators
|
120
|
+
_attributes = validator("attributes", allow_reuse=True)(
|
121
|
+
valdictkeys("attributes")
|
122
|
+
)
|
123
|
+
_types = validator("types", allow_reuse=True)(valdictkeys("types"))
|
124
|
+
|
125
|
+
@validator("attributes")
|
126
|
+
def validate_attributes(
|
127
|
+
cls, v: dict[str, Any]
|
128
|
+
) -> dict[str, Union[int, float, str, bool, None]]:
|
129
|
+
for key, value in v.items():
|
130
|
+
if not isinstance(value, (int, float, str, bool, type(None))):
|
131
|
+
raise ValueError(
|
132
|
+
f"Filters.attributes[{key}] must be a scalar "
|
133
|
+
"(int, float, str, bool, or None). "
|
134
|
+
f"Given {value} ({type(value)})"
|
135
|
+
)
|
136
|
+
return v
|
@@ -0,0 +1,84 @@
|
|
1
|
+
from copy import copy
|
2
|
+
from typing import Any
|
3
|
+
from typing import Literal
|
4
|
+
from typing import Optional
|
5
|
+
from typing import Union
|
6
|
+
|
7
|
+
from fractal_server.images import Filters
|
8
|
+
|
9
|
+
|
10
|
+
ImageSearch = dict[Literal["image", "index"], Union[int, dict[str, Any]]]
|
11
|
+
|
12
|
+
|
13
|
+
def find_image_by_zarr_url(
|
14
|
+
*,
|
15
|
+
images: list[dict[str, Any]],
|
16
|
+
zarr_url: str,
|
17
|
+
) -> Optional[ImageSearch]:
|
18
|
+
"""
|
19
|
+
Return a copy of the image with a given zarr_url, and its positional index.
|
20
|
+
|
21
|
+
Arguments:
|
22
|
+
images: List of images.
|
23
|
+
zarr_url: Path that the returned image must have.
|
24
|
+
|
25
|
+
Returns:
|
26
|
+
The first image from `images` which has zarr_url equal to `zarr_url`.
|
27
|
+
"""
|
28
|
+
image_urls = [img["zarr_url"] for img in images]
|
29
|
+
try:
|
30
|
+
ind = image_urls.index(zarr_url)
|
31
|
+
except ValueError:
|
32
|
+
return None
|
33
|
+
return dict(image=copy(images[ind]), index=ind)
|
34
|
+
|
35
|
+
|
36
|
+
def match_filter(image: dict[str, Any], filters: Filters) -> bool:
|
37
|
+
"""
|
38
|
+
Find whether an image matches a filter set.
|
39
|
+
|
40
|
+
Arguments:
|
41
|
+
image: A single image.
|
42
|
+
filters: A set of filters.
|
43
|
+
|
44
|
+
Returns:
|
45
|
+
Whether the image matches the filter set.
|
46
|
+
"""
|
47
|
+
# Verify match with types (using a False default)
|
48
|
+
for key, value in filters.types.items():
|
49
|
+
if image["types"].get(key, False) != value:
|
50
|
+
return False
|
51
|
+
# Verify match with attributes (only for non-None filters)
|
52
|
+
for key, value in filters.attributes.items():
|
53
|
+
if value is None:
|
54
|
+
continue
|
55
|
+
if image["attributes"].get(key) != value:
|
56
|
+
return False
|
57
|
+
return True
|
58
|
+
|
59
|
+
|
60
|
+
def filter_image_list(
|
61
|
+
images: list[dict[str, Any]],
|
62
|
+
filters: Filters,
|
63
|
+
) -> list[dict[str, Any]]:
|
64
|
+
"""
|
65
|
+
Compute a sublist with images that match a filter set.
|
66
|
+
|
67
|
+
Arguments:
|
68
|
+
images: A list of images.
|
69
|
+
filters: A set of filters.
|
70
|
+
|
71
|
+
Returns:
|
72
|
+
List of the `images` elements which match the filter set.
|
73
|
+
"""
|
74
|
+
|
75
|
+
# When no filter is provided, return all images
|
76
|
+
if filters.attributes == {} and filters.types == {}:
|
77
|
+
return images
|
78
|
+
|
79
|
+
filtered_images = [
|
80
|
+
copy(this_image)
|
81
|
+
for this_image in images
|
82
|
+
if match_filter(this_image, filters=filters)
|
83
|
+
]
|
84
|
+
return filtered_images
|
fractal_server/main.py
CHANGED
@@ -32,13 +32,21 @@ def collect_routers(app: FastAPI) -> None:
|
|
32
32
|
"""
|
33
33
|
from .app.routes.api import router_api
|
34
34
|
from .app.routes.api.v1 import router_api_v1
|
35
|
-
from .app.routes.
|
35
|
+
from .app.routes.api.v2 import router_api_v2
|
36
|
+
from .app.routes.admin.v1 import router_admin_v1
|
37
|
+
from .app.routes.admin.v2 import router_admin_v2
|
36
38
|
from .app.routes.auth import router_auth
|
37
39
|
|
38
40
|
app.include_router(router_api, prefix="/api")
|
39
41
|
app.include_router(router_api_v1, prefix="/api/v1")
|
40
|
-
app.include_router(
|
41
|
-
app.include_router(
|
42
|
+
app.include_router(router_api_v2, prefix="/api/v2")
|
43
|
+
app.include_router(
|
44
|
+
router_admin_v1, prefix="/admin/v1", tags=["V1 Admin area"]
|
45
|
+
)
|
46
|
+
app.include_router(
|
47
|
+
router_admin_v2, prefix="/admin/v2", tags=["V2 Admin area"]
|
48
|
+
)
|
49
|
+
app.include_router(router_auth, prefix="/auth", tags=["Authentication"])
|
42
50
|
|
43
51
|
|
44
52
|
def check_settings() -> None:
|
fractal_server/migrations/env.py
CHANGED
@@ -58,7 +58,6 @@ def run_migrations_offline() -> None:
|
|
58
58
|
target_metadata=target_metadata,
|
59
59
|
literal_binds=True,
|
60
60
|
dialect_opts={"paramstyle": "named"},
|
61
|
-
render_as_batch=True,
|
62
61
|
)
|
63
62
|
|
64
63
|
with context.begin_transaction():
|
@@ -69,7 +68,6 @@ def do_run_migrations(connection: Connection) -> None:
|
|
69
68
|
context.configure(
|
70
69
|
connection=connection,
|
71
70
|
target_metadata=target_metadata,
|
72
|
-
render_as_batch=True,
|
73
71
|
)
|
74
72
|
|
75
73
|
with context.begin_transaction():
|