fractal-server 1.4.9__py3-none-any.whl → 2.0.0a0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- fractal_server/__init__.py +1 -1
- fractal_server/app/models/__init__.py +4 -7
- fractal_server/app/models/linkuserproject.py +9 -0
- fractal_server/app/models/security.py +6 -0
- fractal_server/app/models/state.py +1 -1
- fractal_server/app/models/v1/__init__.py +10 -0
- fractal_server/app/models/{dataset.py → v1/dataset.py} +5 -5
- fractal_server/app/models/{job.py → v1/job.py} +5 -5
- fractal_server/app/models/{project.py → v1/project.py} +5 -5
- fractal_server/app/models/{task.py → v1/task.py} +7 -2
- fractal_server/app/models/{workflow.py → v1/workflow.py} +5 -5
- fractal_server/app/models/v2/__init__.py +20 -0
- fractal_server/app/models/v2/dataset.py +55 -0
- fractal_server/app/models/v2/job.py +51 -0
- fractal_server/app/models/v2/project.py +31 -0
- fractal_server/app/models/v2/task.py +93 -0
- fractal_server/app/models/v2/workflow.py +43 -0
- fractal_server/app/models/v2/workflowtask.py +90 -0
- fractal_server/app/routes/{admin.py → admin/v1.py} +42 -42
- fractal_server/app/routes/admin/v2.py +275 -0
- fractal_server/app/routes/api/v1/__init__.py +7 -7
- fractal_server/app/routes/api/v1/_aux_functions.py +2 -2
- fractal_server/app/routes/api/v1/dataset.py +44 -37
- fractal_server/app/routes/api/v1/job.py +12 -12
- fractal_server/app/routes/api/v1/project.py +23 -21
- fractal_server/app/routes/api/v1/task.py +24 -14
- fractal_server/app/routes/api/v1/task_collection.py +16 -14
- fractal_server/app/routes/api/v1/workflow.py +24 -24
- fractal_server/app/routes/api/v1/workflowtask.py +10 -10
- fractal_server/app/routes/api/v2/__init__.py +28 -0
- fractal_server/app/routes/api/v2/_aux_functions.py +497 -0
- fractal_server/app/routes/api/v2/apply.py +220 -0
- fractal_server/app/routes/api/v2/dataset.py +310 -0
- fractal_server/app/routes/api/v2/images.py +212 -0
- fractal_server/app/routes/api/v2/job.py +200 -0
- fractal_server/app/routes/api/v2/project.py +205 -0
- fractal_server/app/routes/api/v2/task.py +222 -0
- fractal_server/app/routes/api/v2/task_collection.py +229 -0
- fractal_server/app/routes/api/v2/workflow.py +398 -0
- fractal_server/app/routes/api/v2/workflowtask.py +269 -0
- fractal_server/app/routes/aux/_job.py +1 -1
- fractal_server/app/runner/async_wrap.py +27 -0
- fractal_server/app/runner/exceptions.py +129 -0
- fractal_server/app/runner/executors/local/__init__.py +3 -0
- fractal_server/app/runner/{_local → executors/local}/executor.py +2 -2
- fractal_server/app/runner/executors/slurm/__init__.py +3 -0
- fractal_server/app/runner/{_slurm → executors/slurm}/_batching.py +1 -1
- fractal_server/app/runner/executors/slurm/_check_jobs_status.py +72 -0
- fractal_server/app/runner/{_slurm → executors/slurm}/_executor_wait_thread.py +3 -4
- fractal_server/app/runner/{_slurm → executors/slurm}/_slurm_config.py +3 -152
- fractal_server/app/runner/{_slurm → executors/slurm}/_subprocess_run_as_user.py +1 -1
- fractal_server/app/runner/{_slurm → executors/slurm}/executor.py +9 -9
- fractal_server/app/runner/filenames.py +6 -0
- fractal_server/app/runner/set_start_and_last_task_index.py +39 -0
- fractal_server/app/runner/task_files.py +105 -0
- fractal_server/app/runner/{__init__.py → v1/__init__.py} +36 -49
- fractal_server/app/runner/{_common.py → v1/_common.py} +13 -120
- fractal_server/app/runner/{_local → v1/_local}/__init__.py +6 -6
- fractal_server/app/runner/{_local → v1/_local}/_local_config.py +6 -7
- fractal_server/app/runner/{_local → v1/_local}/_submit_setup.py +1 -5
- fractal_server/app/runner/v1/_slurm/__init__.py +310 -0
- fractal_server/app/runner/{_slurm → v1/_slurm}/_submit_setup.py +3 -9
- fractal_server/app/runner/v1/_slurm/get_slurm_config.py +163 -0
- fractal_server/app/runner/v1/common.py +117 -0
- fractal_server/app/runner/{handle_failed_job.py → v1/handle_failed_job.py} +8 -8
- fractal_server/app/runner/v2/__init__.py +337 -0
- fractal_server/app/runner/v2/_local/__init__.py +169 -0
- fractal_server/app/runner/v2/_local/_local_config.py +118 -0
- fractal_server/app/runner/v2/_local/_submit_setup.py +52 -0
- fractal_server/app/runner/v2/_slurm/__init__.py +157 -0
- fractal_server/app/runner/v2/_slurm/_submit_setup.py +83 -0
- fractal_server/app/runner/v2/_slurm/get_slurm_config.py +179 -0
- fractal_server/app/runner/v2/components.py +5 -0
- fractal_server/app/runner/v2/deduplicate_list.py +24 -0
- fractal_server/app/runner/v2/handle_failed_job.py +156 -0
- fractal_server/app/runner/v2/merge_outputs.py +41 -0
- fractal_server/app/runner/v2/runner.py +264 -0
- fractal_server/app/runner/v2/runner_functions.py +339 -0
- fractal_server/app/runner/v2/runner_functions_low_level.py +134 -0
- fractal_server/app/runner/v2/task_interface.py +43 -0
- fractal_server/app/runner/v2/v1_compat.py +21 -0
- fractal_server/app/schemas/__init__.py +4 -42
- fractal_server/app/schemas/v1/__init__.py +42 -0
- fractal_server/app/schemas/{applyworkflow.py → v1/applyworkflow.py} +18 -18
- fractal_server/app/schemas/{dataset.py → v1/dataset.py} +30 -30
- fractal_server/app/schemas/{dumps.py → v1/dumps.py} +8 -8
- fractal_server/app/schemas/{manifest.py → v1/manifest.py} +5 -5
- fractal_server/app/schemas/{project.py → v1/project.py} +9 -9
- fractal_server/app/schemas/{task.py → v1/task.py} +12 -12
- fractal_server/app/schemas/{task_collection.py → v1/task_collection.py} +7 -7
- fractal_server/app/schemas/{workflow.py → v1/workflow.py} +38 -38
- fractal_server/app/schemas/v2/__init__.py +34 -0
- fractal_server/app/schemas/v2/dataset.py +88 -0
- fractal_server/app/schemas/v2/dumps.py +87 -0
- fractal_server/app/schemas/v2/job.py +113 -0
- fractal_server/app/schemas/v2/manifest.py +109 -0
- fractal_server/app/schemas/v2/project.py +36 -0
- fractal_server/app/schemas/v2/task.py +121 -0
- fractal_server/app/schemas/v2/task_collection.py +105 -0
- fractal_server/app/schemas/v2/workflow.py +78 -0
- fractal_server/app/schemas/v2/workflowtask.py +118 -0
- fractal_server/config.py +5 -10
- fractal_server/images/__init__.py +50 -0
- fractal_server/images/tools.py +86 -0
- fractal_server/main.py +11 -3
- fractal_server/migrations/versions/4b35c5cefbe3_tmp_is_v2_compatible.py +39 -0
- fractal_server/migrations/versions/56af171b0159_v2.py +217 -0
- fractal_server/migrations/versions/876f28db9d4e_tmp_split_task_and_wftask_meta.py +68 -0
- fractal_server/migrations/versions/974c802f0dd0_tmp_workflowtaskv2_type_in_db.py +37 -0
- fractal_server/migrations/versions/9cd305cd6023_tmp_workflowtaskv2.py +40 -0
- fractal_server/migrations/versions/a6231ed6273c_tmp_args_schemas_in_taskv2.py +42 -0
- fractal_server/migrations/versions/b9e9eed9d442_tmp_taskv2_type.py +37 -0
- fractal_server/migrations/versions/e3e639454d4b_tmp_make_task_meta_non_optional.py +50 -0
- fractal_server/tasks/__init__.py +0 -5
- fractal_server/tasks/endpoint_operations.py +13 -19
- fractal_server/tasks/utils.py +35 -0
- fractal_server/tasks/{_TaskCollectPip.py → v1/_TaskCollectPip.py} +3 -3
- fractal_server/tasks/{background_operations.py → v1/background_operations.py} +18 -50
- fractal_server/tasks/v1/get_collection_data.py +14 -0
- fractal_server/tasks/v2/_TaskCollectPip.py +103 -0
- fractal_server/tasks/v2/background_operations.py +382 -0
- fractal_server/tasks/v2/get_collection_data.py +14 -0
- {fractal_server-1.4.9.dist-info → fractal_server-2.0.0a0.dist-info}/METADATA +3 -4
- fractal_server-2.0.0a0.dist-info/RECORD +166 -0
- fractal_server/app/runner/_slurm/.gitignore +0 -2
- fractal_server/app/runner/_slurm/__init__.py +0 -150
- fractal_server/app/runner/common.py +0 -311
- fractal_server-1.4.9.dist-info/RECORD +0 -97
- /fractal_server/app/runner/{_slurm → executors/slurm}/remote.py +0 -0
- {fractal_server-1.4.9.dist-info → fractal_server-2.0.0a0.dist-info}/LICENSE +0 -0
- {fractal_server-1.4.9.dist-info → fractal_server-2.0.0a0.dist-info}/WHEEL +0 -0
- {fractal_server-1.4.9.dist-info → fractal_server-2.0.0a0.dist-info}/entry_points.txt +0 -0
@@ -0,0 +1,78 @@
|
|
1
|
+
from datetime import datetime
|
2
|
+
from typing import Optional
|
3
|
+
|
4
|
+
from pydantic import BaseModel
|
5
|
+
from pydantic import validator
|
6
|
+
|
7
|
+
from .._validators import valstr
|
8
|
+
from .._validators import valutc
|
9
|
+
from ..v1.project import ProjectReadV1
|
10
|
+
from .workflowtask import WorkflowTaskExportV2
|
11
|
+
from .workflowtask import WorkflowTaskImportV2
|
12
|
+
from .workflowtask import WorkflowTaskReadV2
|
13
|
+
|
14
|
+
|
15
|
+
class WorkflowCreateV2(BaseModel):
|
16
|
+
|
17
|
+
name: str
|
18
|
+
|
19
|
+
# Validators
|
20
|
+
_name = validator("name", allow_reuse=True)(valstr("name"))
|
21
|
+
|
22
|
+
|
23
|
+
class WorkflowReadV2(BaseModel):
|
24
|
+
|
25
|
+
id: int
|
26
|
+
name: str
|
27
|
+
project_id: int
|
28
|
+
task_list: list[WorkflowTaskReadV2]
|
29
|
+
project: ProjectReadV1
|
30
|
+
timestamp_created: datetime
|
31
|
+
|
32
|
+
_timestamp_created = validator("timestamp_created", allow_reuse=True)(
|
33
|
+
valutc("timestamp_created")
|
34
|
+
)
|
35
|
+
|
36
|
+
|
37
|
+
class WorkflowUpdateV2(BaseModel):
|
38
|
+
|
39
|
+
name: Optional[str]
|
40
|
+
reordered_workflowtask_ids: Optional[list[int]]
|
41
|
+
|
42
|
+
# Validators
|
43
|
+
_name = validator("name", allow_reuse=True)(valstr("name"))
|
44
|
+
|
45
|
+
@validator("reordered_workflowtask_ids")
|
46
|
+
def check_positive_and_unique(cls, value):
|
47
|
+
if any(i < 0 for i in value):
|
48
|
+
raise ValueError("Negative `id` in `reordered_workflowtask_ids`")
|
49
|
+
if len(value) != len(set(value)):
|
50
|
+
raise ValueError("`reordered_workflowtask_ids` has repetitions")
|
51
|
+
return value
|
52
|
+
|
53
|
+
|
54
|
+
class WorkflowImportV2(BaseModel):
|
55
|
+
"""
|
56
|
+
Class for `Workflow` import.
|
57
|
+
|
58
|
+
Attributes:
|
59
|
+
task_list:
|
60
|
+
"""
|
61
|
+
|
62
|
+
name: str
|
63
|
+
task_list: list[WorkflowTaskImportV2]
|
64
|
+
|
65
|
+
# Validators
|
66
|
+
_name = validator("name", allow_reuse=True)(valstr("name"))
|
67
|
+
|
68
|
+
|
69
|
+
class WorkflowExportV2(BaseModel):
|
70
|
+
"""
|
71
|
+
Class for `Workflow` export.
|
72
|
+
|
73
|
+
Attributes:
|
74
|
+
task_list:
|
75
|
+
"""
|
76
|
+
|
77
|
+
name: str
|
78
|
+
task_list: list[WorkflowTaskExportV2]
|
@@ -0,0 +1,118 @@
|
|
1
|
+
from enum import Enum
|
2
|
+
from typing import Any
|
3
|
+
from typing import Optional
|
4
|
+
|
5
|
+
from pydantic import BaseModel
|
6
|
+
from pydantic import Field
|
7
|
+
from pydantic import validator
|
8
|
+
|
9
|
+
from .._validators import valint
|
10
|
+
from ..v1.task import TaskExportV1
|
11
|
+
from ..v1.task import TaskImportV1
|
12
|
+
from ..v1.task import TaskReadV1
|
13
|
+
from .task import TaskExportV2
|
14
|
+
from .task import TaskImportV2
|
15
|
+
from .task import TaskReadV2
|
16
|
+
from fractal_server.images import Filters
|
17
|
+
|
18
|
+
|
19
|
+
class WorkflowTaskStatusTypeV2(str, Enum):
|
20
|
+
"""
|
21
|
+
Define the available values for the status of a `WorkflowTask`.
|
22
|
+
|
23
|
+
This model is used within the `Dataset.history` attribute, which is
|
24
|
+
constructed in the runner and then used in the API (e.g. in the
|
25
|
+
`api/v2/project/{project_id}/dataset/{dataset_id}/status` endpoint).
|
26
|
+
|
27
|
+
Attributes:
|
28
|
+
SUBMITTED: The `WorkflowTask` is part of a running job.
|
29
|
+
DONE: The most-recent execution of this `WorkflowTask` was successful.
|
30
|
+
FAILED: The most-recent execution of this `WorkflowTask` failed.
|
31
|
+
"""
|
32
|
+
|
33
|
+
SUBMITTED = "submitted"
|
34
|
+
DONE = "done"
|
35
|
+
FAILED = "failed"
|
36
|
+
|
37
|
+
|
38
|
+
class WorkflowTaskCreateV2(BaseModel):
|
39
|
+
|
40
|
+
meta_parallel: Optional[dict[str, Any]]
|
41
|
+
meta_non_parallel: Optional[dict[str, Any]]
|
42
|
+
args_non_parallel: Optional[dict[str, Any]]
|
43
|
+
args_parallel: Optional[dict[str, Any]]
|
44
|
+
order: Optional[int]
|
45
|
+
input_filters: Filters = Field(default_factory=Filters)
|
46
|
+
|
47
|
+
is_legacy_task: bool = False
|
48
|
+
|
49
|
+
# Validators
|
50
|
+
|
51
|
+
_order = validator("order", allow_reuse=True)(valint("order", min_val=0))
|
52
|
+
# FIXME validate: if `is_legacy_task`, `args_non_parallel` must be None
|
53
|
+
|
54
|
+
|
55
|
+
class WorkflowTaskReadV2(BaseModel):
|
56
|
+
|
57
|
+
id: int
|
58
|
+
|
59
|
+
workflow_id: int
|
60
|
+
order: Optional[int]
|
61
|
+
meta_parallel: Optional[dict[str, Any]]
|
62
|
+
meta_non_parallel: Optional[dict[str, Any]]
|
63
|
+
|
64
|
+
args_non_parallel: Optional[dict[str, Any]]
|
65
|
+
args_parallel: Optional[dict[str, Any]]
|
66
|
+
|
67
|
+
input_filters: Filters
|
68
|
+
|
69
|
+
is_legacy_task: bool
|
70
|
+
task_type: str
|
71
|
+
task_id: Optional[int]
|
72
|
+
task: Optional[TaskReadV2]
|
73
|
+
task_legacy_id: Optional[int]
|
74
|
+
task_legacy: Optional[TaskReadV1]
|
75
|
+
|
76
|
+
|
77
|
+
class WorkflowTaskUpdateV2(BaseModel):
|
78
|
+
|
79
|
+
meta_parallel: Optional[dict[str, Any]]
|
80
|
+
meta_non_parallel: Optional[dict[str, Any]]
|
81
|
+
args_non_parallel: Optional[dict[str, Any]]
|
82
|
+
args_parallel: Optional[dict[str, Any]]
|
83
|
+
input_filters: Optional[Filters]
|
84
|
+
|
85
|
+
# Validators
|
86
|
+
|
87
|
+
@validator("meta_parallel", "meta_non_parallel")
|
88
|
+
def check_no_parallelisation_level(cls, m):
|
89
|
+
if "parallelization_level" in m:
|
90
|
+
raise ValueError(
|
91
|
+
"Overriding task parallelization level currently not allowed"
|
92
|
+
)
|
93
|
+
return m
|
94
|
+
|
95
|
+
|
96
|
+
class WorkflowTaskImportV2(BaseModel):
|
97
|
+
|
98
|
+
meta_parallel: Optional[dict[str, Any]] = None
|
99
|
+
meta_non_parallel: Optional[dict[str, Any]] = None
|
100
|
+
args: Optional[dict[str, Any]] = None # FIXME
|
101
|
+
|
102
|
+
input_filters: Optional[Filters] = None
|
103
|
+
|
104
|
+
is_legacy_task: bool = False
|
105
|
+
task: Optional[TaskImportV2] = None
|
106
|
+
task_legacy: Optional[TaskImportV1] = None
|
107
|
+
|
108
|
+
|
109
|
+
class WorkflowTaskExportV2(BaseModel):
|
110
|
+
|
111
|
+
meta_parallel: Optional[dict[str, Any]] = None
|
112
|
+
meta_non_parallel: Optional[dict[str, Any]] = None
|
113
|
+
args: Optional[dict[str, Any]] = None # FIXME
|
114
|
+
input_filters: Filters = Field(default_factory=Filters)
|
115
|
+
|
116
|
+
is_legacy_task: bool = False
|
117
|
+
task: Optional[TaskExportV2]
|
118
|
+
task_legacy: Optional[TaskExportV1]
|
fractal_server/config.py
CHANGED
@@ -390,12 +390,11 @@ class Settings(BaseSettings):
|
|
390
390
|
|
391
391
|
info = f"FRACTAL_RUNNER_BACKEND={self.FRACTAL_RUNNER_BACKEND}"
|
392
392
|
if self.FRACTAL_RUNNER_BACKEND == "slurm":
|
393
|
-
|
394
|
-
|
395
|
-
|
396
|
-
|
397
|
-
|
398
|
-
)
|
393
|
+
|
394
|
+
from fractal_server.app.runner.executors.slurm._slurm_config import ( # noqa: E501
|
395
|
+
load_slurm_config_file,
|
396
|
+
)
|
397
|
+
|
399
398
|
if not self.FRACTAL_SLURM_CONFIG_FILE:
|
400
399
|
raise FractalConfigurationError(
|
401
400
|
f"Must set FRACTAL_SLURM_CONFIG_FILE when {info}"
|
@@ -407,10 +406,6 @@ class Settings(BaseSettings):
|
|
407
406
|
f"{self.FRACTAL_SLURM_CONFIG_FILE} not found."
|
408
407
|
)
|
409
408
|
|
410
|
-
from fractal_server.app.runner._slurm._slurm_config import (
|
411
|
-
load_slurm_config_file,
|
412
|
-
)
|
413
|
-
|
414
409
|
load_slurm_config_file(self.FRACTAL_SLURM_CONFIG_FILE)
|
415
410
|
if not shutil.which("sbatch"):
|
416
411
|
raise FractalConfigurationError(
|
@@ -0,0 +1,50 @@
|
|
1
|
+
from typing import Any
|
2
|
+
from typing import Optional
|
3
|
+
from typing import Union
|
4
|
+
|
5
|
+
from pydantic import BaseModel
|
6
|
+
from pydantic import Field
|
7
|
+
from pydantic import validator
|
8
|
+
|
9
|
+
|
10
|
+
class SingleImage(BaseModel):
|
11
|
+
|
12
|
+
path: str
|
13
|
+
origin: Optional[str] = None
|
14
|
+
|
15
|
+
attributes: dict[str, Any] = Field(default_factory=dict)
|
16
|
+
types: dict[str, bool] = Field(default_factory=dict)
|
17
|
+
|
18
|
+
@validator("attributes")
|
19
|
+
def validate_attributes(
|
20
|
+
cls, v: dict[str, Any]
|
21
|
+
) -> dict[str, Union[int, float, str, bool]]:
|
22
|
+
for key, value in v.items():
|
23
|
+
if not isinstance(value, (int, float, str, bool)):
|
24
|
+
raise ValueError(
|
25
|
+
f"SingleImage.attributes[{key}] must be a scalar "
|
26
|
+
f"(int, float, str or bool). Given {value} ({type(value)})"
|
27
|
+
)
|
28
|
+
return v
|
29
|
+
|
30
|
+
|
31
|
+
class Filters(BaseModel):
|
32
|
+
attributes: dict[str, Any] = Field(default_factory=dict)
|
33
|
+
types: dict[str, bool] = Field(default_factory=dict)
|
34
|
+
|
35
|
+
class Config:
|
36
|
+
extra = "forbid"
|
37
|
+
|
38
|
+
# Validators
|
39
|
+
@validator("attributes")
|
40
|
+
def validate_attributes(
|
41
|
+
cls, v: dict[str, Any]
|
42
|
+
) -> dict[str, Union[int, float, str, bool, None]]:
|
43
|
+
for key, value in v.items():
|
44
|
+
if not isinstance(value, (int, float, str, bool, type(None))):
|
45
|
+
raise ValueError(
|
46
|
+
f"Filters.attributes[{key}] must be a scalar "
|
47
|
+
"(int, float, str, bool, or None). "
|
48
|
+
f"Given {value} ({type(value)})"
|
49
|
+
)
|
50
|
+
return v
|
@@ -0,0 +1,86 @@
|
|
1
|
+
from copy import copy
|
2
|
+
from typing import Any
|
3
|
+
from typing import Optional
|
4
|
+
from typing import Union
|
5
|
+
|
6
|
+
from fractal_server.images import Filters
|
7
|
+
from fractal_server.images import SingleImage
|
8
|
+
|
9
|
+
|
10
|
+
def find_image_by_path(
|
11
|
+
*,
|
12
|
+
images: list[dict[str, Any]],
|
13
|
+
path: str,
|
14
|
+
) -> Optional[dict[str, Union[int, dict[str, Any]]]]:
|
15
|
+
"""
|
16
|
+
Return a copy of the image with a given path and its positional index.
|
17
|
+
|
18
|
+
Args:
|
19
|
+
images: List of images.
|
20
|
+
path: Path that the returned image must have.
|
21
|
+
|
22
|
+
Returns:
|
23
|
+
The first image from `images` which has path equal to `path`.
|
24
|
+
"""
|
25
|
+
image_paths = [img["path"] for img in images]
|
26
|
+
try:
|
27
|
+
ind = image_paths.index(path)
|
28
|
+
except ValueError:
|
29
|
+
return None
|
30
|
+
return dict(image=copy(images[ind]), index=ind)
|
31
|
+
|
32
|
+
|
33
|
+
def match_filter(image: dict[str, Any], filters: Filters):
|
34
|
+
for key, value in filters.types.items():
|
35
|
+
if image["types"].get(key, False) != value:
|
36
|
+
return False
|
37
|
+
for key, value in filters.attributes.items():
|
38
|
+
if value is None:
|
39
|
+
continue
|
40
|
+
if image["attributes"].get(key) != value:
|
41
|
+
return False
|
42
|
+
return True
|
43
|
+
|
44
|
+
|
45
|
+
def _filter_image_list(
|
46
|
+
images: list[dict[str, Any]],
|
47
|
+
filters: Filters,
|
48
|
+
) -> list[dict[str, Any]]:
|
49
|
+
|
50
|
+
# When no filter is provided, return all images
|
51
|
+
if filters.attributes == {} and filters.types == {}:
|
52
|
+
return images
|
53
|
+
|
54
|
+
filtered_images = []
|
55
|
+
for this_image in images:
|
56
|
+
if match_filter(this_image, filters=filters):
|
57
|
+
filtered_images.append(copy(this_image))
|
58
|
+
return filtered_images
|
59
|
+
|
60
|
+
|
61
|
+
def match_filter_SingleImage(image: SingleImage, filters: Filters):
|
62
|
+
for key, value in filters.types.items():
|
63
|
+
if image.types.get(key, False) != value:
|
64
|
+
return False
|
65
|
+
for key, value in filters.attributes.items():
|
66
|
+
if value is None:
|
67
|
+
continue
|
68
|
+
if image.attributes.get(key) != value:
|
69
|
+
return False
|
70
|
+
return True
|
71
|
+
|
72
|
+
|
73
|
+
def _filter_image_list_SingleImage(
|
74
|
+
images: list[SingleImage],
|
75
|
+
filters: Filters,
|
76
|
+
) -> list[SingleImage]:
|
77
|
+
|
78
|
+
# When no filter is provided, return all images
|
79
|
+
if filters.attributes == {} and filters.types == {}:
|
80
|
+
return images
|
81
|
+
|
82
|
+
filtered_images = []
|
83
|
+
for this_image in images:
|
84
|
+
if match_filter_SingleImage(this_image, filters=filters):
|
85
|
+
filtered_images.append(copy(this_image))
|
86
|
+
return filtered_images
|
fractal_server/main.py
CHANGED
@@ -32,13 +32,21 @@ def collect_routers(app: FastAPI) -> None:
|
|
32
32
|
"""
|
33
33
|
from .app.routes.api import router_api
|
34
34
|
from .app.routes.api.v1 import router_api_v1
|
35
|
-
from .app.routes.
|
35
|
+
from .app.routes.api.v2 import router_api_v2
|
36
|
+
from .app.routes.admin.v1 import router_admin_v1
|
37
|
+
from .app.routes.admin.v2 import router_admin_v2
|
36
38
|
from .app.routes.auth import router_auth
|
37
39
|
|
38
40
|
app.include_router(router_api, prefix="/api")
|
39
41
|
app.include_router(router_api_v1, prefix="/api/v1")
|
40
|
-
app.include_router(
|
41
|
-
app.include_router(
|
42
|
+
app.include_router(router_api_v2, prefix="/api/v2")
|
43
|
+
app.include_router(
|
44
|
+
router_admin_v1, prefix="/admin/v1", tags=["V1 Admin area"]
|
45
|
+
)
|
46
|
+
app.include_router(
|
47
|
+
router_admin_v2, prefix="/admin/v2", tags=["V2 Admin area"]
|
48
|
+
)
|
49
|
+
app.include_router(router_auth, prefix="/auth", tags=["Authentication"])
|
42
50
|
|
43
51
|
|
44
52
|
def check_settings() -> None:
|
@@ -0,0 +1,39 @@
|
|
1
|
+
"""TMP - is_v2_compatible
|
2
|
+
|
3
|
+
Revision ID: 4b35c5cefbe3
|
4
|
+
Revises: 876f28db9d4e
|
5
|
+
Create Date: 2024-03-28 15:26:33.436724
|
6
|
+
|
7
|
+
"""
|
8
|
+
import sqlalchemy as sa
|
9
|
+
from alembic import op
|
10
|
+
|
11
|
+
|
12
|
+
# revision identifiers, used by Alembic.
|
13
|
+
revision = "4b35c5cefbe3"
|
14
|
+
down_revision = "876f28db9d4e"
|
15
|
+
branch_labels = None
|
16
|
+
depends_on = None
|
17
|
+
|
18
|
+
|
19
|
+
def upgrade() -> None:
|
20
|
+
# ### commands auto generated by Alembic - please adjust! ###
|
21
|
+
with op.batch_alter_table("task", schema=None) as batch_op:
|
22
|
+
batch_op.add_column(
|
23
|
+
sa.Column(
|
24
|
+
"is_v2_compatible",
|
25
|
+
sa.Boolean(),
|
26
|
+
server_default=sa.text("false"),
|
27
|
+
nullable=False,
|
28
|
+
)
|
29
|
+
)
|
30
|
+
|
31
|
+
# ### end Alembic commands ###
|
32
|
+
|
33
|
+
|
34
|
+
def downgrade() -> None:
|
35
|
+
# ### commands auto generated by Alembic - please adjust! ###
|
36
|
+
with op.batch_alter_table("task", schema=None) as batch_op:
|
37
|
+
batch_op.drop_column("is_v2_compatible")
|
38
|
+
|
39
|
+
# ### end Alembic commands ###
|
@@ -0,0 +1,217 @@
|
|
1
|
+
"""v2
|
2
|
+
|
3
|
+
Revision ID: 56af171b0159
|
4
|
+
Revises: 9fd26a2b0de4
|
5
|
+
Create Date: 2024-03-22 11:09:02.458011
|
6
|
+
|
7
|
+
"""
|
8
|
+
import sqlalchemy as sa
|
9
|
+
import sqlmodel
|
10
|
+
from alembic import op
|
11
|
+
|
12
|
+
|
13
|
+
# revision identifiers, used by Alembic.
|
14
|
+
revision = "56af171b0159"
|
15
|
+
down_revision = "9fd26a2b0de4"
|
16
|
+
branch_labels = None
|
17
|
+
depends_on = None
|
18
|
+
|
19
|
+
|
20
|
+
def upgrade() -> None:
|
21
|
+
# ### commands auto generated by Alembic - please adjust! ###
|
22
|
+
op.create_table(
|
23
|
+
"projectv2",
|
24
|
+
sa.Column("id", sa.Integer(), nullable=False),
|
25
|
+
sa.Column("name", sqlmodel.sql.sqltypes.AutoString(), nullable=False),
|
26
|
+
sa.Column("read_only", sa.Boolean(), nullable=False),
|
27
|
+
sa.Column(
|
28
|
+
"timestamp_created", sa.DateTime(timezone=True), nullable=False
|
29
|
+
),
|
30
|
+
sa.PrimaryKeyConstraint("id"),
|
31
|
+
)
|
32
|
+
op.create_table(
|
33
|
+
"taskv2",
|
34
|
+
sa.Column("id", sa.Integer(), nullable=False),
|
35
|
+
sa.Column("name", sqlmodel.sql.sqltypes.AutoString(), nullable=False),
|
36
|
+
sa.Column(
|
37
|
+
"command_non_parallel",
|
38
|
+
sqlmodel.sql.sqltypes.AutoString(),
|
39
|
+
nullable=True,
|
40
|
+
),
|
41
|
+
sa.Column(
|
42
|
+
"command_parallel",
|
43
|
+
sqlmodel.sql.sqltypes.AutoString(),
|
44
|
+
nullable=True,
|
45
|
+
),
|
46
|
+
sa.Column(
|
47
|
+
"source", sqlmodel.sql.sqltypes.AutoString(), nullable=False
|
48
|
+
),
|
49
|
+
sa.Column("meta", sa.JSON(), nullable=True),
|
50
|
+
sa.Column("owner", sqlmodel.sql.sqltypes.AutoString(), nullable=True),
|
51
|
+
sa.Column(
|
52
|
+
"version", sqlmodel.sql.sqltypes.AutoString(), nullable=True
|
53
|
+
),
|
54
|
+
sa.Column("args_schema", sa.JSON(), nullable=True),
|
55
|
+
sa.Column(
|
56
|
+
"args_schema_version",
|
57
|
+
sqlmodel.sql.sqltypes.AutoString(),
|
58
|
+
nullable=True,
|
59
|
+
),
|
60
|
+
sa.Column(
|
61
|
+
"docs_info", sqlmodel.sql.sqltypes.AutoString(), nullable=True
|
62
|
+
),
|
63
|
+
sa.Column(
|
64
|
+
"docs_link", sqlmodel.sql.sqltypes.AutoString(), nullable=True
|
65
|
+
),
|
66
|
+
sa.Column("input_types", sa.JSON(), nullable=True),
|
67
|
+
sa.Column("output_types", sa.JSON(), nullable=True),
|
68
|
+
sa.PrimaryKeyConstraint("id"),
|
69
|
+
sa.UniqueConstraint("source"),
|
70
|
+
)
|
71
|
+
op.create_table(
|
72
|
+
"datasetv2",
|
73
|
+
sa.Column("id", sa.Integer(), nullable=False),
|
74
|
+
sa.Column("name", sqlmodel.sql.sqltypes.AutoString(), nullable=False),
|
75
|
+
sa.Column("project_id", sa.Integer(), nullable=False),
|
76
|
+
sa.Column("history", sa.JSON(), server_default="[]", nullable=False),
|
77
|
+
sa.Column("read_only", sa.Boolean(), nullable=False),
|
78
|
+
sa.Column(
|
79
|
+
"timestamp_created", sa.DateTime(timezone=True), nullable=False
|
80
|
+
),
|
81
|
+
sa.Column(
|
82
|
+
"zarr_dir", sqlmodel.sql.sqltypes.AutoString(), nullable=False
|
83
|
+
),
|
84
|
+
sa.Column("images", sa.JSON(), server_default="[]", nullable=False),
|
85
|
+
sa.Column(
|
86
|
+
"filters",
|
87
|
+
sa.JSON(),
|
88
|
+
server_default='{"attributes": {}, "types": {}}',
|
89
|
+
nullable=False,
|
90
|
+
),
|
91
|
+
sa.ForeignKeyConstraint(
|
92
|
+
["project_id"],
|
93
|
+
["projectv2.id"],
|
94
|
+
),
|
95
|
+
sa.PrimaryKeyConstraint("id"),
|
96
|
+
)
|
97
|
+
op.create_table(
|
98
|
+
"linkuserprojectv2",
|
99
|
+
sa.Column("project_id", sa.Integer(), nullable=False),
|
100
|
+
sa.Column("user_id", sa.Integer(), nullable=False),
|
101
|
+
sa.ForeignKeyConstraint(
|
102
|
+
["project_id"],
|
103
|
+
["projectv2.id"],
|
104
|
+
),
|
105
|
+
sa.ForeignKeyConstraint(
|
106
|
+
["user_id"],
|
107
|
+
["user_oauth.id"],
|
108
|
+
),
|
109
|
+
sa.PrimaryKeyConstraint("project_id", "user_id"),
|
110
|
+
)
|
111
|
+
op.create_table(
|
112
|
+
"workflowv2",
|
113
|
+
sa.Column("id", sa.Integer(), nullable=False),
|
114
|
+
sa.Column("name", sqlmodel.sql.sqltypes.AutoString(), nullable=False),
|
115
|
+
sa.Column("project_id", sa.Integer(), nullable=False),
|
116
|
+
sa.Column(
|
117
|
+
"timestamp_created", sa.DateTime(timezone=True), nullable=False
|
118
|
+
),
|
119
|
+
sa.ForeignKeyConstraint(
|
120
|
+
["project_id"],
|
121
|
+
["projectv2.id"],
|
122
|
+
),
|
123
|
+
sa.PrimaryKeyConstraint("id"),
|
124
|
+
)
|
125
|
+
op.create_table(
|
126
|
+
"jobv2",
|
127
|
+
sa.Column("id", sa.Integer(), nullable=False),
|
128
|
+
sa.Column("project_id", sa.Integer(), nullable=True),
|
129
|
+
sa.Column("workflow_id", sa.Integer(), nullable=True),
|
130
|
+
sa.Column("dataset_id", sa.Integer(), nullable=True),
|
131
|
+
sa.Column(
|
132
|
+
"user_email", sqlmodel.sql.sqltypes.AutoString(), nullable=False
|
133
|
+
),
|
134
|
+
sa.Column(
|
135
|
+
"slurm_account", sqlmodel.sql.sqltypes.AutoString(), nullable=True
|
136
|
+
),
|
137
|
+
sa.Column("dataset_dump", sa.JSON(), nullable=False),
|
138
|
+
sa.Column("workflow_dump", sa.JSON(), nullable=False),
|
139
|
+
sa.Column("project_dump", sa.JSON(), nullable=False),
|
140
|
+
sa.Column(
|
141
|
+
"worker_init", sqlmodel.sql.sqltypes.AutoString(), nullable=True
|
142
|
+
),
|
143
|
+
sa.Column(
|
144
|
+
"working_dir", sqlmodel.sql.sqltypes.AutoString(), nullable=True
|
145
|
+
),
|
146
|
+
sa.Column(
|
147
|
+
"working_dir_user",
|
148
|
+
sqlmodel.sql.sqltypes.AutoString(),
|
149
|
+
nullable=True,
|
150
|
+
),
|
151
|
+
sa.Column("first_task_index", sa.Integer(), nullable=False),
|
152
|
+
sa.Column("last_task_index", sa.Integer(), nullable=False),
|
153
|
+
sa.Column(
|
154
|
+
"start_timestamp", sa.DateTime(timezone=True), nullable=False
|
155
|
+
),
|
156
|
+
sa.Column("end_timestamp", sa.DateTime(timezone=True), nullable=True),
|
157
|
+
sa.Column(
|
158
|
+
"status", sqlmodel.sql.sqltypes.AutoString(), nullable=False
|
159
|
+
),
|
160
|
+
sa.Column("log", sqlmodel.sql.sqltypes.AutoString(), nullable=True),
|
161
|
+
sa.ForeignKeyConstraint(
|
162
|
+
["dataset_id"],
|
163
|
+
["datasetv2.id"],
|
164
|
+
),
|
165
|
+
sa.ForeignKeyConstraint(
|
166
|
+
["project_id"],
|
167
|
+
["projectv2.id"],
|
168
|
+
),
|
169
|
+
sa.ForeignKeyConstraint(
|
170
|
+
["workflow_id"],
|
171
|
+
["workflowv2.id"],
|
172
|
+
),
|
173
|
+
sa.PrimaryKeyConstraint("id"),
|
174
|
+
)
|
175
|
+
op.create_table(
|
176
|
+
"workflowtaskv2",
|
177
|
+
sa.Column("id", sa.Integer(), nullable=False),
|
178
|
+
sa.Column("workflow_id", sa.Integer(), nullable=False),
|
179
|
+
sa.Column("order", sa.Integer(), nullable=True),
|
180
|
+
sa.Column("meta", sa.JSON(), nullable=True),
|
181
|
+
sa.Column("args", sa.JSON(), nullable=True),
|
182
|
+
sa.Column(
|
183
|
+
"input_filters",
|
184
|
+
sa.JSON(),
|
185
|
+
server_default='{"attributes": {}, "types": {}}',
|
186
|
+
nullable=False,
|
187
|
+
),
|
188
|
+
sa.Column("is_legacy_task", sa.Boolean(), nullable=False),
|
189
|
+
sa.Column("task_id", sa.Integer(), nullable=True),
|
190
|
+
sa.Column("task_legacy_id", sa.Integer(), nullable=True),
|
191
|
+
sa.ForeignKeyConstraint(
|
192
|
+
["task_id"],
|
193
|
+
["taskv2.id"],
|
194
|
+
),
|
195
|
+
sa.ForeignKeyConstraint(
|
196
|
+
["task_legacy_id"],
|
197
|
+
["task.id"],
|
198
|
+
),
|
199
|
+
sa.ForeignKeyConstraint(
|
200
|
+
["workflow_id"],
|
201
|
+
["workflowv2.id"],
|
202
|
+
),
|
203
|
+
sa.PrimaryKeyConstraint("id"),
|
204
|
+
)
|
205
|
+
# ### end Alembic commands ###
|
206
|
+
|
207
|
+
|
208
|
+
def downgrade() -> None:
|
209
|
+
# ### commands auto generated by Alembic - please adjust! ###
|
210
|
+
op.drop_table("workflowtaskv2")
|
211
|
+
op.drop_table("jobv2")
|
212
|
+
op.drop_table("workflowv2")
|
213
|
+
op.drop_table("linkuserprojectv2")
|
214
|
+
op.drop_table("datasetv2")
|
215
|
+
op.drop_table("taskv2")
|
216
|
+
op.drop_table("projectv2")
|
217
|
+
# ### end Alembic commands ###
|