fractal-server 1.4.6__py3-none-any.whl → 2.0.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- fractal_server/__init__.py +1 -1
- fractal_server/app/db/__init__.py +0 -1
- fractal_server/app/models/__init__.py +6 -8
- fractal_server/app/models/linkuserproject.py +9 -0
- fractal_server/app/models/security.py +6 -0
- fractal_server/app/models/v1/__init__.py +12 -0
- fractal_server/app/models/{dataset.py → v1/dataset.py} +5 -5
- fractal_server/app/models/{job.py → v1/job.py} +5 -5
- fractal_server/app/models/{project.py → v1/project.py} +5 -5
- fractal_server/app/models/{state.py → v1/state.py} +2 -2
- fractal_server/app/models/{task.py → v1/task.py} +7 -2
- fractal_server/app/models/{workflow.py → v1/workflow.py} +5 -5
- fractal_server/app/models/v2/__init__.py +22 -0
- fractal_server/app/models/v2/collection_state.py +21 -0
- fractal_server/app/models/v2/dataset.py +54 -0
- fractal_server/app/models/v2/job.py +51 -0
- fractal_server/app/models/v2/project.py +30 -0
- fractal_server/app/models/v2/task.py +93 -0
- fractal_server/app/models/v2/workflow.py +35 -0
- fractal_server/app/models/v2/workflowtask.py +49 -0
- fractal_server/app/routes/admin/__init__.py +0 -0
- fractal_server/app/routes/{admin.py → admin/v1.py} +42 -42
- fractal_server/app/routes/admin/v2.py +309 -0
- fractal_server/app/routes/api/v1/__init__.py +7 -7
- fractal_server/app/routes/api/v1/_aux_functions.py +8 -8
- fractal_server/app/routes/api/v1/dataset.py +48 -41
- fractal_server/app/routes/api/v1/job.py +14 -14
- fractal_server/app/routes/api/v1/project.py +30 -27
- fractal_server/app/routes/api/v1/task.py +26 -16
- fractal_server/app/routes/api/v1/task_collection.py +28 -16
- fractal_server/app/routes/api/v1/workflow.py +28 -28
- fractal_server/app/routes/api/v1/workflowtask.py +11 -11
- fractal_server/app/routes/api/v2/__init__.py +34 -0
- fractal_server/app/routes/api/v2/_aux_functions.py +502 -0
- fractal_server/app/routes/api/v2/dataset.py +293 -0
- fractal_server/app/routes/api/v2/images.py +279 -0
- fractal_server/app/routes/api/v2/job.py +200 -0
- fractal_server/app/routes/api/v2/project.py +186 -0
- fractal_server/app/routes/api/v2/status.py +150 -0
- fractal_server/app/routes/api/v2/submit.py +210 -0
- fractal_server/app/routes/api/v2/task.py +222 -0
- fractal_server/app/routes/api/v2/task_collection.py +239 -0
- fractal_server/app/routes/api/v2/task_legacy.py +59 -0
- fractal_server/app/routes/api/v2/workflow.py +380 -0
- fractal_server/app/routes/api/v2/workflowtask.py +265 -0
- fractal_server/app/routes/aux/_job.py +2 -2
- fractal_server/app/runner/__init__.py +0 -379
- fractal_server/app/runner/async_wrap.py +27 -0
- fractal_server/app/runner/components.py +5 -0
- fractal_server/app/runner/exceptions.py +129 -0
- fractal_server/app/runner/executors/__init__.py +0 -0
- fractal_server/app/runner/executors/slurm/__init__.py +3 -0
- fractal_server/app/runner/{_slurm → executors/slurm}/_batching.py +1 -1
- fractal_server/app/runner/executors/slurm/_check_jobs_status.py +72 -0
- fractal_server/app/runner/{_slurm → executors/slurm}/_executor_wait_thread.py +3 -4
- fractal_server/app/runner/{_slurm → executors/slurm}/_slurm_config.py +3 -152
- fractal_server/app/runner/{_slurm → executors/slurm}/_subprocess_run_as_user.py +42 -1
- fractal_server/app/runner/{_slurm → executors/slurm}/executor.py +46 -27
- fractal_server/app/runner/filenames.py +6 -0
- fractal_server/app/runner/set_start_and_last_task_index.py +39 -0
- fractal_server/app/runner/task_files.py +103 -0
- fractal_server/app/runner/v1/__init__.py +366 -0
- fractal_server/app/runner/{_common.py → v1/_common.py} +56 -111
- fractal_server/app/runner/{_local → v1/_local}/__init__.py +5 -4
- fractal_server/app/runner/{_local → v1/_local}/_local_config.py +6 -7
- fractal_server/app/runner/{_local → v1/_local}/_submit_setup.py +1 -5
- fractal_server/app/runner/v1/_slurm/__init__.py +312 -0
- fractal_server/app/runner/{_slurm → v1/_slurm}/_submit_setup.py +5 -11
- fractal_server/app/runner/v1/_slurm/get_slurm_config.py +163 -0
- fractal_server/app/runner/v1/common.py +117 -0
- fractal_server/app/runner/{handle_failed_job.py → v1/handle_failed_job.py} +8 -8
- fractal_server/app/runner/v2/__init__.py +336 -0
- fractal_server/app/runner/v2/_local/__init__.py +162 -0
- fractal_server/app/runner/v2/_local/_local_config.py +118 -0
- fractal_server/app/runner/v2/_local/_submit_setup.py +52 -0
- fractal_server/app/runner/v2/_local/executor.py +100 -0
- fractal_server/app/runner/{_slurm → v2/_slurm}/__init__.py +38 -47
- fractal_server/app/runner/v2/_slurm/_submit_setup.py +82 -0
- fractal_server/app/runner/v2/_slurm/get_slurm_config.py +182 -0
- fractal_server/app/runner/v2/deduplicate_list.py +23 -0
- fractal_server/app/runner/v2/handle_failed_job.py +165 -0
- fractal_server/app/runner/v2/merge_outputs.py +38 -0
- fractal_server/app/runner/v2/runner.py +343 -0
- fractal_server/app/runner/v2/runner_functions.py +374 -0
- fractal_server/app/runner/v2/runner_functions_low_level.py +130 -0
- fractal_server/app/runner/v2/task_interface.py +62 -0
- fractal_server/app/runner/v2/v1_compat.py +31 -0
- fractal_server/app/schemas/__init__.py +1 -42
- fractal_server/app/schemas/_validators.py +28 -5
- fractal_server/app/schemas/v1/__init__.py +36 -0
- fractal_server/app/schemas/{applyworkflow.py → v1/applyworkflow.py} +18 -18
- fractal_server/app/schemas/{dataset.py → v1/dataset.py} +30 -30
- fractal_server/app/schemas/{dumps.py → v1/dumps.py} +8 -8
- fractal_server/app/schemas/{manifest.py → v1/manifest.py} +5 -5
- fractal_server/app/schemas/{project.py → v1/project.py} +9 -9
- fractal_server/app/schemas/{task.py → v1/task.py} +12 -12
- fractal_server/app/schemas/{task_collection.py → v1/task_collection.py} +7 -7
- fractal_server/app/schemas/{workflow.py → v1/workflow.py} +38 -38
- fractal_server/app/schemas/v2/__init__.py +37 -0
- fractal_server/app/schemas/v2/dataset.py +126 -0
- fractal_server/app/schemas/v2/dumps.py +87 -0
- fractal_server/app/schemas/v2/job.py +114 -0
- fractal_server/app/schemas/v2/manifest.py +159 -0
- fractal_server/app/schemas/v2/project.py +34 -0
- fractal_server/app/schemas/v2/status.py +16 -0
- fractal_server/app/schemas/v2/task.py +151 -0
- fractal_server/app/schemas/v2/task_collection.py +109 -0
- fractal_server/app/schemas/v2/workflow.py +79 -0
- fractal_server/app/schemas/v2/workflowtask.py +208 -0
- fractal_server/config.py +13 -10
- fractal_server/images/__init__.py +4 -0
- fractal_server/images/models.py +136 -0
- fractal_server/images/tools.py +84 -0
- fractal_server/main.py +11 -3
- fractal_server/migrations/env.py +0 -2
- fractal_server/migrations/versions/5bf02391cfef_v2.py +245 -0
- fractal_server/tasks/__init__.py +0 -5
- fractal_server/tasks/endpoint_operations.py +13 -19
- fractal_server/tasks/utils.py +35 -0
- fractal_server/tasks/{_TaskCollectPip.py → v1/_TaskCollectPip.py} +3 -3
- fractal_server/tasks/v1/__init__.py +0 -0
- fractal_server/tasks/{background_operations.py → v1/background_operations.py} +20 -52
- fractal_server/tasks/v1/get_collection_data.py +14 -0
- fractal_server/tasks/v2/_TaskCollectPip.py +103 -0
- fractal_server/tasks/v2/__init__.py +0 -0
- fractal_server/tasks/v2/background_operations.py +381 -0
- fractal_server/tasks/v2/get_collection_data.py +14 -0
- fractal_server/urls.py +13 -0
- {fractal_server-1.4.6.dist-info → fractal_server-2.0.0.dist-info}/METADATA +11 -12
- fractal_server-2.0.0.dist-info/RECORD +169 -0
- fractal_server/app/runner/_slurm/.gitignore +0 -2
- fractal_server/app/runner/common.py +0 -307
- fractal_server/app/schemas/json_schemas/manifest.json +0 -81
- fractal_server-1.4.6.dist-info/RECORD +0 -97
- /fractal_server/app/runner/{_slurm → executors/slurm}/remote.py +0 -0
- /fractal_server/app/runner/{_local → v1/_local}/executor.py +0 -0
- {fractal_server-1.4.6.dist-info → fractal_server-2.0.0.dist-info}/LICENSE +0 -0
- {fractal_server-1.4.6.dist-info → fractal_server-2.0.0.dist-info}/WHEEL +0 -0
- {fractal_server-1.4.6.dist-info → fractal_server-2.0.0.dist-info}/entry_points.txt +0 -0
@@ -0,0 +1,293 @@
|
|
1
|
+
from typing import Optional
|
2
|
+
|
3
|
+
from fastapi import APIRouter
|
4
|
+
from fastapi import Depends
|
5
|
+
from fastapi import HTTPException
|
6
|
+
from fastapi import Response
|
7
|
+
from fastapi import status
|
8
|
+
from sqlmodel import select
|
9
|
+
|
10
|
+
from ....db import AsyncSession
|
11
|
+
from ....db import get_async_db
|
12
|
+
from ....models.v2 import DatasetV2
|
13
|
+
from ....models.v2 import JobV2
|
14
|
+
from ....models.v2 import ProjectV2
|
15
|
+
from ....schemas.v2 import DatasetCreateV2
|
16
|
+
from ....schemas.v2 import DatasetReadV2
|
17
|
+
from ....schemas.v2 import DatasetUpdateV2
|
18
|
+
from ....schemas.v2.dataset import DatasetExportV2
|
19
|
+
from ....schemas.v2.dataset import DatasetImportV2
|
20
|
+
from ....security import current_active_user
|
21
|
+
from ....security import User
|
22
|
+
from ._aux_functions import _get_dataset_check_owner
|
23
|
+
from ._aux_functions import _get_project_check_owner
|
24
|
+
from ._aux_functions import _get_submitted_jobs_statement
|
25
|
+
|
26
|
+
router = APIRouter()
|
27
|
+
|
28
|
+
|
29
|
+
@router.post(
|
30
|
+
"/project/{project_id}/dataset/",
|
31
|
+
response_model=DatasetReadV2,
|
32
|
+
status_code=status.HTTP_201_CREATED,
|
33
|
+
)
|
34
|
+
async def create_dataset(
|
35
|
+
project_id: int,
|
36
|
+
dataset: DatasetCreateV2,
|
37
|
+
user: User = Depends(current_active_user),
|
38
|
+
db: AsyncSession = Depends(get_async_db),
|
39
|
+
) -> Optional[DatasetReadV2]:
|
40
|
+
"""
|
41
|
+
Add new dataset to current project
|
42
|
+
"""
|
43
|
+
await _get_project_check_owner(
|
44
|
+
project_id=project_id, user_id=user.id, db=db
|
45
|
+
)
|
46
|
+
db_dataset = DatasetV2(project_id=project_id, **dataset.dict())
|
47
|
+
db.add(db_dataset)
|
48
|
+
await db.commit()
|
49
|
+
await db.refresh(db_dataset)
|
50
|
+
await db.close()
|
51
|
+
|
52
|
+
return db_dataset
|
53
|
+
|
54
|
+
|
55
|
+
@router.get(
|
56
|
+
"/project/{project_id}/dataset/",
|
57
|
+
response_model=list[DatasetReadV2],
|
58
|
+
)
|
59
|
+
async def read_dataset_list(
|
60
|
+
project_id: int,
|
61
|
+
history: bool = True,
|
62
|
+
user: User = Depends(current_active_user),
|
63
|
+
db: AsyncSession = Depends(get_async_db),
|
64
|
+
) -> Optional[list[DatasetReadV2]]:
|
65
|
+
"""
|
66
|
+
Get dataset list for given project
|
67
|
+
"""
|
68
|
+
# Access control
|
69
|
+
project = await _get_project_check_owner(
|
70
|
+
project_id=project_id, user_id=user.id, db=db
|
71
|
+
)
|
72
|
+
# Find datasets of the current project. Note: this select/where approach
|
73
|
+
# has much better scaling than refreshing all elements of
|
74
|
+
# `project.dataset_list` - ref
|
75
|
+
# https://github.com/fractal-analytics-platform/fractal-server/pull/1082#issuecomment-1856676097.
|
76
|
+
stm = select(DatasetV2).where(DatasetV2.project_id == project.id)
|
77
|
+
res = await db.execute(stm)
|
78
|
+
dataset_list = res.scalars().all()
|
79
|
+
await db.close()
|
80
|
+
if not history:
|
81
|
+
for ds in dataset_list:
|
82
|
+
setattr(ds, "history", [])
|
83
|
+
return dataset_list
|
84
|
+
|
85
|
+
|
86
|
+
@router.get(
|
87
|
+
"/project/{project_id}/dataset/{dataset_id}/",
|
88
|
+
response_model=DatasetReadV2,
|
89
|
+
)
|
90
|
+
async def read_dataset(
|
91
|
+
project_id: int,
|
92
|
+
dataset_id: int,
|
93
|
+
user: User = Depends(current_active_user),
|
94
|
+
db: AsyncSession = Depends(get_async_db),
|
95
|
+
) -> Optional[DatasetReadV2]:
|
96
|
+
"""
|
97
|
+
Get info on a dataset associated to the current project
|
98
|
+
"""
|
99
|
+
output = await _get_dataset_check_owner(
|
100
|
+
project_id=project_id,
|
101
|
+
dataset_id=dataset_id,
|
102
|
+
user_id=user.id,
|
103
|
+
db=db,
|
104
|
+
)
|
105
|
+
dataset = output["dataset"]
|
106
|
+
await db.close()
|
107
|
+
return dataset
|
108
|
+
|
109
|
+
|
110
|
+
@router.patch(
|
111
|
+
"/project/{project_id}/dataset/{dataset_id}/",
|
112
|
+
response_model=DatasetReadV2,
|
113
|
+
)
|
114
|
+
async def update_dataset(
|
115
|
+
project_id: int,
|
116
|
+
dataset_id: int,
|
117
|
+
dataset_update: DatasetUpdateV2,
|
118
|
+
user: User = Depends(current_active_user),
|
119
|
+
db: AsyncSession = Depends(get_async_db),
|
120
|
+
) -> Optional[DatasetReadV2]:
|
121
|
+
"""
|
122
|
+
Edit a dataset associated to the current project
|
123
|
+
"""
|
124
|
+
|
125
|
+
output = await _get_dataset_check_owner(
|
126
|
+
project_id=project_id,
|
127
|
+
dataset_id=dataset_id,
|
128
|
+
user_id=user.id,
|
129
|
+
db=db,
|
130
|
+
)
|
131
|
+
db_dataset = output["dataset"]
|
132
|
+
|
133
|
+
if (dataset_update.zarr_dir is not None) and (len(db_dataset.images) != 0):
|
134
|
+
raise HTTPException(
|
135
|
+
status_code=status.HTTP_422_UNPROCESSABLE_ENTITY,
|
136
|
+
detail=(
|
137
|
+
"Cannot modify `zarr_dir` because the dataset has a non-empty "
|
138
|
+
"image list."
|
139
|
+
),
|
140
|
+
)
|
141
|
+
|
142
|
+
for key, value in dataset_update.dict(exclude_unset=True).items():
|
143
|
+
setattr(db_dataset, key, value)
|
144
|
+
|
145
|
+
await db.commit()
|
146
|
+
await db.refresh(db_dataset)
|
147
|
+
await db.close()
|
148
|
+
return db_dataset
|
149
|
+
|
150
|
+
|
151
|
+
@router.delete(
|
152
|
+
"/project/{project_id}/dataset/{dataset_id}/",
|
153
|
+
status_code=204,
|
154
|
+
)
|
155
|
+
async def delete_dataset(
|
156
|
+
project_id: int,
|
157
|
+
dataset_id: int,
|
158
|
+
user: User = Depends(current_active_user),
|
159
|
+
db: AsyncSession = Depends(get_async_db),
|
160
|
+
) -> Response:
|
161
|
+
"""
|
162
|
+
Delete a dataset associated to the current project
|
163
|
+
"""
|
164
|
+
output = await _get_dataset_check_owner(
|
165
|
+
project_id=project_id,
|
166
|
+
dataset_id=dataset_id,
|
167
|
+
user_id=user.id,
|
168
|
+
db=db,
|
169
|
+
)
|
170
|
+
dataset = output["dataset"]
|
171
|
+
|
172
|
+
# Fail if there exist jobs that are submitted and in relation with the
|
173
|
+
# current dataset.
|
174
|
+
stm = _get_submitted_jobs_statement().where(JobV2.dataset_id == dataset_id)
|
175
|
+
res = await db.execute(stm)
|
176
|
+
jobs = res.scalars().all()
|
177
|
+
if jobs:
|
178
|
+
string_ids = str([job.id for job in jobs])[1:-1]
|
179
|
+
raise HTTPException(
|
180
|
+
status_code=status.HTTP_422_UNPROCESSABLE_ENTITY,
|
181
|
+
detail=(
|
182
|
+
f"Cannot delete dataset {dataset.id} because it "
|
183
|
+
f"is linked to active job(s) {string_ids}."
|
184
|
+
),
|
185
|
+
)
|
186
|
+
|
187
|
+
# Cascade operations: set foreign-keys to null for jobs which are in
|
188
|
+
# relationship with the current dataset
|
189
|
+
stm = select(JobV2).where(JobV2.dataset_id == dataset_id)
|
190
|
+
res = await db.execute(stm)
|
191
|
+
jobs = res.scalars().all()
|
192
|
+
for job in jobs:
|
193
|
+
job.dataset_id = None
|
194
|
+
|
195
|
+
# Delete dataset
|
196
|
+
await db.delete(dataset)
|
197
|
+
await db.commit()
|
198
|
+
|
199
|
+
return Response(status_code=status.HTTP_204_NO_CONTENT)
|
200
|
+
|
201
|
+
|
202
|
+
@router.get("/dataset/", response_model=list[DatasetReadV2])
|
203
|
+
async def get_user_datasets(
|
204
|
+
history: bool = True,
|
205
|
+
user: User = Depends(current_active_user),
|
206
|
+
db: AsyncSession = Depends(get_async_db),
|
207
|
+
) -> list[DatasetReadV2]:
|
208
|
+
"""
|
209
|
+
Returns all the datasets of the current user
|
210
|
+
"""
|
211
|
+
stm = select(DatasetV2)
|
212
|
+
stm = stm.join(ProjectV2).where(
|
213
|
+
ProjectV2.user_list.any(User.id == user.id)
|
214
|
+
)
|
215
|
+
|
216
|
+
res = await db.execute(stm)
|
217
|
+
dataset_list = res.scalars().all()
|
218
|
+
await db.close()
|
219
|
+
if not history:
|
220
|
+
for ds in dataset_list:
|
221
|
+
setattr(ds, "history", [])
|
222
|
+
return dataset_list
|
223
|
+
|
224
|
+
|
225
|
+
@router.get(
|
226
|
+
"/project/{project_id}/dataset/{dataset_id}/export/",
|
227
|
+
response_model=DatasetExportV2,
|
228
|
+
)
|
229
|
+
async def export_dataset(
|
230
|
+
project_id: int,
|
231
|
+
dataset_id: int,
|
232
|
+
user: User = Depends(current_active_user),
|
233
|
+
db: AsyncSession = Depends(get_async_db),
|
234
|
+
) -> Optional[DatasetExportV2]:
|
235
|
+
"""
|
236
|
+
Export an existing dataset
|
237
|
+
"""
|
238
|
+
dict_dataset_project = await _get_dataset_check_owner(
|
239
|
+
project_id=project_id,
|
240
|
+
dataset_id=dataset_id,
|
241
|
+
user_id=user.id,
|
242
|
+
db=db,
|
243
|
+
)
|
244
|
+
await db.close()
|
245
|
+
|
246
|
+
dataset = dict_dataset_project["dataset"]
|
247
|
+
|
248
|
+
return dataset
|
249
|
+
|
250
|
+
|
251
|
+
@router.post(
|
252
|
+
"/project/{project_id}/dataset/import/",
|
253
|
+
response_model=DatasetReadV2,
|
254
|
+
status_code=status.HTTP_201_CREATED,
|
255
|
+
)
|
256
|
+
async def import_dataset(
|
257
|
+
project_id: int,
|
258
|
+
dataset: DatasetImportV2,
|
259
|
+
user: User = Depends(current_active_user),
|
260
|
+
db: AsyncSession = Depends(get_async_db),
|
261
|
+
) -> Optional[DatasetReadV2]:
|
262
|
+
"""
|
263
|
+
Import an existing dataset into a project
|
264
|
+
"""
|
265
|
+
|
266
|
+
# Preliminary checks
|
267
|
+
await _get_project_check_owner(
|
268
|
+
project_id=project_id,
|
269
|
+
user_id=user.id,
|
270
|
+
db=db,
|
271
|
+
)
|
272
|
+
|
273
|
+
for image in dataset.images:
|
274
|
+
if not image.zarr_url.startswith(dataset.zarr_dir):
|
275
|
+
raise HTTPException(
|
276
|
+
status_code=status.HTTP_422_UNPROCESSABLE_ENTITY,
|
277
|
+
detail=(
|
278
|
+
f"Cannot import dataset: zarr_url {image.zarr_url} is not "
|
279
|
+
f"relative to zarr_dir={dataset.zarr_dir}."
|
280
|
+
),
|
281
|
+
)
|
282
|
+
|
283
|
+
# Create new Dataset
|
284
|
+
db_dataset = DatasetV2(
|
285
|
+
project_id=project_id,
|
286
|
+
**dataset.dict(exclude_none=True),
|
287
|
+
)
|
288
|
+
db.add(db_dataset)
|
289
|
+
await db.commit()
|
290
|
+
await db.refresh(db_dataset)
|
291
|
+
await db.close()
|
292
|
+
|
293
|
+
return db_dataset
|
@@ -0,0 +1,279 @@
|
|
1
|
+
from typing import Any
|
2
|
+
from typing import Optional
|
3
|
+
|
4
|
+
from fastapi import APIRouter
|
5
|
+
from fastapi import Depends
|
6
|
+
from fastapi import HTTPException
|
7
|
+
from fastapi import Response
|
8
|
+
from fastapi import status
|
9
|
+
from pydantic import BaseModel
|
10
|
+
from pydantic import Field
|
11
|
+
from sqlalchemy.orm.attributes import flag_modified
|
12
|
+
|
13
|
+
from ._aux_functions import _get_dataset_check_owner
|
14
|
+
from fractal_server.app.db import AsyncSession
|
15
|
+
from fractal_server.app.db import get_async_db
|
16
|
+
from fractal_server.app.security import current_active_user
|
17
|
+
from fractal_server.app.security import User
|
18
|
+
from fractal_server.images import Filters
|
19
|
+
from fractal_server.images import SingleImage
|
20
|
+
from fractal_server.images import SingleImageUpdate
|
21
|
+
from fractal_server.images.tools import find_image_by_zarr_url
|
22
|
+
from fractal_server.images.tools import match_filter
|
23
|
+
|
24
|
+
router = APIRouter()
|
25
|
+
|
26
|
+
|
27
|
+
class ImagePage(BaseModel):
|
28
|
+
|
29
|
+
total_count: int
|
30
|
+
page_size: int
|
31
|
+
current_page: int
|
32
|
+
|
33
|
+
attributes: dict[str, list[Any]]
|
34
|
+
types: list[str]
|
35
|
+
|
36
|
+
images: list[SingleImage]
|
37
|
+
|
38
|
+
|
39
|
+
class ImageQuery(BaseModel):
|
40
|
+
zarr_url: Optional[str]
|
41
|
+
filters: Filters = Field(default_factory=Filters)
|
42
|
+
|
43
|
+
|
44
|
+
@router.post(
|
45
|
+
"/project/{project_id}/dataset/{dataset_id}/images/",
|
46
|
+
status_code=status.HTTP_201_CREATED,
|
47
|
+
)
|
48
|
+
async def post_new_image(
|
49
|
+
project_id: int,
|
50
|
+
dataset_id: int,
|
51
|
+
new_image: SingleImage,
|
52
|
+
user: User = Depends(current_active_user),
|
53
|
+
db: AsyncSession = Depends(get_async_db),
|
54
|
+
) -> Response:
|
55
|
+
|
56
|
+
output = await _get_dataset_check_owner(
|
57
|
+
project_id=project_id, dataset_id=dataset_id, user_id=user.id, db=db
|
58
|
+
)
|
59
|
+
dataset = output["dataset"]
|
60
|
+
|
61
|
+
if not new_image.zarr_url.startswith(dataset.zarr_dir):
|
62
|
+
raise HTTPException(
|
63
|
+
status_code=status.HTTP_422_UNPROCESSABLE_ENTITY,
|
64
|
+
detail=(
|
65
|
+
"Cannot create image with zarr_url which is not relative to "
|
66
|
+
f"{dataset.zarr_dir}."
|
67
|
+
),
|
68
|
+
)
|
69
|
+
elif new_image.zarr_url == dataset.zarr_dir:
|
70
|
+
raise HTTPException(
|
71
|
+
status_code=status.HTTP_422_UNPROCESSABLE_ENTITY,
|
72
|
+
detail=(
|
73
|
+
"`SingleImage.zarr_url` cannot be equal to `Dataset.zarr_dir`:"
|
74
|
+
f" {dataset.zarr_dir}"
|
75
|
+
),
|
76
|
+
)
|
77
|
+
|
78
|
+
if new_image.zarr_url in dataset.image_zarr_urls:
|
79
|
+
raise HTTPException(
|
80
|
+
status_code=status.HTTP_422_UNPROCESSABLE_ENTITY,
|
81
|
+
detail=(
|
82
|
+
f"Image with zarr_url '{new_image.zarr_url}' "
|
83
|
+
f"already in DatasetV2 {dataset_id}",
|
84
|
+
),
|
85
|
+
)
|
86
|
+
|
87
|
+
dataset.images.append(new_image.dict())
|
88
|
+
flag_modified(dataset, "images")
|
89
|
+
|
90
|
+
await db.commit()
|
91
|
+
|
92
|
+
return Response(status_code=status.HTTP_201_CREATED)
|
93
|
+
|
94
|
+
|
95
|
+
@router.post(
|
96
|
+
"/project/{project_id}/dataset/{dataset_id}/images/query/",
|
97
|
+
response_model=ImagePage,
|
98
|
+
status_code=status.HTTP_200_OK,
|
99
|
+
)
|
100
|
+
async def query_dataset_images(
|
101
|
+
project_id: int,
|
102
|
+
dataset_id: int,
|
103
|
+
use_dataset_filters: bool = False, # query param
|
104
|
+
page: int = 1, # query param
|
105
|
+
page_size: Optional[int] = None, # query param
|
106
|
+
query: Optional[ImageQuery] = None, # body
|
107
|
+
user: User = Depends(current_active_user),
|
108
|
+
db: AsyncSession = Depends(get_async_db),
|
109
|
+
) -> ImagePage:
|
110
|
+
|
111
|
+
if page < 1:
|
112
|
+
raise HTTPException(
|
113
|
+
status_code=status.HTTP_422_UNPROCESSABLE_ENTITY,
|
114
|
+
detail=f"Invalid pagination parameter: page={page} < 1",
|
115
|
+
)
|
116
|
+
|
117
|
+
output = await _get_dataset_check_owner(
|
118
|
+
project_id=project_id, dataset_id=dataset_id, user_id=user.id, db=db
|
119
|
+
)
|
120
|
+
dataset = output["dataset"]
|
121
|
+
images = dataset.images
|
122
|
+
|
123
|
+
if use_dataset_filters is True:
|
124
|
+
images = [
|
125
|
+
image
|
126
|
+
for image in images
|
127
|
+
if match_filter(image, Filters(**dataset.filters))
|
128
|
+
]
|
129
|
+
|
130
|
+
attributes = {}
|
131
|
+
for image in images:
|
132
|
+
for k, v in image["attributes"].items():
|
133
|
+
attributes.setdefault(k, []).append(v)
|
134
|
+
for k, v in attributes.items():
|
135
|
+
attributes[k] = list(set(v))
|
136
|
+
|
137
|
+
types = list(
|
138
|
+
set(type for image in images for type in image["types"].keys())
|
139
|
+
)
|
140
|
+
|
141
|
+
if query is not None:
|
142
|
+
|
143
|
+
if query.zarr_url is not None:
|
144
|
+
image = next(
|
145
|
+
(
|
146
|
+
image
|
147
|
+
for image in images
|
148
|
+
if image["zarr_url"] == query.zarr_url
|
149
|
+
),
|
150
|
+
None,
|
151
|
+
)
|
152
|
+
if image is None:
|
153
|
+
images = []
|
154
|
+
else:
|
155
|
+
images = [image]
|
156
|
+
|
157
|
+
if query.filters.attributes or query.filters.types:
|
158
|
+
images = [
|
159
|
+
image
|
160
|
+
for image in images
|
161
|
+
if match_filter(
|
162
|
+
image,
|
163
|
+
Filters(**query.filters.dict()),
|
164
|
+
)
|
165
|
+
]
|
166
|
+
|
167
|
+
total_count = len(images)
|
168
|
+
|
169
|
+
if page_size is not None:
|
170
|
+
if page_size <= 0:
|
171
|
+
raise HTTPException(
|
172
|
+
status_code=status.HTTP_422_UNPROCESSABLE_ENTITY,
|
173
|
+
detail=(
|
174
|
+
f"Invalid pagination parameter: page_size={page_size} <= 0"
|
175
|
+
),
|
176
|
+
)
|
177
|
+
else:
|
178
|
+
page_size = total_count
|
179
|
+
|
180
|
+
if total_count == 0:
|
181
|
+
page = 1
|
182
|
+
else:
|
183
|
+
last_page = (total_count // page_size) + (total_count % page_size > 0)
|
184
|
+
if page > last_page:
|
185
|
+
page = last_page
|
186
|
+
offset = (page - 1) * page_size
|
187
|
+
images = images[offset : offset + page_size] # noqa E203
|
188
|
+
|
189
|
+
return ImagePage(
|
190
|
+
total_count=total_count,
|
191
|
+
current_page=page,
|
192
|
+
page_size=page_size,
|
193
|
+
attributes=attributes,
|
194
|
+
types=types,
|
195
|
+
images=images,
|
196
|
+
)
|
197
|
+
|
198
|
+
|
199
|
+
@router.delete(
|
200
|
+
"/project/{project_id}/dataset/{dataset_id}/images/",
|
201
|
+
status_code=status.HTTP_204_NO_CONTENT,
|
202
|
+
)
|
203
|
+
async def delete_dataset_images(
|
204
|
+
project_id: int,
|
205
|
+
dataset_id: int,
|
206
|
+
zarr_url: str,
|
207
|
+
user: User = Depends(current_active_user),
|
208
|
+
db: AsyncSession = Depends(get_async_db),
|
209
|
+
) -> Response:
|
210
|
+
|
211
|
+
output = await _get_dataset_check_owner(
|
212
|
+
project_id=project_id, dataset_id=dataset_id, user_id=user.id, db=db
|
213
|
+
)
|
214
|
+
dataset = output["dataset"]
|
215
|
+
|
216
|
+
image_to_remove = next(
|
217
|
+
(image for image in dataset.images if image["zarr_url"] == zarr_url),
|
218
|
+
None,
|
219
|
+
)
|
220
|
+
if image_to_remove is None:
|
221
|
+
raise HTTPException(
|
222
|
+
status_code=status.HTTP_404_NOT_FOUND,
|
223
|
+
detail=(
|
224
|
+
f"No image with zarr_url '{zarr_url}' in "
|
225
|
+
f"DatasetV2 {dataset_id}."
|
226
|
+
),
|
227
|
+
)
|
228
|
+
|
229
|
+
dataset.images.remove(image_to_remove)
|
230
|
+
flag_modified(dataset, "images")
|
231
|
+
|
232
|
+
await db.commit()
|
233
|
+
|
234
|
+
return Response(status_code=status.HTTP_204_NO_CONTENT)
|
235
|
+
|
236
|
+
|
237
|
+
@router.patch(
|
238
|
+
"/project/{project_id}/dataset/{dataset_id}/images/",
|
239
|
+
response_model=SingleImage,
|
240
|
+
status_code=status.HTTP_200_OK,
|
241
|
+
)
|
242
|
+
async def patch_dataset_image(
|
243
|
+
project_id: int,
|
244
|
+
dataset_id: int,
|
245
|
+
image_update: SingleImageUpdate,
|
246
|
+
user: User = Depends(current_active_user),
|
247
|
+
db: AsyncSession = Depends(get_async_db),
|
248
|
+
):
|
249
|
+
output = await _get_dataset_check_owner(
|
250
|
+
project_id=project_id,
|
251
|
+
dataset_id=dataset_id,
|
252
|
+
user_id=user.id,
|
253
|
+
db=db,
|
254
|
+
)
|
255
|
+
db_dataset = output["dataset"]
|
256
|
+
|
257
|
+
ret = find_image_by_zarr_url(
|
258
|
+
images=db_dataset.images, zarr_url=image_update.zarr_url
|
259
|
+
)
|
260
|
+
if ret is None:
|
261
|
+
raise HTTPException(
|
262
|
+
status_code=status.HTTP_404_NOT_FOUND,
|
263
|
+
detail=(
|
264
|
+
f"No image with zarr_url '{image_update.zarr_url}' in "
|
265
|
+
f"DatasetV2 {dataset_id}."
|
266
|
+
),
|
267
|
+
)
|
268
|
+
index = ret["index"]
|
269
|
+
|
270
|
+
for key, value in image_update.dict(
|
271
|
+
exclude_none=True, exclude={"zarr_url"}
|
272
|
+
).items():
|
273
|
+
db_dataset.images[index][key] = value
|
274
|
+
|
275
|
+
flag_modified(db_dataset, "images")
|
276
|
+
|
277
|
+
await db.commit()
|
278
|
+
await db.close()
|
279
|
+
return db_dataset.images[index]
|