fractal-server 2.17.2__py3-none-any.whl → 2.18.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- fractal_server/__init__.py +1 -1
- fractal_server/__main__.py +2 -1
- fractal_server/app/models/linkuserproject.py +40 -0
- fractal_server/app/models/security.py +7 -5
- fractal_server/app/models/v2/job.py +13 -2
- fractal_server/app/models/v2/resource.py +13 -0
- fractal_server/app/routes/admin/v2/__init__.py +11 -11
- fractal_server/app/routes/admin/v2/accounting.py +2 -2
- fractal_server/app/routes/admin/v2/job.py +34 -23
- fractal_server/app/routes/admin/v2/sharing.py +103 -0
- fractal_server/app/routes/admin/v2/task.py +9 -8
- fractal_server/app/routes/admin/v2/task_group.py +94 -16
- fractal_server/app/routes/admin/v2/task_group_lifecycle.py +20 -20
- fractal_server/app/routes/api/__init__.py +0 -9
- fractal_server/app/routes/api/v2/__init__.py +47 -47
- fractal_server/app/routes/api/v2/_aux_functions.py +65 -64
- fractal_server/app/routes/api/v2/_aux_functions_history.py +8 -3
- fractal_server/app/routes/api/v2/_aux_functions_sharing.py +97 -0
- fractal_server/app/routes/api/v2/_aux_functions_task_lifecycle.py +4 -4
- fractal_server/app/routes/api/v2/_aux_functions_tasks.py +2 -2
- fractal_server/app/routes/api/v2/dataset.py +89 -77
- fractal_server/app/routes/api/v2/history.py +28 -16
- fractal_server/app/routes/api/v2/images.py +22 -8
- fractal_server/app/routes/api/v2/job.py +40 -24
- fractal_server/app/routes/api/v2/pre_submission_checks.py +13 -6
- fractal_server/app/routes/api/v2/project.py +48 -25
- fractal_server/app/routes/api/v2/sharing.py +311 -0
- fractal_server/app/routes/api/v2/status_legacy.py +22 -33
- fractal_server/app/routes/api/v2/submit.py +76 -71
- fractal_server/app/routes/api/v2/task.py +15 -17
- fractal_server/app/routes/api/v2/task_collection.py +18 -18
- fractal_server/app/routes/api/v2/task_collection_custom.py +11 -13
- fractal_server/app/routes/api/v2/task_collection_pixi.py +9 -9
- fractal_server/app/routes/api/v2/task_group.py +18 -18
- fractal_server/app/routes/api/v2/task_group_lifecycle.py +26 -26
- fractal_server/app/routes/api/v2/task_version_update.py +12 -9
- fractal_server/app/routes/api/v2/workflow.py +41 -29
- fractal_server/app/routes/api/v2/workflow_import.py +25 -23
- fractal_server/app/routes/api/v2/workflowtask.py +25 -17
- fractal_server/app/routes/auth/_aux_auth.py +100 -0
- fractal_server/app/routes/auth/current_user.py +0 -63
- fractal_server/app/routes/auth/group.py +1 -30
- fractal_server/app/routes/auth/router.py +2 -0
- fractal_server/app/routes/auth/users.py +9 -0
- fractal_server/app/routes/auth/viewer_paths.py +43 -0
- fractal_server/app/schemas/user.py +29 -12
- fractal_server/app/schemas/user_group.py +0 -15
- fractal_server/app/schemas/v2/__init__.py +55 -48
- fractal_server/app/schemas/v2/dataset.py +35 -13
- fractal_server/app/schemas/v2/dumps.py +9 -9
- fractal_server/app/schemas/v2/job.py +11 -11
- fractal_server/app/schemas/v2/project.py +3 -3
- fractal_server/app/schemas/v2/resource.py +13 -4
- fractal_server/app/schemas/v2/sharing.py +99 -0
- fractal_server/app/schemas/v2/status_legacy.py +3 -3
- fractal_server/app/schemas/v2/task.py +6 -6
- fractal_server/app/schemas/v2/task_collection.py +4 -4
- fractal_server/app/schemas/v2/task_group.py +16 -16
- fractal_server/app/schemas/v2/workflow.py +16 -16
- fractal_server/app/schemas/v2/workflowtask.py +14 -14
- fractal_server/app/security/__init__.py +1 -1
- fractal_server/app/shutdown.py +6 -6
- fractal_server/config/__init__.py +0 -6
- fractal_server/config/_data.py +0 -79
- fractal_server/config/_main.py +6 -1
- fractal_server/data_migrations/2_18_0.py +30 -0
- fractal_server/images/models.py +1 -2
- fractal_server/main.py +72 -11
- fractal_server/migrations/versions/7910eed4cf97_user_project_dirs_and_usergroup_viewer_.py +60 -0
- fractal_server/migrations/versions/88270f589c9b_add_prevent_new_submissions.py +39 -0
- fractal_server/migrations/versions/bc0e8b3327a7_project_sharing.py +72 -0
- fractal_server/migrations/versions/f0702066b007_one_submitted_job_per_dataset.py +40 -0
- fractal_server/runner/config/_slurm.py +2 -0
- fractal_server/runner/executors/slurm_common/_batching.py +4 -10
- fractal_server/runner/executors/slurm_common/slurm_config.py +1 -0
- fractal_server/runner/executors/slurm_ssh/runner.py +1 -1
- fractal_server/runner/executors/slurm_sudo/runner.py +1 -1
- fractal_server/runner/v2/_local.py +4 -3
- fractal_server/runner/v2/_slurm_ssh.py +4 -3
- fractal_server/runner/v2/_slurm_sudo.py +4 -3
- fractal_server/runner/v2/runner.py +36 -17
- fractal_server/runner/v2/runner_functions.py +11 -14
- fractal_server/runner/v2/submit_workflow.py +22 -9
- fractal_server/tasks/v2/local/_utils.py +2 -2
- fractal_server/tasks/v2/local/collect.py +5 -6
- fractal_server/tasks/v2/local/collect_pixi.py +5 -6
- fractal_server/tasks/v2/local/deactivate.py +7 -7
- fractal_server/tasks/v2/local/deactivate_pixi.py +3 -3
- fractal_server/tasks/v2/local/delete.py +5 -5
- fractal_server/tasks/v2/local/reactivate.py +5 -5
- fractal_server/tasks/v2/local/reactivate_pixi.py +5 -5
- fractal_server/tasks/v2/ssh/collect.py +5 -5
- fractal_server/tasks/v2/ssh/collect_pixi.py +5 -5
- fractal_server/tasks/v2/ssh/deactivate.py +7 -7
- fractal_server/tasks/v2/ssh/deactivate_pixi.py +2 -2
- fractal_server/tasks/v2/ssh/delete.py +5 -5
- fractal_server/tasks/v2/ssh/reactivate.py +5 -5
- fractal_server/tasks/v2/ssh/reactivate_pixi.py +5 -5
- fractal_server/tasks/v2/utils_background.py +7 -7
- fractal_server/tasks/v2/utils_database.py +5 -5
- fractal_server/types/__init__.py +22 -0
- fractal_server/types/validators/__init__.py +3 -0
- fractal_server/types/validators/_common_validators.py +32 -0
- {fractal_server-2.17.2.dist-info → fractal_server-2.18.0.dist-info}/METADATA +3 -2
- {fractal_server-2.17.2.dist-info → fractal_server-2.18.0.dist-info}/RECORD +108 -98
- {fractal_server-2.17.2.dist-info → fractal_server-2.18.0.dist-info}/WHEEL +0 -0
- {fractal_server-2.17.2.dist-info → fractal_server-2.18.0.dist-info}/entry_points.txt +0 -0
- {fractal_server-2.17.2.dist-info → fractal_server-2.18.0.dist-info}/licenses/LICENSE +0 -0
|
@@ -1,3 +1,6 @@
|
|
|
1
|
+
import os
|
|
2
|
+
from pathlib import Path
|
|
3
|
+
|
|
1
4
|
from fastapi import APIRouter
|
|
2
5
|
from fastapi import Depends
|
|
3
6
|
from fastapi import HTTPException
|
|
@@ -11,16 +14,17 @@ from fractal_server.app.models import UserOAuth
|
|
|
11
14
|
from fractal_server.app.models.v2 import DatasetV2
|
|
12
15
|
from fractal_server.app.models.v2 import JobV2
|
|
13
16
|
from fractal_server.app.routes.auth import current_user_act_ver_prof
|
|
14
|
-
from fractal_server.app.schemas.v2 import
|
|
15
|
-
from fractal_server.app.schemas.v2 import
|
|
16
|
-
from fractal_server.app.schemas.v2 import
|
|
17
|
-
from fractal_server.app.schemas.v2.dataset import
|
|
18
|
-
from fractal_server.app.schemas.v2.dataset import
|
|
17
|
+
from fractal_server.app.schemas.v2 import DatasetCreate
|
|
18
|
+
from fractal_server.app.schemas.v2 import DatasetRead
|
|
19
|
+
from fractal_server.app.schemas.v2 import DatasetUpdate
|
|
20
|
+
from fractal_server.app.schemas.v2.dataset import DatasetExport
|
|
21
|
+
from fractal_server.app.schemas.v2.dataset import DatasetImport
|
|
22
|
+
from fractal_server.app.schemas.v2.sharing import ProjectPermissions
|
|
19
23
|
from fractal_server.string_tools import sanitize_string
|
|
20
24
|
from fractal_server.urls import normalize_url
|
|
21
25
|
|
|
22
|
-
from ._aux_functions import
|
|
23
|
-
from ._aux_functions import
|
|
26
|
+
from ._aux_functions import _get_dataset_check_access
|
|
27
|
+
from ._aux_functions import _get_project_check_access
|
|
24
28
|
from ._aux_functions import _get_submitted_jobs_statement
|
|
25
29
|
|
|
26
30
|
router = APIRouter()
|
|
@@ -28,66 +32,82 @@ router = APIRouter()
|
|
|
28
32
|
|
|
29
33
|
@router.post(
|
|
30
34
|
"/project/{project_id}/dataset/",
|
|
31
|
-
response_model=
|
|
35
|
+
response_model=DatasetRead,
|
|
32
36
|
status_code=status.HTTP_201_CREATED,
|
|
33
37
|
)
|
|
34
38
|
async def create_dataset(
|
|
35
39
|
project_id: int,
|
|
36
|
-
dataset:
|
|
40
|
+
dataset: DatasetCreate,
|
|
37
41
|
user: UserOAuth = Depends(current_user_act_ver_prof),
|
|
38
42
|
db: AsyncSession = Depends(get_async_db),
|
|
39
|
-
) ->
|
|
43
|
+
) -> DatasetRead | None:
|
|
40
44
|
"""
|
|
41
45
|
Add new dataset to current project
|
|
42
46
|
"""
|
|
43
|
-
project = await
|
|
44
|
-
project_id=project_id,
|
|
47
|
+
project = await _get_project_check_access(
|
|
48
|
+
project_id=project_id,
|
|
49
|
+
user_id=user.id,
|
|
50
|
+
required_permissions=ProjectPermissions.WRITE,
|
|
51
|
+
db=db,
|
|
45
52
|
)
|
|
46
53
|
|
|
47
|
-
|
|
48
|
-
|
|
49
|
-
|
|
50
|
-
|
|
51
|
-
|
|
52
|
-
|
|
53
|
-
|
|
54
|
-
|
|
55
|
-
|
|
56
|
-
|
|
57
|
-
|
|
58
|
-
|
|
54
|
+
db_dataset = DatasetV2(
|
|
55
|
+
project_id=project_id,
|
|
56
|
+
zarr_dir="__PLACEHOLDER__",
|
|
57
|
+
**dataset.model_dump(exclude={"project_dir", "zarr_subfolder"}),
|
|
58
|
+
)
|
|
59
|
+
db.add(db_dataset)
|
|
60
|
+
await db.commit()
|
|
61
|
+
await db.refresh(db_dataset)
|
|
62
|
+
|
|
63
|
+
if dataset.project_dir is None:
|
|
64
|
+
project_dir = user.project_dirs[0]
|
|
65
|
+
else:
|
|
66
|
+
if dataset.project_dir not in user.project_dirs:
|
|
67
|
+
await db.delete(db_dataset)
|
|
68
|
+
await db.commit()
|
|
69
|
+
raise HTTPException(
|
|
70
|
+
status_code=status.HTTP_403_FORBIDDEN,
|
|
71
|
+
detail=f"You are not allowed to use {dataset.project_dir=}.",
|
|
72
|
+
)
|
|
73
|
+
project_dir = dataset.project_dir
|
|
74
|
+
|
|
75
|
+
if dataset.zarr_subfolder is None:
|
|
76
|
+
zarr_subfolder = (
|
|
77
|
+
f"fractal/{project_id}_{sanitize_string(project.name)}/"
|
|
59
78
|
f"{db_dataset.id}_{sanitize_string(db_dataset.name)}"
|
|
60
79
|
)
|
|
61
|
-
normalized_path = normalize_url(path)
|
|
62
|
-
db_dataset.zarr_dir = normalized_path
|
|
63
|
-
|
|
64
|
-
db.add(db_dataset)
|
|
65
|
-
await db.commit()
|
|
66
|
-
await db.refresh(db_dataset)
|
|
67
80
|
else:
|
|
68
|
-
|
|
69
|
-
|
|
70
|
-
|
|
71
|
-
|
|
81
|
+
zarr_subfolder = dataset.zarr_subfolder
|
|
82
|
+
|
|
83
|
+
zarr_dir = os.path.join(project_dir, zarr_subfolder)
|
|
84
|
+
db_dataset.zarr_dir = normalize_url(zarr_dir)
|
|
85
|
+
|
|
86
|
+
db.add(db_dataset)
|
|
87
|
+
await db.commit()
|
|
88
|
+
await db.refresh(db_dataset)
|
|
72
89
|
|
|
73
90
|
return db_dataset
|
|
74
91
|
|
|
75
92
|
|
|
76
93
|
@router.get(
|
|
77
94
|
"/project/{project_id}/dataset/",
|
|
78
|
-
response_model=list[
|
|
95
|
+
response_model=list[DatasetRead],
|
|
79
96
|
)
|
|
80
97
|
async def read_dataset_list(
|
|
81
98
|
project_id: int,
|
|
82
99
|
user: UserOAuth = Depends(current_user_act_ver_prof),
|
|
83
100
|
db: AsyncSession = Depends(get_async_db),
|
|
84
|
-
) -> list[
|
|
101
|
+
) -> list[DatasetRead] | None:
|
|
85
102
|
"""
|
|
86
103
|
Get dataset list for given project
|
|
87
104
|
"""
|
|
88
105
|
# Access control
|
|
89
|
-
project = await
|
|
90
|
-
project_id=project_id,
|
|
106
|
+
project = await _get_project_check_access(
|
|
107
|
+
project_id=project_id,
|
|
108
|
+
user_id=user.id,
|
|
109
|
+
required_permissions=ProjectPermissions.READ,
|
|
110
|
+
db=db,
|
|
91
111
|
)
|
|
92
112
|
# Find datasets of the current project. Note: this select/where approach
|
|
93
113
|
# has much better scaling than refreshing all elements of
|
|
@@ -96,72 +116,62 @@ async def read_dataset_list(
|
|
|
96
116
|
stm = select(DatasetV2).where(DatasetV2.project_id == project.id)
|
|
97
117
|
res = await db.execute(stm)
|
|
98
118
|
dataset_list = res.scalars().all()
|
|
99
|
-
await db.close()
|
|
100
119
|
return dataset_list
|
|
101
120
|
|
|
102
121
|
|
|
103
122
|
@router.get(
|
|
104
123
|
"/project/{project_id}/dataset/{dataset_id}/",
|
|
105
|
-
response_model=
|
|
124
|
+
response_model=DatasetRead,
|
|
106
125
|
)
|
|
107
126
|
async def read_dataset(
|
|
108
127
|
project_id: int,
|
|
109
128
|
dataset_id: int,
|
|
110
129
|
user: UserOAuth = Depends(current_user_act_ver_prof),
|
|
111
130
|
db: AsyncSession = Depends(get_async_db),
|
|
112
|
-
) ->
|
|
131
|
+
) -> DatasetRead | None:
|
|
113
132
|
"""
|
|
114
133
|
Get info on a dataset associated to the current project
|
|
115
134
|
"""
|
|
116
|
-
output = await
|
|
135
|
+
output = await _get_dataset_check_access(
|
|
117
136
|
project_id=project_id,
|
|
118
137
|
dataset_id=dataset_id,
|
|
119
138
|
user_id=user.id,
|
|
139
|
+
required_permissions=ProjectPermissions.READ,
|
|
120
140
|
db=db,
|
|
121
141
|
)
|
|
122
142
|
dataset = output["dataset"]
|
|
123
|
-
await db.close()
|
|
124
143
|
return dataset
|
|
125
144
|
|
|
126
145
|
|
|
127
146
|
@router.patch(
|
|
128
147
|
"/project/{project_id}/dataset/{dataset_id}/",
|
|
129
|
-
response_model=
|
|
148
|
+
response_model=DatasetRead,
|
|
130
149
|
)
|
|
131
150
|
async def update_dataset(
|
|
132
151
|
project_id: int,
|
|
133
152
|
dataset_id: int,
|
|
134
|
-
dataset_update:
|
|
153
|
+
dataset_update: DatasetUpdate,
|
|
135
154
|
user: UserOAuth = Depends(current_user_act_ver_prof),
|
|
136
155
|
db: AsyncSession = Depends(get_async_db),
|
|
137
|
-
) ->
|
|
156
|
+
) -> DatasetRead | None:
|
|
138
157
|
"""
|
|
139
158
|
Edit a dataset associated to the current project
|
|
140
159
|
"""
|
|
141
160
|
|
|
142
|
-
output = await
|
|
161
|
+
output = await _get_dataset_check_access(
|
|
143
162
|
project_id=project_id,
|
|
144
163
|
dataset_id=dataset_id,
|
|
145
164
|
user_id=user.id,
|
|
165
|
+
required_permissions=ProjectPermissions.WRITE,
|
|
146
166
|
db=db,
|
|
147
167
|
)
|
|
148
168
|
db_dataset = output["dataset"]
|
|
149
169
|
|
|
150
|
-
if (dataset_update.zarr_dir is not None) and (len(db_dataset.images) != 0):
|
|
151
|
-
raise HTTPException(
|
|
152
|
-
status_code=status.HTTP_422_UNPROCESSABLE_CONTENT,
|
|
153
|
-
detail=(
|
|
154
|
-
"Cannot modify `zarr_dir` because the dataset has a non-empty "
|
|
155
|
-
"image list."
|
|
156
|
-
),
|
|
157
|
-
)
|
|
158
|
-
|
|
159
170
|
for key, value in dataset_update.model_dump(exclude_unset=True).items():
|
|
160
171
|
setattr(db_dataset, key, value)
|
|
161
172
|
|
|
162
173
|
await db.commit()
|
|
163
174
|
await db.refresh(db_dataset)
|
|
164
|
-
await db.close()
|
|
165
175
|
return db_dataset
|
|
166
176
|
|
|
167
177
|
|
|
@@ -178,10 +188,11 @@ async def delete_dataset(
|
|
|
178
188
|
"""
|
|
179
189
|
Delete a dataset associated to the current project
|
|
180
190
|
"""
|
|
181
|
-
output = await
|
|
191
|
+
output = await _get_dataset_check_access(
|
|
182
192
|
project_id=project_id,
|
|
183
193
|
dataset_id=dataset_id,
|
|
184
194
|
user_id=user.id,
|
|
195
|
+
required_permissions=ProjectPermissions.WRITE,
|
|
185
196
|
db=db,
|
|
186
197
|
)
|
|
187
198
|
dataset = output["dataset"]
|
|
@@ -210,25 +221,24 @@ async def delete_dataset(
|
|
|
210
221
|
|
|
211
222
|
@router.get(
|
|
212
223
|
"/project/{project_id}/dataset/{dataset_id}/export/",
|
|
213
|
-
response_model=
|
|
224
|
+
response_model=DatasetExport,
|
|
214
225
|
)
|
|
215
226
|
async def export_dataset(
|
|
216
227
|
project_id: int,
|
|
217
228
|
dataset_id: int,
|
|
218
229
|
user: UserOAuth = Depends(current_user_act_ver_prof),
|
|
219
230
|
db: AsyncSession = Depends(get_async_db),
|
|
220
|
-
) ->
|
|
231
|
+
) -> DatasetExport | None:
|
|
221
232
|
"""
|
|
222
233
|
Export an existing dataset
|
|
223
234
|
"""
|
|
224
|
-
dict_dataset_project = await
|
|
235
|
+
dict_dataset_project = await _get_dataset_check_access(
|
|
225
236
|
project_id=project_id,
|
|
226
237
|
dataset_id=dataset_id,
|
|
227
238
|
user_id=user.id,
|
|
239
|
+
required_permissions=ProjectPermissions.READ,
|
|
228
240
|
db=db,
|
|
229
241
|
)
|
|
230
|
-
await db.close()
|
|
231
|
-
|
|
232
242
|
dataset = dict_dataset_project["dataset"]
|
|
233
243
|
|
|
234
244
|
return dataset
|
|
@@ -236,35 +246,38 @@ async def export_dataset(
|
|
|
236
246
|
|
|
237
247
|
@router.post(
|
|
238
248
|
"/project/{project_id}/dataset/import/",
|
|
239
|
-
response_model=
|
|
249
|
+
response_model=DatasetRead,
|
|
240
250
|
status_code=status.HTTP_201_CREATED,
|
|
241
251
|
)
|
|
242
252
|
async def import_dataset(
|
|
243
253
|
project_id: int,
|
|
244
|
-
dataset:
|
|
254
|
+
dataset: DatasetImport,
|
|
245
255
|
user: UserOAuth = Depends(current_user_act_ver_prof),
|
|
246
256
|
db: AsyncSession = Depends(get_async_db),
|
|
247
|
-
) ->
|
|
257
|
+
) -> DatasetRead | None:
|
|
248
258
|
"""
|
|
249
259
|
Import an existing dataset into a project
|
|
250
260
|
"""
|
|
251
261
|
|
|
252
262
|
# Preliminary checks
|
|
253
|
-
await
|
|
263
|
+
await _get_project_check_access(
|
|
254
264
|
project_id=project_id,
|
|
255
265
|
user_id=user.id,
|
|
266
|
+
required_permissions=ProjectPermissions.WRITE,
|
|
256
267
|
db=db,
|
|
257
268
|
)
|
|
258
269
|
|
|
259
|
-
|
|
260
|
-
|
|
261
|
-
|
|
262
|
-
|
|
263
|
-
|
|
264
|
-
|
|
265
|
-
|
|
266
|
-
|
|
267
|
-
|
|
270
|
+
if not any(
|
|
271
|
+
Path(dataset.zarr_dir).is_relative_to(project_dir)
|
|
272
|
+
for project_dir in user.project_dirs
|
|
273
|
+
):
|
|
274
|
+
raise HTTPException(
|
|
275
|
+
status_code=status.HTTP_422_UNPROCESSABLE_CONTENT,
|
|
276
|
+
detail=(
|
|
277
|
+
f"{dataset.zarr_dir=} is not relative to any of user's project "
|
|
278
|
+
"dirs."
|
|
279
|
+
),
|
|
280
|
+
)
|
|
268
281
|
|
|
269
282
|
# Create new Dataset
|
|
270
283
|
db_dataset = DatasetV2(
|
|
@@ -274,6 +287,5 @@ async def import_dataset(
|
|
|
274
287
|
db.add(db_dataset)
|
|
275
288
|
await db.commit()
|
|
276
289
|
await db.refresh(db_dataset)
|
|
277
|
-
await db.close()
|
|
278
290
|
|
|
279
291
|
return db_dataset
|
|
@@ -24,6 +24,7 @@ from fractal_server.app.schemas.v2 import HistoryUnitRead
|
|
|
24
24
|
from fractal_server.app.schemas.v2 import HistoryUnitStatus
|
|
25
25
|
from fractal_server.app.schemas.v2 import HistoryUnitStatusWithUnset
|
|
26
26
|
from fractal_server.app.schemas.v2 import ImageLogsRequest
|
|
27
|
+
from fractal_server.app.schemas.v2.sharing import ProjectPermissions
|
|
27
28
|
from fractal_server.images.status_tools import IMAGE_STATUS_KEY
|
|
28
29
|
from fractal_server.images.status_tools import enrich_images_unsorted_async
|
|
29
30
|
from fractal_server.images.tools import aggregate_attributes
|
|
@@ -31,13 +32,13 @@ from fractal_server.images.tools import aggregate_types
|
|
|
31
32
|
from fractal_server.images.tools import filter_image_list
|
|
32
33
|
from fractal_server.logger import set_logger
|
|
33
34
|
|
|
34
|
-
from ._aux_functions import
|
|
35
|
-
from ._aux_functions import
|
|
36
|
-
from ._aux_functions import
|
|
35
|
+
from ._aux_functions import _get_dataset_check_access
|
|
36
|
+
from ._aux_functions import _get_submitted_jobs_statement
|
|
37
|
+
from ._aux_functions import _get_workflow_check_access
|
|
37
38
|
from ._aux_functions_history import _verify_workflow_and_dataset_access
|
|
38
39
|
from ._aux_functions_history import get_history_run_or_404
|
|
39
40
|
from ._aux_functions_history import get_history_unit_or_404
|
|
40
|
-
from ._aux_functions_history import
|
|
41
|
+
from ._aux_functions_history import get_wftask_check_access
|
|
41
42
|
from ._aux_functions_history import read_log_file
|
|
42
43
|
from .images import ImagePage
|
|
43
44
|
from .images import ImageQuery
|
|
@@ -74,24 +75,28 @@ async def get_workflow_tasks_statuses(
|
|
|
74
75
|
db: AsyncSession = Depends(get_async_db),
|
|
75
76
|
) -> JSONResponse:
|
|
76
77
|
# Access control
|
|
77
|
-
workflow = await
|
|
78
|
+
workflow = await _get_workflow_check_access(
|
|
78
79
|
project_id=project_id,
|
|
79
80
|
workflow_id=workflow_id,
|
|
80
81
|
user_id=user.id,
|
|
82
|
+
required_permissions=ProjectPermissions.READ,
|
|
81
83
|
db=db,
|
|
82
84
|
)
|
|
83
|
-
await
|
|
85
|
+
await _get_dataset_check_access(
|
|
84
86
|
project_id=project_id,
|
|
85
87
|
dataset_id=dataset_id,
|
|
86
88
|
user_id=user.id,
|
|
89
|
+
required_permissions=ProjectPermissions.READ,
|
|
87
90
|
db=db,
|
|
88
91
|
)
|
|
89
92
|
|
|
90
|
-
|
|
91
|
-
|
|
92
|
-
dataset_id
|
|
93
|
-
workflow_id
|
|
93
|
+
res = await db.execute(
|
|
94
|
+
_get_submitted_jobs_statement()
|
|
95
|
+
.where(JobV2.dataset_id == dataset_id)
|
|
96
|
+
.where(JobV2.workflow_id == workflow_id)
|
|
94
97
|
)
|
|
98
|
+
running_job = res.scalars().one_or_none()
|
|
99
|
+
|
|
95
100
|
if running_job is not None:
|
|
96
101
|
running_wftasks = workflow.task_list[
|
|
97
102
|
running_job.first_task_index : running_job.last_task_index + 1
|
|
@@ -185,11 +190,12 @@ async def get_history_run_list(
|
|
|
185
190
|
db: AsyncSession = Depends(get_async_db),
|
|
186
191
|
) -> list[HistoryRunReadAggregated]:
|
|
187
192
|
# Access control
|
|
188
|
-
await
|
|
193
|
+
await get_wftask_check_access(
|
|
189
194
|
project_id=project_id,
|
|
190
195
|
dataset_id=dataset_id,
|
|
191
196
|
workflowtask_id=workflowtask_id,
|
|
192
197
|
user_id=user.id,
|
|
198
|
+
required_permissions=ProjectPermissions.READ,
|
|
193
199
|
db=db,
|
|
194
200
|
)
|
|
195
201
|
|
|
@@ -278,11 +284,12 @@ async def get_history_run_units(
|
|
|
278
284
|
pagination: PaginationRequest = Depends(get_pagination_params),
|
|
279
285
|
) -> PaginationResponse[HistoryUnitRead]:
|
|
280
286
|
# Access control
|
|
281
|
-
await
|
|
287
|
+
await get_wftask_check_access(
|
|
282
288
|
project_id=project_id,
|
|
283
289
|
dataset_id=dataset_id,
|
|
284
290
|
workflowtask_id=workflowtask_id,
|
|
285
291
|
user_id=user.id,
|
|
292
|
+
required_permissions=ProjectPermissions.READ,
|
|
286
293
|
db=db,
|
|
287
294
|
)
|
|
288
295
|
|
|
@@ -337,11 +344,12 @@ async def get_history_images(
|
|
|
337
344
|
pagination: PaginationRequest = Depends(get_pagination_params),
|
|
338
345
|
) -> ImagePage:
|
|
339
346
|
# Access control and object retrieval
|
|
340
|
-
wftask = await
|
|
347
|
+
wftask = await get_wftask_check_access(
|
|
341
348
|
project_id=project_id,
|
|
342
349
|
dataset_id=dataset_id,
|
|
343
350
|
workflowtask_id=workflowtask_id,
|
|
344
351
|
user_id=user.id,
|
|
352
|
+
required_permissions=ProjectPermissions.READ,
|
|
345
353
|
db=db,
|
|
346
354
|
)
|
|
347
355
|
res = await _verify_workflow_and_dataset_access(
|
|
@@ -349,6 +357,7 @@ async def get_history_images(
|
|
|
349
357
|
workflow_id=wftask.workflow_id,
|
|
350
358
|
dataset_id=dataset_id,
|
|
351
359
|
user_id=user.id,
|
|
360
|
+
required_permissions=ProjectPermissions.READ,
|
|
352
361
|
db=db,
|
|
353
362
|
)
|
|
354
363
|
dataset = res["dataset"]
|
|
@@ -418,11 +427,12 @@ async def get_image_log(
|
|
|
418
427
|
db: AsyncSession = Depends(get_async_db),
|
|
419
428
|
) -> JSONResponse:
|
|
420
429
|
# Access control
|
|
421
|
-
wftask = await
|
|
430
|
+
wftask = await get_wftask_check_access(
|
|
422
431
|
project_id=project_id,
|
|
423
432
|
dataset_id=request_data.dataset_id,
|
|
424
433
|
workflowtask_id=request_data.workflowtask_id,
|
|
425
434
|
user_id=user.id,
|
|
435
|
+
required_permissions=ProjectPermissions.READ,
|
|
426
436
|
db=db,
|
|
427
437
|
)
|
|
428
438
|
|
|
@@ -475,11 +485,12 @@ async def get_history_unit_log(
|
|
|
475
485
|
db: AsyncSession = Depends(get_async_db),
|
|
476
486
|
) -> JSONResponse:
|
|
477
487
|
# Access control
|
|
478
|
-
wftask = await
|
|
488
|
+
wftask = await get_wftask_check_access(
|
|
479
489
|
project_id=project_id,
|
|
480
490
|
dataset_id=dataset_id,
|
|
481
491
|
workflowtask_id=workflowtask_id,
|
|
482
492
|
user_id=user.id,
|
|
493
|
+
required_permissions=ProjectPermissions.READ,
|
|
483
494
|
db=db,
|
|
484
495
|
)
|
|
485
496
|
|
|
@@ -530,10 +541,11 @@ async def get_dataset_history(
|
|
|
530
541
|
timestamp.
|
|
531
542
|
"""
|
|
532
543
|
# Access control
|
|
533
|
-
await
|
|
544
|
+
await _get_dataset_check_access(
|
|
534
545
|
project_id=project_id,
|
|
535
546
|
dataset_id=dataset_id,
|
|
536
547
|
user_id=user.id,
|
|
548
|
+
required_permissions=ProjectPermissions.READ,
|
|
537
549
|
db=db,
|
|
538
550
|
)
|
|
539
551
|
|
|
@@ -16,6 +16,7 @@ from fractal_server.app.routes.auth import current_user_act_ver_prof
|
|
|
16
16
|
from fractal_server.app.routes.pagination import PaginationRequest
|
|
17
17
|
from fractal_server.app.routes.pagination import PaginationResponse
|
|
18
18
|
from fractal_server.app.routes.pagination import get_pagination_params
|
|
19
|
+
from fractal_server.app.schemas.v2.sharing import ProjectPermissions
|
|
19
20
|
from fractal_server.images import SingleImage
|
|
20
21
|
from fractal_server.images import SingleImageUpdate
|
|
21
22
|
from fractal_server.images.tools import aggregate_attributes
|
|
@@ -26,7 +27,7 @@ from fractal_server.types import AttributeFilters
|
|
|
26
27
|
from fractal_server.types import ImageAttributeValue
|
|
27
28
|
from fractal_server.types import TypeFilters
|
|
28
29
|
|
|
29
|
-
from ._aux_functions import
|
|
30
|
+
from ._aux_functions import _get_dataset_check_access
|
|
30
31
|
|
|
31
32
|
router = APIRouter()
|
|
32
33
|
|
|
@@ -64,8 +65,12 @@ async def post_new_image(
|
|
|
64
65
|
user: UserOAuth = Depends(current_user_act_ver_prof),
|
|
65
66
|
db: AsyncSession = Depends(get_async_db),
|
|
66
67
|
) -> Response:
|
|
67
|
-
output = await
|
|
68
|
-
project_id=project_id,
|
|
68
|
+
output = await _get_dataset_check_access(
|
|
69
|
+
project_id=project_id,
|
|
70
|
+
dataset_id=dataset_id,
|
|
71
|
+
user_id=user.id,
|
|
72
|
+
required_permissions=ProjectPermissions.WRITE,
|
|
73
|
+
db=db,
|
|
69
74
|
)
|
|
70
75
|
dataset = output["dataset"]
|
|
71
76
|
|
|
@@ -119,8 +124,12 @@ async def query_dataset_images(
|
|
|
119
124
|
page = pagination.page
|
|
120
125
|
page_size = pagination.page_size
|
|
121
126
|
|
|
122
|
-
output = await
|
|
123
|
-
project_id=project_id,
|
|
127
|
+
output = await _get_dataset_check_access(
|
|
128
|
+
project_id=project_id,
|
|
129
|
+
dataset_id=dataset_id,
|
|
130
|
+
user_id=user.id,
|
|
131
|
+
required_permissions=ProjectPermissions.READ,
|
|
132
|
+
db=db,
|
|
124
133
|
)
|
|
125
134
|
dataset = output["dataset"]
|
|
126
135
|
images = dataset.images
|
|
@@ -187,8 +196,12 @@ async def delete_dataset_images(
|
|
|
187
196
|
user: UserOAuth = Depends(current_user_act_ver_prof),
|
|
188
197
|
db: AsyncSession = Depends(get_async_db),
|
|
189
198
|
) -> Response:
|
|
190
|
-
output = await
|
|
191
|
-
project_id=project_id,
|
|
199
|
+
output = await _get_dataset_check_access(
|
|
200
|
+
project_id=project_id,
|
|
201
|
+
dataset_id=dataset_id,
|
|
202
|
+
user_id=user.id,
|
|
203
|
+
required_permissions=ProjectPermissions.WRITE,
|
|
204
|
+
db=db,
|
|
192
205
|
)
|
|
193
206
|
dataset = output["dataset"]
|
|
194
207
|
|
|
@@ -231,10 +244,11 @@ async def patch_dataset_image(
|
|
|
231
244
|
user: UserOAuth = Depends(current_user_act_ver_prof),
|
|
232
245
|
db: AsyncSession = Depends(get_async_db),
|
|
233
246
|
):
|
|
234
|
-
output = await
|
|
247
|
+
output = await _get_dataset_check_access(
|
|
235
248
|
project_id=project_id,
|
|
236
249
|
dataset_id=dataset_id,
|
|
237
250
|
user_id=user.id,
|
|
251
|
+
required_permissions=ProjectPermissions.WRITE,
|
|
238
252
|
db=db,
|
|
239
253
|
)
|
|
240
254
|
db_dataset = output["dataset"]
|