fractal-server 2.17.2__py3-none-any.whl → 2.18.0a0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- fractal_server/__init__.py +1 -1
- fractal_server/app/models/linkuserproject.py +40 -0
- fractal_server/app/routes/admin/v2/__init__.py +2 -0
- fractal_server/app/routes/admin/v2/job.py +17 -6
- fractal_server/app/routes/admin/v2/sharing.py +103 -0
- fractal_server/app/routes/admin/v2/task.py +1 -0
- fractal_server/app/routes/api/v2/__init__.py +2 -0
- fractal_server/app/routes/api/v2/_aux_functions.py +43 -17
- fractal_server/app/routes/api/v2/_aux_functions_history.py +8 -3
- fractal_server/app/routes/api/v2/_aux_functions_sharing.py +97 -0
- fractal_server/app/routes/api/v2/dataset.py +23 -17
- fractal_server/app/routes/api/v2/history.py +21 -11
- fractal_server/app/routes/api/v2/images.py +22 -8
- fractal_server/app/routes/api/v2/job.py +28 -12
- fractal_server/app/routes/api/v2/pre_submission_checks.py +13 -6
- fractal_server/app/routes/api/v2/project.py +37 -14
- fractal_server/app/routes/api/v2/sharing.py +312 -0
- fractal_server/app/routes/api/v2/status_legacy.py +7 -4
- fractal_server/app/routes/api/v2/submit.py +11 -5
- fractal_server/app/routes/api/v2/task_version_update.py +7 -4
- fractal_server/app/routes/api/v2/workflow.py +23 -11
- fractal_server/app/routes/api/v2/workflow_import.py +14 -12
- fractal_server/app/routes/api/v2/workflowtask.py +41 -7
- fractal_server/app/schemas/v2/__init__.py +7 -0
- fractal_server/app/schemas/v2/sharing.py +99 -0
- fractal_server/migrations/versions/bc0e8b3327a7_project_sharing.py +72 -0
- fractal_server/runner/executors/slurm_common/_batching.py +4 -10
- fractal_server/runner/executors/slurm_ssh/runner.py +1 -1
- fractal_server/runner/executors/slurm_sudo/runner.py +1 -1
- {fractal_server-2.17.2.dist-info → fractal_server-2.18.0a0.dist-info}/METADATA +3 -2
- {fractal_server-2.17.2.dist-info → fractal_server-2.18.0a0.dist-info}/RECORD +34 -29
- {fractal_server-2.17.2.dist-info → fractal_server-2.18.0a0.dist-info}/WHEEL +0 -0
- {fractal_server-2.17.2.dist-info → fractal_server-2.18.0a0.dist-info}/entry_points.txt +0 -0
- {fractal_server-2.17.2.dist-info → fractal_server-2.18.0a0.dist-info}/licenses/LICENSE +0 -0
|
@@ -16,11 +16,12 @@ from fractal_server.app.schemas.v2 import DatasetReadV2
|
|
|
16
16
|
from fractal_server.app.schemas.v2 import DatasetUpdateV2
|
|
17
17
|
from fractal_server.app.schemas.v2.dataset import DatasetExportV2
|
|
18
18
|
from fractal_server.app.schemas.v2.dataset import DatasetImportV2
|
|
19
|
+
from fractal_server.app.schemas.v2.sharing import ProjectPermissions
|
|
19
20
|
from fractal_server.string_tools import sanitize_string
|
|
20
21
|
from fractal_server.urls import normalize_url
|
|
21
22
|
|
|
22
|
-
from ._aux_functions import
|
|
23
|
-
from ._aux_functions import
|
|
23
|
+
from ._aux_functions import _get_dataset_check_access
|
|
24
|
+
from ._aux_functions import _get_project_check_access
|
|
24
25
|
from ._aux_functions import _get_submitted_jobs_statement
|
|
25
26
|
|
|
26
27
|
router = APIRouter()
|
|
@@ -40,8 +41,11 @@ async def create_dataset(
|
|
|
40
41
|
"""
|
|
41
42
|
Add new dataset to current project
|
|
42
43
|
"""
|
|
43
|
-
project = await
|
|
44
|
-
project_id=project_id,
|
|
44
|
+
project = await _get_project_check_access(
|
|
45
|
+
project_id=project_id,
|
|
46
|
+
user_id=user.id,
|
|
47
|
+
required_permissions=ProjectPermissions.WRITE,
|
|
48
|
+
db=db,
|
|
45
49
|
)
|
|
46
50
|
|
|
47
51
|
if dataset.zarr_dir is None:
|
|
@@ -86,8 +90,11 @@ async def read_dataset_list(
|
|
|
86
90
|
Get dataset list for given project
|
|
87
91
|
"""
|
|
88
92
|
# Access control
|
|
89
|
-
project = await
|
|
90
|
-
project_id=project_id,
|
|
93
|
+
project = await _get_project_check_access(
|
|
94
|
+
project_id=project_id,
|
|
95
|
+
user_id=user.id,
|
|
96
|
+
required_permissions=ProjectPermissions.READ,
|
|
97
|
+
db=db,
|
|
91
98
|
)
|
|
92
99
|
# Find datasets of the current project. Note: this select/where approach
|
|
93
100
|
# has much better scaling than refreshing all elements of
|
|
@@ -96,7 +103,6 @@ async def read_dataset_list(
|
|
|
96
103
|
stm = select(DatasetV2).where(DatasetV2.project_id == project.id)
|
|
97
104
|
res = await db.execute(stm)
|
|
98
105
|
dataset_list = res.scalars().all()
|
|
99
|
-
await db.close()
|
|
100
106
|
return dataset_list
|
|
101
107
|
|
|
102
108
|
|
|
@@ -113,14 +119,14 @@ async def read_dataset(
|
|
|
113
119
|
"""
|
|
114
120
|
Get info on a dataset associated to the current project
|
|
115
121
|
"""
|
|
116
|
-
output = await
|
|
122
|
+
output = await _get_dataset_check_access(
|
|
117
123
|
project_id=project_id,
|
|
118
124
|
dataset_id=dataset_id,
|
|
119
125
|
user_id=user.id,
|
|
126
|
+
required_permissions=ProjectPermissions.READ,
|
|
120
127
|
db=db,
|
|
121
128
|
)
|
|
122
129
|
dataset = output["dataset"]
|
|
123
|
-
await db.close()
|
|
124
130
|
return dataset
|
|
125
131
|
|
|
126
132
|
|
|
@@ -139,10 +145,11 @@ async def update_dataset(
|
|
|
139
145
|
Edit a dataset associated to the current project
|
|
140
146
|
"""
|
|
141
147
|
|
|
142
|
-
output = await
|
|
148
|
+
output = await _get_dataset_check_access(
|
|
143
149
|
project_id=project_id,
|
|
144
150
|
dataset_id=dataset_id,
|
|
145
151
|
user_id=user.id,
|
|
152
|
+
required_permissions=ProjectPermissions.WRITE,
|
|
146
153
|
db=db,
|
|
147
154
|
)
|
|
148
155
|
db_dataset = output["dataset"]
|
|
@@ -161,7 +168,6 @@ async def update_dataset(
|
|
|
161
168
|
|
|
162
169
|
await db.commit()
|
|
163
170
|
await db.refresh(db_dataset)
|
|
164
|
-
await db.close()
|
|
165
171
|
return db_dataset
|
|
166
172
|
|
|
167
173
|
|
|
@@ -178,10 +184,11 @@ async def delete_dataset(
|
|
|
178
184
|
"""
|
|
179
185
|
Delete a dataset associated to the current project
|
|
180
186
|
"""
|
|
181
|
-
output = await
|
|
187
|
+
output = await _get_dataset_check_access(
|
|
182
188
|
project_id=project_id,
|
|
183
189
|
dataset_id=dataset_id,
|
|
184
190
|
user_id=user.id,
|
|
191
|
+
required_permissions=ProjectPermissions.WRITE,
|
|
185
192
|
db=db,
|
|
186
193
|
)
|
|
187
194
|
dataset = output["dataset"]
|
|
@@ -221,14 +228,13 @@ async def export_dataset(
|
|
|
221
228
|
"""
|
|
222
229
|
Export an existing dataset
|
|
223
230
|
"""
|
|
224
|
-
dict_dataset_project = await
|
|
231
|
+
dict_dataset_project = await _get_dataset_check_access(
|
|
225
232
|
project_id=project_id,
|
|
226
233
|
dataset_id=dataset_id,
|
|
227
234
|
user_id=user.id,
|
|
235
|
+
required_permissions=ProjectPermissions.READ,
|
|
228
236
|
db=db,
|
|
229
237
|
)
|
|
230
|
-
await db.close()
|
|
231
|
-
|
|
232
238
|
dataset = dict_dataset_project["dataset"]
|
|
233
239
|
|
|
234
240
|
return dataset
|
|
@@ -250,9 +256,10 @@ async def import_dataset(
|
|
|
250
256
|
"""
|
|
251
257
|
|
|
252
258
|
# Preliminary checks
|
|
253
|
-
await
|
|
259
|
+
await _get_project_check_access(
|
|
254
260
|
project_id=project_id,
|
|
255
261
|
user_id=user.id,
|
|
262
|
+
required_permissions=ProjectPermissions.WRITE,
|
|
256
263
|
db=db,
|
|
257
264
|
)
|
|
258
265
|
|
|
@@ -274,6 +281,5 @@ async def import_dataset(
|
|
|
274
281
|
db.add(db_dataset)
|
|
275
282
|
await db.commit()
|
|
276
283
|
await db.refresh(db_dataset)
|
|
277
|
-
await db.close()
|
|
278
284
|
|
|
279
285
|
return db_dataset
|
|
@@ -24,6 +24,7 @@ from fractal_server.app.schemas.v2 import HistoryUnitRead
|
|
|
24
24
|
from fractal_server.app.schemas.v2 import HistoryUnitStatus
|
|
25
25
|
from fractal_server.app.schemas.v2 import HistoryUnitStatusWithUnset
|
|
26
26
|
from fractal_server.app.schemas.v2 import ImageLogsRequest
|
|
27
|
+
from fractal_server.app.schemas.v2.sharing import ProjectPermissions
|
|
27
28
|
from fractal_server.images.status_tools import IMAGE_STATUS_KEY
|
|
28
29
|
from fractal_server.images.status_tools import enrich_images_unsorted_async
|
|
29
30
|
from fractal_server.images.tools import aggregate_attributes
|
|
@@ -31,13 +32,13 @@ from fractal_server.images.tools import aggregate_types
|
|
|
31
32
|
from fractal_server.images.tools import filter_image_list
|
|
32
33
|
from fractal_server.logger import set_logger
|
|
33
34
|
|
|
34
|
-
from ._aux_functions import
|
|
35
|
+
from ._aux_functions import _get_dataset_check_access
|
|
35
36
|
from ._aux_functions import _get_submitted_job_or_none
|
|
36
|
-
from ._aux_functions import
|
|
37
|
+
from ._aux_functions import _get_workflow_check_access
|
|
37
38
|
from ._aux_functions_history import _verify_workflow_and_dataset_access
|
|
38
39
|
from ._aux_functions_history import get_history_run_or_404
|
|
39
40
|
from ._aux_functions_history import get_history_unit_or_404
|
|
40
|
-
from ._aux_functions_history import
|
|
41
|
+
from ._aux_functions_history import get_wftask_check_access
|
|
41
42
|
from ._aux_functions_history import read_log_file
|
|
42
43
|
from .images import ImagePage
|
|
43
44
|
from .images import ImageQuery
|
|
@@ -74,16 +75,18 @@ async def get_workflow_tasks_statuses(
|
|
|
74
75
|
db: AsyncSession = Depends(get_async_db),
|
|
75
76
|
) -> JSONResponse:
|
|
76
77
|
# Access control
|
|
77
|
-
workflow = await
|
|
78
|
+
workflow = await _get_workflow_check_access(
|
|
78
79
|
project_id=project_id,
|
|
79
80
|
workflow_id=workflow_id,
|
|
80
81
|
user_id=user.id,
|
|
82
|
+
required_permissions=ProjectPermissions.READ,
|
|
81
83
|
db=db,
|
|
82
84
|
)
|
|
83
|
-
await
|
|
85
|
+
await _get_dataset_check_access(
|
|
84
86
|
project_id=project_id,
|
|
85
87
|
dataset_id=dataset_id,
|
|
86
88
|
user_id=user.id,
|
|
89
|
+
required_permissions=ProjectPermissions.READ,
|
|
87
90
|
db=db,
|
|
88
91
|
)
|
|
89
92
|
|
|
@@ -185,11 +188,12 @@ async def get_history_run_list(
|
|
|
185
188
|
db: AsyncSession = Depends(get_async_db),
|
|
186
189
|
) -> list[HistoryRunReadAggregated]:
|
|
187
190
|
# Access control
|
|
188
|
-
await
|
|
191
|
+
await get_wftask_check_access(
|
|
189
192
|
project_id=project_id,
|
|
190
193
|
dataset_id=dataset_id,
|
|
191
194
|
workflowtask_id=workflowtask_id,
|
|
192
195
|
user_id=user.id,
|
|
196
|
+
required_permissions=ProjectPermissions.READ,
|
|
193
197
|
db=db,
|
|
194
198
|
)
|
|
195
199
|
|
|
@@ -278,11 +282,12 @@ async def get_history_run_units(
|
|
|
278
282
|
pagination: PaginationRequest = Depends(get_pagination_params),
|
|
279
283
|
) -> PaginationResponse[HistoryUnitRead]:
|
|
280
284
|
# Access control
|
|
281
|
-
await
|
|
285
|
+
await get_wftask_check_access(
|
|
282
286
|
project_id=project_id,
|
|
283
287
|
dataset_id=dataset_id,
|
|
284
288
|
workflowtask_id=workflowtask_id,
|
|
285
289
|
user_id=user.id,
|
|
290
|
+
required_permissions=ProjectPermissions.READ,
|
|
286
291
|
db=db,
|
|
287
292
|
)
|
|
288
293
|
|
|
@@ -337,11 +342,12 @@ async def get_history_images(
|
|
|
337
342
|
pagination: PaginationRequest = Depends(get_pagination_params),
|
|
338
343
|
) -> ImagePage:
|
|
339
344
|
# Access control and object retrieval
|
|
340
|
-
wftask = await
|
|
345
|
+
wftask = await get_wftask_check_access(
|
|
341
346
|
project_id=project_id,
|
|
342
347
|
dataset_id=dataset_id,
|
|
343
348
|
workflowtask_id=workflowtask_id,
|
|
344
349
|
user_id=user.id,
|
|
350
|
+
required_permissions=ProjectPermissions.READ,
|
|
345
351
|
db=db,
|
|
346
352
|
)
|
|
347
353
|
res = await _verify_workflow_and_dataset_access(
|
|
@@ -349,6 +355,7 @@ async def get_history_images(
|
|
|
349
355
|
workflow_id=wftask.workflow_id,
|
|
350
356
|
dataset_id=dataset_id,
|
|
351
357
|
user_id=user.id,
|
|
358
|
+
required_permissions=ProjectPermissions.READ,
|
|
352
359
|
db=db,
|
|
353
360
|
)
|
|
354
361
|
dataset = res["dataset"]
|
|
@@ -418,11 +425,12 @@ async def get_image_log(
|
|
|
418
425
|
db: AsyncSession = Depends(get_async_db),
|
|
419
426
|
) -> JSONResponse:
|
|
420
427
|
# Access control
|
|
421
|
-
wftask = await
|
|
428
|
+
wftask = await get_wftask_check_access(
|
|
422
429
|
project_id=project_id,
|
|
423
430
|
dataset_id=request_data.dataset_id,
|
|
424
431
|
workflowtask_id=request_data.workflowtask_id,
|
|
425
432
|
user_id=user.id,
|
|
433
|
+
required_permissions=ProjectPermissions.READ,
|
|
426
434
|
db=db,
|
|
427
435
|
)
|
|
428
436
|
|
|
@@ -475,11 +483,12 @@ async def get_history_unit_log(
|
|
|
475
483
|
db: AsyncSession = Depends(get_async_db),
|
|
476
484
|
) -> JSONResponse:
|
|
477
485
|
# Access control
|
|
478
|
-
wftask = await
|
|
486
|
+
wftask = await get_wftask_check_access(
|
|
479
487
|
project_id=project_id,
|
|
480
488
|
dataset_id=dataset_id,
|
|
481
489
|
workflowtask_id=workflowtask_id,
|
|
482
490
|
user_id=user.id,
|
|
491
|
+
required_permissions=ProjectPermissions.READ,
|
|
483
492
|
db=db,
|
|
484
493
|
)
|
|
485
494
|
|
|
@@ -530,10 +539,11 @@ async def get_dataset_history(
|
|
|
530
539
|
timestamp.
|
|
531
540
|
"""
|
|
532
541
|
# Access control
|
|
533
|
-
await
|
|
542
|
+
await _get_dataset_check_access(
|
|
534
543
|
project_id=project_id,
|
|
535
544
|
dataset_id=dataset_id,
|
|
536
545
|
user_id=user.id,
|
|
546
|
+
required_permissions=ProjectPermissions.READ,
|
|
537
547
|
db=db,
|
|
538
548
|
)
|
|
539
549
|
|
|
@@ -16,6 +16,7 @@ from fractal_server.app.routes.auth import current_user_act_ver_prof
|
|
|
16
16
|
from fractal_server.app.routes.pagination import PaginationRequest
|
|
17
17
|
from fractal_server.app.routes.pagination import PaginationResponse
|
|
18
18
|
from fractal_server.app.routes.pagination import get_pagination_params
|
|
19
|
+
from fractal_server.app.schemas.v2.sharing import ProjectPermissions
|
|
19
20
|
from fractal_server.images import SingleImage
|
|
20
21
|
from fractal_server.images import SingleImageUpdate
|
|
21
22
|
from fractal_server.images.tools import aggregate_attributes
|
|
@@ -26,7 +27,7 @@ from fractal_server.types import AttributeFilters
|
|
|
26
27
|
from fractal_server.types import ImageAttributeValue
|
|
27
28
|
from fractal_server.types import TypeFilters
|
|
28
29
|
|
|
29
|
-
from ._aux_functions import
|
|
30
|
+
from ._aux_functions import _get_dataset_check_access
|
|
30
31
|
|
|
31
32
|
router = APIRouter()
|
|
32
33
|
|
|
@@ -64,8 +65,12 @@ async def post_new_image(
|
|
|
64
65
|
user: UserOAuth = Depends(current_user_act_ver_prof),
|
|
65
66
|
db: AsyncSession = Depends(get_async_db),
|
|
66
67
|
) -> Response:
|
|
67
|
-
output = await
|
|
68
|
-
project_id=project_id,
|
|
68
|
+
output = await _get_dataset_check_access(
|
|
69
|
+
project_id=project_id,
|
|
70
|
+
dataset_id=dataset_id,
|
|
71
|
+
user_id=user.id,
|
|
72
|
+
required_permissions=ProjectPermissions.WRITE,
|
|
73
|
+
db=db,
|
|
69
74
|
)
|
|
70
75
|
dataset = output["dataset"]
|
|
71
76
|
|
|
@@ -119,8 +124,12 @@ async def query_dataset_images(
|
|
|
119
124
|
page = pagination.page
|
|
120
125
|
page_size = pagination.page_size
|
|
121
126
|
|
|
122
|
-
output = await
|
|
123
|
-
project_id=project_id,
|
|
127
|
+
output = await _get_dataset_check_access(
|
|
128
|
+
project_id=project_id,
|
|
129
|
+
dataset_id=dataset_id,
|
|
130
|
+
user_id=user.id,
|
|
131
|
+
required_permissions=ProjectPermissions.READ,
|
|
132
|
+
db=db,
|
|
124
133
|
)
|
|
125
134
|
dataset = output["dataset"]
|
|
126
135
|
images = dataset.images
|
|
@@ -187,8 +196,12 @@ async def delete_dataset_images(
|
|
|
187
196
|
user: UserOAuth = Depends(current_user_act_ver_prof),
|
|
188
197
|
db: AsyncSession = Depends(get_async_db),
|
|
189
198
|
) -> Response:
|
|
190
|
-
output = await
|
|
191
|
-
project_id=project_id,
|
|
199
|
+
output = await _get_dataset_check_access(
|
|
200
|
+
project_id=project_id,
|
|
201
|
+
dataset_id=dataset_id,
|
|
202
|
+
user_id=user.id,
|
|
203
|
+
required_permissions=ProjectPermissions.WRITE,
|
|
204
|
+
db=db,
|
|
192
205
|
)
|
|
193
206
|
dataset = output["dataset"]
|
|
194
207
|
|
|
@@ -231,10 +244,11 @@ async def patch_dataset_image(
|
|
|
231
244
|
user: UserOAuth = Depends(current_user_act_ver_prof),
|
|
232
245
|
db: AsyncSession = Depends(get_async_db),
|
|
233
246
|
):
|
|
234
|
-
output = await
|
|
247
|
+
output = await _get_dataset_check_access(
|
|
235
248
|
project_id=project_id,
|
|
236
249
|
dataset_id=dataset_id,
|
|
237
250
|
user_id=user.id,
|
|
251
|
+
required_permissions=ProjectPermissions.WRITE,
|
|
238
252
|
db=db,
|
|
239
253
|
)
|
|
240
254
|
db_dataset = output["dataset"]
|
|
@@ -20,12 +20,13 @@ from fractal_server.app.routes.aux._job import _write_shutdown_file
|
|
|
20
20
|
from fractal_server.app.routes.aux._runner import _check_shutdown_is_supported
|
|
21
21
|
from fractal_server.app.schemas.v2 import JobReadV2
|
|
22
22
|
from fractal_server.app.schemas.v2 import JobStatusTypeV2
|
|
23
|
+
from fractal_server.app.schemas.v2.sharing import ProjectPermissions
|
|
23
24
|
from fractal_server.runner.filenames import WORKFLOW_LOG_FILENAME
|
|
24
25
|
from fractal_server.zip_tools import _zip_folder_to_byte_stream_iterator
|
|
25
26
|
|
|
26
|
-
from ._aux_functions import
|
|
27
|
-
from ._aux_functions import
|
|
28
|
-
from ._aux_functions import
|
|
27
|
+
from ._aux_functions import _get_job_check_access
|
|
28
|
+
from ._aux_functions import _get_project_check_access
|
|
29
|
+
from ._aux_functions import _get_workflow_check_access
|
|
29
30
|
|
|
30
31
|
|
|
31
32
|
# https://docs.python.org/3/library/asyncio-task.html#asyncio.to_thread
|
|
@@ -53,6 +54,7 @@ async def get_user_jobs(
|
|
|
53
54
|
LinkUserProjectV2, LinkUserProjectV2.project_id == JobV2.project_id
|
|
54
55
|
)
|
|
55
56
|
.where(LinkUserProjectV2.user_id == user.id)
|
|
57
|
+
.where(LinkUserProjectV2.is_owner.is_(True))
|
|
56
58
|
)
|
|
57
59
|
res = await db.execute(stm)
|
|
58
60
|
job_list = res.scalars().all()
|
|
@@ -77,8 +79,12 @@ async def get_workflow_jobs(
|
|
|
77
79
|
"""
|
|
78
80
|
Returns all the jobs related to a specific workflow
|
|
79
81
|
"""
|
|
80
|
-
await
|
|
81
|
-
project_id=project_id,
|
|
82
|
+
await _get_workflow_check_access(
|
|
83
|
+
project_id=project_id,
|
|
84
|
+
workflow_id=workflow_id,
|
|
85
|
+
user_id=user.id,
|
|
86
|
+
required_permissions=ProjectPermissions.READ,
|
|
87
|
+
db=db,
|
|
82
88
|
)
|
|
83
89
|
stm = select(JobV2).where(JobV2.workflow_id == workflow_id)
|
|
84
90
|
res = await db.execute(stm)
|
|
@@ -94,8 +100,12 @@ async def get_latest_job(
|
|
|
94
100
|
user: UserOAuth = Depends(current_user_act_ver_prof),
|
|
95
101
|
db: AsyncSession = Depends(get_async_db),
|
|
96
102
|
) -> JobReadV2:
|
|
97
|
-
await
|
|
98
|
-
project_id=project_id,
|
|
103
|
+
await _get_workflow_check_access(
|
|
104
|
+
project_id=project_id,
|
|
105
|
+
workflow_id=workflow_id,
|
|
106
|
+
user_id=user.id,
|
|
107
|
+
required_permissions=ProjectPermissions.READ,
|
|
108
|
+
db=db,
|
|
99
109
|
)
|
|
100
110
|
stm = (
|
|
101
111
|
select(JobV2)
|
|
@@ -130,10 +140,11 @@ async def read_job(
|
|
|
130
140
|
Return info on an existing job
|
|
131
141
|
"""
|
|
132
142
|
|
|
133
|
-
output = await
|
|
143
|
+
output = await _get_job_check_access(
|
|
134
144
|
project_id=project_id,
|
|
135
145
|
job_id=job_id,
|
|
136
146
|
user_id=user.id,
|
|
147
|
+
required_permissions=ProjectPermissions.READ,
|
|
137
148
|
db=db,
|
|
138
149
|
)
|
|
139
150
|
job = output["job"]
|
|
@@ -162,10 +173,11 @@ async def download_job_logs(
|
|
|
162
173
|
"""
|
|
163
174
|
Download zipped job folder
|
|
164
175
|
"""
|
|
165
|
-
output = await
|
|
176
|
+
output = await _get_job_check_access(
|
|
166
177
|
project_id=project_id,
|
|
167
178
|
job_id=job_id,
|
|
168
179
|
user_id=user.id,
|
|
180
|
+
required_permissions=ProjectPermissions.READ,
|
|
169
181
|
db=db,
|
|
170
182
|
)
|
|
171
183
|
job = output["job"]
|
|
@@ -193,8 +205,11 @@ async def get_job_list(
|
|
|
193
205
|
"""
|
|
194
206
|
Get job list for given project
|
|
195
207
|
"""
|
|
196
|
-
project = await
|
|
197
|
-
project_id=project_id,
|
|
208
|
+
project = await _get_project_check_access(
|
|
209
|
+
project_id=project_id,
|
|
210
|
+
user_id=user.id,
|
|
211
|
+
required_permissions=ProjectPermissions.READ,
|
|
212
|
+
db=db,
|
|
198
213
|
)
|
|
199
214
|
|
|
200
215
|
stm = select(JobV2).where(JobV2.project_id == project.id)
|
|
@@ -225,10 +240,11 @@ async def stop_job(
|
|
|
225
240
|
_check_shutdown_is_supported()
|
|
226
241
|
|
|
227
242
|
# Get job from DB
|
|
228
|
-
output = await
|
|
243
|
+
output = await _get_job_check_access(
|
|
229
244
|
project_id=project_id,
|
|
230
245
|
job_id=job_id,
|
|
231
246
|
user_id=user.id,
|
|
247
|
+
required_permissions=ProjectPermissions.EXECUTE,
|
|
232
248
|
db=db,
|
|
233
249
|
)
|
|
234
250
|
job = output["job"]
|
|
@@ -11,14 +11,15 @@ from fractal_server.app.models import UserOAuth
|
|
|
11
11
|
from fractal_server.app.routes.auth import current_user_act_ver_prof
|
|
12
12
|
from fractal_server.app.schemas.v2 import HistoryUnitStatus
|
|
13
13
|
from fractal_server.app.schemas.v2 import TaskType
|
|
14
|
+
from fractal_server.app.schemas.v2.sharing import ProjectPermissions
|
|
14
15
|
from fractal_server.images.status_tools import IMAGE_STATUS_KEY
|
|
15
16
|
from fractal_server.images.status_tools import enrich_images_unsorted_async
|
|
16
17
|
from fractal_server.images.tools import aggregate_types
|
|
17
18
|
from fractal_server.images.tools import filter_image_list
|
|
18
19
|
from fractal_server.types import AttributeFilters
|
|
19
20
|
|
|
20
|
-
from ._aux_functions import
|
|
21
|
-
from ._aux_functions import
|
|
21
|
+
from ._aux_functions import _get_dataset_check_access
|
|
22
|
+
from ._aux_functions import _get_workflow_task_check_access
|
|
22
23
|
from .images import ImageQuery
|
|
23
24
|
|
|
24
25
|
router = APIRouter()
|
|
@@ -37,8 +38,12 @@ async def verify_unique_types(
|
|
|
37
38
|
db: AsyncSession = Depends(get_async_db),
|
|
38
39
|
) -> list[str]:
|
|
39
40
|
# Get dataset
|
|
40
|
-
output = await
|
|
41
|
-
project_id=project_id,
|
|
41
|
+
output = await _get_dataset_check_access(
|
|
42
|
+
project_id=project_id,
|
|
43
|
+
dataset_id=dataset_id,
|
|
44
|
+
user_id=user.id,
|
|
45
|
+
required_permissions=ProjectPermissions.READ,
|
|
46
|
+
db=db,
|
|
42
47
|
)
|
|
43
48
|
dataset = output["dataset"]
|
|
44
49
|
|
|
@@ -97,11 +102,12 @@ async def check_non_processed_images(
|
|
|
97
102
|
user: UserOAuth = Depends(current_user_act_ver_prof),
|
|
98
103
|
db: AsyncSession = Depends(get_async_db),
|
|
99
104
|
) -> JSONResponse:
|
|
100
|
-
db_workflow_task, db_workflow = await
|
|
105
|
+
db_workflow_task, db_workflow = await _get_workflow_task_check_access(
|
|
101
106
|
project_id=project_id,
|
|
102
107
|
workflow_task_id=workflowtask_id,
|
|
103
108
|
workflow_id=workflow_id,
|
|
104
109
|
user_id=user.id,
|
|
110
|
+
required_permissions=ProjectPermissions.READ,
|
|
105
111
|
db=db,
|
|
106
112
|
)
|
|
107
113
|
|
|
@@ -121,10 +127,11 @@ async def check_non_processed_images(
|
|
|
121
127
|
# Skip check if previous task is converter
|
|
122
128
|
return JSONResponse(status_code=200, content=[])
|
|
123
129
|
|
|
124
|
-
res = await
|
|
130
|
+
res = await _get_dataset_check_access(
|
|
125
131
|
project_id=project_id,
|
|
126
132
|
dataset_id=dataset_id,
|
|
127
133
|
user_id=user.id,
|
|
134
|
+
required_permissions=ProjectPermissions.READ,
|
|
128
135
|
db=db,
|
|
129
136
|
)
|
|
130
137
|
dataset = res["dataset"]
|
|
@@ -16,20 +16,22 @@ from fractal_server.app.routes.aux.validate_user_profile import (
|
|
|
16
16
|
validate_user_profile,
|
|
17
17
|
)
|
|
18
18
|
from fractal_server.app.schemas.v2 import ProjectCreateV2
|
|
19
|
+
from fractal_server.app.schemas.v2 import ProjectPermissions
|
|
19
20
|
from fractal_server.app.schemas.v2 import ProjectReadV2
|
|
20
21
|
from fractal_server.app.schemas.v2 import ProjectUpdateV2
|
|
21
|
-
from fractal_server.logger import reset_logger_handlers
|
|
22
22
|
from fractal_server.logger import set_logger
|
|
23
23
|
|
|
24
24
|
from ._aux_functions import _check_project_exists
|
|
25
|
-
from ._aux_functions import
|
|
25
|
+
from ._aux_functions import _get_project_check_access
|
|
26
26
|
from ._aux_functions import _get_submitted_jobs_statement
|
|
27
27
|
|
|
28
|
+
logger = set_logger(__name__)
|
|
28
29
|
router = APIRouter()
|
|
29
30
|
|
|
30
31
|
|
|
31
32
|
@router.get("/project/", response_model=list[ProjectReadV2])
|
|
32
33
|
async def get_list_project(
|
|
34
|
+
is_owner: bool = True,
|
|
33
35
|
user: UserOAuth = Depends(current_user_act_ver_prof),
|
|
34
36
|
db: AsyncSession = Depends(get_async_db),
|
|
35
37
|
) -> list[ProjectV2]:
|
|
@@ -40,6 +42,8 @@ async def get_list_project(
|
|
|
40
42
|
select(ProjectV2)
|
|
41
43
|
.join(LinkUserProjectV2, LinkUserProjectV2.project_id == ProjectV2.id)
|
|
42
44
|
.where(LinkUserProjectV2.user_id == user.id)
|
|
45
|
+
.where(LinkUserProjectV2.is_owner == is_owner)
|
|
46
|
+
.where(LinkUserProjectV2.is_verified.is_(True))
|
|
43
47
|
)
|
|
44
48
|
res = await db.execute(stm)
|
|
45
49
|
project_list = res.scalars().all()
|
|
@@ -73,7 +77,13 @@ async def create_project(
|
|
|
73
77
|
db.add(db_project)
|
|
74
78
|
await db.flush()
|
|
75
79
|
|
|
76
|
-
link = LinkUserProjectV2(
|
|
80
|
+
link = LinkUserProjectV2(
|
|
81
|
+
project_id=db_project.id,
|
|
82
|
+
user_id=user.id,
|
|
83
|
+
is_owner=True,
|
|
84
|
+
is_verified=True,
|
|
85
|
+
permissions=ProjectPermissions.EXECUTE,
|
|
86
|
+
)
|
|
77
87
|
db.add(link)
|
|
78
88
|
|
|
79
89
|
await db.commit()
|
|
@@ -91,8 +101,11 @@ async def read_project(
|
|
|
91
101
|
"""
|
|
92
102
|
Return info on an existing project
|
|
93
103
|
"""
|
|
94
|
-
project = await
|
|
95
|
-
project_id=project_id,
|
|
104
|
+
project = await _get_project_check_access(
|
|
105
|
+
project_id=project_id,
|
|
106
|
+
user_id=user.id,
|
|
107
|
+
required_permissions=ProjectPermissions.READ,
|
|
108
|
+
db=db,
|
|
96
109
|
)
|
|
97
110
|
await db.close()
|
|
98
111
|
return project
|
|
@@ -105,8 +118,11 @@ async def update_project(
|
|
|
105
118
|
user: UserOAuth = Depends(current_user_act_ver_prof),
|
|
106
119
|
db: AsyncSession = Depends(get_async_db),
|
|
107
120
|
):
|
|
108
|
-
project = await
|
|
109
|
-
project_id=project_id,
|
|
121
|
+
project = await _get_project_check_access(
|
|
122
|
+
project_id=project_id,
|
|
123
|
+
user_id=user.id,
|
|
124
|
+
required_permissions=ProjectPermissions.WRITE,
|
|
125
|
+
db=db,
|
|
110
126
|
)
|
|
111
127
|
|
|
112
128
|
# Check that there is no project with the same user and name
|
|
@@ -134,10 +150,18 @@ async def delete_project(
|
|
|
134
150
|
Delete project
|
|
135
151
|
"""
|
|
136
152
|
|
|
137
|
-
project = await
|
|
138
|
-
project_id=project_id,
|
|
153
|
+
project = await _get_project_check_access(
|
|
154
|
+
project_id=project_id,
|
|
155
|
+
user_id=user.id,
|
|
156
|
+
required_permissions=ProjectPermissions.EXECUTE,
|
|
157
|
+
db=db,
|
|
139
158
|
)
|
|
140
|
-
|
|
159
|
+
link_user_project = await db.get(LinkUserProjectV2, (project_id, user.id))
|
|
160
|
+
if not link_user_project.is_owner:
|
|
161
|
+
raise HTTPException(
|
|
162
|
+
status_code=status.HTTP_403_FORBIDDEN,
|
|
163
|
+
detail="Only the owner can delete a Project.",
|
|
164
|
+
)
|
|
141
165
|
|
|
142
166
|
# Fail if there exist jobs that are submitted and in relation with the
|
|
143
167
|
# current project.
|
|
@@ -154,13 +178,12 @@ async def delete_project(
|
|
|
154
178
|
),
|
|
155
179
|
)
|
|
156
180
|
|
|
157
|
-
logger.
|
|
181
|
+
logger.debug(f"Add project {project.id} to deletion.")
|
|
158
182
|
await db.delete(project)
|
|
159
183
|
|
|
160
|
-
logger.
|
|
184
|
+
logger.debug("Commit changes to db")
|
|
161
185
|
await db.commit()
|
|
162
186
|
|
|
163
|
-
logger.
|
|
164
|
-
reset_logger_handlers(logger)
|
|
187
|
+
logger.debug("Everything has been deleted correctly.")
|
|
165
188
|
|
|
166
189
|
return Response(status_code=status.HTTP_204_NO_CONTENT)
|