fractal-server 2.18.0a3__py3-none-any.whl → 2.18.0a5__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (80) hide show
  1. fractal_server/__init__.py +1 -1
  2. fractal_server/app/models/v2/job.py +13 -2
  3. fractal_server/app/models/v2/resource.py +13 -0
  4. fractal_server/app/routes/admin/v2/__init__.py +10 -12
  5. fractal_server/app/routes/admin/v2/job.py +15 -15
  6. fractal_server/app/routes/admin/v2/task.py +7 -7
  7. fractal_server/app/routes/admin/v2/task_group.py +11 -11
  8. fractal_server/app/routes/admin/v2/task_group_lifecycle.py +20 -20
  9. fractal_server/app/routes/api/v2/__init__.py +47 -49
  10. fractal_server/app/routes/api/v2/_aux_functions.py +22 -47
  11. fractal_server/app/routes/api/v2/_aux_functions_task_lifecycle.py +4 -4
  12. fractal_server/app/routes/api/v2/_aux_functions_tasks.py +2 -2
  13. fractal_server/app/routes/api/v2/dataset.py +63 -73
  14. fractal_server/app/routes/api/v2/history.py +7 -5
  15. fractal_server/app/routes/api/v2/job.py +12 -12
  16. fractal_server/app/routes/api/v2/project.py +11 -11
  17. fractal_server/app/routes/api/v2/status_legacy.py +15 -29
  18. fractal_server/app/routes/api/v2/submit.py +65 -66
  19. fractal_server/app/routes/api/v2/task.py +15 -17
  20. fractal_server/app/routes/api/v2/task_collection.py +18 -18
  21. fractal_server/app/routes/api/v2/task_collection_custom.py +11 -13
  22. fractal_server/app/routes/api/v2/task_collection_pixi.py +9 -9
  23. fractal_server/app/routes/api/v2/task_group.py +18 -18
  24. fractal_server/app/routes/api/v2/task_group_lifecycle.py +26 -26
  25. fractal_server/app/routes/api/v2/task_version_update.py +5 -5
  26. fractal_server/app/routes/api/v2/workflow.py +18 -18
  27. fractal_server/app/routes/api/v2/workflow_import.py +11 -11
  28. fractal_server/app/routes/api/v2/workflowtask.py +10 -10
  29. fractal_server/app/routes/auth/_aux_auth.py +99 -0
  30. fractal_server/app/routes/auth/users.py +9 -0
  31. fractal_server/app/schemas/user.py +1 -1
  32. fractal_server/app/schemas/v2/__init__.py +48 -48
  33. fractal_server/app/schemas/v2/dataset.py +25 -13
  34. fractal_server/app/schemas/v2/dumps.py +9 -9
  35. fractal_server/app/schemas/v2/job.py +11 -11
  36. fractal_server/app/schemas/v2/project.py +3 -3
  37. fractal_server/app/schemas/v2/resource.py +13 -4
  38. fractal_server/app/schemas/v2/status_legacy.py +3 -3
  39. fractal_server/app/schemas/v2/task.py +6 -6
  40. fractal_server/app/schemas/v2/task_collection.py +4 -4
  41. fractal_server/app/schemas/v2/task_group.py +16 -16
  42. fractal_server/app/schemas/v2/workflow.py +16 -16
  43. fractal_server/app/schemas/v2/workflowtask.py +14 -14
  44. fractal_server/app/shutdown.py +6 -6
  45. fractal_server/config/_main.py +1 -1
  46. fractal_server/data_migrations/{2_18_1.py → 2_18_0.py} +2 -1
  47. fractal_server/main.py +8 -12
  48. fractal_server/migrations/versions/88270f589c9b_add_prevent_new_submissions.py +39 -0
  49. fractal_server/migrations/versions/f0702066b007_one_submitted_job_per_dataset.py +40 -0
  50. fractal_server/runner/v2/_local.py +3 -2
  51. fractal_server/runner/v2/_slurm_ssh.py +3 -2
  52. fractal_server/runner/v2/_slurm_sudo.py +3 -2
  53. fractal_server/runner/v2/runner.py +36 -17
  54. fractal_server/runner/v2/runner_functions.py +11 -14
  55. fractal_server/runner/v2/submit_workflow.py +22 -9
  56. fractal_server/tasks/v2/local/_utils.py +2 -2
  57. fractal_server/tasks/v2/local/collect.py +5 -6
  58. fractal_server/tasks/v2/local/collect_pixi.py +5 -6
  59. fractal_server/tasks/v2/local/deactivate.py +7 -7
  60. fractal_server/tasks/v2/local/deactivate_pixi.py +3 -3
  61. fractal_server/tasks/v2/local/delete.py +5 -5
  62. fractal_server/tasks/v2/local/reactivate.py +5 -5
  63. fractal_server/tasks/v2/local/reactivate_pixi.py +5 -5
  64. fractal_server/tasks/v2/ssh/collect.py +5 -5
  65. fractal_server/tasks/v2/ssh/collect_pixi.py +5 -5
  66. fractal_server/tasks/v2/ssh/deactivate.py +7 -7
  67. fractal_server/tasks/v2/ssh/deactivate_pixi.py +2 -2
  68. fractal_server/tasks/v2/ssh/delete.py +5 -5
  69. fractal_server/tasks/v2/ssh/reactivate.py +5 -5
  70. fractal_server/tasks/v2/ssh/reactivate_pixi.py +5 -5
  71. fractal_server/tasks/v2/utils_background.py +7 -7
  72. fractal_server/tasks/v2/utils_database.py +5 -5
  73. fractal_server/types/__init__.py +13 -4
  74. fractal_server/types/validators/__init__.py +3 -1
  75. fractal_server/types/validators/_common_validators.py +23 -1
  76. {fractal_server-2.18.0a3.dist-info → fractal_server-2.18.0a5.dist-info}/METADATA +1 -1
  77. {fractal_server-2.18.0a3.dist-info → fractal_server-2.18.0a5.dist-info}/RECORD +80 -78
  78. {fractal_server-2.18.0a3.dist-info → fractal_server-2.18.0a5.dist-info}/WHEEL +0 -0
  79. {fractal_server-2.18.0a3.dist-info → fractal_server-2.18.0a5.dist-info}/entry_points.txt +0 -0
  80. {fractal_server-2.18.0a3.dist-info → fractal_server-2.18.0a5.dist-info}/licenses/LICENSE +0 -0
@@ -3,11 +3,10 @@ Auxiliary functions to get object from the database or perform simple checks
3
3
  """
4
4
 
5
5
  from typing import Any
6
- from typing import Literal
6
+ from typing import TypedDict
7
7
 
8
8
  from fastapi import HTTPException
9
9
  from fastapi import status
10
- from sqlalchemy.exc import MultipleResultsFound
11
10
  from sqlalchemy.orm.attributes import flag_modified
12
11
  from sqlmodel import select
13
12
  from sqlmodel.sql.expression import SelectOfScalar
@@ -23,7 +22,7 @@ from fractal_server.app.models.v2 import ProjectV2
23
22
  from fractal_server.app.models.v2 import TaskV2
24
23
  from fractal_server.app.models.v2 import WorkflowTaskV2
25
24
  from fractal_server.app.models.v2 import WorkflowV2
26
- from fractal_server.app.schemas.v2 import JobStatusTypeV2
25
+ from fractal_server.app.schemas.v2 import JobStatusType
27
26
  from fractal_server.app.schemas.v2 import ProjectPermissions
28
27
  from fractal_server.logger import set_logger
29
28
 
@@ -252,6 +251,11 @@ async def _check_project_exists(
252
251
  )
253
252
 
254
253
 
254
+ class DatasetOrProject(TypedDict):
255
+ dataset: DatasetV2
256
+ project: ProjectV2
257
+
258
+
255
259
  async def _get_dataset_check_access(
256
260
  *,
257
261
  project_id: int,
@@ -259,7 +263,7 @@ async def _get_dataset_check_access(
259
263
  user_id: int,
260
264
  required_permissions: ProjectPermissions,
261
265
  db: AsyncSession,
262
- ) -> dict[Literal["dataset", "project"], DatasetV2 | ProjectV2]:
266
+ ) -> DatasetOrProject:
263
267
  """
264
268
  Get a dataset and a project, after access control on the project
265
269
 
@@ -304,6 +308,11 @@ async def _get_dataset_check_access(
304
308
  return dict(dataset=dataset, project=project)
305
309
 
306
310
 
311
+ class JobAndProject(TypedDict):
312
+ job: JobV2
313
+ project: ProjectV2
314
+
315
+
307
316
  async def _get_job_check_access(
308
317
  *,
309
318
  project_id: int,
@@ -311,7 +320,7 @@ async def _get_job_check_access(
311
320
  user_id: int,
312
321
  required_permissions: ProjectPermissions,
313
322
  db: AsyncSession,
314
- ) -> dict[Literal["job", "project"], JobV2 | ProjectV2]:
323
+ ) -> JobAndProject:
315
324
  """
316
325
  Get a job and a project, after access control on the project
317
326
 
@@ -361,7 +370,7 @@ def _get_submitted_jobs_statement() -> SelectOfScalar:
361
370
  A sqlmodel statement that selects all `Job`s with
362
371
  `Job.status` equal to `submitted`.
363
372
  """
364
- stm = select(JobV2).where(JobV2.status == JobStatusTypeV2.SUBMITTED)
373
+ stm = select(JobV2).where(JobV2.status == JobStatusType.SUBMITTED)
365
374
  return stm
366
375
 
367
376
 
@@ -371,7 +380,7 @@ async def _workflow_has_submitted_job(
371
380
  ) -> bool:
372
381
  res = await db.execute(
373
382
  select(JobV2.id)
374
- .where(JobV2.status == JobStatusTypeV2.SUBMITTED)
383
+ .where(JobV2.status == JobStatusType.SUBMITTED)
375
384
  .where(JobV2.workflow_id == workflow_id)
376
385
  .limit(1)
377
386
  )
@@ -453,8 +462,9 @@ async def _workflow_insert_task(
453
462
  return wf_task
454
463
 
455
464
 
456
- async def clean_app_job_list_v2(
457
- db: AsyncSession, jobs_list: list[int]
465
+ async def clean_app_job_list(
466
+ db: AsyncSession,
467
+ jobs_list: list[int],
458
468
  ) -> list[int]:
459
469
  """
460
470
  Remove from a job list all jobs with status different from submitted.
@@ -466,14 +476,14 @@ async def clean_app_job_list_v2(
466
476
  Return:
467
477
  List of IDs for submitted jobs.
468
478
  """
479
+ logger.info(f"[clean_app_job_list] START - {jobs_list=}.")
469
480
  stmt = select(JobV2).where(JobV2.id.in_(jobs_list))
470
481
  result = await db.execute(stmt)
471
482
  db_jobs_list = result.scalars().all()
472
483
  submitted_job_ids = [
473
- job.id
474
- for job in db_jobs_list
475
- if job.status == JobStatusTypeV2.SUBMITTED
484
+ job.id for job in db_jobs_list if job.status == JobStatusType.SUBMITTED
476
485
  ]
486
+ logger.info(f"[clean_app_job_list] END - {submitted_job_ids=}.")
477
487
  return submitted_job_ids
478
488
 
479
489
 
@@ -543,41 +553,6 @@ async def _get_workflowtask_or_404(
543
553
  return wftask
544
554
 
545
555
 
546
- async def _get_submitted_job_or_none(
547
- *,
548
- dataset_id: int,
549
- workflow_id: int,
550
- db: AsyncSession,
551
- ) -> JobV2 | None:
552
- """
553
- Get the submitted job for given dataset/workflow, if any.
554
-
555
- This function also handles the invalid branch where more than one job
556
- is found.
557
-
558
- Args:
559
- dataset_id:
560
- workflow_id:
561
- db:
562
- """
563
- res = await db.execute(
564
- _get_submitted_jobs_statement()
565
- .where(JobV2.dataset_id == dataset_id)
566
- .where(JobV2.workflow_id == workflow_id)
567
- )
568
- try:
569
- return res.scalars().one_or_none()
570
- except MultipleResultsFound as e:
571
- error_msg = (
572
- f"Multiple running jobs found for {dataset_id=} and {workflow_id=}."
573
- )
574
- logger.error(f"{error_msg} Original error: {str(e)}.")
575
- raise HTTPException(
576
- status_code=status.HTTP_422_UNPROCESSABLE_CONTENT,
577
- detail=error_msg,
578
- )
579
-
580
-
581
556
  async def _get_user_resource_id(user_id: int, db: AsyncSession) -> int | None:
582
557
  res = await db.execute(
583
558
  select(Resource.id)
@@ -14,8 +14,8 @@ from fractal_server.app.models.v2 import TaskGroupV2
14
14
  from fractal_server.app.models.v2 import TaskV2
15
15
  from fractal_server.app.models.v2 import WorkflowTaskV2
16
16
  from fractal_server.app.models.v2 import WorkflowV2
17
- from fractal_server.app.schemas.v2 import JobStatusTypeV2
18
- from fractal_server.app.schemas.v2 import TaskGroupActivityStatusV2
17
+ from fractal_server.app.schemas.v2 import JobStatusType
18
+ from fractal_server.app.schemas.v2 import TaskGroupActivityStatus
19
19
  from fractal_server.logger import set_logger
20
20
  from fractal_server.tasks.v2.utils_package_names import normalize_package_name
21
21
 
@@ -171,7 +171,7 @@ async def check_no_ongoing_activity(
171
171
  stm = (
172
172
  select(TaskGroupActivityV2)
173
173
  .where(TaskGroupActivityV2.taskgroupv2_id == task_group_id)
174
- .where(TaskGroupActivityV2.status == TaskGroupActivityStatusV2.ONGOING)
174
+ .where(TaskGroupActivityV2.status == TaskGroupActivityStatus.ONGOING)
175
175
  )
176
176
  res = await db.execute(stm)
177
177
  ongoing_activities = res.scalars().all()
@@ -213,7 +213,7 @@ async def check_no_submitted_job(
213
213
  .join(TaskV2, WorkflowTaskV2.task_id == TaskV2.id)
214
214
  .where(WorkflowTaskV2.order >= JobV2.first_task_index)
215
215
  .where(WorkflowTaskV2.order <= JobV2.last_task_index)
216
- .where(JobV2.status == JobStatusTypeV2.SUBMITTED)
216
+ .where(JobV2.status == JobStatusType.SUBMITTED)
217
217
  .where(TaskV2.taskgroupv2_id == task_group_id)
218
218
  )
219
219
  res = await db.execute(stm)
@@ -27,7 +27,7 @@ from fractal_server.app.routes.auth._aux_auth import (
27
27
  from fractal_server.app.routes.auth._aux_auth import (
28
28
  _verify_user_belongs_to_group,
29
29
  )
30
- from fractal_server.app.schemas.v2 import TaskGroupActivityActionV2
30
+ from fractal_server.app.schemas.v2 import TaskGroupActivityAction
31
31
  from fractal_server.images.tools import merge_type_filters
32
32
  from fractal_server.logger import set_logger
33
33
 
@@ -252,7 +252,7 @@ async def _get_collection_task_group_activity_status_message(
252
252
  res = await db.execute(
253
253
  select(TaskGroupActivityV2)
254
254
  .where(TaskGroupActivityV2.taskgroupv2_id == task_group_id)
255
- .where(TaskGroupActivityV2.action == TaskGroupActivityActionV2.COLLECT)
255
+ .where(TaskGroupActivityV2.action == TaskGroupActivityAction.COLLECT)
256
256
  )
257
257
  task_group_activity_list = res.scalars().all()
258
258
  if len(task_group_activity_list) > 1:
@@ -1,3 +1,4 @@
1
+ import os
1
2
  from pathlib import Path
2
3
 
3
4
  from fastapi import APIRouter
@@ -13,11 +14,11 @@ from fractal_server.app.models import UserOAuth
13
14
  from fractal_server.app.models.v2 import DatasetV2
14
15
  from fractal_server.app.models.v2 import JobV2
15
16
  from fractal_server.app.routes.auth import current_user_act_ver_prof
16
- from fractal_server.app.schemas.v2 import DatasetCreateV2
17
- from fractal_server.app.schemas.v2 import DatasetReadV2
18
- from fractal_server.app.schemas.v2 import DatasetUpdateV2
19
- from fractal_server.app.schemas.v2.dataset import DatasetExportV2
20
- from fractal_server.app.schemas.v2.dataset import DatasetImportV2
17
+ from fractal_server.app.schemas.v2 import DatasetCreate
18
+ from fractal_server.app.schemas.v2 import DatasetRead
19
+ from fractal_server.app.schemas.v2 import DatasetUpdate
20
+ from fractal_server.app.schemas.v2.dataset import DatasetExport
21
+ from fractal_server.app.schemas.v2.dataset import DatasetImport
21
22
  from fractal_server.app.schemas.v2.sharing import ProjectPermissions
22
23
  from fractal_server.string_tools import sanitize_string
23
24
  from fractal_server.urls import normalize_url
@@ -31,15 +32,15 @@ router = APIRouter()
31
32
 
32
33
  @router.post(
33
34
  "/project/{project_id}/dataset/",
34
- response_model=DatasetReadV2,
35
+ response_model=DatasetRead,
35
36
  status_code=status.HTTP_201_CREATED,
36
37
  )
37
38
  async def create_dataset(
38
39
  project_id: int,
39
- dataset: DatasetCreateV2,
40
+ dataset: DatasetCreate,
40
41
  user: UserOAuth = Depends(current_user_act_ver_prof),
41
42
  db: AsyncSession = Depends(get_async_db),
42
- ) -> DatasetReadV2 | None:
43
+ ) -> DatasetRead | None:
43
44
  """
44
45
  Add new dataset to current project
45
46
  """
@@ -50,56 +51,54 @@ async def create_dataset(
50
51
  db=db,
51
52
  )
52
53
 
53
- if dataset.zarr_dir is None:
54
- db_dataset = DatasetV2(
55
- project_id=project_id,
56
- zarr_dir="__PLACEHOLDER__",
57
- **dataset.model_dump(exclude={"zarr_dir"}),
58
- )
59
- db.add(db_dataset)
60
- await db.commit()
61
- await db.refresh(db_dataset)
62
- path = (
63
- f"{user.project_dirs[0]}/fractal/"
64
- f"{project_id}_{sanitize_string(project.name)}/"
65
- f"{db_dataset.id}_{sanitize_string(db_dataset.name)}"
66
- )
67
- normalized_path = normalize_url(path)
68
- db_dataset.zarr_dir = normalized_path
54
+ db_dataset = DatasetV2(
55
+ project_id=project_id,
56
+ zarr_dir="__PLACEHOLDER__",
57
+ **dataset.model_dump(exclude={"project_dir", "zarr_subfolder"}),
58
+ )
59
+ db.add(db_dataset)
60
+ await db.commit()
61
+ await db.refresh(db_dataset)
69
62
 
70
- db.add(db_dataset)
71
- await db.commit()
72
- await db.refresh(db_dataset)
63
+ if dataset.project_dir is None:
64
+ project_dir = user.project_dirs[0]
73
65
  else:
74
- if not any(
75
- Path(dataset.zarr_dir).is_relative_to(project_dir)
76
- for project_dir in user.project_dirs
77
- ):
66
+ if dataset.project_dir not in user.project_dirs:
67
+ await db.delete(db_dataset)
68
+ await db.commit()
78
69
  raise HTTPException(
79
- status_code=status.HTTP_422_UNPROCESSABLE_CONTENT,
80
- detail=(
81
- "Dataset zarr_dir is not relative to any of the user "
82
- "project directories."
83
- ),
70
+ status_code=status.HTTP_403_FORBIDDEN,
71
+ detail=f"You are not allowed to use {dataset.project_dir=}.",
84
72
  )
73
+ project_dir = dataset.project_dir
85
74
 
86
- db_dataset = DatasetV2(project_id=project_id, **dataset.model_dump())
87
- db.add(db_dataset)
88
- await db.commit()
89
- await db.refresh(db_dataset)
75
+ if dataset.zarr_subfolder is None:
76
+ zarr_subfolder = (
77
+ f"fractal/{project_id}_{sanitize_string(project.name)}/"
78
+ f"{db_dataset.id}_{sanitize_string(db_dataset.name)}"
79
+ )
80
+ else:
81
+ zarr_subfolder = dataset.zarr_subfolder
82
+
83
+ zarr_dir = os.path.join(project_dir, zarr_subfolder)
84
+ db_dataset.zarr_dir = normalize_url(zarr_dir)
85
+
86
+ db.add(db_dataset)
87
+ await db.commit()
88
+ await db.refresh(db_dataset)
90
89
 
91
90
  return db_dataset
92
91
 
93
92
 
94
93
  @router.get(
95
94
  "/project/{project_id}/dataset/",
96
- response_model=list[DatasetReadV2],
95
+ response_model=list[DatasetRead],
97
96
  )
98
97
  async def read_dataset_list(
99
98
  project_id: int,
100
99
  user: UserOAuth = Depends(current_user_act_ver_prof),
101
100
  db: AsyncSession = Depends(get_async_db),
102
- ) -> list[DatasetReadV2] | None:
101
+ ) -> list[DatasetRead] | None:
103
102
  """
104
103
  Get dataset list for given project
105
104
  """
@@ -122,14 +121,14 @@ async def read_dataset_list(
122
121
 
123
122
  @router.get(
124
123
  "/project/{project_id}/dataset/{dataset_id}/",
125
- response_model=DatasetReadV2,
124
+ response_model=DatasetRead,
126
125
  )
127
126
  async def read_dataset(
128
127
  project_id: int,
129
128
  dataset_id: int,
130
129
  user: UserOAuth = Depends(current_user_act_ver_prof),
131
130
  db: AsyncSession = Depends(get_async_db),
132
- ) -> DatasetReadV2 | None:
131
+ ) -> DatasetRead | None:
133
132
  """
134
133
  Get info on a dataset associated to the current project
135
134
  """
@@ -146,15 +145,15 @@ async def read_dataset(
146
145
 
147
146
  @router.patch(
148
147
  "/project/{project_id}/dataset/{dataset_id}/",
149
- response_model=DatasetReadV2,
148
+ response_model=DatasetRead,
150
149
  )
151
150
  async def update_dataset(
152
151
  project_id: int,
153
152
  dataset_id: int,
154
- dataset_update: DatasetUpdateV2,
153
+ dataset_update: DatasetUpdate,
155
154
  user: UserOAuth = Depends(current_user_act_ver_prof),
156
155
  db: AsyncSession = Depends(get_async_db),
157
- ) -> DatasetReadV2 | None:
156
+ ) -> DatasetRead | None:
158
157
  """
159
158
  Edit a dataset associated to the current project
160
159
  """
@@ -168,27 +167,6 @@ async def update_dataset(
168
167
  )
169
168
  db_dataset = output["dataset"]
170
169
 
171
- if dataset_update.zarr_dir is not None:
172
- if db_dataset.images:
173
- raise HTTPException(
174
- status_code=status.HTTP_422_UNPROCESSABLE_CONTENT,
175
- detail=(
176
- "Cannot modify `zarr_dir` because the dataset has a "
177
- "non-empty image list."
178
- ),
179
- )
180
- if not any(
181
- Path(dataset_update.zarr_dir).is_relative_to(project_dir)
182
- for project_dir in user.project_dirs
183
- ):
184
- raise HTTPException(
185
- status_code=status.HTTP_422_UNPROCESSABLE_CONTENT,
186
- detail=(
187
- "Dataset zarr_dir is not relative to any of the user "
188
- "project directories."
189
- ),
190
- )
191
-
192
170
  for key, value in dataset_update.model_dump(exclude_unset=True).items():
193
171
  setattr(db_dataset, key, value)
194
172
 
@@ -243,14 +221,14 @@ async def delete_dataset(
243
221
 
244
222
  @router.get(
245
223
  "/project/{project_id}/dataset/{dataset_id}/export/",
246
- response_model=DatasetExportV2,
224
+ response_model=DatasetExport,
247
225
  )
248
226
  async def export_dataset(
249
227
  project_id: int,
250
228
  dataset_id: int,
251
229
  user: UserOAuth = Depends(current_user_act_ver_prof),
252
230
  db: AsyncSession = Depends(get_async_db),
253
- ) -> DatasetExportV2 | None:
231
+ ) -> DatasetExport | None:
254
232
  """
255
233
  Export an existing dataset
256
234
  """
@@ -268,15 +246,15 @@ async def export_dataset(
268
246
 
269
247
  @router.post(
270
248
  "/project/{project_id}/dataset/import/",
271
- response_model=DatasetReadV2,
249
+ response_model=DatasetRead,
272
250
  status_code=status.HTTP_201_CREATED,
273
251
  )
274
252
  async def import_dataset(
275
253
  project_id: int,
276
- dataset: DatasetImportV2,
254
+ dataset: DatasetImport,
277
255
  user: UserOAuth = Depends(current_user_act_ver_prof),
278
256
  db: AsyncSession = Depends(get_async_db),
279
- ) -> DatasetReadV2 | None:
257
+ ) -> DatasetRead | None:
280
258
  """
281
259
  Import an existing dataset into a project
282
260
  """
@@ -289,6 +267,18 @@ async def import_dataset(
289
267
  db=db,
290
268
  )
291
269
 
270
+ if not any(
271
+ Path(dataset.zarr_dir).is_relative_to(project_dir)
272
+ for project_dir in user.project_dirs
273
+ ):
274
+ raise HTTPException(
275
+ status_code=status.HTTP_422_UNPROCESSABLE_CONTENT,
276
+ detail=(
277
+ f"{dataset.zarr_dir=} is not relative to any of user's project "
278
+ "dirs."
279
+ ),
280
+ )
281
+
292
282
  for image in dataset.images:
293
283
  if not image.zarr_url.startswith(dataset.zarr_dir):
294
284
  raise HTTPException(
@@ -33,7 +33,7 @@ from fractal_server.images.tools import filter_image_list
33
33
  from fractal_server.logger import set_logger
34
34
 
35
35
  from ._aux_functions import _get_dataset_check_access
36
- from ._aux_functions import _get_submitted_job_or_none
36
+ from ._aux_functions import _get_submitted_jobs_statement
37
37
  from ._aux_functions import _get_workflow_check_access
38
38
  from ._aux_functions_history import _verify_workflow_and_dataset_access
39
39
  from ._aux_functions_history import get_history_run_or_404
@@ -90,11 +90,13 @@ async def get_workflow_tasks_statuses(
90
90
  db=db,
91
91
  )
92
92
 
93
- running_job = await _get_submitted_job_or_none(
94
- db=db,
95
- dataset_id=dataset_id,
96
- workflow_id=workflow_id,
93
+ res = await db.execute(
94
+ _get_submitted_jobs_statement()
95
+ .where(JobV2.dataset_id == dataset_id)
96
+ .where(JobV2.workflow_id == workflow_id)
97
97
  )
98
+ running_job = res.scalars().one_or_none()
99
+
98
100
  if running_job is not None:
99
101
  running_wftasks = workflow.task_list[
100
102
  running_job.first_task_index : running_job.last_task_index + 1
@@ -18,8 +18,8 @@ from fractal_server.app.models.v2 import LinkUserProjectV2
18
18
  from fractal_server.app.routes.auth import current_user_act_ver_prof
19
19
  from fractal_server.app.routes.aux._job import _write_shutdown_file
20
20
  from fractal_server.app.routes.aux._runner import _check_shutdown_is_supported
21
- from fractal_server.app.schemas.v2 import JobReadV2
22
- from fractal_server.app.schemas.v2 import JobStatusTypeV2
21
+ from fractal_server.app.schemas.v2 import JobRead
22
+ from fractal_server.app.schemas.v2 import JobStatusType
23
23
  from fractal_server.app.schemas.v2.sharing import ProjectPermissions
24
24
  from fractal_server.runner.filenames import WORKFLOW_LOG_FILENAME
25
25
  from fractal_server.zip_tools import _zip_folder_to_byte_stream_iterator
@@ -39,12 +39,12 @@ async def zip_folder_threaded(folder: str) -> Iterator[bytes]:
39
39
  router = APIRouter()
40
40
 
41
41
 
42
- @router.get("/job/", response_model=list[JobReadV2])
42
+ @router.get("/job/", response_model=list[JobRead])
43
43
  async def get_user_jobs(
44
44
  user: UserOAuth = Depends(current_user_act_ver_prof),
45
45
  log: bool = True,
46
46
  db: AsyncSession = Depends(get_async_db),
47
- ) -> list[JobReadV2]:
47
+ ) -> list[JobRead]:
48
48
  """
49
49
  Returns all the jobs of the current user
50
50
  """
@@ -68,14 +68,14 @@ async def get_user_jobs(
68
68
 
69
69
  @router.get(
70
70
  "/project/{project_id}/workflow/{workflow_id}/job/",
71
- response_model=list[JobReadV2],
71
+ response_model=list[JobRead],
72
72
  )
73
73
  async def get_workflow_jobs(
74
74
  project_id: int,
75
75
  workflow_id: int,
76
76
  user: UserOAuth = Depends(current_user_act_ver_prof),
77
77
  db: AsyncSession = Depends(get_async_db),
78
- ) -> list[JobReadV2] | None:
78
+ ) -> list[JobRead] | None:
79
79
  """
80
80
  Returns all the jobs related to a specific workflow
81
81
  """
@@ -99,7 +99,7 @@ async def get_latest_job(
99
99
  dataset_id: int,
100
100
  user: UserOAuth = Depends(current_user_act_ver_prof),
101
101
  db: AsyncSession = Depends(get_async_db),
102
- ) -> JobReadV2:
102
+ ) -> JobRead:
103
103
  await _get_workflow_check_access(
104
104
  project_id=project_id,
105
105
  workflow_id=workflow_id,
@@ -127,7 +127,7 @@ async def get_latest_job(
127
127
 
128
128
  @router.get(
129
129
  "/project/{project_id}/job/{job_id}/",
130
- response_model=JobReadV2,
130
+ response_model=JobRead,
131
131
  )
132
132
  async def read_job(
133
133
  project_id: int,
@@ -135,7 +135,7 @@ async def read_job(
135
135
  show_tmp_logs: bool = False,
136
136
  user: UserOAuth = Depends(current_user_act_ver_prof),
137
137
  db: AsyncSession = Depends(get_async_db),
138
- ) -> JobReadV2 | None:
138
+ ) -> JobRead | None:
139
139
  """
140
140
  Return info on an existing job
141
141
  """
@@ -150,7 +150,7 @@ async def read_job(
150
150
  job = output["job"]
151
151
  await db.close()
152
152
 
153
- if show_tmp_logs and (job.status == JobStatusTypeV2.SUBMITTED):
153
+ if show_tmp_logs and (job.status == JobStatusType.SUBMITTED):
154
154
  try:
155
155
  with open(f"{job.working_dir}/{WORKFLOW_LOG_FILENAME}") as f:
156
156
  job.log = f.read()
@@ -194,14 +194,14 @@ async def download_job_logs(
194
194
 
195
195
  @router.get(
196
196
  "/project/{project_id}/job/",
197
- response_model=list[JobReadV2],
197
+ response_model=list[JobRead],
198
198
  )
199
199
  async def get_job_list(
200
200
  project_id: int,
201
201
  user: UserOAuth = Depends(current_user_act_ver_prof),
202
202
  log: bool = True,
203
203
  db: AsyncSession = Depends(get_async_db),
204
- ) -> list[JobReadV2] | None:
204
+ ) -> list[JobRead] | None:
205
205
  """
206
206
  Get job list for given project
207
207
  """
@@ -15,10 +15,10 @@ from fractal_server.app.routes.auth import current_user_act_ver_prof
15
15
  from fractal_server.app.routes.aux.validate_user_profile import (
16
16
  validate_user_profile,
17
17
  )
18
- from fractal_server.app.schemas.v2 import ProjectCreateV2
18
+ from fractal_server.app.schemas.v2 import ProjectCreate
19
19
  from fractal_server.app.schemas.v2 import ProjectPermissions
20
- from fractal_server.app.schemas.v2 import ProjectReadV2
21
- from fractal_server.app.schemas.v2 import ProjectUpdateV2
20
+ from fractal_server.app.schemas.v2 import ProjectRead
21
+ from fractal_server.app.schemas.v2 import ProjectUpdate
22
22
  from fractal_server.logger import set_logger
23
23
 
24
24
  from ._aux_functions import _check_project_exists
@@ -29,7 +29,7 @@ logger = set_logger(__name__)
29
29
  router = APIRouter()
30
30
 
31
31
 
32
- @router.get("/project/", response_model=list[ProjectReadV2])
32
+ @router.get("/project/", response_model=list[ProjectRead])
33
33
  async def get_list_project(
34
34
  is_owner: bool = True,
35
35
  user: UserOAuth = Depends(current_user_act_ver_prof),
@@ -51,12 +51,12 @@ async def get_list_project(
51
51
  return project_list
52
52
 
53
53
 
54
- @router.post("/project/", response_model=ProjectReadV2, status_code=201)
54
+ @router.post("/project/", response_model=ProjectRead, status_code=201)
55
55
  async def create_project(
56
- project: ProjectCreateV2,
56
+ project: ProjectCreate,
57
57
  user: UserOAuth = Depends(current_user_act_ver_prof),
58
58
  db: AsyncSession = Depends(get_async_db),
59
- ) -> ProjectReadV2 | None:
59
+ ) -> ProjectRead | None:
60
60
  """
61
61
  Create new project
62
62
  """
@@ -92,12 +92,12 @@ async def create_project(
92
92
  return db_project
93
93
 
94
94
 
95
- @router.get("/project/{project_id}/", response_model=ProjectReadV2)
95
+ @router.get("/project/{project_id}/", response_model=ProjectRead)
96
96
  async def read_project(
97
97
  project_id: int,
98
98
  user: UserOAuth = Depends(current_user_act_ver_prof),
99
99
  db: AsyncSession = Depends(get_async_db),
100
- ) -> ProjectReadV2 | None:
100
+ ) -> ProjectRead | None:
101
101
  """
102
102
  Return info on an existing project
103
103
  """
@@ -111,10 +111,10 @@ async def read_project(
111
111
  return project
112
112
 
113
113
 
114
- @router.patch("/project/{project_id}/", response_model=ProjectReadV2)
114
+ @router.patch("/project/{project_id}/", response_model=ProjectRead)
115
115
  async def update_project(
116
116
  project_id: int,
117
- project_update: ProjectUpdateV2,
117
+ project_update: ProjectUpdate,
118
118
  user: UserOAuth = Depends(current_user_act_ver_prof),
119
119
  db: AsyncSession = Depends(get_async_db),
120
120
  ):