fractal-server 2.17.2__py3-none-any.whl → 2.18.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (108) hide show
  1. fractal_server/__init__.py +1 -1
  2. fractal_server/__main__.py +2 -1
  3. fractal_server/app/models/linkuserproject.py +40 -0
  4. fractal_server/app/models/security.py +7 -5
  5. fractal_server/app/models/v2/job.py +13 -2
  6. fractal_server/app/models/v2/resource.py +13 -0
  7. fractal_server/app/routes/admin/v2/__init__.py +11 -11
  8. fractal_server/app/routes/admin/v2/accounting.py +2 -2
  9. fractal_server/app/routes/admin/v2/job.py +34 -23
  10. fractal_server/app/routes/admin/v2/sharing.py +103 -0
  11. fractal_server/app/routes/admin/v2/task.py +9 -8
  12. fractal_server/app/routes/admin/v2/task_group.py +94 -16
  13. fractal_server/app/routes/admin/v2/task_group_lifecycle.py +20 -20
  14. fractal_server/app/routes/api/__init__.py +0 -9
  15. fractal_server/app/routes/api/v2/__init__.py +47 -47
  16. fractal_server/app/routes/api/v2/_aux_functions.py +65 -64
  17. fractal_server/app/routes/api/v2/_aux_functions_history.py +8 -3
  18. fractal_server/app/routes/api/v2/_aux_functions_sharing.py +97 -0
  19. fractal_server/app/routes/api/v2/_aux_functions_task_lifecycle.py +4 -4
  20. fractal_server/app/routes/api/v2/_aux_functions_tasks.py +2 -2
  21. fractal_server/app/routes/api/v2/dataset.py +89 -77
  22. fractal_server/app/routes/api/v2/history.py +28 -16
  23. fractal_server/app/routes/api/v2/images.py +22 -8
  24. fractal_server/app/routes/api/v2/job.py +40 -24
  25. fractal_server/app/routes/api/v2/pre_submission_checks.py +13 -6
  26. fractal_server/app/routes/api/v2/project.py +48 -25
  27. fractal_server/app/routes/api/v2/sharing.py +311 -0
  28. fractal_server/app/routes/api/v2/status_legacy.py +22 -33
  29. fractal_server/app/routes/api/v2/submit.py +76 -71
  30. fractal_server/app/routes/api/v2/task.py +15 -17
  31. fractal_server/app/routes/api/v2/task_collection.py +18 -18
  32. fractal_server/app/routes/api/v2/task_collection_custom.py +11 -13
  33. fractal_server/app/routes/api/v2/task_collection_pixi.py +9 -9
  34. fractal_server/app/routes/api/v2/task_group.py +18 -18
  35. fractal_server/app/routes/api/v2/task_group_lifecycle.py +26 -26
  36. fractal_server/app/routes/api/v2/task_version_update.py +12 -9
  37. fractal_server/app/routes/api/v2/workflow.py +41 -29
  38. fractal_server/app/routes/api/v2/workflow_import.py +25 -23
  39. fractal_server/app/routes/api/v2/workflowtask.py +25 -17
  40. fractal_server/app/routes/auth/_aux_auth.py +100 -0
  41. fractal_server/app/routes/auth/current_user.py +0 -63
  42. fractal_server/app/routes/auth/group.py +1 -30
  43. fractal_server/app/routes/auth/router.py +2 -0
  44. fractal_server/app/routes/auth/users.py +9 -0
  45. fractal_server/app/routes/auth/viewer_paths.py +43 -0
  46. fractal_server/app/schemas/user.py +29 -12
  47. fractal_server/app/schemas/user_group.py +0 -15
  48. fractal_server/app/schemas/v2/__init__.py +55 -48
  49. fractal_server/app/schemas/v2/dataset.py +35 -13
  50. fractal_server/app/schemas/v2/dumps.py +9 -9
  51. fractal_server/app/schemas/v2/job.py +11 -11
  52. fractal_server/app/schemas/v2/project.py +3 -3
  53. fractal_server/app/schemas/v2/resource.py +13 -4
  54. fractal_server/app/schemas/v2/sharing.py +99 -0
  55. fractal_server/app/schemas/v2/status_legacy.py +3 -3
  56. fractal_server/app/schemas/v2/task.py +6 -6
  57. fractal_server/app/schemas/v2/task_collection.py +4 -4
  58. fractal_server/app/schemas/v2/task_group.py +16 -16
  59. fractal_server/app/schemas/v2/workflow.py +16 -16
  60. fractal_server/app/schemas/v2/workflowtask.py +14 -14
  61. fractal_server/app/security/__init__.py +1 -1
  62. fractal_server/app/shutdown.py +6 -6
  63. fractal_server/config/__init__.py +0 -6
  64. fractal_server/config/_data.py +0 -79
  65. fractal_server/config/_main.py +6 -1
  66. fractal_server/data_migrations/2_18_0.py +30 -0
  67. fractal_server/images/models.py +1 -2
  68. fractal_server/main.py +72 -11
  69. fractal_server/migrations/versions/7910eed4cf97_user_project_dirs_and_usergroup_viewer_.py +60 -0
  70. fractal_server/migrations/versions/88270f589c9b_add_prevent_new_submissions.py +39 -0
  71. fractal_server/migrations/versions/bc0e8b3327a7_project_sharing.py +72 -0
  72. fractal_server/migrations/versions/f0702066b007_one_submitted_job_per_dataset.py +40 -0
  73. fractal_server/runner/config/_slurm.py +2 -0
  74. fractal_server/runner/executors/slurm_common/_batching.py +4 -10
  75. fractal_server/runner/executors/slurm_common/slurm_config.py +1 -0
  76. fractal_server/runner/executors/slurm_ssh/runner.py +1 -1
  77. fractal_server/runner/executors/slurm_sudo/runner.py +1 -1
  78. fractal_server/runner/v2/_local.py +4 -3
  79. fractal_server/runner/v2/_slurm_ssh.py +4 -3
  80. fractal_server/runner/v2/_slurm_sudo.py +4 -3
  81. fractal_server/runner/v2/runner.py +36 -17
  82. fractal_server/runner/v2/runner_functions.py +11 -14
  83. fractal_server/runner/v2/submit_workflow.py +22 -9
  84. fractal_server/tasks/v2/local/_utils.py +2 -2
  85. fractal_server/tasks/v2/local/collect.py +5 -6
  86. fractal_server/tasks/v2/local/collect_pixi.py +5 -6
  87. fractal_server/tasks/v2/local/deactivate.py +7 -7
  88. fractal_server/tasks/v2/local/deactivate_pixi.py +3 -3
  89. fractal_server/tasks/v2/local/delete.py +5 -5
  90. fractal_server/tasks/v2/local/reactivate.py +5 -5
  91. fractal_server/tasks/v2/local/reactivate_pixi.py +5 -5
  92. fractal_server/tasks/v2/ssh/collect.py +5 -5
  93. fractal_server/tasks/v2/ssh/collect_pixi.py +5 -5
  94. fractal_server/tasks/v2/ssh/deactivate.py +7 -7
  95. fractal_server/tasks/v2/ssh/deactivate_pixi.py +2 -2
  96. fractal_server/tasks/v2/ssh/delete.py +5 -5
  97. fractal_server/tasks/v2/ssh/reactivate.py +5 -5
  98. fractal_server/tasks/v2/ssh/reactivate_pixi.py +5 -5
  99. fractal_server/tasks/v2/utils_background.py +7 -7
  100. fractal_server/tasks/v2/utils_database.py +5 -5
  101. fractal_server/types/__init__.py +22 -0
  102. fractal_server/types/validators/__init__.py +3 -0
  103. fractal_server/types/validators/_common_validators.py +32 -0
  104. {fractal_server-2.17.2.dist-info → fractal_server-2.18.0.dist-info}/METADATA +3 -2
  105. {fractal_server-2.17.2.dist-info → fractal_server-2.18.0.dist-info}/RECORD +108 -98
  106. {fractal_server-2.17.2.dist-info → fractal_server-2.18.0.dist-info}/WHEEL +0 -0
  107. {fractal_server-2.17.2.dist-info → fractal_server-2.18.0.dist-info}/entry_points.txt +0 -0
  108. {fractal_server-2.17.2.dist-info → fractal_server-2.18.0.dist-info}/licenses/LICENSE +0 -0
@@ -1,20 +1,19 @@
1
1
  from fastapi import APIRouter
2
2
  from fastapi import Depends
3
- from fastapi import HTTPException
4
- from fastapi import status
5
3
 
6
4
  from fractal_server.app.db import AsyncSession
7
5
  from fractal_server.app.db import get_async_db
8
6
  from fractal_server.app.models import UserOAuth
9
7
  from fractal_server.app.models.v2 import JobV2
10
8
  from fractal_server.app.routes.auth import current_user_act_ver_prof
11
- from fractal_server.app.schemas.v2.status_legacy import LegacyStatusReadV2
12
- from fractal_server.app.schemas.v2.status_legacy import WorkflowTaskStatusTypeV2
9
+ from fractal_server.app.schemas.v2.sharing import ProjectPermissions
10
+ from fractal_server.app.schemas.v2.status_legacy import LegacyStatusRead
11
+ from fractal_server.app.schemas.v2.status_legacy import WorkflowTaskStatusType
13
12
  from fractal_server.logger import set_logger
14
13
 
15
- from ._aux_functions import _get_dataset_check_owner
14
+ from ._aux_functions import _get_dataset_check_access
16
15
  from ._aux_functions import _get_submitted_jobs_statement
17
- from ._aux_functions import _get_workflow_check_owner
16
+ from ._aux_functions import _get_workflow_check_access
18
17
 
19
18
  router = APIRouter()
20
19
 
@@ -23,7 +22,7 @@ logger = set_logger(__name__)
23
22
 
24
23
  @router.get(
25
24
  "/project/{project_id}/status-legacy/",
26
- response_model=LegacyStatusReadV2,
25
+ response_model=LegacyStatusRead,
27
26
  )
28
27
  async def get_workflowtask_status(
29
28
  project_id: int,
@@ -31,7 +30,7 @@ async def get_workflowtask_status(
31
30
  workflow_id: int,
32
31
  user: UserOAuth = Depends(current_user_act_ver_prof),
33
32
  db: AsyncSession = Depends(get_async_db),
34
- ) -> LegacyStatusReadV2 | None:
33
+ ) -> LegacyStatusRead | None:
35
34
  """
36
35
  Extract the status of all `WorkflowTaskV2` of a given `WorkflowV2` that ran
37
36
  on a given `DatasetV2`.
@@ -42,43 +41,33 @@ async def get_workflowtask_status(
42
41
  order). See fractal-server GitHub issues: 793, 1083.
43
42
  """
44
43
  # Get the dataset DB entry
45
- output = await _get_dataset_check_owner(
44
+ output = await _get_dataset_check_access(
46
45
  project_id=project_id,
47
46
  dataset_id=dataset_id,
48
47
  user_id=user.id,
48
+ required_permissions=ProjectPermissions.READ,
49
49
  db=db,
50
50
  )
51
51
  dataset = output["dataset"]
52
52
 
53
53
  # Get the workflow DB entry
54
- workflow = await _get_workflow_check_owner(
54
+ workflow = await _get_workflow_check_access(
55
55
  project_id=project_id,
56
56
  workflow_id=workflow_id,
57
57
  user_id=user.id,
58
+ required_permissions=ProjectPermissions.READ,
58
59
  db=db,
59
60
  )
60
61
 
61
62
  # Check whether there exists a submitted job associated to this
62
63
  # workflow/dataset pair. If it does exist, it will be used later.
63
64
  # If there are multiple jobs, raise an error.
64
- stm = _get_submitted_jobs_statement()
65
- stm = stm.where(JobV2.dataset_id == dataset_id)
66
- stm = stm.where(JobV2.workflow_id == workflow_id)
67
- res = await db.execute(stm)
68
- running_jobs = res.scalars().all()
69
- if len(running_jobs) == 0:
70
- running_job = None
71
- elif len(running_jobs) == 1:
72
- running_job = running_jobs[0]
73
- else:
74
- string_ids = str([job.id for job in running_jobs])[1:-1]
75
- raise HTTPException(
76
- status_code=status.HTTP_422_UNPROCESSABLE_CONTENT,
77
- detail=(
78
- f"Cannot get WorkflowTaskV2 statuses as DatasetV2 {dataset.id}"
79
- f" is linked to multiple active jobs: {string_ids}."
80
- ),
81
- )
65
+ res = await db.execute(
66
+ _get_submitted_jobs_statement()
67
+ .where(JobV2.dataset_id == dataset_id)
68
+ .where(JobV2.workflow_id == workflow_id)
69
+ )
70
+ running_job = res.scalars().one_or_none()
82
71
 
83
72
  # Initialize empty dictionary for WorkflowTaskV2 status
84
73
  workflow_tasks_status_dict: dict = {}
@@ -113,18 +102,18 @@ async def get_workflowtask_status(
113
102
  ]
114
103
  try:
115
104
  first_submitted_index = running_job_statuses.index(
116
- WorkflowTaskStatusTypeV2.SUBMITTED
105
+ WorkflowTaskStatusType.SUBMITTED
117
106
  )
118
107
  except ValueError:
119
108
  logger.warning(
120
109
  f"Job {running_job.id} is submitted but its task list does not"
121
- f" contain a {WorkflowTaskStatusTypeV2.SUBMITTED} task."
110
+ f" contain a {WorkflowTaskStatusType.SUBMITTED} task."
122
111
  )
123
112
  first_submitted_index = 0
124
113
 
125
114
  for wftask in running_job_wftasks[first_submitted_index:]:
126
115
  workflow_tasks_status_dict[wftask.id] = (
127
- WorkflowTaskStatusTypeV2.SUBMITTED
116
+ WorkflowTaskStatusType.SUBMITTED
128
117
  )
129
118
 
130
119
  # The last workflow task that is included in the submitted job is also
@@ -154,7 +143,7 @@ async def get_workflowtask_status(
154
143
  # If a wftask ID was not found, ignore it and continue
155
144
  continue
156
145
  clean_workflow_tasks_status_dict[str(wf_task.id)] = wf_task_status
157
- if wf_task_status == WorkflowTaskStatusTypeV2.FAILED:
146
+ if wf_task_status == WorkflowTaskStatusType.FAILED:
158
147
  # Starting from the beginning of `workflow.task_list`, stop the
159
148
  # first time that you hit a failed job
160
149
  break
@@ -163,5 +152,5 @@ async def get_workflowtask_status(
163
152
  # first time that you hit `last_valid_wftask_id``
164
153
  break
165
154
 
166
- response_body = LegacyStatusReadV2(status=clean_workflow_tasks_status_dict)
155
+ response_body = LegacyStatusRead(status=clean_workflow_tasks_status_dict)
167
156
  return response_body
@@ -9,6 +9,7 @@ from fastapi import HTTPException
9
9
  from fastapi import Request
10
10
  from fastapi import status
11
11
  from sqlmodel import select
12
+ from sqlmodel import update
12
13
 
13
14
  from fractal_server.app.db import AsyncSession
14
15
  from fractal_server.app.db import get_async_db
@@ -23,10 +24,11 @@ from fractal_server.app.routes.auth import current_user_act_ver_prof
23
24
  from fractal_server.app.routes.aux.validate_user_profile import (
24
25
  validate_user_profile,
25
26
  )
26
- from fractal_server.app.schemas.v2 import JobCreateV2
27
- from fractal_server.app.schemas.v2 import JobReadV2
28
- from fractal_server.app.schemas.v2 import JobStatusTypeV2
27
+ from fractal_server.app.schemas.v2 import JobCreate
28
+ from fractal_server.app.schemas.v2 import JobRead
29
+ from fractal_server.app.schemas.v2 import JobStatusType
29
30
  from fractal_server.app.schemas.v2 import ResourceType
31
+ from fractal_server.app.schemas.v2.sharing import ProjectPermissions
30
32
  from fractal_server.config import get_settings
31
33
  from fractal_server.logger import set_logger
32
34
  from fractal_server.runner.set_start_and_last_task_index import (
@@ -35,9 +37,9 @@ from fractal_server.runner.set_start_and_last_task_index import (
35
37
  from fractal_server.runner.v2.submit_workflow import submit_workflow
36
38
  from fractal_server.syringe import Inject
37
39
 
38
- from ._aux_functions import _get_dataset_check_owner
39
- from ._aux_functions import _get_workflow_check_owner
40
- from ._aux_functions import clean_app_job_list_v2
40
+ from ._aux_functions import _get_dataset_check_access
41
+ from ._aux_functions import _get_workflow_check_access
42
+ from ._aux_functions import clean_app_job_list
41
43
  from ._aux_functions_tasks import _check_type_filters_compatibility
42
44
 
43
45
  FRACTAL_CACHE_DIR = ".fractal_cache"
@@ -48,34 +50,33 @@ logger = set_logger(__name__)
48
50
  @router.post(
49
51
  "/project/{project_id}/job/submit/",
50
52
  status_code=status.HTTP_202_ACCEPTED,
51
- response_model=JobReadV2,
53
+ response_model=JobRead,
52
54
  )
53
- async def apply_workflow(
55
+ async def submit_job(
54
56
  project_id: int,
55
57
  workflow_id: int,
56
58
  dataset_id: int,
57
- job_create: JobCreateV2,
59
+ job_create: JobCreate,
58
60
  background_tasks: BackgroundTasks,
59
61
  request: Request,
60
62
  user: UserOAuth = Depends(current_user_act_ver_prof),
61
63
  db: AsyncSession = Depends(get_async_db),
62
- ) -> JobReadV2 | None:
63
- # Remove non-submitted V2 jobs from the app state when the list grows
64
+ ) -> JobRead | None:
65
+ # Remove non-submitted Jobs from the app state when the list grows
64
66
  # beyond a threshold
65
- # NOTE: this may lead to a race condition on `app.state.jobsV2` if two
66
- # requests take place at the same time and `clean_app_job_list_v2` is
67
+ # NOTE: this may lead to a race condition on `app.state.jobs` if two
68
+ # requests take place at the same time and `clean_app_job_list` is
67
69
  # somewhat slow.
68
70
  settings = Inject(get_settings)
69
- if len(request.app.state.jobsV2) > settings.FRACTAL_API_MAX_JOB_LIST_LENGTH:
70
- new_jobs_list = await clean_app_job_list_v2(
71
- db, request.app.state.jobsV2
72
- )
73
- request.app.state.jobsV2 = new_jobs_list
71
+ if len(request.app.state.jobs) > settings.FRACTAL_API_MAX_JOB_LIST_LENGTH:
72
+ new_jobs_list = await clean_app_job_list(db, request.app.state.jobs)
73
+ request.app.state.jobs = new_jobs_list
74
74
 
75
- output = await _get_dataset_check_owner(
75
+ output = await _get_dataset_check_access(
76
76
  project_id=project_id,
77
77
  dataset_id=dataset_id,
78
78
  user_id=user.id,
79
+ required_permissions=ProjectPermissions.EXECUTE,
79
80
  db=db,
80
81
  )
81
82
  project = output["project"]
@@ -92,8 +93,12 @@ async def apply_workflow(
92
93
  detail="Project resource does not match with user's resource",
93
94
  )
94
95
 
95
- workflow = await _get_workflow_check_owner(
96
- project_id=project_id, workflow_id=workflow_id, user_id=user.id, db=db
96
+ workflow = await _get_workflow_check_access(
97
+ project_id=project_id,
98
+ workflow_id=workflow_id,
99
+ user_id=user.id,
100
+ required_permissions=ProjectPermissions.EXECUTE,
101
+ db=db,
97
102
  )
98
103
  num_tasks = len(workflow.task_list)
99
104
  if num_tasks == 0:
@@ -141,35 +146,15 @@ async def apply_workflow(
141
146
  user=user,
142
147
  db=db,
143
148
  )
144
-
145
- # Check that no other job with the same dataset_id is SUBMITTED
146
- stm = (
147
- select(JobV2)
148
- .where(JobV2.dataset_id == dataset_id)
149
- .where(JobV2.status == JobStatusTypeV2.SUBMITTED)
150
- )
151
- res = await db.execute(stm)
152
- if res.scalars().all():
149
+ if resource.prevent_new_submissions:
153
150
  raise HTTPException(
154
151
  status_code=status.HTTP_422_UNPROCESSABLE_CONTENT,
155
152
  detail=(
156
- f"Dataset {dataset_id} is already in use in submitted job(s)."
153
+ f"The '{resource.name}' resource does not currently accept "
154
+ "new job submissions."
157
155
  ),
158
156
  )
159
157
 
160
- if job_create.slurm_account is not None:
161
- if job_create.slurm_account not in user.slurm_accounts:
162
- raise HTTPException(
163
- status_code=status.HTTP_422_UNPROCESSABLE_CONTENT,
164
- detail=(
165
- f"SLURM account '{job_create.slurm_account}' is not "
166
- "among those available to the current user"
167
- ),
168
- )
169
- else:
170
- if len(user.slurm_accounts) > 0:
171
- job_create.slurm_account = user.slurm_accounts[0]
172
-
173
158
  # User appropriate FractalSSH object
174
159
  if resource.type == ResourceType.SLURM_SSH:
175
160
  ssh_config = dict(
@@ -192,6 +177,35 @@ async def apply_workflow(
192
177
  else:
193
178
  fractal_ssh = None
194
179
 
180
+ # Assign `job_create.slurm_account`
181
+ if job_create.slurm_account is not None:
182
+ if job_create.slurm_account not in user.slurm_accounts:
183
+ raise HTTPException(
184
+ status_code=status.HTTP_422_UNPROCESSABLE_CONTENT,
185
+ detail=(
186
+ f"SLURM account '{job_create.slurm_account}' is not "
187
+ "among those available to the current user"
188
+ ),
189
+ )
190
+ else:
191
+ if len(user.slurm_accounts) > 0:
192
+ job_create.slurm_account = user.slurm_accounts[0]
193
+
194
+ # Check that no other job with the same dataset_id is SUBMITTED
195
+ stm = (
196
+ select(JobV2)
197
+ .where(JobV2.dataset_id == dataset_id)
198
+ .where(JobV2.status == JobStatusType.SUBMITTED)
199
+ )
200
+ res = await db.execute(stm)
201
+ if res.scalars().all():
202
+ raise HTTPException(
203
+ status_code=status.HTTP_422_UNPROCESSABLE_CONTENT,
204
+ detail=(
205
+ f"Dataset {dataset_id} is already in use in submitted job(s)."
206
+ ),
207
+ )
208
+
195
209
  # Add new Job object to DB
196
210
  job = JobV2(
197
211
  project_id=project_id,
@@ -215,38 +229,31 @@ async def apply_workflow(
215
229
  await db.refresh(job)
216
230
 
217
231
  # Update TaskGroupV2.timestamp_last_used
218
- res = await db.execute(
219
- select(TaskGroupV2).where(TaskGroupV2.id.in_(used_task_group_ids))
232
+ await db.execute(
233
+ update(TaskGroupV2)
234
+ .where(TaskGroupV2.id.in_(used_task_group_ids))
235
+ .values(timestamp_last_used=job.start_timestamp)
220
236
  )
221
- used_task_groups = res.scalars().all()
222
- for used_task_group in used_task_groups:
223
- used_task_group.timestamp_last_used = job.start_timestamp
224
- db.add(used_task_group)
225
237
  await db.commit()
226
238
 
227
- # Define server-side job directory
228
- timestamp_string = job.start_timestamp.strftime("%Y%m%d_%H%M%S")
229
- WORKFLOW_DIR_LOCAL = Path(resource.jobs_local_dir) / (
239
+ # Define `cache_dir`
240
+ cache_dir = Path(user.project_dirs[0], FRACTAL_CACHE_DIR)
241
+
242
+ # Define server-side and user-side job directories
243
+ timestamp_string = job.start_timestamp.strftime(r"%Y%m%d_%H%M%S")
244
+ working_dir = Path(resource.jobs_local_dir) / (
230
245
  f"proj_v2_{project_id:07d}_wf_{workflow_id:07d}_job_{job.id:07d}"
231
246
  f"_{timestamp_string}"
232
247
  )
233
-
234
- # Define user-side job directory
235
- cache_dir = Path(user.project_dir, FRACTAL_CACHE_DIR)
236
248
  match resource.type:
237
249
  case ResourceType.LOCAL:
238
- WORKFLOW_DIR_REMOTE = WORKFLOW_DIR_LOCAL
250
+ working_dir_user = working_dir
239
251
  case ResourceType.SLURM_SUDO:
240
- WORKFLOW_DIR_REMOTE = cache_dir / WORKFLOW_DIR_LOCAL.name
252
+ working_dir_user = cache_dir / working_dir.name
241
253
  case ResourceType.SLURM_SSH:
242
- WORKFLOW_DIR_REMOTE = Path(
243
- profile.jobs_remote_dir,
244
- WORKFLOW_DIR_LOCAL.name,
245
- )
246
-
247
- # Update job folders in the db
248
- job.working_dir = WORKFLOW_DIR_LOCAL.as_posix()
249
- job.working_dir_user = WORKFLOW_DIR_REMOTE.as_posix()
254
+ working_dir_user = Path(profile.jobs_remote_dir, working_dir.name)
255
+ job.working_dir = working_dir.as_posix()
256
+ job.working_dir_user = working_dir_user.as_posix()
250
257
  await db.merge(job)
251
258
  await db.commit()
252
259
 
@@ -262,11 +269,9 @@ async def apply_workflow(
262
269
  resource=resource,
263
270
  profile=profile,
264
271
  )
265
- request.app.state.jobsV2.append(job.id)
272
+ request.app.state.jobs.append(job.id)
266
273
  logger.info(
267
- f"Current worker's pid is {os.getpid()}. "
268
- f"Current status of worker job's list "
269
- f"{request.app.state.jobsV2}"
274
+ f"Job {job.id}, worker with pid {os.getpid()}. "
275
+ f"Worker jobs list: {request.app.state.jobs}."
270
276
  )
271
- await db.close()
272
277
  return job
@@ -25,11 +25,11 @@ from fractal_server.app.models import UserOAuth
25
25
  from fractal_server.app.models.v2 import TaskGroupV2
26
26
  from fractal_server.app.models.v2 import TaskV2
27
27
  from fractal_server.app.routes.auth import current_user_act_ver_prof
28
- from fractal_server.app.schemas.v2 import TaskCreateV2
29
- from fractal_server.app.schemas.v2 import TaskGroupV2OriginEnum
30
- from fractal_server.app.schemas.v2 import TaskReadV2
28
+ from fractal_server.app.schemas.v2 import TaskCreate
29
+ from fractal_server.app.schemas.v2 import TaskGroupOriginEnum
30
+ from fractal_server.app.schemas.v2 import TaskRead
31
31
  from fractal_server.app.schemas.v2 import TaskType
32
- from fractal_server.app.schemas.v2 import TaskUpdateV2
32
+ from fractal_server.app.schemas.v2 import TaskUpdate
33
33
  from fractal_server.logger import set_logger
34
34
 
35
35
  router = APIRouter()
@@ -37,7 +37,7 @@ router = APIRouter()
37
37
  logger = set_logger(__name__)
38
38
 
39
39
 
40
- @router.get("/", response_model=list[TaskReadV2])
40
+ @router.get("/", response_model=list[TaskRead])
41
41
  async def get_list_task(
42
42
  args_schema: bool = True,
43
43
  category: str | None = None,
@@ -45,7 +45,7 @@ async def get_list_task(
45
45
  author: str | None = None,
46
46
  user: UserOAuth = Depends(current_user_act_ver_prof),
47
47
  db: AsyncSession = Depends(get_async_db),
48
- ) -> list[TaskReadV2]:
48
+ ) -> list[TaskRead]:
49
49
  """
50
50
  Get list of available tasks
51
51
  """
@@ -86,12 +86,12 @@ async def get_list_task(
86
86
  return task_list
87
87
 
88
88
 
89
- @router.get("/{task_id}/", response_model=TaskReadV2)
89
+ @router.get("/{task_id}/", response_model=TaskRead)
90
90
  async def get_task(
91
91
  task_id: int,
92
92
  user: UserOAuth = Depends(current_user_act_ver_prof),
93
93
  db: AsyncSession = Depends(get_async_db),
94
- ) -> TaskReadV2:
94
+ ) -> TaskRead:
95
95
  """
96
96
  Get info on a specific task
97
97
  """
@@ -99,13 +99,13 @@ async def get_task(
99
99
  return task
100
100
 
101
101
 
102
- @router.patch("/{task_id}/", response_model=TaskReadV2)
102
+ @router.patch("/{task_id}/", response_model=TaskRead)
103
103
  async def patch_task(
104
104
  task_id: int,
105
- task_update: TaskUpdateV2,
105
+ task_update: TaskUpdate,
106
106
  user: UserOAuth = Depends(current_user_act_ver_prof),
107
107
  db: AsyncSession = Depends(get_async_db),
108
- ) -> TaskReadV2 | None:
108
+ ) -> TaskRead | None:
109
109
  """
110
110
  Edit a specific task (restricted to task owner)
111
111
  """
@@ -137,16 +137,14 @@ async def patch_task(
137
137
  return db_task
138
138
 
139
139
 
140
- @router.post(
141
- "/", response_model=TaskReadV2, status_code=status.HTTP_201_CREATED
142
- )
140
+ @router.post("/", response_model=TaskRead, status_code=status.HTTP_201_CREATED)
143
141
  async def create_task(
144
- task: TaskCreateV2,
142
+ task: TaskCreate,
145
143
  user_group_id: int | None = None,
146
144
  private: bool = False,
147
145
  user: UserOAuth = Depends(current_user_act_ver_prof),
148
146
  db: AsyncSession = Depends(get_async_db),
149
- ) -> TaskReadV2 | None:
147
+ ) -> TaskRead | None:
150
148
  """
151
149
  Create a new task
152
150
  """
@@ -211,7 +209,7 @@ async def create_task(
211
209
  resource_id=resource_id,
212
210
  active=True,
213
211
  task_list=[db_task],
214
- origin=TaskGroupV2OriginEnum.OTHER,
212
+ origin=TaskGroupOriginEnum.OTHER,
215
213
  version=db_task.version,
216
214
  pkg_name=pkg_name,
217
215
  )
@@ -25,12 +25,12 @@ from fractal_server.app.routes.aux.validate_user_profile import (
25
25
  )
26
26
  from fractal_server.app.schemas.v2 import FractalUploadedFile
27
27
  from fractal_server.app.schemas.v2 import ResourceType
28
- from fractal_server.app.schemas.v2 import TaskCollectPipV2
29
- from fractal_server.app.schemas.v2 import TaskGroupActivityActionV2
30
- from fractal_server.app.schemas.v2 import TaskGroupActivityStatusV2
31
- from fractal_server.app.schemas.v2 import TaskGroupActivityV2Read
32
- from fractal_server.app.schemas.v2 import TaskGroupCreateV2Strict
33
- from fractal_server.app.schemas.v2 import TaskGroupV2OriginEnum
28
+ from fractal_server.app.schemas.v2 import TaskCollectPip
29
+ from fractal_server.app.schemas.v2 import TaskGroupActivityAction
30
+ from fractal_server.app.schemas.v2 import TaskGroupActivityRead
31
+ from fractal_server.app.schemas.v2 import TaskGroupActivityStatus
32
+ from fractal_server.app.schemas.v2 import TaskGroupCreateStrict
33
+ from fractal_server.app.schemas.v2 import TaskGroupOriginEnum
34
34
  from fractal_server.logger import reset_logger_handlers
35
35
  from fractal_server.logger import set_logger
36
36
  from fractal_server.tasks.v2.local.collect import collect_local
@@ -59,9 +59,9 @@ class CollectionRequestData(BaseModel):
59
59
  Validate form data _and_ wheel file.
60
60
  """
61
61
 
62
- task_collect: TaskCollectPipV2
62
+ task_collect: TaskCollectPip
63
63
  file: UploadFile | None = None
64
- origin: TaskGroupV2OriginEnum
64
+ origin: TaskGroupOriginEnum
65
65
 
66
66
  @model_validator(mode="before")
67
67
  @classmethod
@@ -75,7 +75,7 @@ class CollectionRequestData(BaseModel):
75
75
  raise ValueError(
76
76
  "When no `file` is provided, `package` is required."
77
77
  )
78
- values["origin"] = TaskGroupV2OriginEnum.PYPI
78
+ values["origin"] = TaskGroupOriginEnum.PYPI
79
79
  else:
80
80
  if package is not None:
81
81
  raise ValueError(
@@ -87,7 +87,7 @@ class CollectionRequestData(BaseModel):
87
87
  "Cannot set `package_version` when `file` is "
88
88
  f"provided (given package_version='{package_version}')."
89
89
  )
90
- values["origin"] = TaskGroupV2OriginEnum.WHEELFILE
90
+ values["origin"] = TaskGroupOriginEnum.WHEELFILE
91
91
 
92
92
  for forbidden_char in FORBIDDEN_CHAR_WHEEL:
93
93
  if forbidden_char in file.filename:
@@ -125,7 +125,7 @@ def parse_request_data(
125
125
  else None
126
126
  )
127
127
  # Validate and coerce form data
128
- task_collect_pip = TaskCollectPipV2(
128
+ task_collect_pip = TaskCollectPip(
129
129
  package=package,
130
130
  package_version=package_version,
131
131
  package_extras=package_extras,
@@ -150,7 +150,7 @@ def parse_request_data(
150
150
 
151
151
  @router.post(
152
152
  "/collect/pip/",
153
- response_model=TaskGroupActivityV2Read,
153
+ response_model=TaskGroupActivityRead,
154
154
  )
155
155
  async def collect_tasks_pip(
156
156
  response: Response,
@@ -160,7 +160,7 @@ async def collect_tasks_pip(
160
160
  user_group_id: int | None = None,
161
161
  user: UserOAuth = Depends(current_user_act_ver_prof),
162
162
  db: AsyncSession = Depends(get_async_db),
163
- ) -> TaskGroupActivityV2Read:
163
+ ) -> TaskGroupActivityRead:
164
164
  """
165
165
  Task-collection endpoint
166
166
  """
@@ -221,7 +221,7 @@ async def collect_tasks_pip(
221
221
  wheel_file = None
222
222
 
223
223
  # Set pkg_name, version, origin and archive_path
224
- if request_data.origin == TaskGroupV2OriginEnum.WHEELFILE:
224
+ if request_data.origin == TaskGroupOriginEnum.WHEELFILE:
225
225
  try:
226
226
  wheel_filename = request_data.file.filename
227
227
  wheel_info = _parse_wheel_filename(wheel_filename)
@@ -242,7 +242,7 @@ async def collect_tasks_pip(
242
242
  wheel_info["distribution"]
243
243
  )
244
244
  task_group_attrs["version"] = wheel_info["version"]
245
- elif request_data.origin == TaskGroupV2OriginEnum.PYPI:
245
+ elif request_data.origin == TaskGroupOriginEnum.PYPI:
246
246
  pkg_name = task_collect.package
247
247
  task_group_attrs["pkg_name"] = normalize_package_name(pkg_name)
248
248
  latest_version = await get_package_version_from_pypi(
@@ -278,7 +278,7 @@ async def collect_tasks_pip(
278
278
 
279
279
  # Validate TaskGroupV2 attributes
280
280
  try:
281
- TaskGroupCreateV2Strict(**task_group_attrs)
281
+ TaskGroupCreateStrict(**task_group_attrs)
282
282
  except ValidationError as e:
283
283
  raise HTTPException(
284
284
  status_code=status.HTTP_422_UNPROCESSABLE_CONTENT,
@@ -328,8 +328,8 @@ async def collect_tasks_pip(
328
328
  task_group_activity = TaskGroupActivityV2(
329
329
  user_id=task_group.user_id,
330
330
  taskgroupv2_id=task_group.id,
331
- status=TaskGroupActivityStatusV2.PENDING,
332
- action=TaskGroupActivityActionV2.COLLECT,
331
+ status=TaskGroupActivityStatus.PENDING,
332
+ action=TaskGroupActivityAction.COLLECT,
333
333
  pkg_name=task_group.pkg_name,
334
334
  version=task_group.version,
335
335
  )
@@ -17,11 +17,11 @@ from fractal_server.app.routes.aux.validate_user_profile import (
17
17
  validate_user_profile,
18
18
  )
19
19
  from fractal_server.app.schemas.v2 import ResourceType
20
- from fractal_server.app.schemas.v2 import TaskCollectCustomV2
21
- from fractal_server.app.schemas.v2 import TaskCreateV2
22
- from fractal_server.app.schemas.v2 import TaskGroupCreateV2
23
- from fractal_server.app.schemas.v2 import TaskGroupV2OriginEnum
24
- from fractal_server.app.schemas.v2 import TaskReadV2
20
+ from fractal_server.app.schemas.v2 import TaskCollectCustom
21
+ from fractal_server.app.schemas.v2 import TaskCreate
22
+ from fractal_server.app.schemas.v2 import TaskGroupCreate
23
+ from fractal_server.app.schemas.v2 import TaskGroupOriginEnum
24
+ from fractal_server.app.schemas.v2 import TaskRead
25
25
  from fractal_server.logger import set_logger
26
26
  from fractal_server.string_tools import validate_cmd
27
27
  from fractal_server.tasks.v2.utils_background import prepare_tasks_metadata
@@ -38,16 +38,14 @@ router = APIRouter()
38
38
  logger = set_logger(__name__)
39
39
 
40
40
 
41
- @router.post(
42
- "/collect/custom/", status_code=201, response_model=list[TaskReadV2]
43
- )
41
+ @router.post("/collect/custom/", status_code=201, response_model=list[TaskRead])
44
42
  async def collect_task_custom(
45
- task_collect: TaskCollectCustomV2,
43
+ task_collect: TaskCollectCustom,
46
44
  private: bool = False,
47
45
  user_group_id: int | None = None,
48
46
  user: UserOAuth = Depends(current_user_act_ver_prof),
49
47
  db: AsyncSession = Depends(get_async_db),
50
- ) -> list[TaskReadV2]:
48
+ ) -> list[TaskRead]:
51
49
  # Get validated resource and profile
52
50
  resource, profile = await validate_user_profile(user=user, db=db)
53
51
  resource_id = resource.id
@@ -139,7 +137,7 @@ async def collect_task_custom(
139
137
  else:
140
138
  package_root = Path(task_collect.package_root)
141
139
 
142
- task_list: list[TaskCreateV2] = prepare_tasks_metadata(
140
+ task_list: list[TaskCreate] = prepare_tasks_metadata(
143
141
  package_manifest=task_collect.manifest,
144
142
  python_bin=Path(task_collect.python_interpreter),
145
143
  package_root=package_root,
@@ -148,14 +146,14 @@ async def collect_task_custom(
148
146
 
149
147
  # Prepare task-group attributes
150
148
  task_group_attrs = dict(
151
- origin=TaskGroupV2OriginEnum.OTHER,
149
+ origin=TaskGroupOriginEnum.OTHER,
152
150
  pkg_name=task_collect.label,
153
151
  user_id=user.id,
154
152
  user_group_id=user_group_id,
155
153
  version=task_collect.version,
156
154
  resource_id=resource_id,
157
155
  )
158
- TaskGroupCreateV2(**task_group_attrs)
156
+ TaskGroupCreate(**task_group_attrs)
159
157
 
160
158
  # Verify non-duplication constraints
161
159
  await _verify_non_duplication_user_constraint(