fractal-server 2.11.1__py3-none-any.whl → 2.12.0a1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (64) hide show
  1. fractal_server/__init__.py +1 -1
  2. fractal_server/app/models/__init__.py +0 -2
  3. fractal_server/app/models/linkuserproject.py +0 -9
  4. fractal_server/app/routes/aux/_job.py +1 -3
  5. fractal_server/app/runner/executors/slurm/ssh/executor.py +9 -6
  6. fractal_server/app/runner/executors/slurm/sudo/executor.py +1 -5
  7. fractal_server/app/runner/filenames.py +0 -2
  8. fractal_server/app/runner/shutdown.py +3 -27
  9. fractal_server/app/schemas/_validators.py +0 -19
  10. fractal_server/config.py +1 -15
  11. fractal_server/main.py +1 -12
  12. fractal_server/migrations/versions/1eac13a26c83_drop_v1_tables.py +67 -0
  13. fractal_server/string_tools.py +0 -21
  14. fractal_server/tasks/utils.py +0 -28
  15. {fractal_server-2.11.1.dist-info → fractal_server-2.12.0a1.dist-info}/METADATA +1 -1
  16. {fractal_server-2.11.1.dist-info → fractal_server-2.12.0a1.dist-info}/RECORD +19 -63
  17. fractal_server/app/models/v1/__init__.py +0 -13
  18. fractal_server/app/models/v1/dataset.py +0 -71
  19. fractal_server/app/models/v1/job.py +0 -101
  20. fractal_server/app/models/v1/project.py +0 -29
  21. fractal_server/app/models/v1/state.py +0 -34
  22. fractal_server/app/models/v1/task.py +0 -85
  23. fractal_server/app/models/v1/workflow.py +0 -133
  24. fractal_server/app/routes/admin/v1.py +0 -377
  25. fractal_server/app/routes/api/v1/__init__.py +0 -26
  26. fractal_server/app/routes/api/v1/_aux_functions.py +0 -478
  27. fractal_server/app/routes/api/v1/dataset.py +0 -554
  28. fractal_server/app/routes/api/v1/job.py +0 -195
  29. fractal_server/app/routes/api/v1/project.py +0 -475
  30. fractal_server/app/routes/api/v1/task.py +0 -203
  31. fractal_server/app/routes/api/v1/task_collection.py +0 -239
  32. fractal_server/app/routes/api/v1/workflow.py +0 -355
  33. fractal_server/app/routes/api/v1/workflowtask.py +0 -187
  34. fractal_server/app/runner/async_wrap_v1.py +0 -27
  35. fractal_server/app/runner/v1/__init__.py +0 -415
  36. fractal_server/app/runner/v1/_common.py +0 -620
  37. fractal_server/app/runner/v1/_local/__init__.py +0 -186
  38. fractal_server/app/runner/v1/_local/_local_config.py +0 -105
  39. fractal_server/app/runner/v1/_local/_submit_setup.py +0 -48
  40. fractal_server/app/runner/v1/_local/executor.py +0 -100
  41. fractal_server/app/runner/v1/_slurm/__init__.py +0 -312
  42. fractal_server/app/runner/v1/_slurm/_submit_setup.py +0 -81
  43. fractal_server/app/runner/v1/_slurm/get_slurm_config.py +0 -163
  44. fractal_server/app/runner/v1/common.py +0 -117
  45. fractal_server/app/runner/v1/handle_failed_job.py +0 -141
  46. fractal_server/app/schemas/v1/__init__.py +0 -37
  47. fractal_server/app/schemas/v1/applyworkflow.py +0 -161
  48. fractal_server/app/schemas/v1/dataset.py +0 -165
  49. fractal_server/app/schemas/v1/dumps.py +0 -64
  50. fractal_server/app/schemas/v1/manifest.py +0 -126
  51. fractal_server/app/schemas/v1/project.py +0 -66
  52. fractal_server/app/schemas/v1/state.py +0 -18
  53. fractal_server/app/schemas/v1/task.py +0 -167
  54. fractal_server/app/schemas/v1/task_collection.py +0 -110
  55. fractal_server/app/schemas/v1/workflow.py +0 -212
  56. fractal_server/tasks/v1/_TaskCollectPip.py +0 -103
  57. fractal_server/tasks/v1/__init__.py +0 -0
  58. fractal_server/tasks/v1/background_operations.py +0 -352
  59. fractal_server/tasks/v1/endpoint_operations.py +0 -156
  60. fractal_server/tasks/v1/get_collection_data.py +0 -14
  61. fractal_server/tasks/v1/utils.py +0 -67
  62. {fractal_server-2.11.1.dist-info → fractal_server-2.12.0a1.dist-info}/LICENSE +0 -0
  63. {fractal_server-2.11.1.dist-info → fractal_server-2.12.0a1.dist-info}/WHEEL +0 -0
  64. {fractal_server-2.11.1.dist-info → fractal_server-2.12.0a1.dist-info}/entry_points.txt +0 -0
@@ -1,195 +0,0 @@
1
- from pathlib import Path
2
- from typing import Optional
3
-
4
- from fastapi import APIRouter
5
- from fastapi import Depends
6
- from fastapi import Response
7
- from fastapi import status
8
- from fastapi.responses import StreamingResponse
9
- from sqlmodel import select
10
-
11
- from .....zip_tools import _zip_folder_to_byte_stream_iterator
12
- from ....db import AsyncSession
13
- from ....db import get_async_db
14
- from ....models.v1 import ApplyWorkflow
15
- from ....models.v1 import JobStatusTypeV1
16
- from ....models.v1 import Project
17
- from ....runner.filenames import WORKFLOW_LOG_FILENAME
18
- from ....schemas.v1 import ApplyWorkflowReadV1
19
- from ...aux._job import _write_shutdown_file
20
- from ...aux._runner import _check_shutdown_is_supported
21
- from ._aux_functions import _get_job_check_owner
22
- from ._aux_functions import _get_project_check_owner
23
- from ._aux_functions import _get_workflow_check_owner
24
- from fractal_server.app.models import UserOAuth
25
- from fractal_server.app.routes.auth import current_active_user
26
-
27
- router = APIRouter()
28
-
29
-
30
- @router.get("/job/", response_model=list[ApplyWorkflowReadV1])
31
- async def get_user_jobs(
32
- user: UserOAuth = Depends(current_active_user),
33
- log: bool = True,
34
- db: AsyncSession = Depends(get_async_db),
35
- ) -> list[ApplyWorkflowReadV1]:
36
- """
37
- Returns all the jobs of the current user
38
- """
39
- stm = select(ApplyWorkflow)
40
- stm = stm.join(Project).where(
41
- Project.user_list.any(UserOAuth.id == user.id)
42
- )
43
- res = await db.execute(stm)
44
- job_list = res.scalars().all()
45
- await db.close()
46
- if not log:
47
- for job in job_list:
48
- setattr(job, "log", None)
49
-
50
- return job_list
51
-
52
-
53
- @router.get(
54
- "/project/{project_id}/workflow/{workflow_id}/job/",
55
- response_model=list[ApplyWorkflowReadV1],
56
- )
57
- async def get_workflow_jobs(
58
- project_id: int,
59
- workflow_id: int,
60
- user: UserOAuth = Depends(current_active_user),
61
- db: AsyncSession = Depends(get_async_db),
62
- ) -> Optional[list[ApplyWorkflowReadV1]]:
63
- """
64
- Returns all the jobs related to a specific workflow
65
- """
66
- await _get_workflow_check_owner(
67
- project_id=project_id, workflow_id=workflow_id, user_id=user.id, db=db
68
- )
69
- stm = select(ApplyWorkflow).where(ApplyWorkflow.workflow_id == workflow_id)
70
- res = await db.execute(stm)
71
- job_list = res.scalars().all()
72
- return job_list
73
-
74
-
75
- @router.get(
76
- "/project/{project_id}/job/{job_id}/",
77
- response_model=ApplyWorkflowReadV1,
78
- )
79
- async def read_job(
80
- project_id: int,
81
- job_id: int,
82
- show_tmp_logs: bool = False,
83
- user: UserOAuth = Depends(current_active_user),
84
- db: AsyncSession = Depends(get_async_db),
85
- ) -> Optional[ApplyWorkflowReadV1]:
86
- """
87
- Return info on an existing job
88
- """
89
-
90
- output = await _get_job_check_owner(
91
- project_id=project_id,
92
- job_id=job_id,
93
- user_id=user.id,
94
- db=db,
95
- )
96
- job = output["job"]
97
- await db.close()
98
-
99
- if show_tmp_logs and (job.status == JobStatusTypeV1.SUBMITTED):
100
- try:
101
- with open(f"{job.working_dir}/{WORKFLOW_LOG_FILENAME}", "r") as f:
102
- job.log = f.read()
103
- except FileNotFoundError:
104
- pass
105
-
106
- return job
107
-
108
-
109
- @router.get(
110
- "/project/{project_id}/job/{job_id}/download/",
111
- response_class=StreamingResponse,
112
- )
113
- async def download_job_logs(
114
- project_id: int,
115
- job_id: int,
116
- user: UserOAuth = Depends(current_active_user),
117
- db: AsyncSession = Depends(get_async_db),
118
- ) -> StreamingResponse:
119
- """
120
- Download job folder
121
- """
122
- output = await _get_job_check_owner(
123
- project_id=project_id,
124
- job_id=job_id,
125
- user_id=user.id,
126
- db=db,
127
- )
128
- job = output["job"]
129
-
130
- # Create and return byte stream for zipped log folder
131
- PREFIX_ZIP = Path(job.working_dir).name
132
- zip_filename = f"{PREFIX_ZIP}_archive.zip"
133
- return StreamingResponse(
134
- _zip_folder_to_byte_stream_iterator(folder=job.working_dir),
135
- media_type="application/x-zip-compressed",
136
- headers={"Content-Disposition": f"attachment;filename={zip_filename}"},
137
- )
138
-
139
-
140
- @router.get(
141
- "/project/{project_id}/job/",
142
- response_model=list[ApplyWorkflowReadV1],
143
- )
144
- async def get_job_list(
145
- project_id: int,
146
- user: UserOAuth = Depends(current_active_user),
147
- log: bool = True,
148
- db: AsyncSession = Depends(get_async_db),
149
- ) -> Optional[list[ApplyWorkflowReadV1]]:
150
- """
151
- Get job list for given project
152
- """
153
- project = await _get_project_check_owner(
154
- project_id=project_id, user_id=user.id, db=db
155
- )
156
-
157
- stm = select(ApplyWorkflow).where(ApplyWorkflow.project_id == project.id)
158
- res = await db.execute(stm)
159
- job_list = res.scalars().all()
160
- await db.close()
161
- if not log:
162
- for job in job_list:
163
- setattr(job, "log", None)
164
-
165
- return job_list
166
-
167
-
168
- @router.get(
169
- "/project/{project_id}/job/{job_id}/stop/",
170
- status_code=202,
171
- )
172
- async def stop_job(
173
- project_id: int,
174
- job_id: int,
175
- user: UserOAuth = Depends(current_active_user),
176
- db: AsyncSession = Depends(get_async_db),
177
- ) -> Response:
178
- """
179
- Stop execution of a workflow job.
180
- """
181
-
182
- _check_shutdown_is_supported()
183
-
184
- # Get job from DB
185
- output = await _get_job_check_owner(
186
- project_id=project_id,
187
- job_id=job_id,
188
- user_id=user.id,
189
- db=db,
190
- )
191
- job = output["job"]
192
-
193
- _write_shutdown_file(job=job)
194
-
195
- return Response(status_code=status.HTTP_202_ACCEPTED)
@@ -1,475 +0,0 @@
1
- import json
2
- import os
3
- from datetime import timedelta
4
- from datetime import timezone
5
- from typing import Optional
6
-
7
- from fastapi import APIRouter
8
- from fastapi import BackgroundTasks
9
- from fastapi import Depends
10
- from fastapi import HTTPException
11
- from fastapi import Request
12
- from fastapi import Response
13
- from fastapi import status
14
- from sqlmodel import select
15
-
16
- from .....config import get_settings
17
- from .....logger import set_logger
18
- from .....syringe import Inject
19
- from ....db import AsyncSession
20
- from ....db import get_async_db
21
- from ....models.v1 import ApplyWorkflow
22
- from ....models.v1 import Dataset
23
- from ....models.v1 import LinkUserProject
24
- from ....models.v1 import Project
25
- from ....models.v1 import Workflow
26
- from ....runner.set_start_and_last_task_index import (
27
- set_start_and_last_task_index,
28
- )
29
- from ....runner.v1 import submit_workflow
30
- from ....runner.v1 import validate_workflow_compatibility
31
- from ....schemas.v1 import ApplyWorkflowCreateV1
32
- from ....schemas.v1 import ApplyWorkflowReadV1
33
- from ....schemas.v1 import JobStatusTypeV1
34
- from ....schemas.v1 import ProjectCreateV1
35
- from ....schemas.v1 import ProjectReadV1
36
- from ....schemas.v1 import ProjectUpdateV1
37
- from ...aux.validate_user_settings import validate_user_settings
38
- from ._aux_functions import _check_project_exists
39
- from ._aux_functions import _get_dataset_check_owner
40
- from ._aux_functions import _get_project_check_owner
41
- from ._aux_functions import _get_submitted_jobs_statement
42
- from ._aux_functions import _get_workflow_check_owner
43
- from ._aux_functions import _raise_if_v1_is_read_only
44
- from ._aux_functions import clean_app_job_list_v1
45
- from fractal_server.app.models import UserOAuth
46
- from fractal_server.app.routes.auth import current_active_user
47
- from fractal_server.app.routes.auth import current_active_verified_user
48
-
49
- router = APIRouter()
50
- logger = set_logger(__name__)
51
-
52
-
53
- @router.get("/", response_model=list[ProjectReadV1])
54
- async def get_list_project(
55
- user: UserOAuth = Depends(current_active_user),
56
- db: AsyncSession = Depends(get_async_db),
57
- ) -> list[Project]:
58
- """
59
- Return list of projects user is member of
60
- """
61
- stm = (
62
- select(Project)
63
- .join(LinkUserProject)
64
- .where(LinkUserProject.user_id == user.id)
65
- )
66
- res = await db.execute(stm)
67
- project_list = res.scalars().all()
68
- await db.close()
69
- return project_list
70
-
71
-
72
- @router.post("/", response_model=ProjectReadV1, status_code=201)
73
- async def create_project(
74
- project: ProjectCreateV1,
75
- user: UserOAuth = Depends(current_active_user),
76
- db: AsyncSession = Depends(get_async_db),
77
- ) -> Optional[ProjectReadV1]:
78
- """
79
- Create new poject
80
- """
81
- _raise_if_v1_is_read_only()
82
- # Check that there is no project with the same user and name
83
- await _check_project_exists(
84
- project_name=project.name, user_id=user.id, db=db
85
- )
86
-
87
- db_project = Project(**project.dict())
88
- db_project.user_list.append(user)
89
-
90
- db.add(db_project)
91
- await db.commit()
92
- await db.refresh(db_project)
93
- await db.close()
94
-
95
- return db_project
96
-
97
-
98
- @router.get("/{project_id}/", response_model=ProjectReadV1)
99
- async def read_project(
100
- project_id: int,
101
- user: UserOAuth = Depends(current_active_user),
102
- db: AsyncSession = Depends(get_async_db),
103
- ) -> Optional[ProjectReadV1]:
104
- """
105
- Return info on an existing project
106
- """
107
- project = await _get_project_check_owner(
108
- project_id=project_id, user_id=user.id, db=db
109
- )
110
- await db.close()
111
- return project
112
-
113
-
114
- @router.patch("/{project_id}/", response_model=ProjectReadV1)
115
- async def update_project(
116
- project_id: int,
117
- project_update: ProjectUpdateV1,
118
- user: UserOAuth = Depends(current_active_user),
119
- db: AsyncSession = Depends(get_async_db),
120
- ):
121
- _raise_if_v1_is_read_only()
122
- project = await _get_project_check_owner(
123
- project_id=project_id, user_id=user.id, db=db
124
- )
125
-
126
- # Check that there is no project with the same user and name
127
- if project_update.name is not None:
128
- await _check_project_exists(
129
- project_name=project_update.name, user_id=user.id, db=db
130
- )
131
-
132
- for key, value in project_update.dict(exclude_unset=True).items():
133
- setattr(project, key, value)
134
-
135
- await db.commit()
136
- await db.refresh(project)
137
- await db.close()
138
- return project
139
-
140
-
141
- @router.delete("/{project_id}/", status_code=204)
142
- async def delete_project(
143
- project_id: int,
144
- user: UserOAuth = Depends(current_active_user),
145
- db: AsyncSession = Depends(get_async_db),
146
- ) -> Response:
147
- """
148
- Delete project
149
- """
150
- _raise_if_v1_is_read_only()
151
- project = await _get_project_check_owner(
152
- project_id=project_id, user_id=user.id, db=db
153
- )
154
-
155
- # Fail if there exist jobs that are submitted and in relation with the
156
- # current project.
157
- stm = _get_submitted_jobs_statement().where(
158
- ApplyWorkflow.project_id == project_id
159
- )
160
- res = await db.execute(stm)
161
- jobs = res.scalars().all()
162
- if jobs:
163
- string_ids = str([job.id for job in jobs])[1:-1]
164
- raise HTTPException(
165
- status_code=status.HTTP_422_UNPROCESSABLE_ENTITY,
166
- detail=(
167
- f"Cannot delete project {project.id} because it "
168
- f"is linked to active job(s) {string_ids}."
169
- ),
170
- )
171
-
172
- # Cascade operations
173
-
174
- # Workflows
175
- stm = select(Workflow).where(Workflow.project_id == project_id)
176
- res = await db.execute(stm)
177
- workflows = res.scalars().all()
178
- for wf in workflows:
179
- # Cascade operations: set foreign-keys to null for jobs which are in
180
- # relationship with the current workflow
181
- stm = select(ApplyWorkflow).where(ApplyWorkflow.workflow_id == wf.id)
182
- res = await db.execute(stm)
183
- jobs = res.scalars().all()
184
- for job in jobs:
185
- job.workflow_id = None
186
- await db.merge(job)
187
- await db.commit()
188
- # Delete workflow
189
- await db.delete(wf)
190
-
191
- # Dataset
192
- stm = select(Dataset).where(Dataset.project_id == project_id)
193
- res = await db.execute(stm)
194
- datasets = res.scalars().all()
195
- for ds in datasets:
196
- # Cascade operations: set foreign-keys to null for jobs which are in
197
- # relationship with the current dataset
198
- # input_dataset
199
- stm = select(ApplyWorkflow).where(
200
- ApplyWorkflow.input_dataset_id == ds.id
201
- )
202
- res = await db.execute(stm)
203
- jobs = res.scalars().all()
204
- for job in jobs:
205
- job.input_dataset_id = None
206
- await db.merge(job)
207
- await db.commit()
208
- # output_dataset
209
- stm = select(ApplyWorkflow).where(
210
- ApplyWorkflow.output_dataset_id == ds.id
211
- )
212
- res = await db.execute(stm)
213
- jobs = res.scalars().all()
214
- for job in jobs:
215
- job.output_dataset_id = None
216
- await db.merge(job)
217
- await db.commit()
218
- await db.delete(ds)
219
-
220
- # Job
221
- stm = select(ApplyWorkflow).where(ApplyWorkflow.project_id == project_id)
222
- res = await db.execute(stm)
223
- jobs = res.scalars().all()
224
- for job in jobs:
225
- job.project_id = None
226
- await db.merge(job)
227
-
228
- await db.commit()
229
-
230
- await db.delete(project)
231
- await db.commit()
232
-
233
- return Response(status_code=status.HTTP_204_NO_CONTENT)
234
-
235
-
236
- @router.post(
237
- "/{project_id}/workflow/{workflow_id}/apply/",
238
- status_code=status.HTTP_202_ACCEPTED,
239
- response_model=ApplyWorkflowReadV1,
240
- )
241
- async def apply_workflow(
242
- project_id: int,
243
- workflow_id: int,
244
- apply_workflow: ApplyWorkflowCreateV1,
245
- background_tasks: BackgroundTasks,
246
- input_dataset_id: int,
247
- output_dataset_id: int,
248
- request: Request,
249
- user: UserOAuth = Depends(current_active_verified_user),
250
- db: AsyncSession = Depends(get_async_db),
251
- ) -> Optional[ApplyWorkflowReadV1]:
252
- _raise_if_v1_is_read_only()
253
- settings = Inject(get_settings)
254
-
255
- # Remove non-submitted V1 jobs from the app state when the list grows
256
- # beyond a threshold
257
- if (
258
- len(request.app.state.jobsV1)
259
- > settings.FRACTAL_API_MAX_JOB_LIST_LENGTH
260
- ):
261
- new_jobs_list = await clean_app_job_list_v1(
262
- db, request.app.state.jobsV1
263
- )
264
- request.app.state.jobsV1 = new_jobs_list
265
-
266
- output = await _get_dataset_check_owner(
267
- project_id=project_id,
268
- dataset_id=input_dataset_id,
269
- user_id=user.id,
270
- db=db,
271
- )
272
- project = output["project"]
273
- input_dataset = output["dataset"]
274
-
275
- output = await _get_dataset_check_owner(
276
- project_id=project_id,
277
- dataset_id=output_dataset_id,
278
- user_id=user.id,
279
- db=db,
280
- )
281
- output_dataset = output["dataset"]
282
- if output_dataset.read_only:
283
- raise HTTPException(
284
- status_code=status.HTTP_422_UNPROCESSABLE_ENTITY,
285
- detail=(
286
- "Cannot apply workflow because output dataset "
287
- f"({output_dataset_id=}) is read_only."
288
- ),
289
- )
290
-
291
- workflow = await _get_workflow_check_owner(
292
- project_id=project_id, workflow_id=workflow_id, user_id=user.id, db=db
293
- )
294
-
295
- if not workflow.task_list:
296
- raise HTTPException(
297
- status_code=status.HTTP_422_UNPROCESSABLE_ENTITY,
298
- detail=f"Workflow {workflow_id} has empty task list",
299
- )
300
-
301
- # Set values of first_task_index and last_task_index
302
- num_tasks = len(workflow.task_list)
303
- try:
304
- first_task_index, last_task_index = set_start_and_last_task_index(
305
- num_tasks,
306
- first_task_index=apply_workflow.first_task_index,
307
- last_task_index=apply_workflow.last_task_index,
308
- )
309
- apply_workflow.first_task_index = first_task_index
310
- apply_workflow.last_task_index = last_task_index
311
- except ValueError as e:
312
- raise HTTPException(
313
- status_code=status.HTTP_422_UNPROCESSABLE_ENTITY,
314
- detail=(
315
- "Invalid values for first_task_index or last_task_index "
316
- f"(with {num_tasks=}).\n"
317
- f"Original error: {str(e)}"
318
- ),
319
- )
320
-
321
- # Validate user settings
322
- FRACTAL_RUNNER_BACKEND = settings.FRACTAL_RUNNER_BACKEND
323
- user_settings = await validate_user_settings(
324
- user=user, backend=FRACTAL_RUNNER_BACKEND, db=db
325
- )
326
-
327
- # Check that datasets have the right number of resources
328
- if not input_dataset.resource_list:
329
- raise HTTPException(
330
- status_code=status.HTTP_422_UNPROCESSABLE_ENTITY,
331
- detail="Input dataset has empty resource_list",
332
- )
333
- if len(output_dataset.resource_list) != 1:
334
- raise HTTPException(
335
- status_code=status.HTTP_422_UNPROCESSABLE_ENTITY,
336
- detail=(
337
- "Output dataset must have a single resource, "
338
- f"but it has {len(output_dataset.resource_list)}"
339
- ),
340
- )
341
-
342
- try:
343
- validate_workflow_compatibility(
344
- workflow=workflow,
345
- input_dataset=input_dataset,
346
- output_dataset=output_dataset,
347
- first_task_index=apply_workflow.first_task_index,
348
- last_task_index=apply_workflow.last_task_index,
349
- )
350
- except TypeError as e:
351
- raise HTTPException(
352
- status_code=status.HTTP_422_UNPROCESSABLE_ENTITY, detail=str(e)
353
- )
354
-
355
- # Check that no other job with the same output_dataset_id is SUBMITTED
356
- stm = (
357
- select(ApplyWorkflow)
358
- .where(ApplyWorkflow.output_dataset_id == output_dataset_id)
359
- .where(ApplyWorkflow.status == JobStatusTypeV1.SUBMITTED)
360
- )
361
- res = await db.execute(stm)
362
- if res.scalars().all():
363
- raise HTTPException(
364
- status_code=status.HTTP_422_UNPROCESSABLE_ENTITY,
365
- detail=(
366
- f"Output dataset {output_dataset_id} is already in use "
367
- "in submitted job(s)."
368
- ),
369
- )
370
-
371
- if apply_workflow.slurm_account is not None:
372
- if apply_workflow.slurm_account not in user_settings.slurm_accounts:
373
- raise HTTPException(
374
- status_code=status.HTTP_422_UNPROCESSABLE_ENTITY,
375
- detail=(
376
- f"SLURM account '{apply_workflow.slurm_account}' is not "
377
- "among those available to the current user"
378
- ),
379
- )
380
- else:
381
- if len(user_settings.slurm_accounts) > 0:
382
- apply_workflow.slurm_account = user_settings.slurm_accounts[0]
383
-
384
- # Add new ApplyWorkflow object to DB
385
- job = ApplyWorkflow(
386
- project_id=project_id,
387
- input_dataset_id=input_dataset_id,
388
- output_dataset_id=output_dataset_id,
389
- workflow_id=workflow_id,
390
- user_email=user.email,
391
- input_dataset_dump=dict(
392
- **json.loads(
393
- input_dataset.json(exclude={"resource_list", "history"})
394
- ),
395
- resource_list=[
396
- resource.model_dump()
397
- for resource in input_dataset.resource_list
398
- ],
399
- ),
400
- output_dataset_dump=dict(
401
- **json.loads(
402
- output_dataset.json(exclude={"resource_list", "history"})
403
- ),
404
- resource_list=[
405
- resource.model_dump()
406
- for resource in output_dataset.resource_list
407
- ],
408
- ),
409
- workflow_dump=json.loads(workflow.json(exclude={"task_list"})),
410
- project_dump=json.loads(project.json(exclude={"user_list"})),
411
- **apply_workflow.dict(),
412
- )
413
-
414
- # Rate Limiting:
415
- # raise `429 TOO MANY REQUESTS` if this endpoint has been called with the
416
- # same database keys (Project, Workflow and Datasets) during the last
417
- # `settings.FRACTAL_API_SUBMIT_RATE_LIMIT` seconds.
418
- stm = (
419
- select(ApplyWorkflow)
420
- .where(ApplyWorkflow.project_id == project_id)
421
- .where(ApplyWorkflow.workflow_id == workflow_id)
422
- .where(ApplyWorkflow.input_dataset_id == input_dataset_id)
423
- .where(ApplyWorkflow.output_dataset_id == output_dataset_id)
424
- )
425
- res = await db.execute(stm)
426
- db_jobs = res.scalars().all()
427
- if db_jobs and any(
428
- abs(
429
- job.start_timestamp
430
- - db_job.start_timestamp.replace(tzinfo=timezone.utc)
431
- )
432
- < timedelta(seconds=settings.FRACTAL_API_SUBMIT_RATE_LIMIT)
433
- for db_job in db_jobs
434
- ):
435
- raise HTTPException(
436
- status_code=status.HTTP_429_TOO_MANY_REQUESTS,
437
- detail=(
438
- f"The endpoint 'POST /api/v1/project/{project_id}/workflow/"
439
- f"{workflow_id}/apply/' "
440
- "was called several times within an interval of less "
441
- f"than {settings.FRACTAL_API_SUBMIT_RATE_LIMIT} seconds, using"
442
- " the same foreign keys. If it was intentional, please wait "
443
- "and try again."
444
- ),
445
- )
446
-
447
- db.add(job)
448
- await db.commit()
449
- await db.refresh(job)
450
-
451
- cache_dir = (
452
- f"{user_settings.project_dir}/.fractal_cache"
453
- if user_settings.project_dir is not None
454
- else None
455
- )
456
-
457
- background_tasks.add_task(
458
- submit_workflow,
459
- workflow_id=workflow.id,
460
- input_dataset_id=input_dataset.id,
461
- output_dataset_id=output_dataset.id,
462
- job_id=job.id,
463
- worker_init=apply_workflow.worker_init,
464
- slurm_user=user_settings.slurm_user,
465
- user_cache_dir=cache_dir,
466
- )
467
- request.app.state.jobsV1.append(job.id)
468
- logger.info(
469
- f"Current worker's pid is {os.getpid()}. "
470
- f"Current status of worker job's list "
471
- f"{request.app.state.jobsV1}"
472
- )
473
- await db.close()
474
-
475
- return job