fractal-server 1.4.10__py3-none-any.whl → 2.0.0a1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (126) hide show
  1. fractal_server/__init__.py +1 -1
  2. fractal_server/app/models/__init__.py +3 -7
  3. fractal_server/app/models/linkuserproject.py +9 -0
  4. fractal_server/app/models/security.py +6 -0
  5. fractal_server/app/models/state.py +1 -1
  6. fractal_server/app/models/v1/__init__.py +11 -0
  7. fractal_server/app/models/{dataset.py → v1/dataset.py} +5 -5
  8. fractal_server/app/models/{job.py → v1/job.py} +5 -5
  9. fractal_server/app/models/{project.py → v1/project.py} +5 -5
  10. fractal_server/app/models/{task.py → v1/task.py} +7 -2
  11. fractal_server/app/models/{workflow.py → v1/workflow.py} +5 -5
  12. fractal_server/app/models/v2/__init__.py +20 -0
  13. fractal_server/app/models/v2/dataset.py +55 -0
  14. fractal_server/app/models/v2/job.py +51 -0
  15. fractal_server/app/models/v2/project.py +31 -0
  16. fractal_server/app/models/v2/task.py +93 -0
  17. fractal_server/app/models/v2/workflow.py +43 -0
  18. fractal_server/app/models/v2/workflowtask.py +90 -0
  19. fractal_server/app/routes/{admin.py → admin/v1.py} +42 -42
  20. fractal_server/app/routes/admin/v2.py +274 -0
  21. fractal_server/app/routes/api/v1/__init__.py +7 -7
  22. fractal_server/app/routes/api/v1/_aux_functions.py +2 -2
  23. fractal_server/app/routes/api/v1/dataset.py +37 -37
  24. fractal_server/app/routes/api/v1/job.py +14 -14
  25. fractal_server/app/routes/api/v1/project.py +23 -21
  26. fractal_server/app/routes/api/v1/task.py +24 -14
  27. fractal_server/app/routes/api/v1/task_collection.py +16 -14
  28. fractal_server/app/routes/api/v1/workflow.py +24 -24
  29. fractal_server/app/routes/api/v1/workflowtask.py +10 -10
  30. fractal_server/app/routes/api/v2/__init__.py +28 -0
  31. fractal_server/app/routes/api/v2/_aux_functions.py +497 -0
  32. fractal_server/app/routes/api/v2/dataset.py +309 -0
  33. fractal_server/app/routes/api/v2/images.py +207 -0
  34. fractal_server/app/routes/api/v2/job.py +200 -0
  35. fractal_server/app/routes/api/v2/project.py +202 -0
  36. fractal_server/app/routes/api/v2/submit.py +220 -0
  37. fractal_server/app/routes/api/v2/task.py +222 -0
  38. fractal_server/app/routes/api/v2/task_collection.py +229 -0
  39. fractal_server/app/routes/api/v2/workflow.py +397 -0
  40. fractal_server/app/routes/api/v2/workflowtask.py +269 -0
  41. fractal_server/app/routes/aux/_job.py +1 -1
  42. fractal_server/app/runner/async_wrap.py +27 -0
  43. fractal_server/app/runner/components.py +5 -0
  44. fractal_server/app/runner/exceptions.py +129 -0
  45. fractal_server/app/runner/executors/slurm/__init__.py +3 -0
  46. fractal_server/app/runner/{_slurm → executors/slurm}/_batching.py +1 -1
  47. fractal_server/app/runner/{_slurm → executors/slurm}/_check_jobs_status.py +1 -1
  48. fractal_server/app/runner/{_slurm → executors/slurm}/_executor_wait_thread.py +1 -1
  49. fractal_server/app/runner/{_slurm → executors/slurm}/_slurm_config.py +3 -152
  50. fractal_server/app/runner/{_slurm → executors/slurm}/_subprocess_run_as_user.py +1 -1
  51. fractal_server/app/runner/{_slurm → executors/slurm}/executor.py +32 -19
  52. fractal_server/app/runner/filenames.py +6 -0
  53. fractal_server/app/runner/set_start_and_last_task_index.py +39 -0
  54. fractal_server/app/runner/task_files.py +103 -0
  55. fractal_server/app/runner/{__init__.py → v1/__init__.py} +22 -20
  56. fractal_server/app/runner/{_common.py → v1/_common.py} +13 -120
  57. fractal_server/app/runner/{_local → v1/_local}/__init__.py +5 -5
  58. fractal_server/app/runner/{_local → v1/_local}/_local_config.py +6 -7
  59. fractal_server/app/runner/{_local → v1/_local}/_submit_setup.py +1 -5
  60. fractal_server/app/runner/v1/_slurm/__init__.py +310 -0
  61. fractal_server/app/runner/{_slurm → v1/_slurm}/_submit_setup.py +3 -9
  62. fractal_server/app/runner/v1/_slurm/get_slurm_config.py +163 -0
  63. fractal_server/app/runner/v1/common.py +117 -0
  64. fractal_server/app/runner/{handle_failed_job.py → v1/handle_failed_job.py} +8 -8
  65. fractal_server/app/runner/v2/__init__.py +336 -0
  66. fractal_server/app/runner/v2/_local/__init__.py +167 -0
  67. fractal_server/app/runner/v2/_local/_local_config.py +118 -0
  68. fractal_server/app/runner/v2/_local/_submit_setup.py +52 -0
  69. fractal_server/app/runner/v2/_local/executor.py +100 -0
  70. fractal_server/app/runner/{_slurm → v2/_slurm}/__init__.py +34 -45
  71. fractal_server/app/runner/v2/_slurm/_submit_setup.py +83 -0
  72. fractal_server/app/runner/v2/_slurm/get_slurm_config.py +179 -0
  73. fractal_server/app/runner/v2/deduplicate_list.py +22 -0
  74. fractal_server/app/runner/v2/handle_failed_job.py +156 -0
  75. fractal_server/app/runner/v2/merge_outputs.py +38 -0
  76. fractal_server/app/runner/v2/runner.py +267 -0
  77. fractal_server/app/runner/v2/runner_functions.py +341 -0
  78. fractal_server/app/runner/v2/runner_functions_low_level.py +134 -0
  79. fractal_server/app/runner/v2/task_interface.py +43 -0
  80. fractal_server/app/runner/v2/v1_compat.py +21 -0
  81. fractal_server/app/schemas/__init__.py +4 -42
  82. fractal_server/app/schemas/v1/__init__.py +42 -0
  83. fractal_server/app/schemas/{applyworkflow.py → v1/applyworkflow.py} +18 -18
  84. fractal_server/app/schemas/{dataset.py → v1/dataset.py} +30 -30
  85. fractal_server/app/schemas/{dumps.py → v1/dumps.py} +8 -8
  86. fractal_server/app/schemas/{manifest.py → v1/manifest.py} +5 -5
  87. fractal_server/app/schemas/{project.py → v1/project.py} +9 -9
  88. fractal_server/app/schemas/{task.py → v1/task.py} +12 -12
  89. fractal_server/app/schemas/{task_collection.py → v1/task_collection.py} +7 -7
  90. fractal_server/app/schemas/{workflow.py → v1/workflow.py} +38 -38
  91. fractal_server/app/schemas/v2/__init__.py +34 -0
  92. fractal_server/app/schemas/v2/dataset.py +89 -0
  93. fractal_server/app/schemas/v2/dumps.py +87 -0
  94. fractal_server/app/schemas/v2/job.py +114 -0
  95. fractal_server/app/schemas/v2/manifest.py +159 -0
  96. fractal_server/app/schemas/v2/project.py +37 -0
  97. fractal_server/app/schemas/v2/task.py +120 -0
  98. fractal_server/app/schemas/v2/task_collection.py +105 -0
  99. fractal_server/app/schemas/v2/workflow.py +79 -0
  100. fractal_server/app/schemas/v2/workflowtask.py +119 -0
  101. fractal_server/config.py +5 -4
  102. fractal_server/images/__init__.py +2 -0
  103. fractal_server/images/models.py +50 -0
  104. fractal_server/images/tools.py +85 -0
  105. fractal_server/main.py +11 -3
  106. fractal_server/migrations/env.py +0 -2
  107. fractal_server/migrations/versions/d71e732236cd_v2.py +239 -0
  108. fractal_server/tasks/__init__.py +0 -5
  109. fractal_server/tasks/endpoint_operations.py +13 -19
  110. fractal_server/tasks/utils.py +35 -0
  111. fractal_server/tasks/{_TaskCollectPip.py → v1/_TaskCollectPip.py} +3 -3
  112. fractal_server/tasks/{background_operations.py → v1/background_operations.py} +18 -50
  113. fractal_server/tasks/v1/get_collection_data.py +14 -0
  114. fractal_server/tasks/v2/_TaskCollectPip.py +103 -0
  115. fractal_server/tasks/v2/background_operations.py +381 -0
  116. fractal_server/tasks/v2/get_collection_data.py +14 -0
  117. {fractal_server-1.4.10.dist-info → fractal_server-2.0.0a1.dist-info}/METADATA +1 -1
  118. fractal_server-2.0.0a1.dist-info/RECORD +160 -0
  119. fractal_server/app/runner/_slurm/.gitignore +0 -2
  120. fractal_server/app/runner/common.py +0 -311
  121. fractal_server-1.4.10.dist-info/RECORD +0 -98
  122. /fractal_server/app/runner/{_slurm → executors/slurm}/remote.py +0 -0
  123. /fractal_server/app/runner/{_local → v1/_local}/executor.py +0 -0
  124. {fractal_server-1.4.10.dist-info → fractal_server-2.0.0a1.dist-info}/LICENSE +0 -0
  125. {fractal_server-1.4.10.dist-info → fractal_server-2.0.0a1.dist-info}/WHEEL +0 -0
  126. {fractal_server-1.4.10.dist-info → fractal_server-2.0.0a1.dist-info}/entry_points.txt +0 -0
@@ -0,0 +1,229 @@
1
+ from pathlib import Path
2
+ from shutil import copy as shell_copy
3
+ from tempfile import TemporaryDirectory
4
+
5
+ from fastapi import APIRouter
6
+ from fastapi import BackgroundTasks
7
+ from fastapi import Depends
8
+ from fastapi import HTTPException
9
+ from fastapi import Response
10
+ from fastapi import status
11
+ from pydantic.error_wrappers import ValidationError
12
+ from sqlmodel import select
13
+
14
+ from .....config import get_settings
15
+ from .....logger import close_logger
16
+ from .....logger import set_logger
17
+ from .....syringe import Inject
18
+ from ....db import AsyncSession
19
+ from ....db import get_async_db
20
+ from ....models import State
21
+ from ....models.v2 import TaskV2
22
+ from ....schemas import StateRead
23
+ from ....schemas.v2 import TaskCollectPipV2
24
+ from ....schemas.v2 import TaskCollectStatusV2
25
+ from ....security import current_active_user
26
+ from ....security import current_active_verified_user
27
+ from ....security import User
28
+ from fractal_server.tasks.endpoint_operations import create_package_dir_pip
29
+ from fractal_server.tasks.endpoint_operations import download_package
30
+ from fractal_server.tasks.endpoint_operations import inspect_package
31
+ from fractal_server.tasks.utils import get_collection_log
32
+ from fractal_server.tasks.utils import slugify_task_name
33
+ from fractal_server.tasks.v2._TaskCollectPip import _TaskCollectPip
34
+ from fractal_server.tasks.v2.background_operations import (
35
+ background_collect_pip,
36
+ )
37
+ from fractal_server.tasks.v2.get_collection_data import get_collection_data
38
+
39
+ router = APIRouter()
40
+
41
+ logger = set_logger(__name__)
42
+
43
+
44
+ @router.post(
45
+ "/collect/pip/",
46
+ response_model=StateRead,
47
+ responses={
48
+ 201: dict(
49
+ description=(
50
+ "Task collection successfully started in the background"
51
+ )
52
+ ),
53
+ 200: dict(
54
+ description=(
55
+ "Package already collected. Returning info on already "
56
+ "available tasks"
57
+ )
58
+ ),
59
+ },
60
+ )
61
+ async def collect_tasks_pip(
62
+ task_collect: TaskCollectPipV2,
63
+ background_tasks: BackgroundTasks,
64
+ response: Response,
65
+ user: User = Depends(current_active_verified_user),
66
+ db: AsyncSession = Depends(get_async_db),
67
+ ) -> StateRead: # State[TaskCollectStatus]
68
+ """
69
+ Task collection endpoint
70
+
71
+ Trigger the creation of a dedicated virtual environment, the installation
72
+ of a package and the collection of tasks as advertised in the manifest.
73
+ """
74
+
75
+ logger = set_logger(logger_name="collect_tasks_pip")
76
+
77
+ # Validate payload as _TaskCollectPip, which has more strict checks than
78
+ # TaskCollectPip
79
+ try:
80
+ task_pkg = _TaskCollectPip(**task_collect.dict(exclude_unset=True))
81
+ except ValidationError as e:
82
+ raise HTTPException(
83
+ status_code=status.HTTP_422_UNPROCESSABLE_ENTITY,
84
+ detail=f"Invalid task-collection object. Original error: {e}",
85
+ )
86
+
87
+ with TemporaryDirectory() as tmpdir:
88
+ try:
89
+ # Copy or download the package wheel file to tmpdir
90
+ if task_pkg.is_local_package:
91
+ shell_copy(task_pkg.package_path.as_posix(), tmpdir)
92
+ pkg_path = Path(tmpdir) / task_pkg.package_path.name
93
+ else:
94
+ pkg_path = await download_package(
95
+ task_pkg=task_pkg, dest=tmpdir
96
+ )
97
+ # Read package info from wheel file, and override the ones coming
98
+ # from the request body
99
+ pkg_info = inspect_package(pkg_path)
100
+ task_pkg.package_name = pkg_info["pkg_name"]
101
+ task_pkg.package_version = pkg_info["pkg_version"]
102
+ task_pkg.package_manifest = pkg_info["pkg_manifest"]
103
+ task_pkg.check()
104
+ except Exception as e:
105
+ raise HTTPException(
106
+ status_code=status.HTTP_422_UNPROCESSABLE_ENTITY,
107
+ detail=f"Invalid package or manifest. Original error: {e}",
108
+ )
109
+
110
+ try:
111
+ venv_path = create_package_dir_pip(task_pkg=task_pkg)
112
+ except FileExistsError:
113
+ venv_path = create_package_dir_pip(task_pkg=task_pkg, create=False)
114
+ try:
115
+ task_collect_status = get_collection_data(venv_path)
116
+ for task in task_collect_status.task_list:
117
+ db_task = await db.get(TaskV2, task.id)
118
+ if (
119
+ (not db_task)
120
+ or db_task.source != task.source
121
+ or db_task.name != task.name
122
+ ):
123
+ await db.close()
124
+ raise HTTPException(
125
+ status_code=status.HTTP_422_UNPROCESSABLE_ENTITY,
126
+ detail=(
127
+ "Cannot collect package. Folder already exists, "
128
+ f"but task {task.id} does not exists or it does "
129
+ f"not have the expected source ({task.source}) or "
130
+ f"name ({task.name})."
131
+ ),
132
+ )
133
+ except FileNotFoundError as e:
134
+ await db.close()
135
+ raise HTTPException(
136
+ status_code=status.HTTP_422_UNPROCESSABLE_ENTITY,
137
+ detail=(
138
+ "Cannot collect package. Possible reason: another "
139
+ "collection of the same package is in progress. "
140
+ f"Original error: {e}"
141
+ ),
142
+ )
143
+ task_collect_status.info = "Already installed"
144
+ state = State(data=task_collect_status.sanitised_dict())
145
+ response.status_code == status.HTTP_200_OK
146
+ await db.close()
147
+ return state
148
+ settings = Inject(get_settings)
149
+
150
+ # Check that tasks are not already in the DB
151
+ for new_task in task_pkg.package_manifest.task_list:
152
+ new_task_name_slug = slugify_task_name(new_task.name)
153
+ new_task_source = f"{task_pkg.package_source}:{new_task_name_slug}"
154
+ stm = select(TaskV2).where(TaskV2.source == new_task_source)
155
+ res = await db.execute(stm)
156
+ if res.scalars().all():
157
+ raise HTTPException(
158
+ status_code=status.HTTP_422_UNPROCESSABLE_ENTITY,
159
+ detail=(
160
+ "Cannot collect package. Task with source "
161
+ f'"{new_task_source}" already exists in the database.'
162
+ ),
163
+ )
164
+
165
+ # All checks are OK, proceed with task collection
166
+ full_venv_path = venv_path.relative_to(settings.FRACTAL_TASKS_DIR)
167
+ collection_status = TaskCollectStatusV2(
168
+ status="pending", venv_path=full_venv_path, package=task_pkg.package
169
+ )
170
+
171
+ # Create State object (after casting venv_path to string)
172
+ collection_status_dict = collection_status.dict()
173
+ collection_status_dict["venv_path"] = str(collection_status.venv_path)
174
+ state = State(data=collection_status_dict)
175
+ db.add(state)
176
+ await db.commit()
177
+ await db.refresh(state)
178
+
179
+ background_tasks.add_task(
180
+ background_collect_pip,
181
+ state_id=state.id,
182
+ venv_path=venv_path,
183
+ task_pkg=task_pkg,
184
+ )
185
+ logger.debug(
186
+ "Task-collection endpoint: start background collection "
187
+ "and return state"
188
+ )
189
+ close_logger(logger)
190
+ info = (
191
+ "Collecting tasks in the background. "
192
+ f"GET /task/collect/{state.id} to query collection status"
193
+ )
194
+ state.data["info"] = info
195
+ response.status_code = status.HTTP_201_CREATED
196
+ await db.close()
197
+
198
+ return state
199
+
200
+
201
+ @router.get("/collect/{state_id}/", response_model=StateRead)
202
+ async def check_collection_status(
203
+ state_id: int,
204
+ user: User = Depends(current_active_user),
205
+ verbose: bool = False,
206
+ db: AsyncSession = Depends(get_async_db),
207
+ ) -> StateRead: # State[TaskCollectStatus]
208
+ """
209
+ Check status of background task collection
210
+ """
211
+ logger = set_logger(logger_name="check_collection_status")
212
+ logger.debug(f"Querying state for state.id={state_id}")
213
+ state = await db.get(State, state_id)
214
+ if not state:
215
+ await db.close()
216
+ raise HTTPException(
217
+ status_code=status.HTTP_404_NOT_FOUND,
218
+ detail=f"No task collection info with id={state_id}",
219
+ )
220
+ data = TaskCollectStatusV2(**state.data)
221
+
222
+ # In some cases (i.e. a successful or ongoing task collection), data.log is
223
+ # not set; if so, we collect the current logs
224
+ if verbose and not data.log:
225
+ data.log = get_collection_log(data.venv_path)
226
+ state.data = data.sanitised_dict()
227
+ close_logger(logger)
228
+ await db.close()
229
+ return state
@@ -0,0 +1,397 @@
1
+ from typing import Optional
2
+
3
+ from fastapi import APIRouter
4
+ from fastapi import Depends
5
+ from fastapi import HTTPException
6
+ from fastapi import Response
7
+ from fastapi import status
8
+ from sqlmodel import select
9
+
10
+ from .....logger import close_logger
11
+ from .....logger import set_logger
12
+ from ....db import AsyncSession
13
+ from ....db import get_async_db
14
+ from ....models.v1 import Task as TaskV1
15
+ from ....models.v2 import JobV2
16
+ from ....models.v2 import ProjectV2
17
+ from ....models.v2 import TaskV2
18
+ from ....models.v2 import WorkflowV2
19
+ from ....schemas.v1 import WorkflowTaskCreateV1
20
+ from ....schemas.v2 import WorkflowCreateV2
21
+ from ....schemas.v2 import WorkflowExportV2
22
+ from ....schemas.v2 import WorkflowImportV2
23
+ from ....schemas.v2 import WorkflowReadV2
24
+ from ....schemas.v2 import WorkflowTaskCreateV2
25
+ from ....schemas.v2 import WorkflowUpdateV2
26
+ from ....security import current_active_user
27
+ from ....security import User
28
+ from ._aux_functions import _check_workflow_exists
29
+ from ._aux_functions import _get_project_check_owner
30
+ from ._aux_functions import _get_submitted_jobs_statement
31
+ from ._aux_functions import _get_workflow_check_owner
32
+ from ._aux_functions import _workflow_insert_task
33
+
34
+
35
+ router = APIRouter()
36
+
37
+
38
+ @router.get(
39
+ "/project/{project_id}/workflow/",
40
+ response_model=list[WorkflowReadV2],
41
+ )
42
+ async def get_workflow_list(
43
+ project_id: int,
44
+ user: User = Depends(current_active_user),
45
+ db: AsyncSession = Depends(get_async_db),
46
+ ) -> Optional[list[WorkflowReadV2]]:
47
+ """
48
+ Get workflow list for given project
49
+ """
50
+ # Access control
51
+ project = await _get_project_check_owner(
52
+ project_id=project_id, user_id=user.id, db=db
53
+ )
54
+ # Find workflows of the current project. Note: this select/where approach
55
+ # has much better scaling than refreshing all elements of
56
+ # `project.workflow_list` - ref
57
+ # https://github.com/fractal-analytics-platform/fractal-server/pull/1082#issuecomment-1856676097.
58
+ stm = select(WorkflowV2).where(WorkflowV2.project_id == project.id)
59
+ workflow_list = (await db.execute(stm)).scalars().all()
60
+ return workflow_list
61
+
62
+
63
+ @router.post(
64
+ "/project/{project_id}/workflow/",
65
+ response_model=WorkflowReadV2,
66
+ status_code=status.HTTP_201_CREATED,
67
+ )
68
+ async def create_workflow(
69
+ project_id: int,
70
+ workflow: WorkflowCreateV2,
71
+ user: User = Depends(current_active_user),
72
+ db: AsyncSession = Depends(get_async_db),
73
+ ) -> Optional[WorkflowReadV2]:
74
+ """
75
+ Create a workflow, associate to a project
76
+ """
77
+ await _get_project_check_owner(
78
+ project_id=project_id, user_id=user.id, db=db
79
+ )
80
+ await _check_workflow_exists(
81
+ name=workflow.name, project_id=project_id, db=db
82
+ )
83
+
84
+ db_workflow = WorkflowV2(project_id=project_id, **workflow.dict())
85
+ db.add(db_workflow)
86
+ await db.commit()
87
+ await db.refresh(db_workflow)
88
+ await db.close()
89
+ return db_workflow
90
+
91
+
92
+ @router.get(
93
+ "/project/{project_id}/workflow/{workflow_id}/",
94
+ response_model=WorkflowReadV2,
95
+ )
96
+ async def read_workflow(
97
+ project_id: int,
98
+ workflow_id: int,
99
+ user: User = Depends(current_active_user),
100
+ db: AsyncSession = Depends(get_async_db),
101
+ ) -> Optional[WorkflowReadV2]:
102
+ """
103
+ Get info on an existing workflow
104
+ """
105
+
106
+ workflow = await _get_workflow_check_owner(
107
+ project_id=project_id,
108
+ workflow_id=workflow_id,
109
+ user_id=user.id,
110
+ db=db,
111
+ )
112
+
113
+ return workflow
114
+
115
+
116
+ @router.patch(
117
+ "/project/{project_id}/workflow/{workflow_id}/",
118
+ response_model=WorkflowReadV2,
119
+ )
120
+ async def update_workflow(
121
+ project_id: int,
122
+ workflow_id: int,
123
+ patch: WorkflowUpdateV2,
124
+ user: User = Depends(current_active_user),
125
+ db: AsyncSession = Depends(get_async_db),
126
+ ) -> Optional[WorkflowReadV2]:
127
+ """
128
+ Edit a workflow
129
+ """
130
+ workflow = await _get_workflow_check_owner(
131
+ project_id=project_id,
132
+ workflow_id=workflow_id,
133
+ user_id=user.id,
134
+ db=db,
135
+ )
136
+
137
+ if patch.name:
138
+ await _check_workflow_exists(
139
+ name=patch.name, project_id=project_id, db=db
140
+ )
141
+
142
+ for key, value in patch.dict(exclude_unset=True).items():
143
+ if key == "reordered_workflowtask_ids":
144
+ current_workflowtask_ids = [
145
+ wftask.id for wftask in workflow.task_list
146
+ ]
147
+ num_tasks = len(workflow.task_list)
148
+ if len(value) != num_tasks or set(value) != set(
149
+ current_workflowtask_ids
150
+ ):
151
+ raise HTTPException(
152
+ status_code=status.HTTP_422_UNPROCESSABLE_ENTITY,
153
+ detail=(
154
+ "`reordered_workflowtask_ids` must be a permutation of"
155
+ f" {current_workflowtask_ids} (given {value})"
156
+ ),
157
+ )
158
+ for ind_wftask in range(num_tasks):
159
+ new_order = value.index(workflow.task_list[ind_wftask].id)
160
+ workflow.task_list[ind_wftask].order = new_order
161
+ else:
162
+ setattr(workflow, key, value)
163
+
164
+ await db.commit()
165
+ await db.refresh(workflow)
166
+ await db.close()
167
+
168
+ return workflow
169
+
170
+
171
+ @router.delete(
172
+ "/project/{project_id}/workflow/{workflow_id}/",
173
+ status_code=status.HTTP_204_NO_CONTENT,
174
+ )
175
+ async def delete_workflow(
176
+ project_id: int,
177
+ workflow_id: int,
178
+ user: User = Depends(current_active_user),
179
+ db: AsyncSession = Depends(get_async_db),
180
+ ) -> Response:
181
+ """
182
+ Delete a workflow
183
+ """
184
+
185
+ workflow = await _get_workflow_check_owner(
186
+ project_id=project_id,
187
+ workflow_id=workflow_id,
188
+ user_id=user.id,
189
+ db=db,
190
+ )
191
+
192
+ # Fail if there exist jobs that are submitted and in relation with the
193
+ # current workflow.
194
+ stm = _get_submitted_jobs_statement().where(
195
+ JobV2.workflow_id == workflow.id
196
+ )
197
+ res = await db.execute(stm)
198
+ jobs = res.scalars().all()
199
+ if jobs:
200
+ string_ids = str([job.id for job in jobs])[1:-1]
201
+ raise HTTPException(
202
+ status_code=status.HTTP_422_UNPROCESSABLE_ENTITY,
203
+ detail=(
204
+ f"Cannot delete workflow {workflow.id} because it "
205
+ f"is linked to active job(s) {string_ids}."
206
+ ),
207
+ )
208
+
209
+ # Cascade operations: set foreign-keys to null for jobs which are in
210
+ # relationship with the current workflow
211
+ stm = select(JobV2).where(JobV2.workflow_id == workflow_id)
212
+ res = await db.execute(stm)
213
+ jobs = res.scalars().all()
214
+ for job in jobs:
215
+ job.workflow_id = None
216
+ await db.commit()
217
+
218
+ # Delete workflow
219
+ await db.delete(workflow)
220
+ await db.commit()
221
+
222
+ return Response(status_code=status.HTTP_204_NO_CONTENT)
223
+
224
+
225
+ @router.get(
226
+ "/project/{project_id}/workflow/{workflow_id}/export/",
227
+ response_model=WorkflowExportV2,
228
+ )
229
+ async def export_worfklow(
230
+ project_id: int,
231
+ workflow_id: int,
232
+ user: User = Depends(current_active_user),
233
+ db: AsyncSession = Depends(get_async_db),
234
+ ) -> Optional[WorkflowExportV2]:
235
+ """
236
+ Export an existing workflow, after stripping all IDs
237
+ """
238
+ workflow = await _get_workflow_check_owner(
239
+ project_id=project_id,
240
+ workflow_id=workflow_id,
241
+ user_id=user.id,
242
+ db=db,
243
+ )
244
+ # Emit a warning when exporting a workflow with custom tasks
245
+ logger = set_logger(None)
246
+ for wftask in workflow.task_list:
247
+ if wftask.is_legacy_task:
248
+ if wftask.task_legacy.owner is not None:
249
+ logger.warning(
250
+ f"Custom tasks (like the one with "
251
+ f"id={wftask.task_legacy_id} and "
252
+ f"source='{wftask.task_legacy.source}') are not meant to "
253
+ "be portable; re-importing this workflow may not work as "
254
+ "expected."
255
+ )
256
+ else:
257
+ if wftask.task.owner is not None:
258
+ logger.warning(
259
+ f"Custom tasks (like the one with id={wftask.task_id} and "
260
+ f'source="{wftask.task.source}") are not meant to be '
261
+ "portable; re-importing this workflow may not work as "
262
+ "expected."
263
+ )
264
+ close_logger(logger)
265
+
266
+ await db.close()
267
+ return workflow
268
+
269
+
270
+ @router.post(
271
+ "/project/{project_id}/workflow/import/",
272
+ response_model=WorkflowReadV2,
273
+ status_code=status.HTTP_201_CREATED,
274
+ )
275
+ async def import_workflow(
276
+ project_id: int,
277
+ workflow: WorkflowImportV2,
278
+ user: User = Depends(current_active_user),
279
+ db: AsyncSession = Depends(get_async_db),
280
+ ) -> Optional[WorkflowReadV2]:
281
+ """
282
+ Import an existing workflow into a project
283
+
284
+ Also create all required objects (i.e. Workflow and WorkflowTask's) along
285
+ the way.
286
+ """
287
+
288
+ # Preliminary checks
289
+ await _get_project_check_owner(
290
+ project_id=project_id,
291
+ user_id=user.id,
292
+ db=db,
293
+ )
294
+
295
+ await _check_workflow_exists(
296
+ name=workflow.name, project_id=project_id, db=db
297
+ )
298
+
299
+ # Check that all required tasks are available
300
+ source_to_id = {}
301
+ source_to_id_legacy = {}
302
+
303
+ for wf_task in workflow.task_list:
304
+
305
+ if wf_task.is_legacy_task is True:
306
+ source = wf_task.task_legacy.source
307
+ if source not in source_to_id_legacy.keys():
308
+ stm = select(TaskV1).where(TaskV1.source == source)
309
+ tasks_by_source = (await db.execute(stm)).scalars().all()
310
+ if len(tasks_by_source) != 1:
311
+ raise HTTPException(
312
+ status_code=status.HTTP_422_UNPROCESSABLE_ENTITY,
313
+ detail=(
314
+ f"Found {len(tasks_by_source)} tasks legacy "
315
+ f"with {source=}."
316
+ ),
317
+ )
318
+ source_to_id_legacy[source] = tasks_by_source[0].id
319
+ else:
320
+ source = wf_task.task.source
321
+ if source not in source_to_id.keys():
322
+ stm = select(TaskV2).where(TaskV2.source == source)
323
+ tasks_by_source = (await db.execute(stm)).scalars().all()
324
+ if len(tasks_by_source) != 1:
325
+ raise HTTPException(
326
+ status_code=status.HTTP_422_UNPROCESSABLE_ENTITY,
327
+ detail=(
328
+ f"Found {len(tasks_by_source)} tasks "
329
+ f"with {source=}."
330
+ ),
331
+ )
332
+ source_to_id[source] = tasks_by_source[0].id
333
+
334
+ # Create new Workflow (with empty task_list)
335
+ db_workflow = WorkflowV2(
336
+ project_id=project_id,
337
+ **workflow.dict(exclude_none=True, exclude={"task_list"}),
338
+ )
339
+ db.add(db_workflow)
340
+ await db.commit()
341
+ await db.refresh(db_workflow)
342
+
343
+ # Insert tasks
344
+ async with db: # FIXME why?
345
+
346
+ for wf_task in workflow.task_list:
347
+ if wf_task.is_legacy_task is True:
348
+ # Identify task_id
349
+ source = wf_task.task_legacy.source
350
+ task_id = source_to_id_legacy[source]
351
+ # Prepare new_wf_task
352
+ new_wf_task = WorkflowTaskCreateV1(
353
+ **wf_task.dict(exclude_none=True)
354
+ )
355
+ # Insert task
356
+ await _workflow_insert_task(
357
+ **new_wf_task.dict(),
358
+ is_legacy_task=True,
359
+ workflow_id=db_workflow.id,
360
+ task_id=task_id,
361
+ db=db,
362
+ )
363
+ else:
364
+ # Identify task_id
365
+ source = wf_task.task.source
366
+ task_id = source_to_id[source]
367
+ # Prepare new_wf_task
368
+ new_wf_task = WorkflowTaskCreateV2(
369
+ **wf_task.dict(exclude_none=True)
370
+ )
371
+ # Insert task
372
+ await _workflow_insert_task(
373
+ **new_wf_task.dict(),
374
+ workflow_id=db_workflow.id,
375
+ task_id=task_id,
376
+ db=db,
377
+ )
378
+
379
+ await db.close()
380
+ return db_workflow
381
+
382
+
383
+ @router.get("/workflow/", response_model=list[WorkflowReadV2])
384
+ async def get_user_workflows(
385
+ user: User = Depends(current_active_user),
386
+ db: AsyncSession = Depends(get_async_db),
387
+ ) -> list[WorkflowReadV2]:
388
+ """
389
+ Returns all the workflows of the current user
390
+ """
391
+ stm = select(WorkflowV2)
392
+ stm = stm.join(ProjectV2).where(
393
+ ProjectV2.user_list.any(User.id == user.id)
394
+ )
395
+ res = await db.execute(stm)
396
+ workflow_list = res.scalars().all()
397
+ return workflow_list