fractal-server 1.4.10__py3-none-any.whl → 2.0.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (138) hide show
  1. fractal_server/__init__.py +1 -1
  2. fractal_server/app/models/__init__.py +6 -8
  3. fractal_server/app/models/linkuserproject.py +9 -0
  4. fractal_server/app/models/security.py +6 -0
  5. fractal_server/app/models/v1/__init__.py +12 -0
  6. fractal_server/app/models/{dataset.py → v1/dataset.py} +5 -5
  7. fractal_server/app/models/{job.py → v1/job.py} +5 -5
  8. fractal_server/app/models/{project.py → v1/project.py} +5 -5
  9. fractal_server/app/models/{state.py → v1/state.py} +2 -2
  10. fractal_server/app/models/{task.py → v1/task.py} +7 -2
  11. fractal_server/app/models/{workflow.py → v1/workflow.py} +5 -5
  12. fractal_server/app/models/v2/__init__.py +22 -0
  13. fractal_server/app/models/v2/collection_state.py +21 -0
  14. fractal_server/app/models/v2/dataset.py +54 -0
  15. fractal_server/app/models/v2/job.py +51 -0
  16. fractal_server/app/models/v2/project.py +30 -0
  17. fractal_server/app/models/v2/task.py +93 -0
  18. fractal_server/app/models/v2/workflow.py +35 -0
  19. fractal_server/app/models/v2/workflowtask.py +49 -0
  20. fractal_server/app/routes/admin/__init__.py +0 -0
  21. fractal_server/app/routes/{admin.py → admin/v1.py} +42 -42
  22. fractal_server/app/routes/admin/v2.py +309 -0
  23. fractal_server/app/routes/api/v1/__init__.py +7 -7
  24. fractal_server/app/routes/api/v1/_aux_functions.py +8 -8
  25. fractal_server/app/routes/api/v1/dataset.py +41 -41
  26. fractal_server/app/routes/api/v1/job.py +14 -14
  27. fractal_server/app/routes/api/v1/project.py +27 -25
  28. fractal_server/app/routes/api/v1/task.py +26 -16
  29. fractal_server/app/routes/api/v1/task_collection.py +28 -16
  30. fractal_server/app/routes/api/v1/workflow.py +28 -28
  31. fractal_server/app/routes/api/v1/workflowtask.py +11 -11
  32. fractal_server/app/routes/api/v2/__init__.py +34 -0
  33. fractal_server/app/routes/api/v2/_aux_functions.py +502 -0
  34. fractal_server/app/routes/api/v2/dataset.py +293 -0
  35. fractal_server/app/routes/api/v2/images.py +279 -0
  36. fractal_server/app/routes/api/v2/job.py +200 -0
  37. fractal_server/app/routes/api/v2/project.py +186 -0
  38. fractal_server/app/routes/api/v2/status.py +150 -0
  39. fractal_server/app/routes/api/v2/submit.py +210 -0
  40. fractal_server/app/routes/api/v2/task.py +222 -0
  41. fractal_server/app/routes/api/v2/task_collection.py +239 -0
  42. fractal_server/app/routes/api/v2/task_legacy.py +59 -0
  43. fractal_server/app/routes/api/v2/workflow.py +380 -0
  44. fractal_server/app/routes/api/v2/workflowtask.py +265 -0
  45. fractal_server/app/routes/aux/_job.py +2 -2
  46. fractal_server/app/runner/__init__.py +0 -364
  47. fractal_server/app/runner/async_wrap.py +27 -0
  48. fractal_server/app/runner/components.py +5 -0
  49. fractal_server/app/runner/exceptions.py +129 -0
  50. fractal_server/app/runner/executors/__init__.py +0 -0
  51. fractal_server/app/runner/executors/slurm/__init__.py +3 -0
  52. fractal_server/app/runner/{_slurm → executors/slurm}/_batching.py +1 -1
  53. fractal_server/app/runner/{_slurm → executors/slurm}/_check_jobs_status.py +1 -1
  54. fractal_server/app/runner/{_slurm → executors/slurm}/_executor_wait_thread.py +1 -1
  55. fractal_server/app/runner/{_slurm → executors/slurm}/_slurm_config.py +3 -152
  56. fractal_server/app/runner/{_slurm → executors/slurm}/_subprocess_run_as_user.py +1 -1
  57. fractal_server/app/runner/{_slurm → executors/slurm}/executor.py +32 -21
  58. fractal_server/app/runner/filenames.py +6 -0
  59. fractal_server/app/runner/set_start_and_last_task_index.py +39 -0
  60. fractal_server/app/runner/task_files.py +103 -0
  61. fractal_server/app/runner/v1/__init__.py +366 -0
  62. fractal_server/app/runner/{_common.py → v1/_common.py} +14 -121
  63. fractal_server/app/runner/{_local → v1/_local}/__init__.py +5 -4
  64. fractal_server/app/runner/{_local → v1/_local}/_local_config.py +6 -7
  65. fractal_server/app/runner/{_local → v1/_local}/_submit_setup.py +1 -5
  66. fractal_server/app/runner/v1/_slurm/__init__.py +312 -0
  67. fractal_server/app/runner/{_slurm → v1/_slurm}/_submit_setup.py +5 -11
  68. fractal_server/app/runner/v1/_slurm/get_slurm_config.py +163 -0
  69. fractal_server/app/runner/v1/common.py +117 -0
  70. fractal_server/app/runner/{handle_failed_job.py → v1/handle_failed_job.py} +8 -8
  71. fractal_server/app/runner/v2/__init__.py +336 -0
  72. fractal_server/app/runner/v2/_local/__init__.py +162 -0
  73. fractal_server/app/runner/v2/_local/_local_config.py +118 -0
  74. fractal_server/app/runner/v2/_local/_submit_setup.py +52 -0
  75. fractal_server/app/runner/v2/_local/executor.py +100 -0
  76. fractal_server/app/runner/{_slurm → v2/_slurm}/__init__.py +38 -47
  77. fractal_server/app/runner/v2/_slurm/_submit_setup.py +82 -0
  78. fractal_server/app/runner/v2/_slurm/get_slurm_config.py +182 -0
  79. fractal_server/app/runner/v2/deduplicate_list.py +23 -0
  80. fractal_server/app/runner/v2/handle_failed_job.py +165 -0
  81. fractal_server/app/runner/v2/merge_outputs.py +38 -0
  82. fractal_server/app/runner/v2/runner.py +343 -0
  83. fractal_server/app/runner/v2/runner_functions.py +374 -0
  84. fractal_server/app/runner/v2/runner_functions_low_level.py +130 -0
  85. fractal_server/app/runner/v2/task_interface.py +62 -0
  86. fractal_server/app/runner/v2/v1_compat.py +31 -0
  87. fractal_server/app/schemas/__init__.py +1 -42
  88. fractal_server/app/schemas/_validators.py +28 -5
  89. fractal_server/app/schemas/v1/__init__.py +36 -0
  90. fractal_server/app/schemas/{applyworkflow.py → v1/applyworkflow.py} +18 -18
  91. fractal_server/app/schemas/{dataset.py → v1/dataset.py} +30 -30
  92. fractal_server/app/schemas/{dumps.py → v1/dumps.py} +8 -8
  93. fractal_server/app/schemas/{manifest.py → v1/manifest.py} +5 -5
  94. fractal_server/app/schemas/{project.py → v1/project.py} +9 -9
  95. fractal_server/app/schemas/{task.py → v1/task.py} +12 -12
  96. fractal_server/app/schemas/{task_collection.py → v1/task_collection.py} +7 -7
  97. fractal_server/app/schemas/{workflow.py → v1/workflow.py} +38 -38
  98. fractal_server/app/schemas/v2/__init__.py +37 -0
  99. fractal_server/app/schemas/v2/dataset.py +126 -0
  100. fractal_server/app/schemas/v2/dumps.py +87 -0
  101. fractal_server/app/schemas/v2/job.py +114 -0
  102. fractal_server/app/schemas/v2/manifest.py +159 -0
  103. fractal_server/app/schemas/v2/project.py +34 -0
  104. fractal_server/app/schemas/v2/status.py +16 -0
  105. fractal_server/app/schemas/v2/task.py +151 -0
  106. fractal_server/app/schemas/v2/task_collection.py +109 -0
  107. fractal_server/app/schemas/v2/workflow.py +79 -0
  108. fractal_server/app/schemas/v2/workflowtask.py +208 -0
  109. fractal_server/config.py +5 -4
  110. fractal_server/images/__init__.py +4 -0
  111. fractal_server/images/models.py +136 -0
  112. fractal_server/images/tools.py +84 -0
  113. fractal_server/main.py +11 -3
  114. fractal_server/migrations/env.py +0 -2
  115. fractal_server/migrations/versions/5bf02391cfef_v2.py +245 -0
  116. fractal_server/tasks/__init__.py +0 -5
  117. fractal_server/tasks/endpoint_operations.py +13 -19
  118. fractal_server/tasks/utils.py +35 -0
  119. fractal_server/tasks/{_TaskCollectPip.py → v1/_TaskCollectPip.py} +3 -3
  120. fractal_server/tasks/v1/__init__.py +0 -0
  121. fractal_server/tasks/{background_operations.py → v1/background_operations.py} +20 -52
  122. fractal_server/tasks/v1/get_collection_data.py +14 -0
  123. fractal_server/tasks/v2/_TaskCollectPip.py +103 -0
  124. fractal_server/tasks/v2/__init__.py +0 -0
  125. fractal_server/tasks/v2/background_operations.py +381 -0
  126. fractal_server/tasks/v2/get_collection_data.py +14 -0
  127. fractal_server/urls.py +13 -0
  128. {fractal_server-1.4.10.dist-info → fractal_server-2.0.0.dist-info}/METADATA +10 -10
  129. fractal_server-2.0.0.dist-info/RECORD +169 -0
  130. fractal_server/app/runner/_slurm/.gitignore +0 -2
  131. fractal_server/app/runner/common.py +0 -311
  132. fractal_server/app/schemas/json_schemas/manifest.json +0 -81
  133. fractal_server-1.4.10.dist-info/RECORD +0 -98
  134. /fractal_server/app/runner/{_slurm → executors/slurm}/remote.py +0 -0
  135. /fractal_server/app/runner/{_local → v1/_local}/executor.py +0 -0
  136. {fractal_server-1.4.10.dist-info → fractal_server-2.0.0.dist-info}/LICENSE +0 -0
  137. {fractal_server-1.4.10.dist-info → fractal_server-2.0.0.dist-info}/WHEEL +0 -0
  138. {fractal_server-1.4.10.dist-info → fractal_server-2.0.0.dist-info}/entry_points.txt +0 -0
@@ -0,0 +1,59 @@
1
+ from fastapi import APIRouter
2
+ from fastapi import Depends
3
+ from fastapi import HTTPException
4
+ from fastapi import status
5
+ from sqlmodel import select
6
+
7
+ from .....logger import set_logger
8
+ from ....db import AsyncSession
9
+ from ....db import get_async_db
10
+ from ....models.v1 import Task as TaskV1
11
+ from ....schemas.v2 import TaskLegacyReadV2
12
+ from ....security import current_active_user
13
+ from ....security import User
14
+
15
+ router = APIRouter()
16
+
17
+ logger = set_logger(__name__)
18
+
19
+
20
+ @router.get("/", response_model=list[TaskLegacyReadV2])
21
+ async def get_list_task_legacy(
22
+ args_schema: bool = True,
23
+ only_v2_compatible: bool = False,
24
+ user: User = Depends(current_active_user),
25
+ db: AsyncSession = Depends(get_async_db),
26
+ ) -> list[TaskLegacyReadV2]:
27
+ """
28
+ Get list of available legacy tasks
29
+ """
30
+ stm = select(TaskV1)
31
+ if only_v2_compatible:
32
+ stm = stm.where(TaskV1.is_v2_compatible)
33
+ res = await db.execute(stm)
34
+ task_list = res.scalars().all()
35
+ await db.close()
36
+ if args_schema is False:
37
+ for task in task_list:
38
+ setattr(task, "args_schema", None)
39
+
40
+ return task_list
41
+
42
+
43
+ @router.get("/{task_id}/", response_model=TaskLegacyReadV2)
44
+ async def get_task_legacy(
45
+ task_id: int,
46
+ user: User = Depends(current_active_user),
47
+ db: AsyncSession = Depends(get_async_db),
48
+ ) -> TaskLegacyReadV2:
49
+ """
50
+ Get info on a specific legacy task
51
+ """
52
+ task = await db.get(TaskV1, task_id)
53
+ await db.close()
54
+ if not task:
55
+ raise HTTPException(
56
+ status_code=status.HTTP_404_NOT_FOUND,
57
+ detail=f"TaskV1[{task_id}] not found",
58
+ )
59
+ return task
@@ -0,0 +1,380 @@
1
+ from typing import Optional
2
+
3
+ from fastapi import APIRouter
4
+ from fastapi import Depends
5
+ from fastapi import HTTPException
6
+ from fastapi import Response
7
+ from fastapi import status
8
+ from sqlmodel import select
9
+
10
+ from .....logger import close_logger
11
+ from .....logger import set_logger
12
+ from ....db import AsyncSession
13
+ from ....db import get_async_db
14
+ from ....models.v1 import Task as TaskV1
15
+ from ....models.v2 import JobV2
16
+ from ....models.v2 import ProjectV2
17
+ from ....models.v2 import TaskV2
18
+ from ....models.v2 import WorkflowV2
19
+ from ....schemas.v2 import WorkflowCreateV2
20
+ from ....schemas.v2 import WorkflowExportV2
21
+ from ....schemas.v2 import WorkflowImportV2
22
+ from ....schemas.v2 import WorkflowReadV2
23
+ from ....schemas.v2 import WorkflowTaskCreateV2
24
+ from ....schemas.v2 import WorkflowUpdateV2
25
+ from ....security import current_active_user
26
+ from ....security import User
27
+ from ._aux_functions import _check_workflow_exists
28
+ from ._aux_functions import _get_project_check_owner
29
+ from ._aux_functions import _get_submitted_jobs_statement
30
+ from ._aux_functions import _get_workflow_check_owner
31
+ from ._aux_functions import _workflow_insert_task
32
+
33
+
34
+ router = APIRouter()
35
+
36
+
37
+ @router.get(
38
+ "/project/{project_id}/workflow/",
39
+ response_model=list[WorkflowReadV2],
40
+ )
41
+ async def get_workflow_list(
42
+ project_id: int,
43
+ user: User = Depends(current_active_user),
44
+ db: AsyncSession = Depends(get_async_db),
45
+ ) -> Optional[list[WorkflowReadV2]]:
46
+ """
47
+ Get workflow list for given project
48
+ """
49
+ # Access control
50
+ project = await _get_project_check_owner(
51
+ project_id=project_id, user_id=user.id, db=db
52
+ )
53
+ # Find workflows of the current project. Note: this select/where approach
54
+ # has much better scaling than refreshing all elements of
55
+ # `project.workflow_list` - ref
56
+ # https://github.com/fractal-analytics-platform/fractal-server/pull/1082#issuecomment-1856676097.
57
+ stm = select(WorkflowV2).where(WorkflowV2.project_id == project.id)
58
+ workflow_list = (await db.execute(stm)).scalars().all()
59
+ return workflow_list
60
+
61
+
62
+ @router.post(
63
+ "/project/{project_id}/workflow/",
64
+ response_model=WorkflowReadV2,
65
+ status_code=status.HTTP_201_CREATED,
66
+ )
67
+ async def create_workflow(
68
+ project_id: int,
69
+ workflow: WorkflowCreateV2,
70
+ user: User = Depends(current_active_user),
71
+ db: AsyncSession = Depends(get_async_db),
72
+ ) -> Optional[WorkflowReadV2]:
73
+ """
74
+ Create a workflow, associate to a project
75
+ """
76
+ await _get_project_check_owner(
77
+ project_id=project_id, user_id=user.id, db=db
78
+ )
79
+ await _check_workflow_exists(
80
+ name=workflow.name, project_id=project_id, db=db
81
+ )
82
+
83
+ db_workflow = WorkflowV2(project_id=project_id, **workflow.dict())
84
+ db.add(db_workflow)
85
+ await db.commit()
86
+ await db.refresh(db_workflow)
87
+ await db.close()
88
+ return db_workflow
89
+
90
+
91
+ @router.get(
92
+ "/project/{project_id}/workflow/{workflow_id}/",
93
+ response_model=WorkflowReadV2,
94
+ )
95
+ async def read_workflow(
96
+ project_id: int,
97
+ workflow_id: int,
98
+ user: User = Depends(current_active_user),
99
+ db: AsyncSession = Depends(get_async_db),
100
+ ) -> Optional[WorkflowReadV2]:
101
+ """
102
+ Get info on an existing workflow
103
+ """
104
+
105
+ workflow = await _get_workflow_check_owner(
106
+ project_id=project_id,
107
+ workflow_id=workflow_id,
108
+ user_id=user.id,
109
+ db=db,
110
+ )
111
+
112
+ return workflow
113
+
114
+
115
+ @router.patch(
116
+ "/project/{project_id}/workflow/{workflow_id}/",
117
+ response_model=WorkflowReadV2,
118
+ )
119
+ async def update_workflow(
120
+ project_id: int,
121
+ workflow_id: int,
122
+ patch: WorkflowUpdateV2,
123
+ user: User = Depends(current_active_user),
124
+ db: AsyncSession = Depends(get_async_db),
125
+ ) -> Optional[WorkflowReadV2]:
126
+ """
127
+ Edit a workflow
128
+ """
129
+ workflow = await _get_workflow_check_owner(
130
+ project_id=project_id,
131
+ workflow_id=workflow_id,
132
+ user_id=user.id,
133
+ db=db,
134
+ )
135
+
136
+ if patch.name:
137
+ await _check_workflow_exists(
138
+ name=patch.name, project_id=project_id, db=db
139
+ )
140
+
141
+ for key, value in patch.dict(exclude_unset=True).items():
142
+ if key == "reordered_workflowtask_ids":
143
+ current_workflowtask_ids = [
144
+ wftask.id for wftask in workflow.task_list
145
+ ]
146
+ num_tasks = len(workflow.task_list)
147
+ if len(value) != num_tasks or set(value) != set(
148
+ current_workflowtask_ids
149
+ ):
150
+ raise HTTPException(
151
+ status_code=status.HTTP_422_UNPROCESSABLE_ENTITY,
152
+ detail=(
153
+ "`reordered_workflowtask_ids` must be a permutation of"
154
+ f" {current_workflowtask_ids} (given {value})"
155
+ ),
156
+ )
157
+ for ind_wftask in range(num_tasks):
158
+ new_order = value.index(workflow.task_list[ind_wftask].id)
159
+ workflow.task_list[ind_wftask].order = new_order
160
+ else:
161
+ setattr(workflow, key, value)
162
+
163
+ await db.commit()
164
+ await db.refresh(workflow)
165
+ await db.close()
166
+
167
+ return workflow
168
+
169
+
170
+ @router.delete(
171
+ "/project/{project_id}/workflow/{workflow_id}/",
172
+ status_code=status.HTTP_204_NO_CONTENT,
173
+ )
174
+ async def delete_workflow(
175
+ project_id: int,
176
+ workflow_id: int,
177
+ user: User = Depends(current_active_user),
178
+ db: AsyncSession = Depends(get_async_db),
179
+ ) -> Response:
180
+ """
181
+ Delete a workflow
182
+ """
183
+
184
+ workflow = await _get_workflow_check_owner(
185
+ project_id=project_id,
186
+ workflow_id=workflow_id,
187
+ user_id=user.id,
188
+ db=db,
189
+ )
190
+
191
+ # Fail if there exist jobs that are submitted and in relation with the
192
+ # current workflow.
193
+ stm = _get_submitted_jobs_statement().where(
194
+ JobV2.workflow_id == workflow.id
195
+ )
196
+ res = await db.execute(stm)
197
+ jobs = res.scalars().all()
198
+ if jobs:
199
+ string_ids = str([job.id for job in jobs])[1:-1]
200
+ raise HTTPException(
201
+ status_code=status.HTTP_422_UNPROCESSABLE_ENTITY,
202
+ detail=(
203
+ f"Cannot delete workflow {workflow.id} because it "
204
+ f"is linked to active job(s) {string_ids}."
205
+ ),
206
+ )
207
+
208
+ # Cascade operations: set foreign-keys to null for jobs which are in
209
+ # relationship with the current workflow
210
+ stm = select(JobV2).where(JobV2.workflow_id == workflow_id)
211
+ res = await db.execute(stm)
212
+ jobs = res.scalars().all()
213
+ for job in jobs:
214
+ job.workflow_id = None
215
+
216
+ # Delete workflow
217
+ await db.delete(workflow)
218
+ await db.commit()
219
+
220
+ return Response(status_code=status.HTTP_204_NO_CONTENT)
221
+
222
+
223
+ @router.get(
224
+ "/project/{project_id}/workflow/{workflow_id}/export/",
225
+ response_model=WorkflowExportV2,
226
+ )
227
+ async def export_worfklow(
228
+ project_id: int,
229
+ workflow_id: int,
230
+ user: User = Depends(current_active_user),
231
+ db: AsyncSession = Depends(get_async_db),
232
+ ) -> Optional[WorkflowExportV2]:
233
+ """
234
+ Export an existing workflow, after stripping all IDs
235
+ """
236
+ workflow = await _get_workflow_check_owner(
237
+ project_id=project_id,
238
+ workflow_id=workflow_id,
239
+ user_id=user.id,
240
+ db=db,
241
+ )
242
+ # Emit a warning when exporting a workflow with custom tasks
243
+ logger = set_logger(None)
244
+ for wftask in workflow.task_list:
245
+ if wftask.is_legacy_task:
246
+ if wftask.task_legacy.owner is not None:
247
+ logger.warning(
248
+ f"Custom tasks (like the one with "
249
+ f"id={wftask.task_legacy_id} and "
250
+ f"source='{wftask.task_legacy.source}') are not meant to "
251
+ "be portable; re-importing this workflow may not work as "
252
+ "expected."
253
+ )
254
+ else:
255
+ if wftask.task.owner is not None:
256
+ logger.warning(
257
+ f"Custom tasks (like the one with id={wftask.task_id} and "
258
+ f'source="{wftask.task.source}") are not meant to be '
259
+ "portable; re-importing this workflow may not work as "
260
+ "expected."
261
+ )
262
+ close_logger(logger)
263
+
264
+ await db.close()
265
+ return workflow
266
+
267
+
268
+ @router.post(
269
+ "/project/{project_id}/workflow/import/",
270
+ response_model=WorkflowReadV2,
271
+ status_code=status.HTTP_201_CREATED,
272
+ )
273
+ async def import_workflow(
274
+ project_id: int,
275
+ workflow: WorkflowImportV2,
276
+ user: User = Depends(current_active_user),
277
+ db: AsyncSession = Depends(get_async_db),
278
+ ) -> Optional[WorkflowReadV2]:
279
+ """
280
+ Import an existing workflow into a project
281
+
282
+ Also create all required objects (i.e. Workflow and WorkflowTask's) along
283
+ the way.
284
+ """
285
+
286
+ # Preliminary checks
287
+ await _get_project_check_owner(
288
+ project_id=project_id,
289
+ user_id=user.id,
290
+ db=db,
291
+ )
292
+
293
+ await _check_workflow_exists(
294
+ name=workflow.name, project_id=project_id, db=db
295
+ )
296
+
297
+ # Check that all required tasks are available
298
+ source_to_id = {}
299
+ source_to_id_legacy = {}
300
+
301
+ for wf_task in workflow.task_list:
302
+
303
+ if wf_task.is_legacy_task is True:
304
+ source = wf_task.task_legacy.source
305
+ if source not in source_to_id_legacy.keys():
306
+ stm = select(TaskV1).where(TaskV1.source == source)
307
+ tasks_by_source = (await db.execute(stm)).scalars().all()
308
+ if len(tasks_by_source) != 1:
309
+ raise HTTPException(
310
+ status_code=status.HTTP_422_UNPROCESSABLE_ENTITY,
311
+ detail=(
312
+ f"Found {len(tasks_by_source)} tasks legacy "
313
+ f"with {source=}."
314
+ ),
315
+ )
316
+ source_to_id_legacy[source] = tasks_by_source[0].id
317
+ else:
318
+ source = wf_task.task.source
319
+ if source not in source_to_id.keys():
320
+ stm = select(TaskV2).where(TaskV2.source == source)
321
+ tasks_by_source = (await db.execute(stm)).scalars().all()
322
+ if len(tasks_by_source) != 1:
323
+ raise HTTPException(
324
+ status_code=status.HTTP_422_UNPROCESSABLE_ENTITY,
325
+ detail=(
326
+ f"Found {len(tasks_by_source)} tasks "
327
+ f"with {source=}."
328
+ ),
329
+ )
330
+ source_to_id[source] = tasks_by_source[0].id
331
+
332
+ # Create new Workflow (with empty task_list)
333
+ db_workflow = WorkflowV2(
334
+ project_id=project_id,
335
+ **workflow.dict(exclude_none=True, exclude={"task_list"}),
336
+ )
337
+ db.add(db_workflow)
338
+ await db.commit()
339
+ await db.refresh(db_workflow)
340
+
341
+ # Insert tasks
342
+
343
+ for wf_task in workflow.task_list:
344
+ if wf_task.is_legacy_task is True:
345
+ source = wf_task.task_legacy.source
346
+ task_id = source_to_id_legacy[source]
347
+ else:
348
+ source = wf_task.task.source
349
+ task_id = source_to_id[source]
350
+
351
+ new_wf_task = WorkflowTaskCreateV2(
352
+ **wf_task.dict(exclude_none=True, exclude={"task", "task_legacy"})
353
+ )
354
+ # Insert task
355
+ await _workflow_insert_task(
356
+ **new_wf_task.dict(),
357
+ workflow_id=db_workflow.id,
358
+ task_id=task_id,
359
+ db=db,
360
+ )
361
+
362
+ await db.close()
363
+ return db_workflow
364
+
365
+
366
+ @router.get("/workflow/", response_model=list[WorkflowReadV2])
367
+ async def get_user_workflows(
368
+ user: User = Depends(current_active_user),
369
+ db: AsyncSession = Depends(get_async_db),
370
+ ) -> list[WorkflowReadV2]:
371
+ """
372
+ Returns all the workflows of the current user
373
+ """
374
+ stm = select(WorkflowV2)
375
+ stm = stm.join(ProjectV2).where(
376
+ ProjectV2.user_list.any(User.id == user.id)
377
+ )
378
+ res = await db.execute(stm)
379
+ workflow_list = res.scalars().all()
380
+ return workflow_list
@@ -0,0 +1,265 @@
1
+ from copy import deepcopy
2
+ from typing import Optional
3
+
4
+ from fastapi import APIRouter
5
+ from fastapi import Depends
6
+ from fastapi import HTTPException
7
+ from fastapi import Response
8
+ from fastapi import status
9
+
10
+ from ....db import AsyncSession
11
+ from ....db import get_async_db
12
+ from ....models.v1 import Task
13
+ from ....models.v2 import TaskV2
14
+ from ....schemas.v2 import WorkflowTaskCreateV2
15
+ from ....schemas.v2 import WorkflowTaskReadV2
16
+ from ....schemas.v2 import WorkflowTaskUpdateV2
17
+ from ....security import current_active_user
18
+ from ....security import User
19
+ from ._aux_functions import _get_workflow_check_owner
20
+ from ._aux_functions import _get_workflow_task_check_owner
21
+ from ._aux_functions import _workflow_insert_task
22
+
23
+ router = APIRouter()
24
+
25
+
26
+ @router.post(
27
+ "/project/{project_id}/workflow/{workflow_id}/wftask/",
28
+ response_model=WorkflowTaskReadV2,
29
+ status_code=status.HTTP_201_CREATED,
30
+ )
31
+ async def create_workflowtask(
32
+ project_id: int,
33
+ workflow_id: int,
34
+ task_id: int,
35
+ new_task: WorkflowTaskCreateV2,
36
+ user: User = Depends(current_active_user),
37
+ db: AsyncSession = Depends(get_async_db),
38
+ ) -> Optional[WorkflowTaskReadV2]:
39
+ """
40
+ Add a WorkflowTask to a Workflow
41
+ """
42
+
43
+ workflow = await _get_workflow_check_owner(
44
+ project_id=project_id, workflow_id=workflow_id, user_id=user.id, db=db
45
+ )
46
+
47
+ if new_task.is_legacy_task is True:
48
+ task = await db.get(Task, task_id)
49
+ if not task:
50
+ raise HTTPException(
51
+ status_code=status.HTTP_404_NOT_FOUND,
52
+ detail=f"Task {task_id} not found.",
53
+ )
54
+ if not task.is_v2_compatible:
55
+ raise HTTPException(
56
+ status_code=status.HTTP_422_UNPROCESSABLE_ENTITY,
57
+ detail=f"Task {task_id} is not V2-compatible.",
58
+ )
59
+ else:
60
+ task = await db.get(TaskV2, task_id)
61
+ if not task:
62
+ raise HTTPException(
63
+ status_code=status.HTTP_404_NOT_FOUND,
64
+ detail=f"TaskV2 {task_id} not found.",
65
+ )
66
+
67
+ if new_task.is_legacy_task is True or task.type == "parallel":
68
+ if (
69
+ new_task.meta_non_parallel is not None
70
+ or new_task.args_non_parallel is not None
71
+ ):
72
+ raise HTTPException(
73
+ status_code=status.HTTP_422_UNPROCESSABLE_ENTITY,
74
+ detail=(
75
+ "Cannot set `WorkflowTaskV2.meta_non_parallel` or "
76
+ "`WorkflowTask.args_non_parallel` if the associated Task "
77
+ "is `parallel` (or legacy)."
78
+ ),
79
+ )
80
+ elif task.type == "non_parallel":
81
+ if (
82
+ new_task.meta_parallel is not None
83
+ or new_task.args_parallel is not None
84
+ ):
85
+ raise HTTPException(
86
+ status_code=status.HTTP_422_UNPROCESSABLE_ENTITY,
87
+ detail=(
88
+ "Cannot set `WorkflowTaskV2.meta_parallel` or "
89
+ "`WorkflowTask.args_parallel` if the associated Task "
90
+ "is `non_parallel`."
91
+ ),
92
+ )
93
+
94
+ workflow_task = await _workflow_insert_task(
95
+ workflow_id=workflow.id,
96
+ is_legacy_task=new_task.is_legacy_task,
97
+ task_id=task_id,
98
+ order=new_task.order,
99
+ meta_non_parallel=new_task.meta_non_parallel,
100
+ meta_parallel=new_task.meta_parallel,
101
+ args_non_parallel=new_task.args_non_parallel,
102
+ args_parallel=new_task.args_parallel,
103
+ input_filters=new_task.input_filters,
104
+ db=db,
105
+ )
106
+
107
+ await db.close()
108
+
109
+ return workflow_task
110
+
111
+
112
+ @router.get(
113
+ "/project/{project_id}/workflow/{workflow_id}/wftask/{workflow_task_id}/",
114
+ response_model=WorkflowTaskReadV2,
115
+ )
116
+ async def read_workflowtask(
117
+ project_id: int,
118
+ workflow_id: int,
119
+ workflow_task_id: int,
120
+ user: User = Depends(current_active_user),
121
+ db: AsyncSession = Depends(get_async_db),
122
+ ):
123
+ workflow_task, _ = await _get_workflow_task_check_owner(
124
+ project_id=project_id,
125
+ workflow_task_id=workflow_task_id,
126
+ workflow_id=workflow_id,
127
+ user_id=user.id,
128
+ db=db,
129
+ )
130
+ return workflow_task
131
+
132
+
133
+ @router.patch(
134
+ "/project/{project_id}/workflow/{workflow_id}/wftask/{workflow_task_id}/",
135
+ response_model=WorkflowTaskReadV2,
136
+ )
137
+ async def update_workflowtask(
138
+ project_id: int,
139
+ workflow_id: int,
140
+ workflow_task_id: int,
141
+ workflow_task_update: WorkflowTaskUpdateV2,
142
+ user: User = Depends(current_active_user),
143
+ db: AsyncSession = Depends(get_async_db),
144
+ ) -> Optional[WorkflowTaskReadV2]:
145
+ """
146
+ Edit a WorkflowTask of a Workflow
147
+ """
148
+
149
+ db_wf_task, db_workflow = await _get_workflow_task_check_owner(
150
+ project_id=project_id,
151
+ workflow_task_id=workflow_task_id,
152
+ workflow_id=workflow_id,
153
+ user_id=user.id,
154
+ db=db,
155
+ )
156
+
157
+ if db_wf_task.task_type == "parallel" and (
158
+ workflow_task_update.args_non_parallel is not None
159
+ or workflow_task_update.meta_non_parallel is not None
160
+ ):
161
+ raise HTTPException(
162
+ status_code=status.HTTP_422_UNPROCESSABLE_ENTITY,
163
+ detail=(
164
+ "Cannot patch `WorkflowTaskV2.args_non_parallel` or "
165
+ "`WorkflowTask.meta_non_parallel` if the associated Task is "
166
+ "parallel."
167
+ ),
168
+ )
169
+ elif db_wf_task.task_type == "non_parallel" and (
170
+ workflow_task_update.args_parallel is not None
171
+ or workflow_task_update.meta_parallel is not None
172
+ ):
173
+ raise HTTPException(
174
+ status_code=status.HTTP_422_UNPROCESSABLE_ENTITY,
175
+ detail=(
176
+ "Cannot patch `WorkflowTaskV2.args_parallel` or "
177
+ "`WorkflowTask.meta_parallel` if the associated Task is "
178
+ "non parallel."
179
+ ),
180
+ )
181
+
182
+ for key, value in workflow_task_update.dict(exclude_unset=True).items():
183
+ if key == "args_parallel":
184
+ # Get default arguments via a Task property method
185
+ if db_wf_task.is_legacy_task:
186
+ default_args = (
187
+ db_wf_task.task_legacy.default_args_from_args_schema
188
+ )
189
+ else:
190
+ default_args = (
191
+ db_wf_task.task.default_args_parallel_from_args_schema
192
+ )
193
+ # Override default_args with args value items
194
+ actual_args = deepcopy(default_args)
195
+ if value is not None:
196
+ for k, v in value.items():
197
+ actual_args[k] = v
198
+ if not actual_args:
199
+ actual_args = None
200
+ setattr(db_wf_task, key, actual_args)
201
+ elif key == "args_non_parallel":
202
+ # Get default arguments via a Task property method
203
+ if db_wf_task.is_legacy_task:
204
+ # This is only needed so that we don't have to modify the rest
205
+ # of this block, but legacy task cannot take any non-parallel
206
+ # args (see checks above).
207
+ default_args = {}
208
+ else:
209
+ default_args = deepcopy(
210
+ db_wf_task.task.default_args_non_parallel_from_args_schema
211
+ )
212
+ # Override default_args with args value items
213
+ actual_args = default_args.copy()
214
+ if value is not None:
215
+ for k, v in value.items():
216
+ actual_args[k] = v
217
+ if not actual_args:
218
+ actual_args = None
219
+ setattr(db_wf_task, key, actual_args)
220
+ elif key in ["meta_parallel", "meta_non_parallel", "input_filters"]:
221
+ setattr(db_wf_task, key, value)
222
+ else:
223
+ raise HTTPException(
224
+ status_code=status.HTTP_422_UNPROCESSABLE_ENTITY,
225
+ detail=f"patch_workflow_task endpoint cannot set {key=}",
226
+ )
227
+
228
+ await db.commit()
229
+ await db.refresh(db_wf_task)
230
+ await db.close()
231
+
232
+ return db_wf_task
233
+
234
+
235
+ @router.delete(
236
+ "/project/{project_id}/workflow/{workflow_id}/wftask/{workflow_task_id}/",
237
+ status_code=status.HTTP_204_NO_CONTENT,
238
+ )
239
+ async def delete_workflowtask(
240
+ project_id: int,
241
+ workflow_id: int,
242
+ workflow_task_id: int,
243
+ user: User = Depends(current_active_user),
244
+ db: AsyncSession = Depends(get_async_db),
245
+ ) -> Response:
246
+ """
247
+ Delete a WorkflowTask of a Workflow
248
+ """
249
+
250
+ db_workflow_task, db_workflow = await _get_workflow_task_check_owner(
251
+ project_id=project_id,
252
+ workflow_task_id=workflow_task_id,
253
+ workflow_id=workflow_id,
254
+ user_id=user.id,
255
+ db=db,
256
+ )
257
+
258
+ await db.delete(db_workflow_task)
259
+ await db.commit()
260
+
261
+ await db.refresh(db_workflow)
262
+ db_workflow.task_list.reorder()
263
+ await db.commit()
264
+
265
+ return Response(status_code=status.HTTP_204_NO_CONTENT)