fractal-server 1.4.10__py3-none-any.whl → 2.0.0a0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- fractal_server/__init__.py +1 -1
- fractal_server/app/models/__init__.py +4 -7
- fractal_server/app/models/linkuserproject.py +9 -0
- fractal_server/app/models/security.py +6 -0
- fractal_server/app/models/state.py +1 -1
- fractal_server/app/models/v1/__init__.py +10 -0
- fractal_server/app/models/{dataset.py → v1/dataset.py} +5 -5
- fractal_server/app/models/{job.py → v1/job.py} +5 -5
- fractal_server/app/models/{project.py → v1/project.py} +5 -5
- fractal_server/app/models/{task.py → v1/task.py} +7 -2
- fractal_server/app/models/{workflow.py → v1/workflow.py} +5 -5
- fractal_server/app/models/v2/__init__.py +20 -0
- fractal_server/app/models/v2/dataset.py +55 -0
- fractal_server/app/models/v2/job.py +51 -0
- fractal_server/app/models/v2/project.py +31 -0
- fractal_server/app/models/v2/task.py +93 -0
- fractal_server/app/models/v2/workflow.py +43 -0
- fractal_server/app/models/v2/workflowtask.py +90 -0
- fractal_server/app/routes/{admin.py → admin/v1.py} +42 -42
- fractal_server/app/routes/admin/v2.py +275 -0
- fractal_server/app/routes/api/v1/__init__.py +7 -7
- fractal_server/app/routes/api/v1/_aux_functions.py +2 -2
- fractal_server/app/routes/api/v1/dataset.py +37 -37
- fractal_server/app/routes/api/v1/job.py +12 -12
- fractal_server/app/routes/api/v1/project.py +23 -21
- fractal_server/app/routes/api/v1/task.py +24 -14
- fractal_server/app/routes/api/v1/task_collection.py +16 -14
- fractal_server/app/routes/api/v1/workflow.py +24 -24
- fractal_server/app/routes/api/v1/workflowtask.py +10 -10
- fractal_server/app/routes/api/v2/__init__.py +28 -0
- fractal_server/app/routes/api/v2/_aux_functions.py +497 -0
- fractal_server/app/routes/api/v2/apply.py +220 -0
- fractal_server/app/routes/api/v2/dataset.py +310 -0
- fractal_server/app/routes/api/v2/images.py +212 -0
- fractal_server/app/routes/api/v2/job.py +200 -0
- fractal_server/app/routes/api/v2/project.py +205 -0
- fractal_server/app/routes/api/v2/task.py +222 -0
- fractal_server/app/routes/api/v2/task_collection.py +229 -0
- fractal_server/app/routes/api/v2/workflow.py +398 -0
- fractal_server/app/routes/api/v2/workflowtask.py +269 -0
- fractal_server/app/routes/aux/_job.py +1 -1
- fractal_server/app/runner/async_wrap.py +27 -0
- fractal_server/app/runner/exceptions.py +129 -0
- fractal_server/app/runner/executors/local/__init__.py +3 -0
- fractal_server/app/runner/{_local → executors/local}/executor.py +2 -2
- fractal_server/app/runner/executors/slurm/__init__.py +3 -0
- fractal_server/app/runner/{_slurm → executors/slurm}/_batching.py +1 -1
- fractal_server/app/runner/{_slurm → executors/slurm}/_check_jobs_status.py +1 -1
- fractal_server/app/runner/{_slurm → executors/slurm}/_executor_wait_thread.py +1 -1
- fractal_server/app/runner/{_slurm → executors/slurm}/_slurm_config.py +3 -152
- fractal_server/app/runner/{_slurm → executors/slurm}/_subprocess_run_as_user.py +1 -1
- fractal_server/app/runner/{_slurm → executors/slurm}/executor.py +9 -9
- fractal_server/app/runner/filenames.py +6 -0
- fractal_server/app/runner/set_start_and_last_task_index.py +39 -0
- fractal_server/app/runner/task_files.py +105 -0
- fractal_server/app/runner/{__init__.py → v1/__init__.py} +24 -22
- fractal_server/app/runner/{_common.py → v1/_common.py} +13 -120
- fractal_server/app/runner/{_local → v1/_local}/__init__.py +6 -6
- fractal_server/app/runner/{_local → v1/_local}/_local_config.py +6 -7
- fractal_server/app/runner/{_local → v1/_local}/_submit_setup.py +1 -5
- fractal_server/app/runner/v1/_slurm/__init__.py +310 -0
- fractal_server/app/runner/{_slurm → v1/_slurm}/_submit_setup.py +3 -9
- fractal_server/app/runner/v1/_slurm/get_slurm_config.py +163 -0
- fractal_server/app/runner/v1/common.py +117 -0
- fractal_server/app/runner/{handle_failed_job.py → v1/handle_failed_job.py} +8 -8
- fractal_server/app/runner/v2/__init__.py +337 -0
- fractal_server/app/runner/v2/_local/__init__.py +169 -0
- fractal_server/app/runner/v2/_local/_local_config.py +118 -0
- fractal_server/app/runner/v2/_local/_submit_setup.py +52 -0
- fractal_server/app/runner/v2/_slurm/__init__.py +157 -0
- fractal_server/app/runner/v2/_slurm/_submit_setup.py +83 -0
- fractal_server/app/runner/v2/_slurm/get_slurm_config.py +179 -0
- fractal_server/app/runner/v2/components.py +5 -0
- fractal_server/app/runner/v2/deduplicate_list.py +24 -0
- fractal_server/app/runner/v2/handle_failed_job.py +156 -0
- fractal_server/app/runner/v2/merge_outputs.py +41 -0
- fractal_server/app/runner/v2/runner.py +264 -0
- fractal_server/app/runner/v2/runner_functions.py +339 -0
- fractal_server/app/runner/v2/runner_functions_low_level.py +134 -0
- fractal_server/app/runner/v2/task_interface.py +43 -0
- fractal_server/app/runner/v2/v1_compat.py +21 -0
- fractal_server/app/schemas/__init__.py +4 -42
- fractal_server/app/schemas/v1/__init__.py +42 -0
- fractal_server/app/schemas/{applyworkflow.py → v1/applyworkflow.py} +18 -18
- fractal_server/app/schemas/{dataset.py → v1/dataset.py} +30 -30
- fractal_server/app/schemas/{dumps.py → v1/dumps.py} +8 -8
- fractal_server/app/schemas/{manifest.py → v1/manifest.py} +5 -5
- fractal_server/app/schemas/{project.py → v1/project.py} +9 -9
- fractal_server/app/schemas/{task.py → v1/task.py} +12 -12
- fractal_server/app/schemas/{task_collection.py → v1/task_collection.py} +7 -7
- fractal_server/app/schemas/{workflow.py → v1/workflow.py} +38 -38
- fractal_server/app/schemas/v2/__init__.py +34 -0
- fractal_server/app/schemas/v2/dataset.py +88 -0
- fractal_server/app/schemas/v2/dumps.py +87 -0
- fractal_server/app/schemas/v2/job.py +113 -0
- fractal_server/app/schemas/v2/manifest.py +109 -0
- fractal_server/app/schemas/v2/project.py +36 -0
- fractal_server/app/schemas/v2/task.py +121 -0
- fractal_server/app/schemas/v2/task_collection.py +105 -0
- fractal_server/app/schemas/v2/workflow.py +78 -0
- fractal_server/app/schemas/v2/workflowtask.py +118 -0
- fractal_server/config.py +5 -4
- fractal_server/images/__init__.py +50 -0
- fractal_server/images/tools.py +86 -0
- fractal_server/main.py +11 -3
- fractal_server/migrations/versions/4b35c5cefbe3_tmp_is_v2_compatible.py +39 -0
- fractal_server/migrations/versions/56af171b0159_v2.py +217 -0
- fractal_server/migrations/versions/876f28db9d4e_tmp_split_task_and_wftask_meta.py +68 -0
- fractal_server/migrations/versions/974c802f0dd0_tmp_workflowtaskv2_type_in_db.py +37 -0
- fractal_server/migrations/versions/9cd305cd6023_tmp_workflowtaskv2.py +40 -0
- fractal_server/migrations/versions/a6231ed6273c_tmp_args_schemas_in_taskv2.py +42 -0
- fractal_server/migrations/versions/b9e9eed9d442_tmp_taskv2_type.py +37 -0
- fractal_server/migrations/versions/e3e639454d4b_tmp_make_task_meta_non_optional.py +50 -0
- fractal_server/tasks/__init__.py +0 -5
- fractal_server/tasks/endpoint_operations.py +13 -19
- fractal_server/tasks/utils.py +35 -0
- fractal_server/tasks/{_TaskCollectPip.py → v1/_TaskCollectPip.py} +3 -3
- fractal_server/tasks/{background_operations.py → v1/background_operations.py} +18 -50
- fractal_server/tasks/v1/get_collection_data.py +14 -0
- fractal_server/tasks/v2/_TaskCollectPip.py +103 -0
- fractal_server/tasks/v2/background_operations.py +382 -0
- fractal_server/tasks/v2/get_collection_data.py +14 -0
- {fractal_server-1.4.10.dist-info → fractal_server-2.0.0a0.dist-info}/METADATA +1 -1
- fractal_server-2.0.0a0.dist-info/RECORD +166 -0
- fractal_server/app/runner/_slurm/.gitignore +0 -2
- fractal_server/app/runner/_slurm/__init__.py +0 -150
- fractal_server/app/runner/common.py +0 -311
- fractal_server-1.4.10.dist-info/RECORD +0 -98
- /fractal_server/app/runner/{_slurm → executors/slurm}/remote.py +0 -0
- {fractal_server-1.4.10.dist-info → fractal_server-2.0.0a0.dist-info}/LICENSE +0 -0
- {fractal_server-1.4.10.dist-info → fractal_server-2.0.0a0.dist-info}/WHEEL +0 -0
- {fractal_server-1.4.10.dist-info → fractal_server-2.0.0a0.dist-info}/entry_points.txt +0 -0
@@ -0,0 +1,229 @@
|
|
1
|
+
from pathlib import Path
|
2
|
+
from shutil import copy as shell_copy
|
3
|
+
from tempfile import TemporaryDirectory
|
4
|
+
|
5
|
+
from fastapi import APIRouter
|
6
|
+
from fastapi import BackgroundTasks
|
7
|
+
from fastapi import Depends
|
8
|
+
from fastapi import HTTPException
|
9
|
+
from fastapi import Response
|
10
|
+
from fastapi import status
|
11
|
+
from pydantic.error_wrappers import ValidationError
|
12
|
+
from sqlmodel import select
|
13
|
+
|
14
|
+
from .....config import get_settings
|
15
|
+
from .....logger import close_logger
|
16
|
+
from .....logger import set_logger
|
17
|
+
from .....syringe import Inject
|
18
|
+
from ....db import AsyncSession
|
19
|
+
from ....db import get_async_db
|
20
|
+
from ....models import State
|
21
|
+
from ....models.v2 import TaskV2
|
22
|
+
from ....schemas import StateRead
|
23
|
+
from ....schemas.v2 import TaskCollectPipV2
|
24
|
+
from ....schemas.v2 import TaskCollectStatusV2
|
25
|
+
from ....security import current_active_user
|
26
|
+
from ....security import current_active_verified_user
|
27
|
+
from ....security import User
|
28
|
+
from fractal_server.tasks.endpoint_operations import create_package_dir_pip
|
29
|
+
from fractal_server.tasks.endpoint_operations import download_package
|
30
|
+
from fractal_server.tasks.endpoint_operations import inspect_package
|
31
|
+
from fractal_server.tasks.utils import get_collection_log
|
32
|
+
from fractal_server.tasks.utils import slugify_task_name
|
33
|
+
from fractal_server.tasks.v2._TaskCollectPip import _TaskCollectPip
|
34
|
+
from fractal_server.tasks.v2.background_operations import (
|
35
|
+
background_collect_pip,
|
36
|
+
)
|
37
|
+
from fractal_server.tasks.v2.get_collection_data import get_collection_data
|
38
|
+
|
39
|
+
router = APIRouter()
|
40
|
+
|
41
|
+
logger = set_logger(__name__)
|
42
|
+
|
43
|
+
|
44
|
+
@router.post(
|
45
|
+
"/collect/pip/",
|
46
|
+
response_model=StateRead,
|
47
|
+
responses={
|
48
|
+
201: dict(
|
49
|
+
description=(
|
50
|
+
"Task collection successfully started in the background"
|
51
|
+
)
|
52
|
+
),
|
53
|
+
200: dict(
|
54
|
+
description=(
|
55
|
+
"Package already collected. Returning info on already "
|
56
|
+
"available tasks"
|
57
|
+
)
|
58
|
+
),
|
59
|
+
},
|
60
|
+
)
|
61
|
+
async def collect_tasks_pip(
|
62
|
+
task_collect: TaskCollectPipV2,
|
63
|
+
background_tasks: BackgroundTasks,
|
64
|
+
response: Response,
|
65
|
+
user: User = Depends(current_active_verified_user),
|
66
|
+
db: AsyncSession = Depends(get_async_db),
|
67
|
+
) -> StateRead: # State[TaskCollectStatus]
|
68
|
+
"""
|
69
|
+
Task collection endpoint
|
70
|
+
|
71
|
+
Trigger the creation of a dedicated virtual environment, the installation
|
72
|
+
of a package and the collection of tasks as advertised in the manifest.
|
73
|
+
"""
|
74
|
+
|
75
|
+
logger = set_logger(logger_name="collect_tasks_pip")
|
76
|
+
|
77
|
+
# Validate payload as _TaskCollectPip, which has more strict checks than
|
78
|
+
# TaskCollectPip
|
79
|
+
try:
|
80
|
+
task_pkg = _TaskCollectPip(**task_collect.dict(exclude_unset=True))
|
81
|
+
except ValidationError as e:
|
82
|
+
raise HTTPException(
|
83
|
+
status_code=status.HTTP_422_UNPROCESSABLE_ENTITY,
|
84
|
+
detail=f"Invalid task-collection object. Original error: {e}",
|
85
|
+
)
|
86
|
+
|
87
|
+
with TemporaryDirectory() as tmpdir:
|
88
|
+
try:
|
89
|
+
# Copy or download the package wheel file to tmpdir
|
90
|
+
if task_pkg.is_local_package:
|
91
|
+
shell_copy(task_pkg.package_path.as_posix(), tmpdir)
|
92
|
+
pkg_path = Path(tmpdir) / task_pkg.package_path.name
|
93
|
+
else:
|
94
|
+
pkg_path = await download_package(
|
95
|
+
task_pkg=task_pkg, dest=tmpdir
|
96
|
+
)
|
97
|
+
# Read package info from wheel file, and override the ones coming
|
98
|
+
# from the request body
|
99
|
+
pkg_info = inspect_package(pkg_path)
|
100
|
+
task_pkg.package_name = pkg_info["pkg_name"]
|
101
|
+
task_pkg.package_version = pkg_info["pkg_version"]
|
102
|
+
task_pkg.package_manifest = pkg_info["pkg_manifest"]
|
103
|
+
task_pkg.check()
|
104
|
+
except Exception as e:
|
105
|
+
raise HTTPException(
|
106
|
+
status_code=status.HTTP_422_UNPROCESSABLE_ENTITY,
|
107
|
+
detail=f"Invalid package or manifest. Original error: {e}",
|
108
|
+
)
|
109
|
+
|
110
|
+
try:
|
111
|
+
venv_path = create_package_dir_pip(task_pkg=task_pkg)
|
112
|
+
except FileExistsError:
|
113
|
+
venv_path = create_package_dir_pip(task_pkg=task_pkg, create=False)
|
114
|
+
try:
|
115
|
+
task_collect_status = get_collection_data(venv_path)
|
116
|
+
for task in task_collect_status.task_list:
|
117
|
+
db_task = await db.get(TaskV2, task.id)
|
118
|
+
if (
|
119
|
+
(not db_task)
|
120
|
+
or db_task.source != task.source
|
121
|
+
or db_task.name != task.name
|
122
|
+
):
|
123
|
+
await db.close()
|
124
|
+
raise HTTPException(
|
125
|
+
status_code=status.HTTP_422_UNPROCESSABLE_ENTITY,
|
126
|
+
detail=(
|
127
|
+
"Cannot collect package. Folder already exists, "
|
128
|
+
f"but task {task.id} does not exists or it does "
|
129
|
+
f"not have the expected source ({task.source}) or "
|
130
|
+
f"name ({task.name})."
|
131
|
+
),
|
132
|
+
)
|
133
|
+
except FileNotFoundError as e:
|
134
|
+
await db.close()
|
135
|
+
raise HTTPException(
|
136
|
+
status_code=status.HTTP_422_UNPROCESSABLE_ENTITY,
|
137
|
+
detail=(
|
138
|
+
"Cannot collect package. Possible reason: another "
|
139
|
+
"collection of the same package is in progress. "
|
140
|
+
f"Original error: {e}"
|
141
|
+
),
|
142
|
+
)
|
143
|
+
task_collect_status.info = "Already installed"
|
144
|
+
state = State(data=task_collect_status.sanitised_dict())
|
145
|
+
response.status_code == status.HTTP_200_OK
|
146
|
+
await db.close()
|
147
|
+
return state
|
148
|
+
settings = Inject(get_settings)
|
149
|
+
|
150
|
+
# Check that tasks are not already in the DB
|
151
|
+
for new_task in task_pkg.package_manifest.task_list:
|
152
|
+
new_task_name_slug = slugify_task_name(new_task.name)
|
153
|
+
new_task_source = f"{task_pkg.package_source}:{new_task_name_slug}"
|
154
|
+
stm = select(TaskV2).where(TaskV2.source == new_task_source)
|
155
|
+
res = await db.execute(stm)
|
156
|
+
if res.scalars().all():
|
157
|
+
raise HTTPException(
|
158
|
+
status_code=status.HTTP_422_UNPROCESSABLE_ENTITY,
|
159
|
+
detail=(
|
160
|
+
"Cannot collect package. Task with source "
|
161
|
+
f'"{new_task_source}" already exists in the database.'
|
162
|
+
),
|
163
|
+
)
|
164
|
+
|
165
|
+
# All checks are OK, proceed with task collection
|
166
|
+
full_venv_path = venv_path.relative_to(settings.FRACTAL_TASKS_DIR)
|
167
|
+
collection_status = TaskCollectStatusV2(
|
168
|
+
status="pending", venv_path=full_venv_path, package=task_pkg.package
|
169
|
+
)
|
170
|
+
|
171
|
+
# Create State object (after casting venv_path to string)
|
172
|
+
collection_status_dict = collection_status.dict()
|
173
|
+
collection_status_dict["venv_path"] = str(collection_status.venv_path)
|
174
|
+
state = State(data=collection_status_dict)
|
175
|
+
db.add(state)
|
176
|
+
await db.commit()
|
177
|
+
await db.refresh(state)
|
178
|
+
|
179
|
+
background_tasks.add_task(
|
180
|
+
background_collect_pip,
|
181
|
+
state_id=state.id,
|
182
|
+
venv_path=venv_path,
|
183
|
+
task_pkg=task_pkg,
|
184
|
+
)
|
185
|
+
logger.debug(
|
186
|
+
"Task-collection endpoint: start background collection "
|
187
|
+
"and return state"
|
188
|
+
)
|
189
|
+
close_logger(logger)
|
190
|
+
info = (
|
191
|
+
"Collecting tasks in the background. "
|
192
|
+
f"GET /task/collect/{state.id} to query collection status"
|
193
|
+
)
|
194
|
+
state.data["info"] = info
|
195
|
+
response.status_code = status.HTTP_201_CREATED
|
196
|
+
await db.close()
|
197
|
+
|
198
|
+
return state
|
199
|
+
|
200
|
+
|
201
|
+
@router.get("/collect/{state_id}/", response_model=StateRead)
|
202
|
+
async def check_collection_status(
|
203
|
+
state_id: int,
|
204
|
+
user: User = Depends(current_active_user),
|
205
|
+
verbose: bool = False,
|
206
|
+
db: AsyncSession = Depends(get_async_db),
|
207
|
+
) -> StateRead: # State[TaskCollectStatus]
|
208
|
+
"""
|
209
|
+
Check status of background task collection
|
210
|
+
"""
|
211
|
+
logger = set_logger(logger_name="check_collection_status")
|
212
|
+
logger.debug(f"Querying state for state.id={state_id}")
|
213
|
+
state = await db.get(State, state_id)
|
214
|
+
if not state:
|
215
|
+
await db.close()
|
216
|
+
raise HTTPException(
|
217
|
+
status_code=status.HTTP_404_NOT_FOUND,
|
218
|
+
detail=f"No task collection info with id={state_id}",
|
219
|
+
)
|
220
|
+
data = TaskCollectStatusV2(**state.data)
|
221
|
+
|
222
|
+
# In some cases (i.e. a successful or ongoing task collection), data.log is
|
223
|
+
# not set; if so, we collect the current logs
|
224
|
+
if verbose and not data.log:
|
225
|
+
data.log = get_collection_log(data.venv_path)
|
226
|
+
state.data = data.sanitised_dict()
|
227
|
+
close_logger(logger)
|
228
|
+
await db.close()
|
229
|
+
return state
|
@@ -0,0 +1,398 @@
|
|
1
|
+
from typing import Optional
|
2
|
+
|
3
|
+
from fastapi import APIRouter
|
4
|
+
from fastapi import Depends
|
5
|
+
from fastapi import HTTPException
|
6
|
+
from fastapi import Response
|
7
|
+
from fastapi import status
|
8
|
+
from sqlmodel import select
|
9
|
+
|
10
|
+
from .....logger import close_logger
|
11
|
+
from .....logger import set_logger
|
12
|
+
from ....db import AsyncSession
|
13
|
+
from ....db import get_async_db
|
14
|
+
from ....models.v1 import Task as TaskV1
|
15
|
+
from ....models.v2 import JobV2
|
16
|
+
from ....models.v2 import ProjectV2
|
17
|
+
from ....models.v2 import TaskV2
|
18
|
+
from ....models.v2 import WorkflowV2
|
19
|
+
from ....schemas.v1 import WorkflowTaskCreateV1
|
20
|
+
from ....schemas.v2 import WorkflowCreateV2
|
21
|
+
from ....schemas.v2 import WorkflowExportV2
|
22
|
+
from ....schemas.v2 import WorkflowImportV2
|
23
|
+
from ....schemas.v2 import WorkflowReadV2
|
24
|
+
from ....schemas.v2 import WorkflowTaskCreateV2
|
25
|
+
from ....schemas.v2 import WorkflowUpdateV2
|
26
|
+
from ....security import current_active_user
|
27
|
+
from ....security import User
|
28
|
+
from ._aux_functions import _check_workflow_exists
|
29
|
+
from ._aux_functions import _get_project_check_owner
|
30
|
+
from ._aux_functions import _get_submitted_jobs_statement
|
31
|
+
from ._aux_functions import _get_workflow_check_owner
|
32
|
+
from ._aux_functions import _workflow_insert_task
|
33
|
+
|
34
|
+
|
35
|
+
router = APIRouter()
|
36
|
+
|
37
|
+
|
38
|
+
@router.get(
|
39
|
+
"/project/{project_id}/workflow/",
|
40
|
+
response_model=list[WorkflowReadV2],
|
41
|
+
)
|
42
|
+
async def get_workflow_list(
|
43
|
+
project_id: int,
|
44
|
+
user: User = Depends(current_active_user),
|
45
|
+
db: AsyncSession = Depends(get_async_db),
|
46
|
+
) -> Optional[list[WorkflowReadV2]]:
|
47
|
+
"""
|
48
|
+
Get workflow list for given project
|
49
|
+
"""
|
50
|
+
# Access control
|
51
|
+
project = await _get_project_check_owner(
|
52
|
+
project_id=project_id, user_id=user.id, db=db
|
53
|
+
)
|
54
|
+
# Find workflows of the current project. Note: this select/where approach
|
55
|
+
# has much better scaling than refreshing all elements of
|
56
|
+
# `project.workflow_list` - ref
|
57
|
+
# https://github.com/fractal-analytics-platform/fractal-server/pull/1082#issuecomment-1856676097.
|
58
|
+
stm = select(WorkflowV2).where(WorkflowV2.project_id == project.id)
|
59
|
+
workflow_list = (await db.execute(stm)).scalars().all()
|
60
|
+
return workflow_list
|
61
|
+
|
62
|
+
|
63
|
+
@router.post(
|
64
|
+
"/project/{project_id}/workflow/",
|
65
|
+
response_model=WorkflowReadV2,
|
66
|
+
status_code=status.HTTP_201_CREATED,
|
67
|
+
)
|
68
|
+
async def create_workflow(
|
69
|
+
project_id: int,
|
70
|
+
workflow: WorkflowCreateV2,
|
71
|
+
user: User = Depends(current_active_user),
|
72
|
+
db: AsyncSession = Depends(get_async_db),
|
73
|
+
) -> Optional[WorkflowReadV2]:
|
74
|
+
"""
|
75
|
+
Create a workflow, associate to a project
|
76
|
+
"""
|
77
|
+
await _get_project_check_owner(
|
78
|
+
project_id=project_id, user_id=user.id, db=db
|
79
|
+
)
|
80
|
+
await _check_workflow_exists(
|
81
|
+
name=workflow.name, project_id=project_id, db=db
|
82
|
+
)
|
83
|
+
|
84
|
+
db_workflow = WorkflowV2(project_id=project_id, **workflow.dict())
|
85
|
+
db.add(db_workflow)
|
86
|
+
await db.commit()
|
87
|
+
await db.refresh(db_workflow)
|
88
|
+
await db.close()
|
89
|
+
return db_workflow
|
90
|
+
|
91
|
+
|
92
|
+
@router.get(
|
93
|
+
"/project/{project_id}/workflow/{workflow_id}/",
|
94
|
+
response_model=WorkflowReadV2,
|
95
|
+
)
|
96
|
+
async def read_workflow(
|
97
|
+
project_id: int,
|
98
|
+
workflow_id: int,
|
99
|
+
user: User = Depends(current_active_user),
|
100
|
+
db: AsyncSession = Depends(get_async_db),
|
101
|
+
) -> Optional[WorkflowReadV2]:
|
102
|
+
"""
|
103
|
+
Get info on an existing workflow
|
104
|
+
"""
|
105
|
+
|
106
|
+
workflow = await _get_workflow_check_owner(
|
107
|
+
project_id=project_id,
|
108
|
+
workflow_id=workflow_id,
|
109
|
+
user_id=user.id,
|
110
|
+
db=db,
|
111
|
+
)
|
112
|
+
|
113
|
+
return workflow
|
114
|
+
|
115
|
+
|
116
|
+
@router.patch(
|
117
|
+
"/project/{project_id}/workflow/{workflow_id}/",
|
118
|
+
response_model=WorkflowReadV2,
|
119
|
+
)
|
120
|
+
async def update_workflow(
|
121
|
+
project_id: int,
|
122
|
+
workflow_id: int,
|
123
|
+
patch: WorkflowUpdateV2,
|
124
|
+
user: User = Depends(current_active_user),
|
125
|
+
db: AsyncSession = Depends(get_async_db),
|
126
|
+
) -> Optional[WorkflowReadV2]:
|
127
|
+
"""
|
128
|
+
Edit a workflow
|
129
|
+
"""
|
130
|
+
workflow = await _get_workflow_check_owner(
|
131
|
+
project_id=project_id,
|
132
|
+
workflow_id=workflow_id,
|
133
|
+
user_id=user.id,
|
134
|
+
db=db,
|
135
|
+
)
|
136
|
+
|
137
|
+
if patch.name:
|
138
|
+
await _check_workflow_exists(
|
139
|
+
name=patch.name, project_id=project_id, db=db
|
140
|
+
)
|
141
|
+
|
142
|
+
for key, value in patch.dict(exclude_unset=True).items():
|
143
|
+
if key == "reordered_workflowtask_ids":
|
144
|
+
current_workflowtask_ids = [
|
145
|
+
wftask.id for wftask in workflow.task_list
|
146
|
+
]
|
147
|
+
num_tasks = len(workflow.task_list)
|
148
|
+
if len(value) != num_tasks or set(value) != set(
|
149
|
+
current_workflowtask_ids
|
150
|
+
):
|
151
|
+
raise HTTPException(
|
152
|
+
status_code=status.HTTP_422_UNPROCESSABLE_ENTITY,
|
153
|
+
detail=(
|
154
|
+
"`reordered_workflowtask_ids` must be a permutation of"
|
155
|
+
f" {current_workflowtask_ids} (given {value})"
|
156
|
+
),
|
157
|
+
)
|
158
|
+
for ind_wftask in range(num_tasks):
|
159
|
+
new_order = value.index(workflow.task_list[ind_wftask].id)
|
160
|
+
workflow.task_list[ind_wftask].order = new_order
|
161
|
+
else:
|
162
|
+
setattr(workflow, key, value)
|
163
|
+
|
164
|
+
await db.commit()
|
165
|
+
await db.refresh(workflow)
|
166
|
+
await db.close()
|
167
|
+
|
168
|
+
return workflow
|
169
|
+
|
170
|
+
|
171
|
+
@router.delete(
|
172
|
+
"/project/{project_id}/workflow/{workflow_id}/",
|
173
|
+
status_code=status.HTTP_204_NO_CONTENT,
|
174
|
+
)
|
175
|
+
async def delete_workflow(
|
176
|
+
project_id: int,
|
177
|
+
workflow_id: int,
|
178
|
+
user: User = Depends(current_active_user),
|
179
|
+
db: AsyncSession = Depends(get_async_db),
|
180
|
+
) -> Response:
|
181
|
+
"""
|
182
|
+
Delete a workflow
|
183
|
+
"""
|
184
|
+
|
185
|
+
workflow = await _get_workflow_check_owner(
|
186
|
+
project_id=project_id,
|
187
|
+
workflow_id=workflow_id,
|
188
|
+
user_id=user.id,
|
189
|
+
db=db,
|
190
|
+
)
|
191
|
+
|
192
|
+
# Fail if there exist jobs that are submitted and in relation with the
|
193
|
+
# current workflow.
|
194
|
+
stm = _get_submitted_jobs_statement().where(
|
195
|
+
JobV2.workflow_id == workflow.id
|
196
|
+
)
|
197
|
+
res = await db.execute(stm)
|
198
|
+
jobs = res.scalars().all()
|
199
|
+
if jobs:
|
200
|
+
string_ids = str([job.id for job in jobs])[1:-1]
|
201
|
+
raise HTTPException(
|
202
|
+
status_code=status.HTTP_422_UNPROCESSABLE_ENTITY,
|
203
|
+
detail=(
|
204
|
+
f"Cannot delete workflow {workflow.id} because it "
|
205
|
+
f"is linked to active job(s) {string_ids}."
|
206
|
+
),
|
207
|
+
)
|
208
|
+
|
209
|
+
# Cascade operations: set foreign-keys to null for jobs which are in
|
210
|
+
# relationship with the current workflow
|
211
|
+
stm = select(JobV2).where(JobV2.workflow_id == workflow_id)
|
212
|
+
res = await db.execute(stm)
|
213
|
+
jobs = res.scalars().all()
|
214
|
+
for job in jobs:
|
215
|
+
job.workflow_id = None
|
216
|
+
await db.merge(job)
|
217
|
+
await db.commit()
|
218
|
+
|
219
|
+
# Delete workflow
|
220
|
+
await db.delete(workflow)
|
221
|
+
await db.commit()
|
222
|
+
|
223
|
+
return Response(status_code=status.HTTP_204_NO_CONTENT)
|
224
|
+
|
225
|
+
|
226
|
+
@router.get(
|
227
|
+
"/project/{project_id}/workflow/{workflow_id}/export/",
|
228
|
+
response_model=WorkflowExportV2,
|
229
|
+
)
|
230
|
+
async def export_worfklow(
|
231
|
+
project_id: int,
|
232
|
+
workflow_id: int,
|
233
|
+
user: User = Depends(current_active_user),
|
234
|
+
db: AsyncSession = Depends(get_async_db),
|
235
|
+
) -> Optional[WorkflowExportV2]:
|
236
|
+
"""
|
237
|
+
Export an existing workflow, after stripping all IDs
|
238
|
+
"""
|
239
|
+
workflow = await _get_workflow_check_owner(
|
240
|
+
project_id=project_id,
|
241
|
+
workflow_id=workflow_id,
|
242
|
+
user_id=user.id,
|
243
|
+
db=db,
|
244
|
+
)
|
245
|
+
# Emit a warning when exporting a workflow with custom tasks
|
246
|
+
logger = set_logger(None)
|
247
|
+
for wftask in workflow.task_list:
|
248
|
+
if wftask.is_legacy_task:
|
249
|
+
if wftask.task_legacy.owner is not None:
|
250
|
+
logger.warning(
|
251
|
+
f"Custom tasks (like the one with "
|
252
|
+
f"id={wftask.task_legacy_id} and "
|
253
|
+
f"source='{wftask.task_legacy.source}') are not meant to "
|
254
|
+
"be portable; re-importing this workflow may not work as "
|
255
|
+
"expected."
|
256
|
+
)
|
257
|
+
else:
|
258
|
+
if wftask.task.owner is not None:
|
259
|
+
logger.warning(
|
260
|
+
f"Custom tasks (like the one with id={wftask.task_id} and "
|
261
|
+
f'source="{wftask.task.source}") are not meant to be '
|
262
|
+
"portable; re-importing this workflow may not work as "
|
263
|
+
"expected."
|
264
|
+
)
|
265
|
+
close_logger(logger)
|
266
|
+
|
267
|
+
await db.close()
|
268
|
+
return workflow
|
269
|
+
|
270
|
+
|
271
|
+
@router.post(
|
272
|
+
"/project/{project_id}/workflow/import/",
|
273
|
+
response_model=WorkflowReadV2,
|
274
|
+
status_code=status.HTTP_201_CREATED,
|
275
|
+
)
|
276
|
+
async def import_workflow(
|
277
|
+
project_id: int,
|
278
|
+
workflow: WorkflowImportV2,
|
279
|
+
user: User = Depends(current_active_user),
|
280
|
+
db: AsyncSession = Depends(get_async_db),
|
281
|
+
) -> Optional[WorkflowReadV2]:
|
282
|
+
"""
|
283
|
+
Import an existing workflow into a project
|
284
|
+
|
285
|
+
Also create all required objects (i.e. Workflow and WorkflowTask's) along
|
286
|
+
the way.
|
287
|
+
"""
|
288
|
+
|
289
|
+
# Preliminary checks
|
290
|
+
await _get_project_check_owner(
|
291
|
+
project_id=project_id,
|
292
|
+
user_id=user.id,
|
293
|
+
db=db,
|
294
|
+
)
|
295
|
+
|
296
|
+
await _check_workflow_exists(
|
297
|
+
name=workflow.name, project_id=project_id, db=db
|
298
|
+
)
|
299
|
+
|
300
|
+
# Check that all required tasks are available
|
301
|
+
source_to_id = {}
|
302
|
+
source_to_id_legacy = {}
|
303
|
+
|
304
|
+
for wf_task in workflow.task_list:
|
305
|
+
|
306
|
+
if wf_task.is_legacy_task is True:
|
307
|
+
source = wf_task.task_legacy.source
|
308
|
+
if source not in source_to_id_legacy.keys():
|
309
|
+
stm = select(TaskV1).where(TaskV1.source == source)
|
310
|
+
tasks_by_source = (await db.execute(stm)).scalars().all()
|
311
|
+
if len(tasks_by_source) != 1:
|
312
|
+
raise HTTPException(
|
313
|
+
status_code=status.HTTP_422_UNPROCESSABLE_ENTITY,
|
314
|
+
detail=(
|
315
|
+
f"Found {len(tasks_by_source)} tasks legacy "
|
316
|
+
f"with {source=}."
|
317
|
+
),
|
318
|
+
)
|
319
|
+
source_to_id_legacy[source] = tasks_by_source[0].id
|
320
|
+
else:
|
321
|
+
source = wf_task.task.source
|
322
|
+
if source not in source_to_id.keys():
|
323
|
+
stm = select(TaskV2).where(TaskV2.source == source)
|
324
|
+
tasks_by_source = (await db.execute(stm)).scalars().all()
|
325
|
+
if len(tasks_by_source) != 1:
|
326
|
+
raise HTTPException(
|
327
|
+
status_code=status.HTTP_422_UNPROCESSABLE_ENTITY,
|
328
|
+
detail=(
|
329
|
+
f"Found {len(tasks_by_source)} tasks "
|
330
|
+
f"with {source=}."
|
331
|
+
),
|
332
|
+
)
|
333
|
+
source_to_id[source] = tasks_by_source[0].id
|
334
|
+
|
335
|
+
# Create new Workflow (with empty task_list)
|
336
|
+
db_workflow = WorkflowV2(
|
337
|
+
project_id=project_id,
|
338
|
+
**workflow.dict(exclude_none=True, exclude={"task_list"}),
|
339
|
+
)
|
340
|
+
db.add(db_workflow)
|
341
|
+
await db.commit()
|
342
|
+
await db.refresh(db_workflow)
|
343
|
+
|
344
|
+
# Insert tasks
|
345
|
+
async with db: # FIXME why?
|
346
|
+
|
347
|
+
for wf_task in workflow.task_list:
|
348
|
+
if wf_task.is_legacy_task is True:
|
349
|
+
# Identify task_id
|
350
|
+
source = wf_task.task_legacy.source
|
351
|
+
task_id = source_to_id_legacy[source]
|
352
|
+
# Prepare new_wf_task
|
353
|
+
new_wf_task = WorkflowTaskCreateV1(
|
354
|
+
**wf_task.dict(exclude_none=True)
|
355
|
+
)
|
356
|
+
# Insert task
|
357
|
+
await _workflow_insert_task(
|
358
|
+
**new_wf_task.dict(),
|
359
|
+
is_legacy_task=True,
|
360
|
+
workflow_id=db_workflow.id,
|
361
|
+
task_id=task_id,
|
362
|
+
db=db,
|
363
|
+
)
|
364
|
+
else:
|
365
|
+
# Identify task_id
|
366
|
+
source = wf_task.task.source
|
367
|
+
task_id = source_to_id[source]
|
368
|
+
# Prepare new_wf_task
|
369
|
+
new_wf_task = WorkflowTaskCreateV2(
|
370
|
+
**wf_task.dict(exclude_none=True)
|
371
|
+
)
|
372
|
+
# Insert task
|
373
|
+
await _workflow_insert_task(
|
374
|
+
**new_wf_task.dict(),
|
375
|
+
workflow_id=db_workflow.id,
|
376
|
+
task_id=task_id,
|
377
|
+
db=db,
|
378
|
+
)
|
379
|
+
|
380
|
+
await db.close()
|
381
|
+
return db_workflow
|
382
|
+
|
383
|
+
|
384
|
+
@router.get("/workflow/", response_model=list[WorkflowReadV2])
|
385
|
+
async def get_user_workflows(
|
386
|
+
user: User = Depends(current_active_user),
|
387
|
+
db: AsyncSession = Depends(get_async_db),
|
388
|
+
) -> list[WorkflowReadV2]:
|
389
|
+
"""
|
390
|
+
Returns all the workflows of the current user
|
391
|
+
"""
|
392
|
+
stm = select(WorkflowV2)
|
393
|
+
stm = stm.join(ProjectV2).where(
|
394
|
+
ProjectV2.user_list.any(User.id == user.id)
|
395
|
+
)
|
396
|
+
res = await db.execute(stm)
|
397
|
+
workflow_list = res.scalars().all()
|
398
|
+
return workflow_list
|