fractal-server 1.4.6__py3-none-any.whl → 2.0.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- fractal_server/__init__.py +1 -1
- fractal_server/app/db/__init__.py +0 -1
- fractal_server/app/models/__init__.py +6 -8
- fractal_server/app/models/linkuserproject.py +9 -0
- fractal_server/app/models/security.py +6 -0
- fractal_server/app/models/v1/__init__.py +12 -0
- fractal_server/app/models/{dataset.py → v1/dataset.py} +5 -5
- fractal_server/app/models/{job.py → v1/job.py} +5 -5
- fractal_server/app/models/{project.py → v1/project.py} +5 -5
- fractal_server/app/models/{state.py → v1/state.py} +2 -2
- fractal_server/app/models/{task.py → v1/task.py} +7 -2
- fractal_server/app/models/{workflow.py → v1/workflow.py} +5 -5
- fractal_server/app/models/v2/__init__.py +22 -0
- fractal_server/app/models/v2/collection_state.py +21 -0
- fractal_server/app/models/v2/dataset.py +54 -0
- fractal_server/app/models/v2/job.py +51 -0
- fractal_server/app/models/v2/project.py +30 -0
- fractal_server/app/models/v2/task.py +93 -0
- fractal_server/app/models/v2/workflow.py +35 -0
- fractal_server/app/models/v2/workflowtask.py +49 -0
- fractal_server/app/routes/admin/__init__.py +0 -0
- fractal_server/app/routes/{admin.py → admin/v1.py} +42 -42
- fractal_server/app/routes/admin/v2.py +309 -0
- fractal_server/app/routes/api/v1/__init__.py +7 -7
- fractal_server/app/routes/api/v1/_aux_functions.py +8 -8
- fractal_server/app/routes/api/v1/dataset.py +48 -41
- fractal_server/app/routes/api/v1/job.py +14 -14
- fractal_server/app/routes/api/v1/project.py +30 -27
- fractal_server/app/routes/api/v1/task.py +26 -16
- fractal_server/app/routes/api/v1/task_collection.py +28 -16
- fractal_server/app/routes/api/v1/workflow.py +28 -28
- fractal_server/app/routes/api/v1/workflowtask.py +11 -11
- fractal_server/app/routes/api/v2/__init__.py +34 -0
- fractal_server/app/routes/api/v2/_aux_functions.py +502 -0
- fractal_server/app/routes/api/v2/dataset.py +293 -0
- fractal_server/app/routes/api/v2/images.py +279 -0
- fractal_server/app/routes/api/v2/job.py +200 -0
- fractal_server/app/routes/api/v2/project.py +186 -0
- fractal_server/app/routes/api/v2/status.py +150 -0
- fractal_server/app/routes/api/v2/submit.py +210 -0
- fractal_server/app/routes/api/v2/task.py +222 -0
- fractal_server/app/routes/api/v2/task_collection.py +239 -0
- fractal_server/app/routes/api/v2/task_legacy.py +59 -0
- fractal_server/app/routes/api/v2/workflow.py +380 -0
- fractal_server/app/routes/api/v2/workflowtask.py +265 -0
- fractal_server/app/routes/aux/_job.py +2 -2
- fractal_server/app/runner/__init__.py +0 -379
- fractal_server/app/runner/async_wrap.py +27 -0
- fractal_server/app/runner/components.py +5 -0
- fractal_server/app/runner/exceptions.py +129 -0
- fractal_server/app/runner/executors/__init__.py +0 -0
- fractal_server/app/runner/executors/slurm/__init__.py +3 -0
- fractal_server/app/runner/{_slurm → executors/slurm}/_batching.py +1 -1
- fractal_server/app/runner/executors/slurm/_check_jobs_status.py +72 -0
- fractal_server/app/runner/{_slurm → executors/slurm}/_executor_wait_thread.py +3 -4
- fractal_server/app/runner/{_slurm → executors/slurm}/_slurm_config.py +3 -152
- fractal_server/app/runner/{_slurm → executors/slurm}/_subprocess_run_as_user.py +42 -1
- fractal_server/app/runner/{_slurm → executors/slurm}/executor.py +46 -27
- fractal_server/app/runner/filenames.py +6 -0
- fractal_server/app/runner/set_start_and_last_task_index.py +39 -0
- fractal_server/app/runner/task_files.py +103 -0
- fractal_server/app/runner/v1/__init__.py +366 -0
- fractal_server/app/runner/{_common.py → v1/_common.py} +56 -111
- fractal_server/app/runner/{_local → v1/_local}/__init__.py +5 -4
- fractal_server/app/runner/{_local → v1/_local}/_local_config.py +6 -7
- fractal_server/app/runner/{_local → v1/_local}/_submit_setup.py +1 -5
- fractal_server/app/runner/v1/_slurm/__init__.py +312 -0
- fractal_server/app/runner/{_slurm → v1/_slurm}/_submit_setup.py +5 -11
- fractal_server/app/runner/v1/_slurm/get_slurm_config.py +163 -0
- fractal_server/app/runner/v1/common.py +117 -0
- fractal_server/app/runner/{handle_failed_job.py → v1/handle_failed_job.py} +8 -8
- fractal_server/app/runner/v2/__init__.py +336 -0
- fractal_server/app/runner/v2/_local/__init__.py +162 -0
- fractal_server/app/runner/v2/_local/_local_config.py +118 -0
- fractal_server/app/runner/v2/_local/_submit_setup.py +52 -0
- fractal_server/app/runner/v2/_local/executor.py +100 -0
- fractal_server/app/runner/{_slurm → v2/_slurm}/__init__.py +38 -47
- fractal_server/app/runner/v2/_slurm/_submit_setup.py +82 -0
- fractal_server/app/runner/v2/_slurm/get_slurm_config.py +182 -0
- fractal_server/app/runner/v2/deduplicate_list.py +23 -0
- fractal_server/app/runner/v2/handle_failed_job.py +165 -0
- fractal_server/app/runner/v2/merge_outputs.py +38 -0
- fractal_server/app/runner/v2/runner.py +343 -0
- fractal_server/app/runner/v2/runner_functions.py +374 -0
- fractal_server/app/runner/v2/runner_functions_low_level.py +130 -0
- fractal_server/app/runner/v2/task_interface.py +62 -0
- fractal_server/app/runner/v2/v1_compat.py +31 -0
- fractal_server/app/schemas/__init__.py +1 -42
- fractal_server/app/schemas/_validators.py +28 -5
- fractal_server/app/schemas/v1/__init__.py +36 -0
- fractal_server/app/schemas/{applyworkflow.py → v1/applyworkflow.py} +18 -18
- fractal_server/app/schemas/{dataset.py → v1/dataset.py} +30 -30
- fractal_server/app/schemas/{dumps.py → v1/dumps.py} +8 -8
- fractal_server/app/schemas/{manifest.py → v1/manifest.py} +5 -5
- fractal_server/app/schemas/{project.py → v1/project.py} +9 -9
- fractal_server/app/schemas/{task.py → v1/task.py} +12 -12
- fractal_server/app/schemas/{task_collection.py → v1/task_collection.py} +7 -7
- fractal_server/app/schemas/{workflow.py → v1/workflow.py} +38 -38
- fractal_server/app/schemas/v2/__init__.py +37 -0
- fractal_server/app/schemas/v2/dataset.py +126 -0
- fractal_server/app/schemas/v2/dumps.py +87 -0
- fractal_server/app/schemas/v2/job.py +114 -0
- fractal_server/app/schemas/v2/manifest.py +159 -0
- fractal_server/app/schemas/v2/project.py +34 -0
- fractal_server/app/schemas/v2/status.py +16 -0
- fractal_server/app/schemas/v2/task.py +151 -0
- fractal_server/app/schemas/v2/task_collection.py +109 -0
- fractal_server/app/schemas/v2/workflow.py +79 -0
- fractal_server/app/schemas/v2/workflowtask.py +208 -0
- fractal_server/config.py +13 -10
- fractal_server/images/__init__.py +4 -0
- fractal_server/images/models.py +136 -0
- fractal_server/images/tools.py +84 -0
- fractal_server/main.py +11 -3
- fractal_server/migrations/env.py +0 -2
- fractal_server/migrations/versions/5bf02391cfef_v2.py +245 -0
- fractal_server/tasks/__init__.py +0 -5
- fractal_server/tasks/endpoint_operations.py +13 -19
- fractal_server/tasks/utils.py +35 -0
- fractal_server/tasks/{_TaskCollectPip.py → v1/_TaskCollectPip.py} +3 -3
- fractal_server/tasks/v1/__init__.py +0 -0
- fractal_server/tasks/{background_operations.py → v1/background_operations.py} +20 -52
- fractal_server/tasks/v1/get_collection_data.py +14 -0
- fractal_server/tasks/v2/_TaskCollectPip.py +103 -0
- fractal_server/tasks/v2/__init__.py +0 -0
- fractal_server/tasks/v2/background_operations.py +381 -0
- fractal_server/tasks/v2/get_collection_data.py +14 -0
- fractal_server/urls.py +13 -0
- {fractal_server-1.4.6.dist-info → fractal_server-2.0.0.dist-info}/METADATA +11 -12
- fractal_server-2.0.0.dist-info/RECORD +169 -0
- fractal_server/app/runner/_slurm/.gitignore +0 -2
- fractal_server/app/runner/common.py +0 -307
- fractal_server/app/schemas/json_schemas/manifest.json +0 -81
- fractal_server-1.4.6.dist-info/RECORD +0 -97
- /fractal_server/app/runner/{_slurm → executors/slurm}/remote.py +0 -0
- /fractal_server/app/runner/{_local → v1/_local}/executor.py +0 -0
- {fractal_server-1.4.6.dist-info → fractal_server-2.0.0.dist-info}/LICENSE +0 -0
- {fractal_server-1.4.6.dist-info → fractal_server-2.0.0.dist-info}/WHEEL +0 -0
- {fractal_server-1.4.6.dist-info → fractal_server-2.0.0.dist-info}/entry_points.txt +0 -0
@@ -0,0 +1,210 @@
|
|
1
|
+
from datetime import datetime
|
2
|
+
from datetime import timedelta
|
3
|
+
from datetime import timezone
|
4
|
+
from typing import Optional
|
5
|
+
|
6
|
+
from fastapi import APIRouter
|
7
|
+
from fastapi import BackgroundTasks
|
8
|
+
from fastapi import Depends
|
9
|
+
from fastapi import HTTPException
|
10
|
+
from fastapi import status
|
11
|
+
from sqlmodel import select
|
12
|
+
|
13
|
+
from .....config import get_settings
|
14
|
+
from .....syringe import Inject
|
15
|
+
from ....db import AsyncSession
|
16
|
+
from ....db import get_async_db
|
17
|
+
from ....models.v2 import JobV2
|
18
|
+
from ....runner.set_start_and_last_task_index import (
|
19
|
+
set_start_and_last_task_index,
|
20
|
+
)
|
21
|
+
from ....runner.v2 import submit_workflow
|
22
|
+
from ....schemas.v2 import JobCreateV2
|
23
|
+
from ....schemas.v2 import JobReadV2
|
24
|
+
from ....schemas.v2 import JobStatusTypeV2
|
25
|
+
from ....security import current_active_verified_user
|
26
|
+
from ....security import User
|
27
|
+
from ._aux_functions import _get_dataset_check_owner
|
28
|
+
from ._aux_functions import _get_workflow_check_owner
|
29
|
+
|
30
|
+
|
31
|
+
def _encode_as_utc(dt: datetime):
|
32
|
+
return dt.replace(tzinfo=timezone.utc).isoformat()
|
33
|
+
|
34
|
+
|
35
|
+
router = APIRouter()
|
36
|
+
|
37
|
+
|
38
|
+
@router.post(
|
39
|
+
"/project/{project_id}/job/submit/",
|
40
|
+
status_code=status.HTTP_202_ACCEPTED,
|
41
|
+
response_model=JobReadV2,
|
42
|
+
)
|
43
|
+
async def apply_workflow(
|
44
|
+
project_id: int,
|
45
|
+
workflow_id: int,
|
46
|
+
dataset_id: int,
|
47
|
+
job_create: JobCreateV2,
|
48
|
+
background_tasks: BackgroundTasks,
|
49
|
+
user: User = Depends(current_active_verified_user),
|
50
|
+
db: AsyncSession = Depends(get_async_db),
|
51
|
+
) -> Optional[JobReadV2]:
|
52
|
+
|
53
|
+
output = await _get_dataset_check_owner(
|
54
|
+
project_id=project_id,
|
55
|
+
dataset_id=dataset_id,
|
56
|
+
user_id=user.id,
|
57
|
+
db=db,
|
58
|
+
)
|
59
|
+
project = output["project"]
|
60
|
+
dataset = output["dataset"]
|
61
|
+
|
62
|
+
workflow = await _get_workflow_check_owner(
|
63
|
+
project_id=project_id, workflow_id=workflow_id, user_id=user.id, db=db
|
64
|
+
)
|
65
|
+
|
66
|
+
if not workflow.task_list:
|
67
|
+
raise HTTPException(
|
68
|
+
status_code=status.HTTP_422_UNPROCESSABLE_ENTITY,
|
69
|
+
detail=f"Workflow {workflow_id} has empty task list",
|
70
|
+
)
|
71
|
+
|
72
|
+
# Set values of first_task_index and last_task_index
|
73
|
+
num_tasks = len(workflow.task_list)
|
74
|
+
try:
|
75
|
+
first_task_index, last_task_index = set_start_and_last_task_index(
|
76
|
+
num_tasks,
|
77
|
+
first_task_index=job_create.first_task_index,
|
78
|
+
last_task_index=job_create.last_task_index,
|
79
|
+
)
|
80
|
+
job_create.first_task_index = first_task_index
|
81
|
+
job_create.last_task_index = last_task_index
|
82
|
+
except ValueError as e:
|
83
|
+
raise HTTPException(
|
84
|
+
status_code=status.HTTP_422_UNPROCESSABLE_ENTITY,
|
85
|
+
detail=(
|
86
|
+
"Invalid values for first_task_index or last_task_index "
|
87
|
+
f"(with {num_tasks=}).\n"
|
88
|
+
f"Original error: {str(e)}"
|
89
|
+
),
|
90
|
+
)
|
91
|
+
|
92
|
+
# If backend is SLURM, check that the user has required attributes
|
93
|
+
settings = Inject(get_settings)
|
94
|
+
backend = settings.FRACTAL_RUNNER_BACKEND
|
95
|
+
if backend == "slurm":
|
96
|
+
if not user.slurm_user:
|
97
|
+
raise HTTPException(
|
98
|
+
status_code=status.HTTP_422_UNPROCESSABLE_ENTITY,
|
99
|
+
detail=(
|
100
|
+
f"FRACTAL_RUNNER_BACKEND={backend}, "
|
101
|
+
f"but {user.slurm_user=}."
|
102
|
+
),
|
103
|
+
)
|
104
|
+
if not user.cache_dir:
|
105
|
+
raise HTTPException(
|
106
|
+
status_code=status.HTTP_422_UNPROCESSABLE_ENTITY,
|
107
|
+
detail=(
|
108
|
+
f"FRACTAL_RUNNER_BACKEND={backend}, "
|
109
|
+
f"but {user.cache_dir=}."
|
110
|
+
),
|
111
|
+
)
|
112
|
+
|
113
|
+
# Check that no other job with the same dataset_id is SUBMITTED
|
114
|
+
stm = (
|
115
|
+
select(JobV2)
|
116
|
+
.where(JobV2.dataset_id == dataset_id)
|
117
|
+
.where(JobV2.status == JobStatusTypeV2.SUBMITTED)
|
118
|
+
)
|
119
|
+
res = await db.execute(stm)
|
120
|
+
if res.scalars().all():
|
121
|
+
raise HTTPException(
|
122
|
+
status_code=status.HTTP_422_UNPROCESSABLE_ENTITY,
|
123
|
+
detail=(
|
124
|
+
f"Dataset {dataset_id} is already in use "
|
125
|
+
"in submitted job(s)."
|
126
|
+
),
|
127
|
+
)
|
128
|
+
|
129
|
+
if job_create.slurm_account is not None:
|
130
|
+
if job_create.slurm_account not in user.slurm_accounts:
|
131
|
+
raise HTTPException(
|
132
|
+
status_code=status.HTTP_422_UNPROCESSABLE_ENTITY,
|
133
|
+
detail=(
|
134
|
+
f"SLURM account '{job_create.slurm_account}' is not "
|
135
|
+
"among those available to the current user"
|
136
|
+
),
|
137
|
+
)
|
138
|
+
else:
|
139
|
+
if len(user.slurm_accounts) > 0:
|
140
|
+
job_create.slurm_account = user.slurm_accounts[0]
|
141
|
+
|
142
|
+
# Add new Job object to DB
|
143
|
+
job = JobV2(
|
144
|
+
project_id=project_id,
|
145
|
+
dataset_id=dataset_id,
|
146
|
+
workflow_id=workflow_id,
|
147
|
+
user_email=user.email,
|
148
|
+
dataset_dump=dict(
|
149
|
+
**dataset.model_dump(exclude={"timestamp_created"}),
|
150
|
+
timestamp_created=_encode_as_utc(dataset.timestamp_created),
|
151
|
+
),
|
152
|
+
workflow_dump=dict(
|
153
|
+
**workflow.model_dump(exclude={"task_list", "timestamp_created"}),
|
154
|
+
timestamp_created=_encode_as_utc(workflow.timestamp_created),
|
155
|
+
),
|
156
|
+
project_dump=dict(
|
157
|
+
**project.model_dump(exclude={"user_list", "timestamp_created"}),
|
158
|
+
timestamp_created=_encode_as_utc(project.timestamp_created),
|
159
|
+
),
|
160
|
+
**job_create.dict(),
|
161
|
+
)
|
162
|
+
|
163
|
+
# Rate Limiting:
|
164
|
+
# raise `429 TOO MANY REQUESTS` if this endpoint has been called with the
|
165
|
+
# same database keys (Project, Workflow and Datasets) during the last
|
166
|
+
# `settings.FRACTAL_API_SUBMIT_RATE_LIMIT` seconds.
|
167
|
+
stm = (
|
168
|
+
select(JobV2)
|
169
|
+
.where(JobV2.project_id == project_id)
|
170
|
+
.where(JobV2.workflow_id == workflow_id)
|
171
|
+
.where(JobV2.dataset_id == dataset_id)
|
172
|
+
)
|
173
|
+
res = await db.execute(stm)
|
174
|
+
db_jobs = res.scalars().all()
|
175
|
+
if db_jobs and any(
|
176
|
+
abs(
|
177
|
+
job.start_timestamp
|
178
|
+
- db_job.start_timestamp.replace(tzinfo=timezone.utc)
|
179
|
+
)
|
180
|
+
< timedelta(seconds=settings.FRACTAL_API_SUBMIT_RATE_LIMIT)
|
181
|
+
for db_job in db_jobs
|
182
|
+
):
|
183
|
+
raise HTTPException(
|
184
|
+
status_code=status.HTTP_429_TOO_MANY_REQUESTS,
|
185
|
+
detail=(
|
186
|
+
f"The endpoint 'POST /api/v2/project/{project_id}/job/submit/'"
|
187
|
+
" was called several times within an interval of less "
|
188
|
+
f"than {settings.FRACTAL_API_SUBMIT_RATE_LIMIT} seconds, using"
|
189
|
+
" the same foreign keys. If it was intentional, please wait "
|
190
|
+
"and try again."
|
191
|
+
),
|
192
|
+
)
|
193
|
+
|
194
|
+
db.add(job)
|
195
|
+
await db.commit()
|
196
|
+
await db.refresh(job)
|
197
|
+
|
198
|
+
background_tasks.add_task(
|
199
|
+
submit_workflow,
|
200
|
+
workflow_id=workflow.id,
|
201
|
+
dataset_id=dataset.id,
|
202
|
+
job_id=job.id,
|
203
|
+
worker_init=job.worker_init,
|
204
|
+
slurm_user=user.slurm_user,
|
205
|
+
user_cache_dir=user.cache_dir,
|
206
|
+
)
|
207
|
+
|
208
|
+
await db.close()
|
209
|
+
|
210
|
+
return job
|
@@ -0,0 +1,222 @@
|
|
1
|
+
from copy import deepcopy # noqa
|
2
|
+
from typing import Optional
|
3
|
+
|
4
|
+
from fastapi import APIRouter
|
5
|
+
from fastapi import Depends
|
6
|
+
from fastapi import HTTPException
|
7
|
+
from fastapi import Response
|
8
|
+
from fastapi import status
|
9
|
+
from sqlmodel import select
|
10
|
+
|
11
|
+
from .....logger import set_logger
|
12
|
+
from ....db import AsyncSession
|
13
|
+
from ....db import get_async_db
|
14
|
+
from ....models.v1 import Task as TaskV1
|
15
|
+
from ....models.v2 import TaskV2
|
16
|
+
from ....models.v2 import WorkflowTaskV2
|
17
|
+
from ....schemas.v2 import TaskCreateV2
|
18
|
+
from ....schemas.v2 import TaskReadV2
|
19
|
+
from ....schemas.v2 import TaskUpdateV2
|
20
|
+
from ....security import current_active_user
|
21
|
+
from ....security import current_active_verified_user
|
22
|
+
from ....security import User
|
23
|
+
from ._aux_functions import _get_task_check_owner
|
24
|
+
|
25
|
+
router = APIRouter()
|
26
|
+
|
27
|
+
logger = set_logger(__name__)
|
28
|
+
|
29
|
+
|
30
|
+
@router.get("/", response_model=list[TaskReadV2])
|
31
|
+
async def get_list_task(
|
32
|
+
args_schema_parallel: bool = True,
|
33
|
+
args_schema_non_parallel: bool = True,
|
34
|
+
user: User = Depends(current_active_user),
|
35
|
+
db: AsyncSession = Depends(get_async_db),
|
36
|
+
) -> list[TaskReadV2]:
|
37
|
+
"""
|
38
|
+
Get list of available tasks
|
39
|
+
"""
|
40
|
+
stm = select(TaskV2)
|
41
|
+
res = await db.execute(stm)
|
42
|
+
task_list = res.scalars().all()
|
43
|
+
await db.close()
|
44
|
+
if args_schema_parallel is False:
|
45
|
+
for task in task_list:
|
46
|
+
setattr(task, "args_schema_parallel", None)
|
47
|
+
if args_schema_non_parallel is False:
|
48
|
+
for task in task_list:
|
49
|
+
setattr(task, "args_schema_non_parallel", None)
|
50
|
+
|
51
|
+
return task_list
|
52
|
+
|
53
|
+
|
54
|
+
@router.get("/{task_id}/", response_model=TaskReadV2)
|
55
|
+
async def get_task(
|
56
|
+
task_id: int,
|
57
|
+
user: User = Depends(current_active_user),
|
58
|
+
db: AsyncSession = Depends(get_async_db),
|
59
|
+
) -> TaskReadV2:
|
60
|
+
"""
|
61
|
+
Get info on a specific task
|
62
|
+
"""
|
63
|
+
task = await db.get(TaskV2, task_id)
|
64
|
+
await db.close()
|
65
|
+
if not task:
|
66
|
+
raise HTTPException(
|
67
|
+
status_code=status.HTTP_404_NOT_FOUND, detail="TaskV2 not found"
|
68
|
+
)
|
69
|
+
return task
|
70
|
+
|
71
|
+
|
72
|
+
@router.patch("/{task_id}/", response_model=TaskReadV2)
|
73
|
+
async def patch_task(
|
74
|
+
task_id: int,
|
75
|
+
task_update: TaskUpdateV2,
|
76
|
+
user: User = Depends(current_active_verified_user),
|
77
|
+
db: AsyncSession = Depends(get_async_db),
|
78
|
+
) -> Optional[TaskReadV2]:
|
79
|
+
"""
|
80
|
+
Edit a specific task (restricted to superusers and task owner)
|
81
|
+
"""
|
82
|
+
|
83
|
+
# Retrieve task from database
|
84
|
+
db_task = await _get_task_check_owner(task_id=task_id, user=user, db=db)
|
85
|
+
update = task_update.dict(exclude_unset=True)
|
86
|
+
|
87
|
+
# Forbid changes that set a previously unset command
|
88
|
+
if db_task.type == "non_parallel" and "command_parallel" in update:
|
89
|
+
raise HTTPException(
|
90
|
+
status_code=status.HTTP_422_UNPROCESSABLE_ENTITY,
|
91
|
+
detail="Cannot set an unset `command_parallel`.",
|
92
|
+
)
|
93
|
+
if db_task.type == "parallel" and "command_non_parallel" in update:
|
94
|
+
raise HTTPException(
|
95
|
+
status_code=status.HTTP_422_UNPROCESSABLE_ENTITY,
|
96
|
+
detail="Cannot set an unset `command_non_parallel`.",
|
97
|
+
)
|
98
|
+
|
99
|
+
for key, value in update.items():
|
100
|
+
setattr(db_task, key, value)
|
101
|
+
|
102
|
+
await db.commit()
|
103
|
+
await db.refresh(db_task)
|
104
|
+
await db.close()
|
105
|
+
return db_task
|
106
|
+
|
107
|
+
|
108
|
+
@router.post(
|
109
|
+
"/", response_model=TaskReadV2, status_code=status.HTTP_201_CREATED
|
110
|
+
)
|
111
|
+
async def create_task(
|
112
|
+
task: TaskCreateV2,
|
113
|
+
user: User = Depends(current_active_verified_user),
|
114
|
+
db: AsyncSession = Depends(get_async_db),
|
115
|
+
) -> Optional[TaskReadV2]:
|
116
|
+
"""
|
117
|
+
Create a new task
|
118
|
+
"""
|
119
|
+
|
120
|
+
if task.command_non_parallel is None:
|
121
|
+
task_type = "parallel"
|
122
|
+
elif task.command_parallel is None:
|
123
|
+
task_type = "non_parallel"
|
124
|
+
else:
|
125
|
+
task_type = "compound"
|
126
|
+
|
127
|
+
if task_type == "parallel" and (
|
128
|
+
task.args_schema_non_parallel is not None
|
129
|
+
or task.meta_non_parallel is not None
|
130
|
+
):
|
131
|
+
raise HTTPException(
|
132
|
+
status_code=status.HTTP_422_UNPROCESSABLE_ENTITY,
|
133
|
+
detail=(
|
134
|
+
"Cannot set `TaskV2.args_schema_non_parallel` or "
|
135
|
+
"`TaskV2.args_schema_non_parallel` if TaskV2 is parallel"
|
136
|
+
),
|
137
|
+
)
|
138
|
+
elif task_type == "non_parallel" and (
|
139
|
+
task.args_schema_parallel is not None or task.meta_parallel is not None
|
140
|
+
):
|
141
|
+
raise HTTPException(
|
142
|
+
status_code=status.HTTP_422_UNPROCESSABLE_ENTITY,
|
143
|
+
detail=(
|
144
|
+
"Cannot set `TaskV2.args_schema_parallel` or "
|
145
|
+
"`TaskV2.args_schema_parallel` if TaskV2 is non_parallel"
|
146
|
+
),
|
147
|
+
)
|
148
|
+
|
149
|
+
# Set task.owner attribute
|
150
|
+
if user.username:
|
151
|
+
owner = user.username
|
152
|
+
elif user.slurm_user:
|
153
|
+
owner = user.slurm_user
|
154
|
+
else:
|
155
|
+
raise HTTPException(
|
156
|
+
status_code=status.HTTP_422_UNPROCESSABLE_ENTITY,
|
157
|
+
detail=(
|
158
|
+
"Cannot add a new task because current user does not "
|
159
|
+
"have `username` or `slurm_user` attributes."
|
160
|
+
),
|
161
|
+
)
|
162
|
+
|
163
|
+
# Prepend owner to task.source
|
164
|
+
task.source = f"{owner}:{task.source}"
|
165
|
+
|
166
|
+
# Verify that source is not already in use (note: this check is only useful
|
167
|
+
# to provide a user-friendly error message, but `task.source` uniqueness is
|
168
|
+
# already guaranteed by a constraint in the table definition).
|
169
|
+
stm = select(TaskV2).where(TaskV2.source == task.source)
|
170
|
+
res = await db.execute(stm)
|
171
|
+
if res.scalars().all():
|
172
|
+
raise HTTPException(
|
173
|
+
status_code=status.HTTP_422_UNPROCESSABLE_ENTITY,
|
174
|
+
detail=f"Source '{task.source}' already used by some TaskV2",
|
175
|
+
)
|
176
|
+
stm = select(TaskV1).where(TaskV1.source == task.source)
|
177
|
+
res = await db.execute(stm)
|
178
|
+
if res.scalars().all():
|
179
|
+
raise HTTPException(
|
180
|
+
status_code=status.HTTP_422_UNPROCESSABLE_ENTITY,
|
181
|
+
detail=f"Source '{task.source}' already used by some TaskV1",
|
182
|
+
)
|
183
|
+
# Add task
|
184
|
+
db_task = TaskV2(**task.dict(), owner=owner, type=task_type)
|
185
|
+
db.add(db_task)
|
186
|
+
await db.commit()
|
187
|
+
await db.refresh(db_task)
|
188
|
+
await db.close()
|
189
|
+
return db_task
|
190
|
+
|
191
|
+
|
192
|
+
@router.delete("/{task_id}/", status_code=204)
|
193
|
+
async def delete_task(
|
194
|
+
task_id: int,
|
195
|
+
user: User = Depends(current_active_user),
|
196
|
+
db: AsyncSession = Depends(get_async_db),
|
197
|
+
) -> Response:
|
198
|
+
"""
|
199
|
+
Delete a task
|
200
|
+
"""
|
201
|
+
|
202
|
+
db_task = await _get_task_check_owner(task_id=task_id, user=user, db=db)
|
203
|
+
|
204
|
+
# Check that the TaskV2 is not in relationship with some WorkflowTaskV2
|
205
|
+
stm = select(WorkflowTaskV2).filter(WorkflowTaskV2.task_id == task_id)
|
206
|
+
res = await db.execute(stm)
|
207
|
+
workflowtask_list = res.scalars().all()
|
208
|
+
if workflowtask_list:
|
209
|
+
raise HTTPException(
|
210
|
+
status_code=status.HTTP_422_UNPROCESSABLE_ENTITY,
|
211
|
+
detail=(
|
212
|
+
f"Cannot remove TaskV2 {task_id} because it is currently "
|
213
|
+
"imported in WorkflowsV2 "
|
214
|
+
f"{[x.workflow_id for x in workflowtask_list]}. "
|
215
|
+
"If you want to remove this task, then you should first remove"
|
216
|
+
" the workflows.",
|
217
|
+
),
|
218
|
+
)
|
219
|
+
|
220
|
+
await db.delete(db_task)
|
221
|
+
await db.commit()
|
222
|
+
return Response(status_code=status.HTTP_204_NO_CONTENT)
|
@@ -0,0 +1,239 @@
|
|
1
|
+
from pathlib import Path
|
2
|
+
from shutil import copy as shell_copy
|
3
|
+
from tempfile import TemporaryDirectory
|
4
|
+
|
5
|
+
from fastapi import APIRouter
|
6
|
+
from fastapi import BackgroundTasks
|
7
|
+
from fastapi import Depends
|
8
|
+
from fastapi import HTTPException
|
9
|
+
from fastapi import Response
|
10
|
+
from fastapi import status
|
11
|
+
from pydantic.error_wrappers import ValidationError
|
12
|
+
from sqlmodel import select
|
13
|
+
|
14
|
+
from .....config import get_settings
|
15
|
+
from .....logger import close_logger
|
16
|
+
from .....logger import set_logger
|
17
|
+
from .....syringe import Inject
|
18
|
+
from ....db import AsyncSession
|
19
|
+
from ....db import get_async_db
|
20
|
+
from ....models.v2 import CollectionStateV2
|
21
|
+
from ....models.v2 import TaskV2
|
22
|
+
from ....schemas.state import StateRead
|
23
|
+
from ....schemas.v2 import TaskCollectPipV2
|
24
|
+
from ....schemas.v2 import TaskCollectStatusV2
|
25
|
+
from ....security import current_active_user
|
26
|
+
from ....security import current_active_verified_user
|
27
|
+
from ....security import User
|
28
|
+
from fractal_server.tasks.endpoint_operations import create_package_dir_pip
|
29
|
+
from fractal_server.tasks.endpoint_operations import download_package
|
30
|
+
from fractal_server.tasks.endpoint_operations import inspect_package
|
31
|
+
from fractal_server.tasks.utils import get_collection_log
|
32
|
+
from fractal_server.tasks.utils import slugify_task_name
|
33
|
+
from fractal_server.tasks.v2._TaskCollectPip import _TaskCollectPip
|
34
|
+
from fractal_server.tasks.v2.background_operations import (
|
35
|
+
background_collect_pip,
|
36
|
+
)
|
37
|
+
from fractal_server.tasks.v2.get_collection_data import get_collection_data
|
38
|
+
|
39
|
+
router = APIRouter()
|
40
|
+
|
41
|
+
logger = set_logger(__name__)
|
42
|
+
|
43
|
+
|
44
|
+
@router.post(
|
45
|
+
"/collect/pip/",
|
46
|
+
response_model=StateRead,
|
47
|
+
responses={
|
48
|
+
201: dict(
|
49
|
+
description=(
|
50
|
+
"Task collection successfully started in the background"
|
51
|
+
)
|
52
|
+
),
|
53
|
+
200: dict(
|
54
|
+
description=(
|
55
|
+
"Package already collected. Returning info on already "
|
56
|
+
"available tasks"
|
57
|
+
)
|
58
|
+
),
|
59
|
+
},
|
60
|
+
)
|
61
|
+
async def collect_tasks_pip(
|
62
|
+
task_collect: TaskCollectPipV2,
|
63
|
+
background_tasks: BackgroundTasks,
|
64
|
+
response: Response,
|
65
|
+
user: User = Depends(current_active_verified_user),
|
66
|
+
db: AsyncSession = Depends(get_async_db),
|
67
|
+
) -> StateRead: # State[TaskCollectStatus]
|
68
|
+
"""
|
69
|
+
Task collection endpoint
|
70
|
+
|
71
|
+
Trigger the creation of a dedicated virtual environment, the installation
|
72
|
+
of a package and the collection of tasks as advertised in the manifest.
|
73
|
+
"""
|
74
|
+
|
75
|
+
logger = set_logger(logger_name="collect_tasks_pip")
|
76
|
+
|
77
|
+
# Validate payload as _TaskCollectPip, which has more strict checks than
|
78
|
+
# TaskCollectPip
|
79
|
+
try:
|
80
|
+
task_pkg = _TaskCollectPip(**task_collect.dict(exclude_unset=True))
|
81
|
+
except ValidationError as e:
|
82
|
+
raise HTTPException(
|
83
|
+
status_code=status.HTTP_422_UNPROCESSABLE_ENTITY,
|
84
|
+
detail=f"Invalid task-collection object. Original error: {e}",
|
85
|
+
)
|
86
|
+
|
87
|
+
with TemporaryDirectory() as tmpdir:
|
88
|
+
try:
|
89
|
+
# Copy or download the package wheel file to tmpdir
|
90
|
+
if task_pkg.is_local_package:
|
91
|
+
shell_copy(task_pkg.package_path.as_posix(), tmpdir)
|
92
|
+
pkg_path = Path(tmpdir) / task_pkg.package_path.name
|
93
|
+
else:
|
94
|
+
pkg_path = await download_package(
|
95
|
+
task_pkg=task_pkg, dest=tmpdir
|
96
|
+
)
|
97
|
+
# Read package info from wheel file, and override the ones coming
|
98
|
+
# from the request body
|
99
|
+
pkg_info = inspect_package(pkg_path)
|
100
|
+
task_pkg.package_name = pkg_info["pkg_name"]
|
101
|
+
task_pkg.package_version = pkg_info["pkg_version"]
|
102
|
+
task_pkg.package_manifest = pkg_info["pkg_manifest"]
|
103
|
+
task_pkg.check()
|
104
|
+
except Exception as e:
|
105
|
+
raise HTTPException(
|
106
|
+
status_code=status.HTTP_422_UNPROCESSABLE_ENTITY,
|
107
|
+
detail=f"Invalid package or manifest. Original error: {e}",
|
108
|
+
)
|
109
|
+
|
110
|
+
try:
|
111
|
+
venv_path = create_package_dir_pip(task_pkg=task_pkg)
|
112
|
+
except FileExistsError:
|
113
|
+
venv_path = create_package_dir_pip(task_pkg=task_pkg, create=False)
|
114
|
+
try:
|
115
|
+
task_collect_status = get_collection_data(venv_path)
|
116
|
+
for task in task_collect_status.task_list:
|
117
|
+
db_task = await db.get(TaskV2, task.id)
|
118
|
+
if (
|
119
|
+
(not db_task)
|
120
|
+
or db_task.source != task.source
|
121
|
+
or db_task.name != task.name
|
122
|
+
):
|
123
|
+
await db.close()
|
124
|
+
raise HTTPException(
|
125
|
+
status_code=status.HTTP_422_UNPROCESSABLE_ENTITY,
|
126
|
+
detail=(
|
127
|
+
"Cannot collect package. Folder already exists, "
|
128
|
+
f"but task {task.id} does not exists or it does "
|
129
|
+
f"not have the expected source ({task.source}) or "
|
130
|
+
f"name ({task.name})."
|
131
|
+
),
|
132
|
+
)
|
133
|
+
except FileNotFoundError as e:
|
134
|
+
await db.close()
|
135
|
+
raise HTTPException(
|
136
|
+
status_code=status.HTTP_422_UNPROCESSABLE_ENTITY,
|
137
|
+
detail=(
|
138
|
+
"Cannot collect package. Possible reason: another "
|
139
|
+
"collection of the same package is in progress. "
|
140
|
+
f"Original FileNotFoundError: {e}"
|
141
|
+
),
|
142
|
+
)
|
143
|
+
except ValidationError as e:
|
144
|
+
await db.close()
|
145
|
+
raise HTTPException(
|
146
|
+
status_code=status.HTTP_422_UNPROCESSABLE_ENTITY,
|
147
|
+
detail=(
|
148
|
+
"Cannot collect package. Possible reason: an old version "
|
149
|
+
"of the same package has already been collected. "
|
150
|
+
f"Original ValidationError: {e}"
|
151
|
+
),
|
152
|
+
)
|
153
|
+
task_collect_status.info = "Already installed"
|
154
|
+
state = CollectionStateV2(data=task_collect_status.sanitised_dict())
|
155
|
+
response.status_code == status.HTTP_200_OK
|
156
|
+
await db.close()
|
157
|
+
return state
|
158
|
+
settings = Inject(get_settings)
|
159
|
+
|
160
|
+
# Check that tasks are not already in the DB
|
161
|
+
for new_task in task_pkg.package_manifest.task_list:
|
162
|
+
new_task_name_slug = slugify_task_name(new_task.name)
|
163
|
+
new_task_source = f"{task_pkg.package_source}:{new_task_name_slug}"
|
164
|
+
stm = select(TaskV2).where(TaskV2.source == new_task_source)
|
165
|
+
res = await db.execute(stm)
|
166
|
+
if res.scalars().all():
|
167
|
+
raise HTTPException(
|
168
|
+
status_code=status.HTTP_422_UNPROCESSABLE_ENTITY,
|
169
|
+
detail=(
|
170
|
+
"Cannot collect package. Task with source "
|
171
|
+
f'"{new_task_source}" already exists in the database.'
|
172
|
+
),
|
173
|
+
)
|
174
|
+
|
175
|
+
# All checks are OK, proceed with task collection
|
176
|
+
full_venv_path = venv_path.relative_to(settings.FRACTAL_TASKS_DIR)
|
177
|
+
collection_status = TaskCollectStatusV2(
|
178
|
+
status="pending", venv_path=full_venv_path, package=task_pkg.package
|
179
|
+
)
|
180
|
+
|
181
|
+
# Create State object (after casting venv_path to string)
|
182
|
+
collection_status_dict = collection_status.dict()
|
183
|
+
collection_status_dict["venv_path"] = str(collection_status.venv_path)
|
184
|
+
state = CollectionStateV2(data=collection_status_dict)
|
185
|
+
db.add(state)
|
186
|
+
await db.commit()
|
187
|
+
await db.refresh(state)
|
188
|
+
|
189
|
+
background_tasks.add_task(
|
190
|
+
background_collect_pip,
|
191
|
+
state_id=state.id,
|
192
|
+
venv_path=venv_path,
|
193
|
+
task_pkg=task_pkg,
|
194
|
+
)
|
195
|
+
logger.debug(
|
196
|
+
"Task-collection endpoint: start background collection "
|
197
|
+
"and return state"
|
198
|
+
)
|
199
|
+
close_logger(logger)
|
200
|
+
info = (
|
201
|
+
"Collecting tasks in the background. "
|
202
|
+
f"GET /task/collect/{state.id} to query collection status"
|
203
|
+
)
|
204
|
+
state.data["info"] = info
|
205
|
+
response.status_code = status.HTTP_201_CREATED
|
206
|
+
await db.close()
|
207
|
+
|
208
|
+
return state
|
209
|
+
|
210
|
+
|
211
|
+
@router.get("/collect/{state_id}/", response_model=StateRead)
|
212
|
+
async def check_collection_status(
|
213
|
+
state_id: int,
|
214
|
+
user: User = Depends(current_active_user),
|
215
|
+
verbose: bool = False,
|
216
|
+
db: AsyncSession = Depends(get_async_db),
|
217
|
+
) -> StateRead: # State[TaskCollectStatus]
|
218
|
+
"""
|
219
|
+
Check status of background task collection
|
220
|
+
"""
|
221
|
+
logger = set_logger(logger_name="check_collection_status")
|
222
|
+
logger.debug(f"Querying state for state.id={state_id}")
|
223
|
+
state = await db.get(CollectionStateV2, state_id)
|
224
|
+
if not state:
|
225
|
+
await db.close()
|
226
|
+
raise HTTPException(
|
227
|
+
status_code=status.HTTP_404_NOT_FOUND,
|
228
|
+
detail=f"No task collection info with id={state_id}",
|
229
|
+
)
|
230
|
+
data = TaskCollectStatusV2(**state.data)
|
231
|
+
|
232
|
+
# In some cases (i.e. a successful or ongoing task collection), data.log is
|
233
|
+
# not set; if so, we collect the current logs
|
234
|
+
if verbose and not data.log:
|
235
|
+
data.log = get_collection_log(data.venv_path)
|
236
|
+
state.data = data.sanitised_dict()
|
237
|
+
close_logger(logger)
|
238
|
+
await db.close()
|
239
|
+
return state
|