fractal-server 2.18.0a4__py3-none-any.whl → 2.18.0a5__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- fractal_server/__init__.py +1 -1
- fractal_server/app/models/v2/job.py +13 -2
- fractal_server/app/models/v2/resource.py +13 -0
- fractal_server/app/routes/admin/v2/__init__.py +10 -12
- fractal_server/app/routes/admin/v2/job.py +15 -15
- fractal_server/app/routes/admin/v2/task.py +7 -7
- fractal_server/app/routes/admin/v2/task_group.py +11 -11
- fractal_server/app/routes/admin/v2/task_group_lifecycle.py +20 -20
- fractal_server/app/routes/api/v2/__init__.py +47 -49
- fractal_server/app/routes/api/v2/_aux_functions.py +22 -47
- fractal_server/app/routes/api/v2/_aux_functions_task_lifecycle.py +4 -4
- fractal_server/app/routes/api/v2/_aux_functions_tasks.py +2 -2
- fractal_server/app/routes/api/v2/dataset.py +63 -73
- fractal_server/app/routes/api/v2/history.py +7 -5
- fractal_server/app/routes/api/v2/job.py +12 -12
- fractal_server/app/routes/api/v2/project.py +11 -11
- fractal_server/app/routes/api/v2/status_legacy.py +15 -29
- fractal_server/app/routes/api/v2/submit.py +65 -66
- fractal_server/app/routes/api/v2/task.py +15 -17
- fractal_server/app/routes/api/v2/task_collection.py +18 -18
- fractal_server/app/routes/api/v2/task_collection_custom.py +11 -13
- fractal_server/app/routes/api/v2/task_collection_pixi.py +9 -9
- fractal_server/app/routes/api/v2/task_group.py +18 -18
- fractal_server/app/routes/api/v2/task_group_lifecycle.py +26 -26
- fractal_server/app/routes/api/v2/task_version_update.py +5 -5
- fractal_server/app/routes/api/v2/workflow.py +18 -18
- fractal_server/app/routes/api/v2/workflow_import.py +11 -11
- fractal_server/app/routes/api/v2/workflowtask.py +10 -10
- fractal_server/app/routes/auth/_aux_auth.py +99 -0
- fractal_server/app/routes/auth/users.py +9 -0
- fractal_server/app/schemas/user.py +1 -1
- fractal_server/app/schemas/v2/__init__.py +48 -48
- fractal_server/app/schemas/v2/dataset.py +25 -13
- fractal_server/app/schemas/v2/dumps.py +9 -9
- fractal_server/app/schemas/v2/job.py +11 -11
- fractal_server/app/schemas/v2/project.py +3 -3
- fractal_server/app/schemas/v2/resource.py +13 -4
- fractal_server/app/schemas/v2/status_legacy.py +3 -3
- fractal_server/app/schemas/v2/task.py +6 -6
- fractal_server/app/schemas/v2/task_collection.py +4 -4
- fractal_server/app/schemas/v2/task_group.py +16 -16
- fractal_server/app/schemas/v2/workflow.py +16 -16
- fractal_server/app/schemas/v2/workflowtask.py +14 -14
- fractal_server/app/shutdown.py +6 -6
- fractal_server/config/_main.py +1 -1
- fractal_server/data_migrations/2_18_0.py +2 -1
- fractal_server/main.py +8 -12
- fractal_server/migrations/versions/88270f589c9b_add_prevent_new_submissions.py +39 -0
- fractal_server/migrations/versions/f0702066b007_one_submitted_job_per_dataset.py +40 -0
- fractal_server/runner/v2/_local.py +3 -2
- fractal_server/runner/v2/_slurm_ssh.py +3 -2
- fractal_server/runner/v2/_slurm_sudo.py +3 -2
- fractal_server/runner/v2/runner.py +36 -17
- fractal_server/runner/v2/runner_functions.py +11 -14
- fractal_server/runner/v2/submit_workflow.py +22 -9
- fractal_server/tasks/v2/local/_utils.py +2 -2
- fractal_server/tasks/v2/local/collect.py +5 -6
- fractal_server/tasks/v2/local/collect_pixi.py +5 -6
- fractal_server/tasks/v2/local/deactivate.py +7 -7
- fractal_server/tasks/v2/local/deactivate_pixi.py +3 -3
- fractal_server/tasks/v2/local/delete.py +5 -5
- fractal_server/tasks/v2/local/reactivate.py +5 -5
- fractal_server/tasks/v2/local/reactivate_pixi.py +5 -5
- fractal_server/tasks/v2/ssh/collect.py +5 -5
- fractal_server/tasks/v2/ssh/collect_pixi.py +5 -5
- fractal_server/tasks/v2/ssh/deactivate.py +7 -7
- fractal_server/tasks/v2/ssh/deactivate_pixi.py +2 -2
- fractal_server/tasks/v2/ssh/delete.py +5 -5
- fractal_server/tasks/v2/ssh/reactivate.py +5 -5
- fractal_server/tasks/v2/ssh/reactivate_pixi.py +5 -5
- fractal_server/tasks/v2/utils_background.py +7 -7
- fractal_server/tasks/v2/utils_database.py +5 -5
- fractal_server/types/__init__.py +13 -4
- fractal_server/types/validators/__init__.py +3 -1
- fractal_server/types/validators/_common_validators.py +23 -1
- {fractal_server-2.18.0a4.dist-info → fractal_server-2.18.0a5.dist-info}/METADATA +1 -1
- {fractal_server-2.18.0a4.dist-info → fractal_server-2.18.0a5.dist-info}/RECORD +80 -78
- {fractal_server-2.18.0a4.dist-info → fractal_server-2.18.0a5.dist-info}/WHEEL +0 -0
- {fractal_server-2.18.0a4.dist-info → fractal_server-2.18.0a5.dist-info}/entry_points.txt +0 -0
- {fractal_server-2.18.0a4.dist-info → fractal_server-2.18.0a5.dist-info}/licenses/LICENSE +0 -0
|
@@ -1,7 +1,5 @@
|
|
|
1
1
|
from fastapi import APIRouter
|
|
2
2
|
from fastapi import Depends
|
|
3
|
-
from fastapi import HTTPException
|
|
4
|
-
from fastapi import status
|
|
5
3
|
|
|
6
4
|
from fractal_server.app.db import AsyncSession
|
|
7
5
|
from fractal_server.app.db import get_async_db
|
|
@@ -9,8 +7,8 @@ from fractal_server.app.models import UserOAuth
|
|
|
9
7
|
from fractal_server.app.models.v2 import JobV2
|
|
10
8
|
from fractal_server.app.routes.auth import current_user_act_ver_prof
|
|
11
9
|
from fractal_server.app.schemas.v2.sharing import ProjectPermissions
|
|
12
|
-
from fractal_server.app.schemas.v2.status_legacy import
|
|
13
|
-
from fractal_server.app.schemas.v2.status_legacy import
|
|
10
|
+
from fractal_server.app.schemas.v2.status_legacy import LegacyStatusRead
|
|
11
|
+
from fractal_server.app.schemas.v2.status_legacy import WorkflowTaskStatusType
|
|
14
12
|
from fractal_server.logger import set_logger
|
|
15
13
|
|
|
16
14
|
from ._aux_functions import _get_dataset_check_access
|
|
@@ -24,7 +22,7 @@ logger = set_logger(__name__)
|
|
|
24
22
|
|
|
25
23
|
@router.get(
|
|
26
24
|
"/project/{project_id}/status-legacy/",
|
|
27
|
-
response_model=
|
|
25
|
+
response_model=LegacyStatusRead,
|
|
28
26
|
)
|
|
29
27
|
async def get_workflowtask_status(
|
|
30
28
|
project_id: int,
|
|
@@ -32,7 +30,7 @@ async def get_workflowtask_status(
|
|
|
32
30
|
workflow_id: int,
|
|
33
31
|
user: UserOAuth = Depends(current_user_act_ver_prof),
|
|
34
32
|
db: AsyncSession = Depends(get_async_db),
|
|
35
|
-
) ->
|
|
33
|
+
) -> LegacyStatusRead | None:
|
|
36
34
|
"""
|
|
37
35
|
Extract the status of all `WorkflowTaskV2` of a given `WorkflowV2` that ran
|
|
38
36
|
on a given `DatasetV2`.
|
|
@@ -64,24 +62,12 @@ async def get_workflowtask_status(
|
|
|
64
62
|
# Check whether there exists a submitted job associated to this
|
|
65
63
|
# workflow/dataset pair. If it does exist, it will be used later.
|
|
66
64
|
# If there are multiple jobs, raise an error.
|
|
67
|
-
|
|
68
|
-
|
|
69
|
-
|
|
70
|
-
|
|
71
|
-
|
|
72
|
-
|
|
73
|
-
running_job = None
|
|
74
|
-
elif len(running_jobs) == 1:
|
|
75
|
-
running_job = running_jobs[0]
|
|
76
|
-
else:
|
|
77
|
-
string_ids = str([job.id for job in running_jobs])[1:-1]
|
|
78
|
-
raise HTTPException(
|
|
79
|
-
status_code=status.HTTP_422_UNPROCESSABLE_CONTENT,
|
|
80
|
-
detail=(
|
|
81
|
-
f"Cannot get WorkflowTaskV2 statuses as DatasetV2 {dataset.id}"
|
|
82
|
-
f" is linked to multiple active jobs: {string_ids}."
|
|
83
|
-
),
|
|
84
|
-
)
|
|
65
|
+
res = await db.execute(
|
|
66
|
+
_get_submitted_jobs_statement()
|
|
67
|
+
.where(JobV2.dataset_id == dataset_id)
|
|
68
|
+
.where(JobV2.workflow_id == workflow_id)
|
|
69
|
+
)
|
|
70
|
+
running_job = res.scalars().one_or_none()
|
|
85
71
|
|
|
86
72
|
# Initialize empty dictionary for WorkflowTaskV2 status
|
|
87
73
|
workflow_tasks_status_dict: dict = {}
|
|
@@ -116,18 +102,18 @@ async def get_workflowtask_status(
|
|
|
116
102
|
]
|
|
117
103
|
try:
|
|
118
104
|
first_submitted_index = running_job_statuses.index(
|
|
119
|
-
|
|
105
|
+
WorkflowTaskStatusType.SUBMITTED
|
|
120
106
|
)
|
|
121
107
|
except ValueError:
|
|
122
108
|
logger.warning(
|
|
123
109
|
f"Job {running_job.id} is submitted but its task list does not"
|
|
124
|
-
f" contain a {
|
|
110
|
+
f" contain a {WorkflowTaskStatusType.SUBMITTED} task."
|
|
125
111
|
)
|
|
126
112
|
first_submitted_index = 0
|
|
127
113
|
|
|
128
114
|
for wftask in running_job_wftasks[first_submitted_index:]:
|
|
129
115
|
workflow_tasks_status_dict[wftask.id] = (
|
|
130
|
-
|
|
116
|
+
WorkflowTaskStatusType.SUBMITTED
|
|
131
117
|
)
|
|
132
118
|
|
|
133
119
|
# The last workflow task that is included in the submitted job is also
|
|
@@ -157,7 +143,7 @@ async def get_workflowtask_status(
|
|
|
157
143
|
# If a wftask ID was not found, ignore it and continue
|
|
158
144
|
continue
|
|
159
145
|
clean_workflow_tasks_status_dict[str(wf_task.id)] = wf_task_status
|
|
160
|
-
if wf_task_status ==
|
|
146
|
+
if wf_task_status == WorkflowTaskStatusType.FAILED:
|
|
161
147
|
# Starting from the beginning of `workflow.task_list`, stop the
|
|
162
148
|
# first time that you hit a failed job
|
|
163
149
|
break
|
|
@@ -166,5 +152,5 @@ async def get_workflowtask_status(
|
|
|
166
152
|
# first time that you hit `last_valid_wftask_id``
|
|
167
153
|
break
|
|
168
154
|
|
|
169
|
-
response_body =
|
|
155
|
+
response_body = LegacyStatusRead(status=clean_workflow_tasks_status_dict)
|
|
170
156
|
return response_body
|
|
@@ -9,6 +9,7 @@ from fastapi import HTTPException
|
|
|
9
9
|
from fastapi import Request
|
|
10
10
|
from fastapi import status
|
|
11
11
|
from sqlmodel import select
|
|
12
|
+
from sqlmodel import update
|
|
12
13
|
|
|
13
14
|
from fractal_server.app.db import AsyncSession
|
|
14
15
|
from fractal_server.app.db import get_async_db
|
|
@@ -23,9 +24,9 @@ from fractal_server.app.routes.auth import current_user_act_ver_prof
|
|
|
23
24
|
from fractal_server.app.routes.aux.validate_user_profile import (
|
|
24
25
|
validate_user_profile,
|
|
25
26
|
)
|
|
26
|
-
from fractal_server.app.schemas.v2 import
|
|
27
|
-
from fractal_server.app.schemas.v2 import
|
|
28
|
-
from fractal_server.app.schemas.v2 import
|
|
27
|
+
from fractal_server.app.schemas.v2 import JobCreate
|
|
28
|
+
from fractal_server.app.schemas.v2 import JobRead
|
|
29
|
+
from fractal_server.app.schemas.v2 import JobStatusType
|
|
29
30
|
from fractal_server.app.schemas.v2 import ResourceType
|
|
30
31
|
from fractal_server.app.schemas.v2.sharing import ProjectPermissions
|
|
31
32
|
from fractal_server.config import get_settings
|
|
@@ -38,7 +39,7 @@ from fractal_server.syringe import Inject
|
|
|
38
39
|
|
|
39
40
|
from ._aux_functions import _get_dataset_check_access
|
|
40
41
|
from ._aux_functions import _get_workflow_check_access
|
|
41
|
-
from ._aux_functions import
|
|
42
|
+
from ._aux_functions import clean_app_job_list
|
|
42
43
|
from ._aux_functions_tasks import _check_type_filters_compatibility
|
|
43
44
|
|
|
44
45
|
FRACTAL_CACHE_DIR = ".fractal_cache"
|
|
@@ -49,29 +50,27 @@ logger = set_logger(__name__)
|
|
|
49
50
|
@router.post(
|
|
50
51
|
"/project/{project_id}/job/submit/",
|
|
51
52
|
status_code=status.HTTP_202_ACCEPTED,
|
|
52
|
-
response_model=
|
|
53
|
+
response_model=JobRead,
|
|
53
54
|
)
|
|
54
|
-
async def
|
|
55
|
+
async def submit_job(
|
|
55
56
|
project_id: int,
|
|
56
57
|
workflow_id: int,
|
|
57
58
|
dataset_id: int,
|
|
58
|
-
job_create:
|
|
59
|
+
job_create: JobCreate,
|
|
59
60
|
background_tasks: BackgroundTasks,
|
|
60
61
|
request: Request,
|
|
61
62
|
user: UserOAuth = Depends(current_user_act_ver_prof),
|
|
62
63
|
db: AsyncSession = Depends(get_async_db),
|
|
63
|
-
) ->
|
|
64
|
-
# Remove non-submitted
|
|
64
|
+
) -> JobRead | None:
|
|
65
|
+
# Remove non-submitted Jobs from the app state when the list grows
|
|
65
66
|
# beyond a threshold
|
|
66
|
-
# NOTE: this may lead to a race condition on `app.state.
|
|
67
|
-
# requests take place at the same time and `
|
|
67
|
+
# NOTE: this may lead to a race condition on `app.state.jobs` if two
|
|
68
|
+
# requests take place at the same time and `clean_app_job_list` is
|
|
68
69
|
# somewhat slow.
|
|
69
70
|
settings = Inject(get_settings)
|
|
70
|
-
if len(request.app.state.
|
|
71
|
-
new_jobs_list = await
|
|
72
|
-
|
|
73
|
-
)
|
|
74
|
-
request.app.state.jobsV2 = new_jobs_list
|
|
71
|
+
if len(request.app.state.jobs) > settings.FRACTAL_API_MAX_JOB_LIST_LENGTH:
|
|
72
|
+
new_jobs_list = await clean_app_job_list(db, request.app.state.jobs)
|
|
73
|
+
request.app.state.jobs = new_jobs_list
|
|
75
74
|
|
|
76
75
|
output = await _get_dataset_check_access(
|
|
77
76
|
project_id=project_id,
|
|
@@ -147,35 +146,15 @@ async def apply_workflow(
|
|
|
147
146
|
user=user,
|
|
148
147
|
db=db,
|
|
149
148
|
)
|
|
150
|
-
|
|
151
|
-
# Check that no other job with the same dataset_id is SUBMITTED
|
|
152
|
-
stm = (
|
|
153
|
-
select(JobV2)
|
|
154
|
-
.where(JobV2.dataset_id == dataset_id)
|
|
155
|
-
.where(JobV2.status == JobStatusTypeV2.SUBMITTED)
|
|
156
|
-
)
|
|
157
|
-
res = await db.execute(stm)
|
|
158
|
-
if res.scalars().all():
|
|
149
|
+
if resource.prevent_new_submissions:
|
|
159
150
|
raise HTTPException(
|
|
160
151
|
status_code=status.HTTP_422_UNPROCESSABLE_CONTENT,
|
|
161
152
|
detail=(
|
|
162
|
-
f"
|
|
153
|
+
f"The '{resource.name}' resource does not currently accept "
|
|
154
|
+
"new job submissions."
|
|
163
155
|
),
|
|
164
156
|
)
|
|
165
157
|
|
|
166
|
-
if job_create.slurm_account is not None:
|
|
167
|
-
if job_create.slurm_account not in user.slurm_accounts:
|
|
168
|
-
raise HTTPException(
|
|
169
|
-
status_code=status.HTTP_422_UNPROCESSABLE_CONTENT,
|
|
170
|
-
detail=(
|
|
171
|
-
f"SLURM account '{job_create.slurm_account}' is not "
|
|
172
|
-
"among those available to the current user"
|
|
173
|
-
),
|
|
174
|
-
)
|
|
175
|
-
else:
|
|
176
|
-
if len(user.slurm_accounts) > 0:
|
|
177
|
-
job_create.slurm_account = user.slurm_accounts[0]
|
|
178
|
-
|
|
179
158
|
# User appropriate FractalSSH object
|
|
180
159
|
if resource.type == ResourceType.SLURM_SSH:
|
|
181
160
|
ssh_config = dict(
|
|
@@ -198,6 +177,35 @@ async def apply_workflow(
|
|
|
198
177
|
else:
|
|
199
178
|
fractal_ssh = None
|
|
200
179
|
|
|
180
|
+
# Assign `job_create.slurm_account`
|
|
181
|
+
if job_create.slurm_account is not None:
|
|
182
|
+
if job_create.slurm_account not in user.slurm_accounts:
|
|
183
|
+
raise HTTPException(
|
|
184
|
+
status_code=status.HTTP_422_UNPROCESSABLE_CONTENT,
|
|
185
|
+
detail=(
|
|
186
|
+
f"SLURM account '{job_create.slurm_account}' is not "
|
|
187
|
+
"among those available to the current user"
|
|
188
|
+
),
|
|
189
|
+
)
|
|
190
|
+
else:
|
|
191
|
+
if len(user.slurm_accounts) > 0:
|
|
192
|
+
job_create.slurm_account = user.slurm_accounts[0]
|
|
193
|
+
|
|
194
|
+
# Check that no other job with the same dataset_id is SUBMITTED
|
|
195
|
+
stm = (
|
|
196
|
+
select(JobV2)
|
|
197
|
+
.where(JobV2.dataset_id == dataset_id)
|
|
198
|
+
.where(JobV2.status == JobStatusType.SUBMITTED)
|
|
199
|
+
)
|
|
200
|
+
res = await db.execute(stm)
|
|
201
|
+
if res.scalars().all():
|
|
202
|
+
raise HTTPException(
|
|
203
|
+
status_code=status.HTTP_422_UNPROCESSABLE_CONTENT,
|
|
204
|
+
detail=(
|
|
205
|
+
f"Dataset {dataset_id} is already in use in submitted job(s)."
|
|
206
|
+
),
|
|
207
|
+
)
|
|
208
|
+
|
|
201
209
|
# Add new Job object to DB
|
|
202
210
|
job = JobV2(
|
|
203
211
|
project_id=project_id,
|
|
@@ -221,38 +229,31 @@ async def apply_workflow(
|
|
|
221
229
|
await db.refresh(job)
|
|
222
230
|
|
|
223
231
|
# Update TaskGroupV2.timestamp_last_used
|
|
224
|
-
|
|
225
|
-
|
|
232
|
+
await db.execute(
|
|
233
|
+
update(TaskGroupV2)
|
|
234
|
+
.where(TaskGroupV2.id.in_(used_task_group_ids))
|
|
235
|
+
.values(timestamp_last_used=job.start_timestamp)
|
|
226
236
|
)
|
|
227
|
-
used_task_groups = res.scalars().all()
|
|
228
|
-
for used_task_group in used_task_groups:
|
|
229
|
-
used_task_group.timestamp_last_used = job.start_timestamp
|
|
230
|
-
db.add(used_task_group)
|
|
231
237
|
await db.commit()
|
|
232
238
|
|
|
233
|
-
# Define
|
|
234
|
-
|
|
235
|
-
|
|
239
|
+
# Define `cache_dir`
|
|
240
|
+
cache_dir = Path(user.project_dirs[0], FRACTAL_CACHE_DIR)
|
|
241
|
+
|
|
242
|
+
# Define server-side and user-side job directories
|
|
243
|
+
timestamp_string = job.start_timestamp.strftime(r"%Y%m%d_%H%M%S")
|
|
244
|
+
working_dir = Path(resource.jobs_local_dir) / (
|
|
236
245
|
f"proj_v2_{project_id:07d}_wf_{workflow_id:07d}_job_{job.id:07d}"
|
|
237
246
|
f"_{timestamp_string}"
|
|
238
247
|
)
|
|
239
|
-
|
|
240
|
-
# Define user-side job directory
|
|
241
|
-
cache_dir = Path(user.project_dirs[0], FRACTAL_CACHE_DIR)
|
|
242
248
|
match resource.type:
|
|
243
249
|
case ResourceType.LOCAL:
|
|
244
|
-
|
|
250
|
+
working_dir_user = working_dir
|
|
245
251
|
case ResourceType.SLURM_SUDO:
|
|
246
|
-
|
|
252
|
+
working_dir_user = cache_dir / working_dir.name
|
|
247
253
|
case ResourceType.SLURM_SSH:
|
|
248
|
-
|
|
249
|
-
|
|
250
|
-
|
|
251
|
-
)
|
|
252
|
-
|
|
253
|
-
# Update job folders in the db
|
|
254
|
-
job.working_dir = WORKFLOW_DIR_LOCAL.as_posix()
|
|
255
|
-
job.working_dir_user = WORKFLOW_DIR_REMOTE.as_posix()
|
|
254
|
+
working_dir_user = Path(profile.jobs_remote_dir, working_dir.name)
|
|
255
|
+
job.working_dir = working_dir.as_posix()
|
|
256
|
+
job.working_dir_user = working_dir_user.as_posix()
|
|
256
257
|
await db.merge(job)
|
|
257
258
|
await db.commit()
|
|
258
259
|
|
|
@@ -268,11 +269,9 @@ async def apply_workflow(
|
|
|
268
269
|
resource=resource,
|
|
269
270
|
profile=profile,
|
|
270
271
|
)
|
|
271
|
-
request.app.state.
|
|
272
|
+
request.app.state.jobs.append(job.id)
|
|
272
273
|
logger.info(
|
|
273
|
-
f"
|
|
274
|
-
f"
|
|
275
|
-
f"{request.app.state.jobsV2}"
|
|
274
|
+
f"Job {job.id}, worker with pid {os.getpid()}. "
|
|
275
|
+
f"Worker jobs list: {request.app.state.jobs}."
|
|
276
276
|
)
|
|
277
|
-
await db.close()
|
|
278
277
|
return job
|
|
@@ -25,11 +25,11 @@ from fractal_server.app.models import UserOAuth
|
|
|
25
25
|
from fractal_server.app.models.v2 import TaskGroupV2
|
|
26
26
|
from fractal_server.app.models.v2 import TaskV2
|
|
27
27
|
from fractal_server.app.routes.auth import current_user_act_ver_prof
|
|
28
|
-
from fractal_server.app.schemas.v2 import
|
|
29
|
-
from fractal_server.app.schemas.v2 import
|
|
30
|
-
from fractal_server.app.schemas.v2 import
|
|
28
|
+
from fractal_server.app.schemas.v2 import TaskCreate
|
|
29
|
+
from fractal_server.app.schemas.v2 import TaskGroupOriginEnum
|
|
30
|
+
from fractal_server.app.schemas.v2 import TaskRead
|
|
31
31
|
from fractal_server.app.schemas.v2 import TaskType
|
|
32
|
-
from fractal_server.app.schemas.v2 import
|
|
32
|
+
from fractal_server.app.schemas.v2 import TaskUpdate
|
|
33
33
|
from fractal_server.logger import set_logger
|
|
34
34
|
|
|
35
35
|
router = APIRouter()
|
|
@@ -37,7 +37,7 @@ router = APIRouter()
|
|
|
37
37
|
logger = set_logger(__name__)
|
|
38
38
|
|
|
39
39
|
|
|
40
|
-
@router.get("/", response_model=list[
|
|
40
|
+
@router.get("/", response_model=list[TaskRead])
|
|
41
41
|
async def get_list_task(
|
|
42
42
|
args_schema: bool = True,
|
|
43
43
|
category: str | None = None,
|
|
@@ -45,7 +45,7 @@ async def get_list_task(
|
|
|
45
45
|
author: str | None = None,
|
|
46
46
|
user: UserOAuth = Depends(current_user_act_ver_prof),
|
|
47
47
|
db: AsyncSession = Depends(get_async_db),
|
|
48
|
-
) -> list[
|
|
48
|
+
) -> list[TaskRead]:
|
|
49
49
|
"""
|
|
50
50
|
Get list of available tasks
|
|
51
51
|
"""
|
|
@@ -86,12 +86,12 @@ async def get_list_task(
|
|
|
86
86
|
return task_list
|
|
87
87
|
|
|
88
88
|
|
|
89
|
-
@router.get("/{task_id}/", response_model=
|
|
89
|
+
@router.get("/{task_id}/", response_model=TaskRead)
|
|
90
90
|
async def get_task(
|
|
91
91
|
task_id: int,
|
|
92
92
|
user: UserOAuth = Depends(current_user_act_ver_prof),
|
|
93
93
|
db: AsyncSession = Depends(get_async_db),
|
|
94
|
-
) ->
|
|
94
|
+
) -> TaskRead:
|
|
95
95
|
"""
|
|
96
96
|
Get info on a specific task
|
|
97
97
|
"""
|
|
@@ -99,13 +99,13 @@ async def get_task(
|
|
|
99
99
|
return task
|
|
100
100
|
|
|
101
101
|
|
|
102
|
-
@router.patch("/{task_id}/", response_model=
|
|
102
|
+
@router.patch("/{task_id}/", response_model=TaskRead)
|
|
103
103
|
async def patch_task(
|
|
104
104
|
task_id: int,
|
|
105
|
-
task_update:
|
|
105
|
+
task_update: TaskUpdate,
|
|
106
106
|
user: UserOAuth = Depends(current_user_act_ver_prof),
|
|
107
107
|
db: AsyncSession = Depends(get_async_db),
|
|
108
|
-
) ->
|
|
108
|
+
) -> TaskRead | None:
|
|
109
109
|
"""
|
|
110
110
|
Edit a specific task (restricted to task owner)
|
|
111
111
|
"""
|
|
@@ -137,16 +137,14 @@ async def patch_task(
|
|
|
137
137
|
return db_task
|
|
138
138
|
|
|
139
139
|
|
|
140
|
-
@router.post(
|
|
141
|
-
"/", response_model=TaskReadV2, status_code=status.HTTP_201_CREATED
|
|
142
|
-
)
|
|
140
|
+
@router.post("/", response_model=TaskRead, status_code=status.HTTP_201_CREATED)
|
|
143
141
|
async def create_task(
|
|
144
|
-
task:
|
|
142
|
+
task: TaskCreate,
|
|
145
143
|
user_group_id: int | None = None,
|
|
146
144
|
private: bool = False,
|
|
147
145
|
user: UserOAuth = Depends(current_user_act_ver_prof),
|
|
148
146
|
db: AsyncSession = Depends(get_async_db),
|
|
149
|
-
) ->
|
|
147
|
+
) -> TaskRead | None:
|
|
150
148
|
"""
|
|
151
149
|
Create a new task
|
|
152
150
|
"""
|
|
@@ -211,7 +209,7 @@ async def create_task(
|
|
|
211
209
|
resource_id=resource_id,
|
|
212
210
|
active=True,
|
|
213
211
|
task_list=[db_task],
|
|
214
|
-
origin=
|
|
212
|
+
origin=TaskGroupOriginEnum.OTHER,
|
|
215
213
|
version=db_task.version,
|
|
216
214
|
pkg_name=pkg_name,
|
|
217
215
|
)
|
|
@@ -25,12 +25,12 @@ from fractal_server.app.routes.aux.validate_user_profile import (
|
|
|
25
25
|
)
|
|
26
26
|
from fractal_server.app.schemas.v2 import FractalUploadedFile
|
|
27
27
|
from fractal_server.app.schemas.v2 import ResourceType
|
|
28
|
-
from fractal_server.app.schemas.v2 import
|
|
29
|
-
from fractal_server.app.schemas.v2 import
|
|
30
|
-
from fractal_server.app.schemas.v2 import
|
|
31
|
-
from fractal_server.app.schemas.v2 import
|
|
32
|
-
from fractal_server.app.schemas.v2 import
|
|
33
|
-
from fractal_server.app.schemas.v2 import
|
|
28
|
+
from fractal_server.app.schemas.v2 import TaskCollectPip
|
|
29
|
+
from fractal_server.app.schemas.v2 import TaskGroupActivityAction
|
|
30
|
+
from fractal_server.app.schemas.v2 import TaskGroupActivityRead
|
|
31
|
+
from fractal_server.app.schemas.v2 import TaskGroupActivityStatus
|
|
32
|
+
from fractal_server.app.schemas.v2 import TaskGroupCreateStrict
|
|
33
|
+
from fractal_server.app.schemas.v2 import TaskGroupOriginEnum
|
|
34
34
|
from fractal_server.logger import reset_logger_handlers
|
|
35
35
|
from fractal_server.logger import set_logger
|
|
36
36
|
from fractal_server.tasks.v2.local.collect import collect_local
|
|
@@ -59,9 +59,9 @@ class CollectionRequestData(BaseModel):
|
|
|
59
59
|
Validate form data _and_ wheel file.
|
|
60
60
|
"""
|
|
61
61
|
|
|
62
|
-
task_collect:
|
|
62
|
+
task_collect: TaskCollectPip
|
|
63
63
|
file: UploadFile | None = None
|
|
64
|
-
origin:
|
|
64
|
+
origin: TaskGroupOriginEnum
|
|
65
65
|
|
|
66
66
|
@model_validator(mode="before")
|
|
67
67
|
@classmethod
|
|
@@ -75,7 +75,7 @@ class CollectionRequestData(BaseModel):
|
|
|
75
75
|
raise ValueError(
|
|
76
76
|
"When no `file` is provided, `package` is required."
|
|
77
77
|
)
|
|
78
|
-
values["origin"] =
|
|
78
|
+
values["origin"] = TaskGroupOriginEnum.PYPI
|
|
79
79
|
else:
|
|
80
80
|
if package is not None:
|
|
81
81
|
raise ValueError(
|
|
@@ -87,7 +87,7 @@ class CollectionRequestData(BaseModel):
|
|
|
87
87
|
"Cannot set `package_version` when `file` is "
|
|
88
88
|
f"provided (given package_version='{package_version}')."
|
|
89
89
|
)
|
|
90
|
-
values["origin"] =
|
|
90
|
+
values["origin"] = TaskGroupOriginEnum.WHEELFILE
|
|
91
91
|
|
|
92
92
|
for forbidden_char in FORBIDDEN_CHAR_WHEEL:
|
|
93
93
|
if forbidden_char in file.filename:
|
|
@@ -125,7 +125,7 @@ def parse_request_data(
|
|
|
125
125
|
else None
|
|
126
126
|
)
|
|
127
127
|
# Validate and coerce form data
|
|
128
|
-
task_collect_pip =
|
|
128
|
+
task_collect_pip = TaskCollectPip(
|
|
129
129
|
package=package,
|
|
130
130
|
package_version=package_version,
|
|
131
131
|
package_extras=package_extras,
|
|
@@ -150,7 +150,7 @@ def parse_request_data(
|
|
|
150
150
|
|
|
151
151
|
@router.post(
|
|
152
152
|
"/collect/pip/",
|
|
153
|
-
response_model=
|
|
153
|
+
response_model=TaskGroupActivityRead,
|
|
154
154
|
)
|
|
155
155
|
async def collect_tasks_pip(
|
|
156
156
|
response: Response,
|
|
@@ -160,7 +160,7 @@ async def collect_tasks_pip(
|
|
|
160
160
|
user_group_id: int | None = None,
|
|
161
161
|
user: UserOAuth = Depends(current_user_act_ver_prof),
|
|
162
162
|
db: AsyncSession = Depends(get_async_db),
|
|
163
|
-
) ->
|
|
163
|
+
) -> TaskGroupActivityRead:
|
|
164
164
|
"""
|
|
165
165
|
Task-collection endpoint
|
|
166
166
|
"""
|
|
@@ -221,7 +221,7 @@ async def collect_tasks_pip(
|
|
|
221
221
|
wheel_file = None
|
|
222
222
|
|
|
223
223
|
# Set pkg_name, version, origin and archive_path
|
|
224
|
-
if request_data.origin ==
|
|
224
|
+
if request_data.origin == TaskGroupOriginEnum.WHEELFILE:
|
|
225
225
|
try:
|
|
226
226
|
wheel_filename = request_data.file.filename
|
|
227
227
|
wheel_info = _parse_wheel_filename(wheel_filename)
|
|
@@ -242,7 +242,7 @@ async def collect_tasks_pip(
|
|
|
242
242
|
wheel_info["distribution"]
|
|
243
243
|
)
|
|
244
244
|
task_group_attrs["version"] = wheel_info["version"]
|
|
245
|
-
elif request_data.origin ==
|
|
245
|
+
elif request_data.origin == TaskGroupOriginEnum.PYPI:
|
|
246
246
|
pkg_name = task_collect.package
|
|
247
247
|
task_group_attrs["pkg_name"] = normalize_package_name(pkg_name)
|
|
248
248
|
latest_version = await get_package_version_from_pypi(
|
|
@@ -278,7 +278,7 @@ async def collect_tasks_pip(
|
|
|
278
278
|
|
|
279
279
|
# Validate TaskGroupV2 attributes
|
|
280
280
|
try:
|
|
281
|
-
|
|
281
|
+
TaskGroupCreateStrict(**task_group_attrs)
|
|
282
282
|
except ValidationError as e:
|
|
283
283
|
raise HTTPException(
|
|
284
284
|
status_code=status.HTTP_422_UNPROCESSABLE_CONTENT,
|
|
@@ -328,8 +328,8 @@ async def collect_tasks_pip(
|
|
|
328
328
|
task_group_activity = TaskGroupActivityV2(
|
|
329
329
|
user_id=task_group.user_id,
|
|
330
330
|
taskgroupv2_id=task_group.id,
|
|
331
|
-
status=
|
|
332
|
-
action=
|
|
331
|
+
status=TaskGroupActivityStatus.PENDING,
|
|
332
|
+
action=TaskGroupActivityAction.COLLECT,
|
|
333
333
|
pkg_name=task_group.pkg_name,
|
|
334
334
|
version=task_group.version,
|
|
335
335
|
)
|
|
@@ -17,11 +17,11 @@ from fractal_server.app.routes.aux.validate_user_profile import (
|
|
|
17
17
|
validate_user_profile,
|
|
18
18
|
)
|
|
19
19
|
from fractal_server.app.schemas.v2 import ResourceType
|
|
20
|
-
from fractal_server.app.schemas.v2 import
|
|
21
|
-
from fractal_server.app.schemas.v2 import
|
|
22
|
-
from fractal_server.app.schemas.v2 import
|
|
23
|
-
from fractal_server.app.schemas.v2 import
|
|
24
|
-
from fractal_server.app.schemas.v2 import
|
|
20
|
+
from fractal_server.app.schemas.v2 import TaskCollectCustom
|
|
21
|
+
from fractal_server.app.schemas.v2 import TaskCreate
|
|
22
|
+
from fractal_server.app.schemas.v2 import TaskGroupCreate
|
|
23
|
+
from fractal_server.app.schemas.v2 import TaskGroupOriginEnum
|
|
24
|
+
from fractal_server.app.schemas.v2 import TaskRead
|
|
25
25
|
from fractal_server.logger import set_logger
|
|
26
26
|
from fractal_server.string_tools import validate_cmd
|
|
27
27
|
from fractal_server.tasks.v2.utils_background import prepare_tasks_metadata
|
|
@@ -38,16 +38,14 @@ router = APIRouter()
|
|
|
38
38
|
logger = set_logger(__name__)
|
|
39
39
|
|
|
40
40
|
|
|
41
|
-
@router.post(
|
|
42
|
-
"/collect/custom/", status_code=201, response_model=list[TaskReadV2]
|
|
43
|
-
)
|
|
41
|
+
@router.post("/collect/custom/", status_code=201, response_model=list[TaskRead])
|
|
44
42
|
async def collect_task_custom(
|
|
45
|
-
task_collect:
|
|
43
|
+
task_collect: TaskCollectCustom,
|
|
46
44
|
private: bool = False,
|
|
47
45
|
user_group_id: int | None = None,
|
|
48
46
|
user: UserOAuth = Depends(current_user_act_ver_prof),
|
|
49
47
|
db: AsyncSession = Depends(get_async_db),
|
|
50
|
-
) -> list[
|
|
48
|
+
) -> list[TaskRead]:
|
|
51
49
|
# Get validated resource and profile
|
|
52
50
|
resource, profile = await validate_user_profile(user=user, db=db)
|
|
53
51
|
resource_id = resource.id
|
|
@@ -139,7 +137,7 @@ async def collect_task_custom(
|
|
|
139
137
|
else:
|
|
140
138
|
package_root = Path(task_collect.package_root)
|
|
141
139
|
|
|
142
|
-
task_list: list[
|
|
140
|
+
task_list: list[TaskCreate] = prepare_tasks_metadata(
|
|
143
141
|
package_manifest=task_collect.manifest,
|
|
144
142
|
python_bin=Path(task_collect.python_interpreter),
|
|
145
143
|
package_root=package_root,
|
|
@@ -148,14 +146,14 @@ async def collect_task_custom(
|
|
|
148
146
|
|
|
149
147
|
# Prepare task-group attributes
|
|
150
148
|
task_group_attrs = dict(
|
|
151
|
-
origin=
|
|
149
|
+
origin=TaskGroupOriginEnum.OTHER,
|
|
152
150
|
pkg_name=task_collect.label,
|
|
153
151
|
user_id=user.id,
|
|
154
152
|
user_group_id=user_group_id,
|
|
155
153
|
version=task_collect.version,
|
|
156
154
|
resource_id=resource_id,
|
|
157
155
|
)
|
|
158
|
-
|
|
156
|
+
TaskGroupCreate(**task_group_attrs)
|
|
159
157
|
|
|
160
158
|
# Verify non-duplication constraints
|
|
161
159
|
await _verify_non_duplication_user_constraint(
|
|
@@ -33,10 +33,10 @@ from fractal_server.app.routes.aux.validate_user_profile import (
|
|
|
33
33
|
)
|
|
34
34
|
from fractal_server.app.schemas.v2 import FractalUploadedFile
|
|
35
35
|
from fractal_server.app.schemas.v2 import ResourceType
|
|
36
|
-
from fractal_server.app.schemas.v2 import
|
|
37
|
-
from fractal_server.app.schemas.v2 import
|
|
38
|
-
from fractal_server.app.schemas.v2 import
|
|
39
|
-
from fractal_server.app.schemas.v2.task_group import
|
|
36
|
+
from fractal_server.app.schemas.v2 import TaskGroupActivityAction
|
|
37
|
+
from fractal_server.app.schemas.v2 import TaskGroupActivityRead
|
|
38
|
+
from fractal_server.app.schemas.v2 import TaskGroupActivityStatus
|
|
39
|
+
from fractal_server.app.schemas.v2.task_group import TaskGroupOriginEnum
|
|
40
40
|
from fractal_server.logger import set_logger
|
|
41
41
|
from fractal_server.tasks.v2.local import collect_local_pixi
|
|
42
42
|
from fractal_server.tasks.v2.ssh import collect_ssh_pixi
|
|
@@ -74,7 +74,7 @@ def validate_pkgname_and_version(filename: str) -> tuple[str, str]:
|
|
|
74
74
|
@router.post(
|
|
75
75
|
"/collect/pixi/",
|
|
76
76
|
status_code=202,
|
|
77
|
-
response_model=
|
|
77
|
+
response_model=TaskGroupActivityRead,
|
|
78
78
|
)
|
|
79
79
|
async def collect_task_pixi(
|
|
80
80
|
response: Response,
|
|
@@ -85,7 +85,7 @@ async def collect_task_pixi(
|
|
|
85
85
|
user_group_id: int | None = None,
|
|
86
86
|
user: UserOAuth = Depends(current_user_act_ver_prof),
|
|
87
87
|
db: AsyncSession = Depends(get_async_db),
|
|
88
|
-
) ->
|
|
88
|
+
) -> TaskGroupActivityRead:
|
|
89
89
|
# Get validated resource and profile
|
|
90
90
|
resource, profile = await validate_user_profile(user=user, db=db)
|
|
91
91
|
resource_id = resource.id
|
|
@@ -136,7 +136,7 @@ async def collect_task_pixi(
|
|
|
136
136
|
user_id=user.id,
|
|
137
137
|
user_group_id=user_group_id,
|
|
138
138
|
resource_id=resource_id,
|
|
139
|
-
origin=
|
|
139
|
+
origin=TaskGroupOriginEnum.PIXI,
|
|
140
140
|
pixi_version=pixi_version,
|
|
141
141
|
pkg_name=pkg_name,
|
|
142
142
|
version=version,
|
|
@@ -178,8 +178,8 @@ async def collect_task_pixi(
|
|
|
178
178
|
task_group_activity = TaskGroupActivityV2(
|
|
179
179
|
user_id=task_group.user_id,
|
|
180
180
|
taskgroupv2_id=task_group.id,
|
|
181
|
-
status=
|
|
182
|
-
action=
|
|
181
|
+
status=TaskGroupActivityStatus.PENDING,
|
|
182
|
+
action=TaskGroupActivityAction.COLLECT,
|
|
183
183
|
pkg_name=task_group.pkg_name,
|
|
184
184
|
version=task_group.version,
|
|
185
185
|
)
|