fractal-server 2.17.2__py3-none-any.whl → 2.18.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- fractal_server/__init__.py +1 -1
- fractal_server/__main__.py +2 -1
- fractal_server/app/models/linkuserproject.py +40 -0
- fractal_server/app/models/security.py +7 -5
- fractal_server/app/models/v2/job.py +13 -2
- fractal_server/app/models/v2/resource.py +13 -0
- fractal_server/app/routes/admin/v2/__init__.py +11 -11
- fractal_server/app/routes/admin/v2/accounting.py +2 -2
- fractal_server/app/routes/admin/v2/job.py +34 -23
- fractal_server/app/routes/admin/v2/sharing.py +103 -0
- fractal_server/app/routes/admin/v2/task.py +9 -8
- fractal_server/app/routes/admin/v2/task_group.py +94 -16
- fractal_server/app/routes/admin/v2/task_group_lifecycle.py +20 -20
- fractal_server/app/routes/api/__init__.py +0 -9
- fractal_server/app/routes/api/v2/__init__.py +47 -47
- fractal_server/app/routes/api/v2/_aux_functions.py +65 -64
- fractal_server/app/routes/api/v2/_aux_functions_history.py +8 -3
- fractal_server/app/routes/api/v2/_aux_functions_sharing.py +97 -0
- fractal_server/app/routes/api/v2/_aux_functions_task_lifecycle.py +4 -4
- fractal_server/app/routes/api/v2/_aux_functions_tasks.py +2 -2
- fractal_server/app/routes/api/v2/dataset.py +89 -77
- fractal_server/app/routes/api/v2/history.py +28 -16
- fractal_server/app/routes/api/v2/images.py +22 -8
- fractal_server/app/routes/api/v2/job.py +40 -24
- fractal_server/app/routes/api/v2/pre_submission_checks.py +13 -6
- fractal_server/app/routes/api/v2/project.py +48 -25
- fractal_server/app/routes/api/v2/sharing.py +311 -0
- fractal_server/app/routes/api/v2/status_legacy.py +22 -33
- fractal_server/app/routes/api/v2/submit.py +76 -71
- fractal_server/app/routes/api/v2/task.py +15 -17
- fractal_server/app/routes/api/v2/task_collection.py +18 -18
- fractal_server/app/routes/api/v2/task_collection_custom.py +11 -13
- fractal_server/app/routes/api/v2/task_collection_pixi.py +9 -9
- fractal_server/app/routes/api/v2/task_group.py +18 -18
- fractal_server/app/routes/api/v2/task_group_lifecycle.py +26 -26
- fractal_server/app/routes/api/v2/task_version_update.py +12 -9
- fractal_server/app/routes/api/v2/workflow.py +41 -29
- fractal_server/app/routes/api/v2/workflow_import.py +25 -23
- fractal_server/app/routes/api/v2/workflowtask.py +25 -17
- fractal_server/app/routes/auth/_aux_auth.py +100 -0
- fractal_server/app/routes/auth/current_user.py +0 -63
- fractal_server/app/routes/auth/group.py +1 -30
- fractal_server/app/routes/auth/router.py +2 -0
- fractal_server/app/routes/auth/users.py +9 -0
- fractal_server/app/routes/auth/viewer_paths.py +43 -0
- fractal_server/app/schemas/user.py +29 -12
- fractal_server/app/schemas/user_group.py +0 -15
- fractal_server/app/schemas/v2/__init__.py +55 -48
- fractal_server/app/schemas/v2/dataset.py +35 -13
- fractal_server/app/schemas/v2/dumps.py +9 -9
- fractal_server/app/schemas/v2/job.py +11 -11
- fractal_server/app/schemas/v2/project.py +3 -3
- fractal_server/app/schemas/v2/resource.py +13 -4
- fractal_server/app/schemas/v2/sharing.py +99 -0
- fractal_server/app/schemas/v2/status_legacy.py +3 -3
- fractal_server/app/schemas/v2/task.py +6 -6
- fractal_server/app/schemas/v2/task_collection.py +4 -4
- fractal_server/app/schemas/v2/task_group.py +16 -16
- fractal_server/app/schemas/v2/workflow.py +16 -16
- fractal_server/app/schemas/v2/workflowtask.py +14 -14
- fractal_server/app/security/__init__.py +1 -1
- fractal_server/app/shutdown.py +6 -6
- fractal_server/config/__init__.py +0 -6
- fractal_server/config/_data.py +0 -79
- fractal_server/config/_main.py +6 -1
- fractal_server/data_migrations/2_18_0.py +30 -0
- fractal_server/images/models.py +1 -2
- fractal_server/main.py +72 -11
- fractal_server/migrations/versions/7910eed4cf97_user_project_dirs_and_usergroup_viewer_.py +60 -0
- fractal_server/migrations/versions/88270f589c9b_add_prevent_new_submissions.py +39 -0
- fractal_server/migrations/versions/bc0e8b3327a7_project_sharing.py +72 -0
- fractal_server/migrations/versions/f0702066b007_one_submitted_job_per_dataset.py +40 -0
- fractal_server/runner/config/_slurm.py +2 -0
- fractal_server/runner/executors/slurm_common/_batching.py +4 -10
- fractal_server/runner/executors/slurm_common/slurm_config.py +1 -0
- fractal_server/runner/executors/slurm_ssh/runner.py +1 -1
- fractal_server/runner/executors/slurm_sudo/runner.py +1 -1
- fractal_server/runner/v2/_local.py +4 -3
- fractal_server/runner/v2/_slurm_ssh.py +4 -3
- fractal_server/runner/v2/_slurm_sudo.py +4 -3
- fractal_server/runner/v2/runner.py +36 -17
- fractal_server/runner/v2/runner_functions.py +11 -14
- fractal_server/runner/v2/submit_workflow.py +22 -9
- fractal_server/tasks/v2/local/_utils.py +2 -2
- fractal_server/tasks/v2/local/collect.py +5 -6
- fractal_server/tasks/v2/local/collect_pixi.py +5 -6
- fractal_server/tasks/v2/local/deactivate.py +7 -7
- fractal_server/tasks/v2/local/deactivate_pixi.py +3 -3
- fractal_server/tasks/v2/local/delete.py +5 -5
- fractal_server/tasks/v2/local/reactivate.py +5 -5
- fractal_server/tasks/v2/local/reactivate_pixi.py +5 -5
- fractal_server/tasks/v2/ssh/collect.py +5 -5
- fractal_server/tasks/v2/ssh/collect_pixi.py +5 -5
- fractal_server/tasks/v2/ssh/deactivate.py +7 -7
- fractal_server/tasks/v2/ssh/deactivate_pixi.py +2 -2
- fractal_server/tasks/v2/ssh/delete.py +5 -5
- fractal_server/tasks/v2/ssh/reactivate.py +5 -5
- fractal_server/tasks/v2/ssh/reactivate_pixi.py +5 -5
- fractal_server/tasks/v2/utils_background.py +7 -7
- fractal_server/tasks/v2/utils_database.py +5 -5
- fractal_server/types/__init__.py +22 -0
- fractal_server/types/validators/__init__.py +3 -0
- fractal_server/types/validators/_common_validators.py +32 -0
- {fractal_server-2.17.2.dist-info → fractal_server-2.18.0.dist-info}/METADATA +3 -2
- {fractal_server-2.17.2.dist-info → fractal_server-2.18.0.dist-info}/RECORD +108 -98
- {fractal_server-2.17.2.dist-info → fractal_server-2.18.0.dist-info}/WHEEL +0 -0
- {fractal_server-2.17.2.dist-info → fractal_server-2.18.0.dist-info}/entry_points.txt +0 -0
- {fractal_server-2.17.2.dist-info → fractal_server-2.18.0.dist-info}/licenses/LICENSE +0 -0
|
@@ -1,20 +1,19 @@
|
|
|
1
1
|
from fastapi import APIRouter
|
|
2
2
|
from fastapi import Depends
|
|
3
|
-
from fastapi import HTTPException
|
|
4
|
-
from fastapi import status
|
|
5
3
|
|
|
6
4
|
from fractal_server.app.db import AsyncSession
|
|
7
5
|
from fractal_server.app.db import get_async_db
|
|
8
6
|
from fractal_server.app.models import UserOAuth
|
|
9
7
|
from fractal_server.app.models.v2 import JobV2
|
|
10
8
|
from fractal_server.app.routes.auth import current_user_act_ver_prof
|
|
11
|
-
from fractal_server.app.schemas.v2.
|
|
12
|
-
from fractal_server.app.schemas.v2.status_legacy import
|
|
9
|
+
from fractal_server.app.schemas.v2.sharing import ProjectPermissions
|
|
10
|
+
from fractal_server.app.schemas.v2.status_legacy import LegacyStatusRead
|
|
11
|
+
from fractal_server.app.schemas.v2.status_legacy import WorkflowTaskStatusType
|
|
13
12
|
from fractal_server.logger import set_logger
|
|
14
13
|
|
|
15
|
-
from ._aux_functions import
|
|
14
|
+
from ._aux_functions import _get_dataset_check_access
|
|
16
15
|
from ._aux_functions import _get_submitted_jobs_statement
|
|
17
|
-
from ._aux_functions import
|
|
16
|
+
from ._aux_functions import _get_workflow_check_access
|
|
18
17
|
|
|
19
18
|
router = APIRouter()
|
|
20
19
|
|
|
@@ -23,7 +22,7 @@ logger = set_logger(__name__)
|
|
|
23
22
|
|
|
24
23
|
@router.get(
|
|
25
24
|
"/project/{project_id}/status-legacy/",
|
|
26
|
-
response_model=
|
|
25
|
+
response_model=LegacyStatusRead,
|
|
27
26
|
)
|
|
28
27
|
async def get_workflowtask_status(
|
|
29
28
|
project_id: int,
|
|
@@ -31,7 +30,7 @@ async def get_workflowtask_status(
|
|
|
31
30
|
workflow_id: int,
|
|
32
31
|
user: UserOAuth = Depends(current_user_act_ver_prof),
|
|
33
32
|
db: AsyncSession = Depends(get_async_db),
|
|
34
|
-
) ->
|
|
33
|
+
) -> LegacyStatusRead | None:
|
|
35
34
|
"""
|
|
36
35
|
Extract the status of all `WorkflowTaskV2` of a given `WorkflowV2` that ran
|
|
37
36
|
on a given `DatasetV2`.
|
|
@@ -42,43 +41,33 @@ async def get_workflowtask_status(
|
|
|
42
41
|
order). See fractal-server GitHub issues: 793, 1083.
|
|
43
42
|
"""
|
|
44
43
|
# Get the dataset DB entry
|
|
45
|
-
output = await
|
|
44
|
+
output = await _get_dataset_check_access(
|
|
46
45
|
project_id=project_id,
|
|
47
46
|
dataset_id=dataset_id,
|
|
48
47
|
user_id=user.id,
|
|
48
|
+
required_permissions=ProjectPermissions.READ,
|
|
49
49
|
db=db,
|
|
50
50
|
)
|
|
51
51
|
dataset = output["dataset"]
|
|
52
52
|
|
|
53
53
|
# Get the workflow DB entry
|
|
54
|
-
workflow = await
|
|
54
|
+
workflow = await _get_workflow_check_access(
|
|
55
55
|
project_id=project_id,
|
|
56
56
|
workflow_id=workflow_id,
|
|
57
57
|
user_id=user.id,
|
|
58
|
+
required_permissions=ProjectPermissions.READ,
|
|
58
59
|
db=db,
|
|
59
60
|
)
|
|
60
61
|
|
|
61
62
|
# Check whether there exists a submitted job associated to this
|
|
62
63
|
# workflow/dataset pair. If it does exist, it will be used later.
|
|
63
64
|
# If there are multiple jobs, raise an error.
|
|
64
|
-
|
|
65
|
-
|
|
66
|
-
|
|
67
|
-
|
|
68
|
-
|
|
69
|
-
|
|
70
|
-
running_job = None
|
|
71
|
-
elif len(running_jobs) == 1:
|
|
72
|
-
running_job = running_jobs[0]
|
|
73
|
-
else:
|
|
74
|
-
string_ids = str([job.id for job in running_jobs])[1:-1]
|
|
75
|
-
raise HTTPException(
|
|
76
|
-
status_code=status.HTTP_422_UNPROCESSABLE_CONTENT,
|
|
77
|
-
detail=(
|
|
78
|
-
f"Cannot get WorkflowTaskV2 statuses as DatasetV2 {dataset.id}"
|
|
79
|
-
f" is linked to multiple active jobs: {string_ids}."
|
|
80
|
-
),
|
|
81
|
-
)
|
|
65
|
+
res = await db.execute(
|
|
66
|
+
_get_submitted_jobs_statement()
|
|
67
|
+
.where(JobV2.dataset_id == dataset_id)
|
|
68
|
+
.where(JobV2.workflow_id == workflow_id)
|
|
69
|
+
)
|
|
70
|
+
running_job = res.scalars().one_or_none()
|
|
82
71
|
|
|
83
72
|
# Initialize empty dictionary for WorkflowTaskV2 status
|
|
84
73
|
workflow_tasks_status_dict: dict = {}
|
|
@@ -113,18 +102,18 @@ async def get_workflowtask_status(
|
|
|
113
102
|
]
|
|
114
103
|
try:
|
|
115
104
|
first_submitted_index = running_job_statuses.index(
|
|
116
|
-
|
|
105
|
+
WorkflowTaskStatusType.SUBMITTED
|
|
117
106
|
)
|
|
118
107
|
except ValueError:
|
|
119
108
|
logger.warning(
|
|
120
109
|
f"Job {running_job.id} is submitted but its task list does not"
|
|
121
|
-
f" contain a {
|
|
110
|
+
f" contain a {WorkflowTaskStatusType.SUBMITTED} task."
|
|
122
111
|
)
|
|
123
112
|
first_submitted_index = 0
|
|
124
113
|
|
|
125
114
|
for wftask in running_job_wftasks[first_submitted_index:]:
|
|
126
115
|
workflow_tasks_status_dict[wftask.id] = (
|
|
127
|
-
|
|
116
|
+
WorkflowTaskStatusType.SUBMITTED
|
|
128
117
|
)
|
|
129
118
|
|
|
130
119
|
# The last workflow task that is included in the submitted job is also
|
|
@@ -154,7 +143,7 @@ async def get_workflowtask_status(
|
|
|
154
143
|
# If a wftask ID was not found, ignore it and continue
|
|
155
144
|
continue
|
|
156
145
|
clean_workflow_tasks_status_dict[str(wf_task.id)] = wf_task_status
|
|
157
|
-
if wf_task_status ==
|
|
146
|
+
if wf_task_status == WorkflowTaskStatusType.FAILED:
|
|
158
147
|
# Starting from the beginning of `workflow.task_list`, stop the
|
|
159
148
|
# first time that you hit a failed job
|
|
160
149
|
break
|
|
@@ -163,5 +152,5 @@ async def get_workflowtask_status(
|
|
|
163
152
|
# first time that you hit `last_valid_wftask_id``
|
|
164
153
|
break
|
|
165
154
|
|
|
166
|
-
response_body =
|
|
155
|
+
response_body = LegacyStatusRead(status=clean_workflow_tasks_status_dict)
|
|
167
156
|
return response_body
|
|
@@ -9,6 +9,7 @@ from fastapi import HTTPException
|
|
|
9
9
|
from fastapi import Request
|
|
10
10
|
from fastapi import status
|
|
11
11
|
from sqlmodel import select
|
|
12
|
+
from sqlmodel import update
|
|
12
13
|
|
|
13
14
|
from fractal_server.app.db import AsyncSession
|
|
14
15
|
from fractal_server.app.db import get_async_db
|
|
@@ -23,10 +24,11 @@ from fractal_server.app.routes.auth import current_user_act_ver_prof
|
|
|
23
24
|
from fractal_server.app.routes.aux.validate_user_profile import (
|
|
24
25
|
validate_user_profile,
|
|
25
26
|
)
|
|
26
|
-
from fractal_server.app.schemas.v2 import
|
|
27
|
-
from fractal_server.app.schemas.v2 import
|
|
28
|
-
from fractal_server.app.schemas.v2 import
|
|
27
|
+
from fractal_server.app.schemas.v2 import JobCreate
|
|
28
|
+
from fractal_server.app.schemas.v2 import JobRead
|
|
29
|
+
from fractal_server.app.schemas.v2 import JobStatusType
|
|
29
30
|
from fractal_server.app.schemas.v2 import ResourceType
|
|
31
|
+
from fractal_server.app.schemas.v2.sharing import ProjectPermissions
|
|
30
32
|
from fractal_server.config import get_settings
|
|
31
33
|
from fractal_server.logger import set_logger
|
|
32
34
|
from fractal_server.runner.set_start_and_last_task_index import (
|
|
@@ -35,9 +37,9 @@ from fractal_server.runner.set_start_and_last_task_index import (
|
|
|
35
37
|
from fractal_server.runner.v2.submit_workflow import submit_workflow
|
|
36
38
|
from fractal_server.syringe import Inject
|
|
37
39
|
|
|
38
|
-
from ._aux_functions import
|
|
39
|
-
from ._aux_functions import
|
|
40
|
-
from ._aux_functions import
|
|
40
|
+
from ._aux_functions import _get_dataset_check_access
|
|
41
|
+
from ._aux_functions import _get_workflow_check_access
|
|
42
|
+
from ._aux_functions import clean_app_job_list
|
|
41
43
|
from ._aux_functions_tasks import _check_type_filters_compatibility
|
|
42
44
|
|
|
43
45
|
FRACTAL_CACHE_DIR = ".fractal_cache"
|
|
@@ -48,34 +50,33 @@ logger = set_logger(__name__)
|
|
|
48
50
|
@router.post(
|
|
49
51
|
"/project/{project_id}/job/submit/",
|
|
50
52
|
status_code=status.HTTP_202_ACCEPTED,
|
|
51
|
-
response_model=
|
|
53
|
+
response_model=JobRead,
|
|
52
54
|
)
|
|
53
|
-
async def
|
|
55
|
+
async def submit_job(
|
|
54
56
|
project_id: int,
|
|
55
57
|
workflow_id: int,
|
|
56
58
|
dataset_id: int,
|
|
57
|
-
job_create:
|
|
59
|
+
job_create: JobCreate,
|
|
58
60
|
background_tasks: BackgroundTasks,
|
|
59
61
|
request: Request,
|
|
60
62
|
user: UserOAuth = Depends(current_user_act_ver_prof),
|
|
61
63
|
db: AsyncSession = Depends(get_async_db),
|
|
62
|
-
) ->
|
|
63
|
-
# Remove non-submitted
|
|
64
|
+
) -> JobRead | None:
|
|
65
|
+
# Remove non-submitted Jobs from the app state when the list grows
|
|
64
66
|
# beyond a threshold
|
|
65
|
-
# NOTE: this may lead to a race condition on `app.state.
|
|
66
|
-
# requests take place at the same time and `
|
|
67
|
+
# NOTE: this may lead to a race condition on `app.state.jobs` if two
|
|
68
|
+
# requests take place at the same time and `clean_app_job_list` is
|
|
67
69
|
# somewhat slow.
|
|
68
70
|
settings = Inject(get_settings)
|
|
69
|
-
if len(request.app.state.
|
|
70
|
-
new_jobs_list = await
|
|
71
|
-
|
|
72
|
-
)
|
|
73
|
-
request.app.state.jobsV2 = new_jobs_list
|
|
71
|
+
if len(request.app.state.jobs) > settings.FRACTAL_API_MAX_JOB_LIST_LENGTH:
|
|
72
|
+
new_jobs_list = await clean_app_job_list(db, request.app.state.jobs)
|
|
73
|
+
request.app.state.jobs = new_jobs_list
|
|
74
74
|
|
|
75
|
-
output = await
|
|
75
|
+
output = await _get_dataset_check_access(
|
|
76
76
|
project_id=project_id,
|
|
77
77
|
dataset_id=dataset_id,
|
|
78
78
|
user_id=user.id,
|
|
79
|
+
required_permissions=ProjectPermissions.EXECUTE,
|
|
79
80
|
db=db,
|
|
80
81
|
)
|
|
81
82
|
project = output["project"]
|
|
@@ -92,8 +93,12 @@ async def apply_workflow(
|
|
|
92
93
|
detail="Project resource does not match with user's resource",
|
|
93
94
|
)
|
|
94
95
|
|
|
95
|
-
workflow = await
|
|
96
|
-
project_id=project_id,
|
|
96
|
+
workflow = await _get_workflow_check_access(
|
|
97
|
+
project_id=project_id,
|
|
98
|
+
workflow_id=workflow_id,
|
|
99
|
+
user_id=user.id,
|
|
100
|
+
required_permissions=ProjectPermissions.EXECUTE,
|
|
101
|
+
db=db,
|
|
97
102
|
)
|
|
98
103
|
num_tasks = len(workflow.task_list)
|
|
99
104
|
if num_tasks == 0:
|
|
@@ -141,35 +146,15 @@ async def apply_workflow(
|
|
|
141
146
|
user=user,
|
|
142
147
|
db=db,
|
|
143
148
|
)
|
|
144
|
-
|
|
145
|
-
# Check that no other job with the same dataset_id is SUBMITTED
|
|
146
|
-
stm = (
|
|
147
|
-
select(JobV2)
|
|
148
|
-
.where(JobV2.dataset_id == dataset_id)
|
|
149
|
-
.where(JobV2.status == JobStatusTypeV2.SUBMITTED)
|
|
150
|
-
)
|
|
151
|
-
res = await db.execute(stm)
|
|
152
|
-
if res.scalars().all():
|
|
149
|
+
if resource.prevent_new_submissions:
|
|
153
150
|
raise HTTPException(
|
|
154
151
|
status_code=status.HTTP_422_UNPROCESSABLE_CONTENT,
|
|
155
152
|
detail=(
|
|
156
|
-
f"
|
|
153
|
+
f"The '{resource.name}' resource does not currently accept "
|
|
154
|
+
"new job submissions."
|
|
157
155
|
),
|
|
158
156
|
)
|
|
159
157
|
|
|
160
|
-
if job_create.slurm_account is not None:
|
|
161
|
-
if job_create.slurm_account not in user.slurm_accounts:
|
|
162
|
-
raise HTTPException(
|
|
163
|
-
status_code=status.HTTP_422_UNPROCESSABLE_CONTENT,
|
|
164
|
-
detail=(
|
|
165
|
-
f"SLURM account '{job_create.slurm_account}' is not "
|
|
166
|
-
"among those available to the current user"
|
|
167
|
-
),
|
|
168
|
-
)
|
|
169
|
-
else:
|
|
170
|
-
if len(user.slurm_accounts) > 0:
|
|
171
|
-
job_create.slurm_account = user.slurm_accounts[0]
|
|
172
|
-
|
|
173
158
|
# User appropriate FractalSSH object
|
|
174
159
|
if resource.type == ResourceType.SLURM_SSH:
|
|
175
160
|
ssh_config = dict(
|
|
@@ -192,6 +177,35 @@ async def apply_workflow(
|
|
|
192
177
|
else:
|
|
193
178
|
fractal_ssh = None
|
|
194
179
|
|
|
180
|
+
# Assign `job_create.slurm_account`
|
|
181
|
+
if job_create.slurm_account is not None:
|
|
182
|
+
if job_create.slurm_account not in user.slurm_accounts:
|
|
183
|
+
raise HTTPException(
|
|
184
|
+
status_code=status.HTTP_422_UNPROCESSABLE_CONTENT,
|
|
185
|
+
detail=(
|
|
186
|
+
f"SLURM account '{job_create.slurm_account}' is not "
|
|
187
|
+
"among those available to the current user"
|
|
188
|
+
),
|
|
189
|
+
)
|
|
190
|
+
else:
|
|
191
|
+
if len(user.slurm_accounts) > 0:
|
|
192
|
+
job_create.slurm_account = user.slurm_accounts[0]
|
|
193
|
+
|
|
194
|
+
# Check that no other job with the same dataset_id is SUBMITTED
|
|
195
|
+
stm = (
|
|
196
|
+
select(JobV2)
|
|
197
|
+
.where(JobV2.dataset_id == dataset_id)
|
|
198
|
+
.where(JobV2.status == JobStatusType.SUBMITTED)
|
|
199
|
+
)
|
|
200
|
+
res = await db.execute(stm)
|
|
201
|
+
if res.scalars().all():
|
|
202
|
+
raise HTTPException(
|
|
203
|
+
status_code=status.HTTP_422_UNPROCESSABLE_CONTENT,
|
|
204
|
+
detail=(
|
|
205
|
+
f"Dataset {dataset_id} is already in use in submitted job(s)."
|
|
206
|
+
),
|
|
207
|
+
)
|
|
208
|
+
|
|
195
209
|
# Add new Job object to DB
|
|
196
210
|
job = JobV2(
|
|
197
211
|
project_id=project_id,
|
|
@@ -215,38 +229,31 @@ async def apply_workflow(
|
|
|
215
229
|
await db.refresh(job)
|
|
216
230
|
|
|
217
231
|
# Update TaskGroupV2.timestamp_last_used
|
|
218
|
-
|
|
219
|
-
|
|
232
|
+
await db.execute(
|
|
233
|
+
update(TaskGroupV2)
|
|
234
|
+
.where(TaskGroupV2.id.in_(used_task_group_ids))
|
|
235
|
+
.values(timestamp_last_used=job.start_timestamp)
|
|
220
236
|
)
|
|
221
|
-
used_task_groups = res.scalars().all()
|
|
222
|
-
for used_task_group in used_task_groups:
|
|
223
|
-
used_task_group.timestamp_last_used = job.start_timestamp
|
|
224
|
-
db.add(used_task_group)
|
|
225
237
|
await db.commit()
|
|
226
238
|
|
|
227
|
-
# Define
|
|
228
|
-
|
|
229
|
-
|
|
239
|
+
# Define `cache_dir`
|
|
240
|
+
cache_dir = Path(user.project_dirs[0], FRACTAL_CACHE_DIR)
|
|
241
|
+
|
|
242
|
+
# Define server-side and user-side job directories
|
|
243
|
+
timestamp_string = job.start_timestamp.strftime(r"%Y%m%d_%H%M%S")
|
|
244
|
+
working_dir = Path(resource.jobs_local_dir) / (
|
|
230
245
|
f"proj_v2_{project_id:07d}_wf_{workflow_id:07d}_job_{job.id:07d}"
|
|
231
246
|
f"_{timestamp_string}"
|
|
232
247
|
)
|
|
233
|
-
|
|
234
|
-
# Define user-side job directory
|
|
235
|
-
cache_dir = Path(user.project_dir, FRACTAL_CACHE_DIR)
|
|
236
248
|
match resource.type:
|
|
237
249
|
case ResourceType.LOCAL:
|
|
238
|
-
|
|
250
|
+
working_dir_user = working_dir
|
|
239
251
|
case ResourceType.SLURM_SUDO:
|
|
240
|
-
|
|
252
|
+
working_dir_user = cache_dir / working_dir.name
|
|
241
253
|
case ResourceType.SLURM_SSH:
|
|
242
|
-
|
|
243
|
-
|
|
244
|
-
|
|
245
|
-
)
|
|
246
|
-
|
|
247
|
-
# Update job folders in the db
|
|
248
|
-
job.working_dir = WORKFLOW_DIR_LOCAL.as_posix()
|
|
249
|
-
job.working_dir_user = WORKFLOW_DIR_REMOTE.as_posix()
|
|
254
|
+
working_dir_user = Path(profile.jobs_remote_dir, working_dir.name)
|
|
255
|
+
job.working_dir = working_dir.as_posix()
|
|
256
|
+
job.working_dir_user = working_dir_user.as_posix()
|
|
250
257
|
await db.merge(job)
|
|
251
258
|
await db.commit()
|
|
252
259
|
|
|
@@ -262,11 +269,9 @@ async def apply_workflow(
|
|
|
262
269
|
resource=resource,
|
|
263
270
|
profile=profile,
|
|
264
271
|
)
|
|
265
|
-
request.app.state.
|
|
272
|
+
request.app.state.jobs.append(job.id)
|
|
266
273
|
logger.info(
|
|
267
|
-
f"
|
|
268
|
-
f"
|
|
269
|
-
f"{request.app.state.jobsV2}"
|
|
274
|
+
f"Job {job.id}, worker with pid {os.getpid()}. "
|
|
275
|
+
f"Worker jobs list: {request.app.state.jobs}."
|
|
270
276
|
)
|
|
271
|
-
await db.close()
|
|
272
277
|
return job
|
|
@@ -25,11 +25,11 @@ from fractal_server.app.models import UserOAuth
|
|
|
25
25
|
from fractal_server.app.models.v2 import TaskGroupV2
|
|
26
26
|
from fractal_server.app.models.v2 import TaskV2
|
|
27
27
|
from fractal_server.app.routes.auth import current_user_act_ver_prof
|
|
28
|
-
from fractal_server.app.schemas.v2 import
|
|
29
|
-
from fractal_server.app.schemas.v2 import
|
|
30
|
-
from fractal_server.app.schemas.v2 import
|
|
28
|
+
from fractal_server.app.schemas.v2 import TaskCreate
|
|
29
|
+
from fractal_server.app.schemas.v2 import TaskGroupOriginEnum
|
|
30
|
+
from fractal_server.app.schemas.v2 import TaskRead
|
|
31
31
|
from fractal_server.app.schemas.v2 import TaskType
|
|
32
|
-
from fractal_server.app.schemas.v2 import
|
|
32
|
+
from fractal_server.app.schemas.v2 import TaskUpdate
|
|
33
33
|
from fractal_server.logger import set_logger
|
|
34
34
|
|
|
35
35
|
router = APIRouter()
|
|
@@ -37,7 +37,7 @@ router = APIRouter()
|
|
|
37
37
|
logger = set_logger(__name__)
|
|
38
38
|
|
|
39
39
|
|
|
40
|
-
@router.get("/", response_model=list[
|
|
40
|
+
@router.get("/", response_model=list[TaskRead])
|
|
41
41
|
async def get_list_task(
|
|
42
42
|
args_schema: bool = True,
|
|
43
43
|
category: str | None = None,
|
|
@@ -45,7 +45,7 @@ async def get_list_task(
|
|
|
45
45
|
author: str | None = None,
|
|
46
46
|
user: UserOAuth = Depends(current_user_act_ver_prof),
|
|
47
47
|
db: AsyncSession = Depends(get_async_db),
|
|
48
|
-
) -> list[
|
|
48
|
+
) -> list[TaskRead]:
|
|
49
49
|
"""
|
|
50
50
|
Get list of available tasks
|
|
51
51
|
"""
|
|
@@ -86,12 +86,12 @@ async def get_list_task(
|
|
|
86
86
|
return task_list
|
|
87
87
|
|
|
88
88
|
|
|
89
|
-
@router.get("/{task_id}/", response_model=
|
|
89
|
+
@router.get("/{task_id}/", response_model=TaskRead)
|
|
90
90
|
async def get_task(
|
|
91
91
|
task_id: int,
|
|
92
92
|
user: UserOAuth = Depends(current_user_act_ver_prof),
|
|
93
93
|
db: AsyncSession = Depends(get_async_db),
|
|
94
|
-
) ->
|
|
94
|
+
) -> TaskRead:
|
|
95
95
|
"""
|
|
96
96
|
Get info on a specific task
|
|
97
97
|
"""
|
|
@@ -99,13 +99,13 @@ async def get_task(
|
|
|
99
99
|
return task
|
|
100
100
|
|
|
101
101
|
|
|
102
|
-
@router.patch("/{task_id}/", response_model=
|
|
102
|
+
@router.patch("/{task_id}/", response_model=TaskRead)
|
|
103
103
|
async def patch_task(
|
|
104
104
|
task_id: int,
|
|
105
|
-
task_update:
|
|
105
|
+
task_update: TaskUpdate,
|
|
106
106
|
user: UserOAuth = Depends(current_user_act_ver_prof),
|
|
107
107
|
db: AsyncSession = Depends(get_async_db),
|
|
108
|
-
) ->
|
|
108
|
+
) -> TaskRead | None:
|
|
109
109
|
"""
|
|
110
110
|
Edit a specific task (restricted to task owner)
|
|
111
111
|
"""
|
|
@@ -137,16 +137,14 @@ async def patch_task(
|
|
|
137
137
|
return db_task
|
|
138
138
|
|
|
139
139
|
|
|
140
|
-
@router.post(
|
|
141
|
-
"/", response_model=TaskReadV2, status_code=status.HTTP_201_CREATED
|
|
142
|
-
)
|
|
140
|
+
@router.post("/", response_model=TaskRead, status_code=status.HTTP_201_CREATED)
|
|
143
141
|
async def create_task(
|
|
144
|
-
task:
|
|
142
|
+
task: TaskCreate,
|
|
145
143
|
user_group_id: int | None = None,
|
|
146
144
|
private: bool = False,
|
|
147
145
|
user: UserOAuth = Depends(current_user_act_ver_prof),
|
|
148
146
|
db: AsyncSession = Depends(get_async_db),
|
|
149
|
-
) ->
|
|
147
|
+
) -> TaskRead | None:
|
|
150
148
|
"""
|
|
151
149
|
Create a new task
|
|
152
150
|
"""
|
|
@@ -211,7 +209,7 @@ async def create_task(
|
|
|
211
209
|
resource_id=resource_id,
|
|
212
210
|
active=True,
|
|
213
211
|
task_list=[db_task],
|
|
214
|
-
origin=
|
|
212
|
+
origin=TaskGroupOriginEnum.OTHER,
|
|
215
213
|
version=db_task.version,
|
|
216
214
|
pkg_name=pkg_name,
|
|
217
215
|
)
|
|
@@ -25,12 +25,12 @@ from fractal_server.app.routes.aux.validate_user_profile import (
|
|
|
25
25
|
)
|
|
26
26
|
from fractal_server.app.schemas.v2 import FractalUploadedFile
|
|
27
27
|
from fractal_server.app.schemas.v2 import ResourceType
|
|
28
|
-
from fractal_server.app.schemas.v2 import
|
|
29
|
-
from fractal_server.app.schemas.v2 import
|
|
30
|
-
from fractal_server.app.schemas.v2 import
|
|
31
|
-
from fractal_server.app.schemas.v2 import
|
|
32
|
-
from fractal_server.app.schemas.v2 import
|
|
33
|
-
from fractal_server.app.schemas.v2 import
|
|
28
|
+
from fractal_server.app.schemas.v2 import TaskCollectPip
|
|
29
|
+
from fractal_server.app.schemas.v2 import TaskGroupActivityAction
|
|
30
|
+
from fractal_server.app.schemas.v2 import TaskGroupActivityRead
|
|
31
|
+
from fractal_server.app.schemas.v2 import TaskGroupActivityStatus
|
|
32
|
+
from fractal_server.app.schemas.v2 import TaskGroupCreateStrict
|
|
33
|
+
from fractal_server.app.schemas.v2 import TaskGroupOriginEnum
|
|
34
34
|
from fractal_server.logger import reset_logger_handlers
|
|
35
35
|
from fractal_server.logger import set_logger
|
|
36
36
|
from fractal_server.tasks.v2.local.collect import collect_local
|
|
@@ -59,9 +59,9 @@ class CollectionRequestData(BaseModel):
|
|
|
59
59
|
Validate form data _and_ wheel file.
|
|
60
60
|
"""
|
|
61
61
|
|
|
62
|
-
task_collect:
|
|
62
|
+
task_collect: TaskCollectPip
|
|
63
63
|
file: UploadFile | None = None
|
|
64
|
-
origin:
|
|
64
|
+
origin: TaskGroupOriginEnum
|
|
65
65
|
|
|
66
66
|
@model_validator(mode="before")
|
|
67
67
|
@classmethod
|
|
@@ -75,7 +75,7 @@ class CollectionRequestData(BaseModel):
|
|
|
75
75
|
raise ValueError(
|
|
76
76
|
"When no `file` is provided, `package` is required."
|
|
77
77
|
)
|
|
78
|
-
values["origin"] =
|
|
78
|
+
values["origin"] = TaskGroupOriginEnum.PYPI
|
|
79
79
|
else:
|
|
80
80
|
if package is not None:
|
|
81
81
|
raise ValueError(
|
|
@@ -87,7 +87,7 @@ class CollectionRequestData(BaseModel):
|
|
|
87
87
|
"Cannot set `package_version` when `file` is "
|
|
88
88
|
f"provided (given package_version='{package_version}')."
|
|
89
89
|
)
|
|
90
|
-
values["origin"] =
|
|
90
|
+
values["origin"] = TaskGroupOriginEnum.WHEELFILE
|
|
91
91
|
|
|
92
92
|
for forbidden_char in FORBIDDEN_CHAR_WHEEL:
|
|
93
93
|
if forbidden_char in file.filename:
|
|
@@ -125,7 +125,7 @@ def parse_request_data(
|
|
|
125
125
|
else None
|
|
126
126
|
)
|
|
127
127
|
# Validate and coerce form data
|
|
128
|
-
task_collect_pip =
|
|
128
|
+
task_collect_pip = TaskCollectPip(
|
|
129
129
|
package=package,
|
|
130
130
|
package_version=package_version,
|
|
131
131
|
package_extras=package_extras,
|
|
@@ -150,7 +150,7 @@ def parse_request_data(
|
|
|
150
150
|
|
|
151
151
|
@router.post(
|
|
152
152
|
"/collect/pip/",
|
|
153
|
-
response_model=
|
|
153
|
+
response_model=TaskGroupActivityRead,
|
|
154
154
|
)
|
|
155
155
|
async def collect_tasks_pip(
|
|
156
156
|
response: Response,
|
|
@@ -160,7 +160,7 @@ async def collect_tasks_pip(
|
|
|
160
160
|
user_group_id: int | None = None,
|
|
161
161
|
user: UserOAuth = Depends(current_user_act_ver_prof),
|
|
162
162
|
db: AsyncSession = Depends(get_async_db),
|
|
163
|
-
) ->
|
|
163
|
+
) -> TaskGroupActivityRead:
|
|
164
164
|
"""
|
|
165
165
|
Task-collection endpoint
|
|
166
166
|
"""
|
|
@@ -221,7 +221,7 @@ async def collect_tasks_pip(
|
|
|
221
221
|
wheel_file = None
|
|
222
222
|
|
|
223
223
|
# Set pkg_name, version, origin and archive_path
|
|
224
|
-
if request_data.origin ==
|
|
224
|
+
if request_data.origin == TaskGroupOriginEnum.WHEELFILE:
|
|
225
225
|
try:
|
|
226
226
|
wheel_filename = request_data.file.filename
|
|
227
227
|
wheel_info = _parse_wheel_filename(wheel_filename)
|
|
@@ -242,7 +242,7 @@ async def collect_tasks_pip(
|
|
|
242
242
|
wheel_info["distribution"]
|
|
243
243
|
)
|
|
244
244
|
task_group_attrs["version"] = wheel_info["version"]
|
|
245
|
-
elif request_data.origin ==
|
|
245
|
+
elif request_data.origin == TaskGroupOriginEnum.PYPI:
|
|
246
246
|
pkg_name = task_collect.package
|
|
247
247
|
task_group_attrs["pkg_name"] = normalize_package_name(pkg_name)
|
|
248
248
|
latest_version = await get_package_version_from_pypi(
|
|
@@ -278,7 +278,7 @@ async def collect_tasks_pip(
|
|
|
278
278
|
|
|
279
279
|
# Validate TaskGroupV2 attributes
|
|
280
280
|
try:
|
|
281
|
-
|
|
281
|
+
TaskGroupCreateStrict(**task_group_attrs)
|
|
282
282
|
except ValidationError as e:
|
|
283
283
|
raise HTTPException(
|
|
284
284
|
status_code=status.HTTP_422_UNPROCESSABLE_CONTENT,
|
|
@@ -328,8 +328,8 @@ async def collect_tasks_pip(
|
|
|
328
328
|
task_group_activity = TaskGroupActivityV2(
|
|
329
329
|
user_id=task_group.user_id,
|
|
330
330
|
taskgroupv2_id=task_group.id,
|
|
331
|
-
status=
|
|
332
|
-
action=
|
|
331
|
+
status=TaskGroupActivityStatus.PENDING,
|
|
332
|
+
action=TaskGroupActivityAction.COLLECT,
|
|
333
333
|
pkg_name=task_group.pkg_name,
|
|
334
334
|
version=task_group.version,
|
|
335
335
|
)
|
|
@@ -17,11 +17,11 @@ from fractal_server.app.routes.aux.validate_user_profile import (
|
|
|
17
17
|
validate_user_profile,
|
|
18
18
|
)
|
|
19
19
|
from fractal_server.app.schemas.v2 import ResourceType
|
|
20
|
-
from fractal_server.app.schemas.v2 import
|
|
21
|
-
from fractal_server.app.schemas.v2 import
|
|
22
|
-
from fractal_server.app.schemas.v2 import
|
|
23
|
-
from fractal_server.app.schemas.v2 import
|
|
24
|
-
from fractal_server.app.schemas.v2 import
|
|
20
|
+
from fractal_server.app.schemas.v2 import TaskCollectCustom
|
|
21
|
+
from fractal_server.app.schemas.v2 import TaskCreate
|
|
22
|
+
from fractal_server.app.schemas.v2 import TaskGroupCreate
|
|
23
|
+
from fractal_server.app.schemas.v2 import TaskGroupOriginEnum
|
|
24
|
+
from fractal_server.app.schemas.v2 import TaskRead
|
|
25
25
|
from fractal_server.logger import set_logger
|
|
26
26
|
from fractal_server.string_tools import validate_cmd
|
|
27
27
|
from fractal_server.tasks.v2.utils_background import prepare_tasks_metadata
|
|
@@ -38,16 +38,14 @@ router = APIRouter()
|
|
|
38
38
|
logger = set_logger(__name__)
|
|
39
39
|
|
|
40
40
|
|
|
41
|
-
@router.post(
|
|
42
|
-
"/collect/custom/", status_code=201, response_model=list[TaskReadV2]
|
|
43
|
-
)
|
|
41
|
+
@router.post("/collect/custom/", status_code=201, response_model=list[TaskRead])
|
|
44
42
|
async def collect_task_custom(
|
|
45
|
-
task_collect:
|
|
43
|
+
task_collect: TaskCollectCustom,
|
|
46
44
|
private: bool = False,
|
|
47
45
|
user_group_id: int | None = None,
|
|
48
46
|
user: UserOAuth = Depends(current_user_act_ver_prof),
|
|
49
47
|
db: AsyncSession = Depends(get_async_db),
|
|
50
|
-
) -> list[
|
|
48
|
+
) -> list[TaskRead]:
|
|
51
49
|
# Get validated resource and profile
|
|
52
50
|
resource, profile = await validate_user_profile(user=user, db=db)
|
|
53
51
|
resource_id = resource.id
|
|
@@ -139,7 +137,7 @@ async def collect_task_custom(
|
|
|
139
137
|
else:
|
|
140
138
|
package_root = Path(task_collect.package_root)
|
|
141
139
|
|
|
142
|
-
task_list: list[
|
|
140
|
+
task_list: list[TaskCreate] = prepare_tasks_metadata(
|
|
143
141
|
package_manifest=task_collect.manifest,
|
|
144
142
|
python_bin=Path(task_collect.python_interpreter),
|
|
145
143
|
package_root=package_root,
|
|
@@ -148,14 +146,14 @@ async def collect_task_custom(
|
|
|
148
146
|
|
|
149
147
|
# Prepare task-group attributes
|
|
150
148
|
task_group_attrs = dict(
|
|
151
|
-
origin=
|
|
149
|
+
origin=TaskGroupOriginEnum.OTHER,
|
|
152
150
|
pkg_name=task_collect.label,
|
|
153
151
|
user_id=user.id,
|
|
154
152
|
user_group_id=user_group_id,
|
|
155
153
|
version=task_collect.version,
|
|
156
154
|
resource_id=resource_id,
|
|
157
155
|
)
|
|
158
|
-
|
|
156
|
+
TaskGroupCreate(**task_group_attrs)
|
|
159
157
|
|
|
160
158
|
# Verify non-duplication constraints
|
|
161
159
|
await _verify_non_duplication_user_constraint(
|