fractal-server 2.14.5__py3-none-any.whl → 2.14.7__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- fractal_server/__init__.py +1 -1
- fractal_server/app/db/__init__.py +2 -2
- fractal_server/app/models/security.py +8 -8
- fractal_server/app/models/user_settings.py +8 -10
- fractal_server/app/models/v2/accounting.py +2 -3
- fractal_server/app/models/v2/dataset.py +1 -2
- fractal_server/app/models/v2/history.py +3 -4
- fractal_server/app/models/v2/job.py +10 -11
- fractal_server/app/models/v2/project.py +1 -2
- fractal_server/app/models/v2/task.py +13 -14
- fractal_server/app/models/v2/task_group.py +15 -16
- fractal_server/app/models/v2/workflow.py +1 -2
- fractal_server/app/models/v2/workflowtask.py +6 -7
- fractal_server/app/routes/admin/v2/accounting.py +3 -4
- fractal_server/app/routes/admin/v2/job.py +13 -14
- fractal_server/app/routes/admin/v2/project.py +2 -4
- fractal_server/app/routes/admin/v2/task.py +11 -13
- fractal_server/app/routes/admin/v2/task_group.py +15 -17
- fractal_server/app/routes/admin/v2/task_group_lifecycle.py +5 -8
- fractal_server/app/routes/api/v2/__init__.py +2 -0
- fractal_server/app/routes/api/v2/_aux_functions.py +7 -9
- fractal_server/app/routes/api/v2/_aux_functions_history.py +1 -1
- fractal_server/app/routes/api/v2/_aux_functions_task_lifecycle.py +1 -3
- fractal_server/app/routes/api/v2/_aux_functions_tasks.py +5 -6
- fractal_server/app/routes/api/v2/dataset.py +6 -8
- fractal_server/app/routes/api/v2/history.py +5 -8
- fractal_server/app/routes/api/v2/images.py +2 -3
- fractal_server/app/routes/api/v2/job.py +5 -6
- fractal_server/app/routes/api/v2/pre_submission_checks.py +1 -3
- fractal_server/app/routes/api/v2/project.py +2 -4
- fractal_server/app/routes/api/v2/status_legacy.py +2 -4
- fractal_server/app/routes/api/v2/submit.py +3 -4
- fractal_server/app/routes/api/v2/task.py +6 -7
- fractal_server/app/routes/api/v2/task_collection.py +11 -13
- fractal_server/app/routes/api/v2/task_collection_custom.py +4 -4
- fractal_server/app/routes/api/v2/task_group.py +6 -8
- fractal_server/app/routes/api/v2/task_group_lifecycle.py +6 -9
- fractal_server/app/routes/api/v2/task_version_update.py +270 -0
- fractal_server/app/routes/api/v2/workflow.py +5 -6
- fractal_server/app/routes/api/v2/workflow_import.py +3 -5
- fractal_server/app/routes/api/v2/workflowtask.py +2 -114
- fractal_server/app/routes/auth/current_user.py +2 -2
- fractal_server/app/routes/pagination.py +2 -3
- fractal_server/app/runner/exceptions.py +15 -16
- fractal_server/app/runner/executors/base_runner.py +3 -3
- fractal_server/app/runner/executors/call_command_wrapper.py +1 -1
- fractal_server/app/runner/executors/local/get_local_config.py +2 -3
- fractal_server/app/runner/executors/local/runner.py +1 -1
- fractal_server/app/runner/executors/slurm_common/_batching.py +2 -3
- fractal_server/app/runner/executors/slurm_common/_slurm_config.py +27 -29
- fractal_server/app/runner/executors/slurm_common/base_slurm_runner.py +32 -14
- fractal_server/app/runner/executors/slurm_common/get_slurm_config.py +2 -3
- fractal_server/app/runner/executors/slurm_common/remote.py +2 -2
- fractal_server/app/runner/executors/slurm_common/slurm_job_task_models.py +2 -3
- fractal_server/app/runner/executors/slurm_ssh/run_subprocess.py +2 -3
- fractal_server/app/runner/executors/slurm_ssh/runner.py +5 -4
- fractal_server/app/runner/executors/slurm_sudo/_subprocess_run_as_user.py +1 -2
- fractal_server/app/runner/executors/slurm_sudo/runner.py +7 -8
- fractal_server/app/runner/set_start_and_last_task_index.py +2 -5
- fractal_server/app/runner/shutdown.py +5 -11
- fractal_server/app/runner/task_files.py +3 -5
- fractal_server/app/runner/v2/_local.py +3 -4
- fractal_server/app/runner/v2/_slurm_ssh.py +8 -7
- fractal_server/app/runner/v2/_slurm_sudo.py +8 -9
- fractal_server/app/runner/v2/runner.py +4 -5
- fractal_server/app/runner/v2/runner_functions.py +4 -5
- fractal_server/app/runner/v2/submit_workflow.py +12 -11
- fractal_server/app/runner/v2/task_interface.py +2 -3
- fractal_server/app/runner/versions.py +1 -2
- fractal_server/app/schemas/user.py +2 -4
- fractal_server/app/schemas/user_group.py +1 -2
- fractal_server/app/schemas/user_settings.py +19 -21
- fractal_server/app/schemas/v2/dataset.py +2 -3
- fractal_server/app/schemas/v2/dumps.py +13 -15
- fractal_server/app/schemas/v2/history.py +6 -7
- fractal_server/app/schemas/v2/job.py +17 -18
- fractal_server/app/schemas/v2/manifest.py +12 -13
- fractal_server/app/schemas/v2/status_legacy.py +2 -2
- fractal_server/app/schemas/v2/task.py +29 -30
- fractal_server/app/schemas/v2/task_collection.py +8 -9
- fractal_server/app/schemas/v2/task_group.py +22 -23
- fractal_server/app/schemas/v2/workflow.py +1 -2
- fractal_server/app/schemas/v2/workflowtask.py +27 -29
- fractal_server/app/security/__init__.py +10 -12
- fractal_server/config.py +32 -33
- fractal_server/images/models.py +2 -4
- fractal_server/images/tools.py +4 -7
- fractal_server/logger.py +3 -5
- fractal_server/ssh/_fabric.py +37 -12
- fractal_server/string_tools.py +2 -2
- fractal_server/syringe.py +1 -1
- fractal_server/tasks/v2/local/collect.py +2 -3
- fractal_server/tasks/v2/local/deactivate.py +1 -1
- fractal_server/tasks/v2/local/reactivate.py +1 -1
- fractal_server/tasks/v2/ssh/collect.py +256 -245
- fractal_server/tasks/v2/ssh/deactivate.py +210 -187
- fractal_server/tasks/v2/ssh/reactivate.py +154 -146
- fractal_server/tasks/v2/utils_background.py +2 -3
- fractal_server/types/__init__.py +1 -2
- fractal_server/types/validators/_filter_validators.py +1 -2
- fractal_server/utils.py +4 -5
- fractal_server/zip_tools.py +1 -1
- {fractal_server-2.14.5.dist-info → fractal_server-2.14.7.dist-info}/METADATA +2 -3
- {fractal_server-2.14.5.dist-info → fractal_server-2.14.7.dist-info}/RECORD +107 -107
- fractal_server/app/history/__init__.py +0 -0
- {fractal_server-2.14.5.dist-info → fractal_server-2.14.7.dist-info}/LICENSE +0 -0
- {fractal_server-2.14.5.dist-info → fractal_server-2.14.7.dist-info}/WHEEL +0 -0
- {fractal_server-2.14.5.dist-info → fractal_server-2.14.7.dist-info}/entry_points.txt +0 -0
@@ -22,6 +22,7 @@ from fractal_server.app.schemas.v2 import TaskGroupReadV2
|
|
22
22
|
from fractal_server.app.schemas.v2 import TaskGroupV2OriginEnum
|
23
23
|
from fractal_server.config import get_settings
|
24
24
|
from fractal_server.logger import set_logger
|
25
|
+
from fractal_server.ssh._fabric import SSHConfig
|
25
26
|
from fractal_server.syringe import Inject
|
26
27
|
from fractal_server.tasks.v2.local import deactivate_local
|
27
28
|
from fractal_server.tasks.v2.local import reactivate_local
|
@@ -119,19 +120,17 @@ async def deactivate_task_group(
|
|
119
120
|
)
|
120
121
|
|
121
122
|
# User appropriate FractalSSH object
|
122
|
-
|
123
|
+
ssh_config = SSHConfig(
|
123
124
|
user=user_settings.ssh_username,
|
124
125
|
host=user_settings.ssh_host,
|
125
126
|
key_path=user_settings.ssh_private_key_path,
|
126
127
|
)
|
127
|
-
fractal_ssh_list = request.app.state.fractal_ssh_list
|
128
|
-
fractal_ssh = fractal_ssh_list.get(**ssh_credentials)
|
129
128
|
|
130
129
|
background_tasks.add_task(
|
131
130
|
deactivate_ssh,
|
132
131
|
task_group_id=task_group.id,
|
133
132
|
task_group_activity_id=task_group_activity.id,
|
134
|
-
|
133
|
+
ssh_config=ssh_config,
|
135
134
|
tasks_base_dir=user_settings.ssh_tasks_dir,
|
136
135
|
)
|
137
136
|
|
@@ -241,20 +240,18 @@ async def reactivate_task_group(
|
|
241
240
|
user=user, backend=settings.FRACTAL_RUNNER_BACKEND, db=db
|
242
241
|
)
|
243
242
|
|
244
|
-
# Use appropriate
|
245
|
-
|
243
|
+
# Use appropriate SSH credentials
|
244
|
+
ssh_config = SSHConfig(
|
246
245
|
user=user_settings.ssh_username,
|
247
246
|
host=user_settings.ssh_host,
|
248
247
|
key_path=user_settings.ssh_private_key_path,
|
249
248
|
)
|
250
|
-
fractal_ssh_list = request.app.state.fractal_ssh_list
|
251
|
-
fractal_ssh = fractal_ssh_list.get(**ssh_credentials)
|
252
249
|
|
253
250
|
background_tasks.add_task(
|
254
251
|
reactivate_ssh,
|
255
252
|
task_group_id=task_group.id,
|
256
253
|
task_group_activity_id=task_group_activity.id,
|
257
|
-
|
254
|
+
ssh_config=ssh_config,
|
258
255
|
tasks_base_dir=user_settings.ssh_tasks_dir,
|
259
256
|
)
|
260
257
|
|
@@ -0,0 +1,270 @@
|
|
1
|
+
from fastapi import APIRouter
|
2
|
+
from fastapi import Depends
|
3
|
+
from fastapi import HTTPException
|
4
|
+
from fastapi import status
|
5
|
+
from packaging.version import parse
|
6
|
+
from packaging.version import Version
|
7
|
+
from pydantic import BaseModel
|
8
|
+
from pydantic import ConfigDict
|
9
|
+
from sqlmodel import cast
|
10
|
+
from sqlmodel import or_
|
11
|
+
from sqlmodel import select
|
12
|
+
from sqlmodel import String
|
13
|
+
|
14
|
+
from ....db import AsyncSession
|
15
|
+
from ....db import get_async_db
|
16
|
+
from ....models import LinkUserGroup
|
17
|
+
from ....models.v2 import TaskV2
|
18
|
+
from ._aux_functions import _get_workflow_check_owner
|
19
|
+
from ._aux_functions import _get_workflow_task_check_owner
|
20
|
+
from ._aux_functions_tasks import _check_type_filters_compatibility
|
21
|
+
from ._aux_functions_tasks import _get_task_group_or_404
|
22
|
+
from ._aux_functions_tasks import _get_task_read_access
|
23
|
+
from fractal_server.app.models import UserOAuth
|
24
|
+
from fractal_server.app.models.v2 import TaskGroupV2
|
25
|
+
from fractal_server.app.models.v2 import WorkflowTaskV2
|
26
|
+
from fractal_server.app.routes.auth import current_active_user
|
27
|
+
from fractal_server.app.schemas.v2 import WorkflowTaskReadV2
|
28
|
+
from fractal_server.app.schemas.v2 import WorkflowTaskReplaceV2
|
29
|
+
|
30
|
+
|
31
|
+
router = APIRouter()
|
32
|
+
|
33
|
+
|
34
|
+
VALID_TYPE_UPDATES = {
|
35
|
+
("non_parallel", "converter_non_parallel"),
|
36
|
+
("compound", "converter_compound"),
|
37
|
+
("converter_non_parallel", "converter_non_parallel"),
|
38
|
+
("converter_compound", "converter_compound"),
|
39
|
+
("non_parallel", "non_parallel"),
|
40
|
+
("compound", "compound"),
|
41
|
+
("parallel", "parallel"),
|
42
|
+
}
|
43
|
+
|
44
|
+
|
45
|
+
def _is_type_update_valid(*, old_type: str, new_type: str) -> bool:
|
46
|
+
return (old_type, new_type) in VALID_TYPE_UPDATES
|
47
|
+
|
48
|
+
|
49
|
+
def _is_version_parsable(version: str) -> bool:
|
50
|
+
try:
|
51
|
+
parse(version)
|
52
|
+
return True
|
53
|
+
except Exception:
|
54
|
+
return False
|
55
|
+
|
56
|
+
|
57
|
+
class TaskVersion(BaseModel):
|
58
|
+
model_config = ConfigDict(arbitrary_types_allowed=True)
|
59
|
+
|
60
|
+
task_id: int
|
61
|
+
version: str
|
62
|
+
parsed_version: Version
|
63
|
+
|
64
|
+
|
65
|
+
class TaskVersionRead(BaseModel):
|
66
|
+
task_id: int
|
67
|
+
version: str
|
68
|
+
|
69
|
+
|
70
|
+
@router.get(
|
71
|
+
"/project/{project_id}/workflow/{workflow_id}/version-update-candidates/"
|
72
|
+
)
|
73
|
+
async def get_workflow_version_update_candidates(
|
74
|
+
project_id: int,
|
75
|
+
workflow_id: int,
|
76
|
+
user: UserOAuth = Depends(current_active_user),
|
77
|
+
db: AsyncSession = Depends(get_async_db),
|
78
|
+
) -> list[list[TaskVersionRead]]:
|
79
|
+
|
80
|
+
workflow = await _get_workflow_check_owner(
|
81
|
+
project_id=project_id,
|
82
|
+
workflow_id=workflow_id,
|
83
|
+
user_id=user.id,
|
84
|
+
db=db,
|
85
|
+
)
|
86
|
+
|
87
|
+
response = []
|
88
|
+
for wftask in workflow.task_list:
|
89
|
+
current_task = wftask.task
|
90
|
+
|
91
|
+
# Skip tasks with no args schemas
|
92
|
+
if not (
|
93
|
+
current_task.args_schema_parallel
|
94
|
+
or current_task.args_schema_non_parallel
|
95
|
+
):
|
96
|
+
response.append([])
|
97
|
+
continue
|
98
|
+
|
99
|
+
current_task_group = await _get_task_group_or_404(
|
100
|
+
task_group_id=current_task.taskgroupv2_id, db=db
|
101
|
+
)
|
102
|
+
|
103
|
+
# Skip tasks with non-parsable version
|
104
|
+
if _is_version_parsable(current_task_group.version):
|
105
|
+
current_parsed_version = parse(current_task_group.version)
|
106
|
+
else:
|
107
|
+
response.append([])
|
108
|
+
continue
|
109
|
+
|
110
|
+
res = await db.execute(
|
111
|
+
select(TaskV2.id, TaskV2.type, TaskGroupV2.version)
|
112
|
+
.where(
|
113
|
+
or_(
|
114
|
+
cast(TaskV2.args_schema_parallel, String) != "null",
|
115
|
+
cast(TaskV2.args_schema_non_parallel, String) != "null",
|
116
|
+
)
|
117
|
+
)
|
118
|
+
.where(TaskV2.name == current_task.name)
|
119
|
+
.where(TaskV2.taskgroupv2_id == TaskGroupV2.id)
|
120
|
+
.where(TaskGroupV2.pkg_name == current_task_group.pkg_name)
|
121
|
+
.where(TaskGroupV2.active.is_(True))
|
122
|
+
.where(
|
123
|
+
or_(
|
124
|
+
TaskGroupV2.user_id == user.id,
|
125
|
+
TaskGroupV2.user_group_id.in_(
|
126
|
+
select(LinkUserGroup.group_id).where(
|
127
|
+
LinkUserGroup.user_id == user.id
|
128
|
+
)
|
129
|
+
),
|
130
|
+
)
|
131
|
+
)
|
132
|
+
)
|
133
|
+
query_results: list[tuple[int, str, str]] = res.all()
|
134
|
+
|
135
|
+
# Exclude tasks with non-compatible types or non-parsable versions
|
136
|
+
current_task_type = current_task.type
|
137
|
+
update_candidates = [
|
138
|
+
TaskVersion(
|
139
|
+
task_id=task_id,
|
140
|
+
version=version,
|
141
|
+
parsed_version=parse(version),
|
142
|
+
)
|
143
|
+
for task_id, _type, version in query_results
|
144
|
+
if (
|
145
|
+
_is_type_update_valid(
|
146
|
+
old_type=current_task_type,
|
147
|
+
new_type=_type,
|
148
|
+
)
|
149
|
+
and _is_version_parsable(version)
|
150
|
+
)
|
151
|
+
]
|
152
|
+
# Exclude tasks with old versions from update candidates
|
153
|
+
update_candidates = [
|
154
|
+
item
|
155
|
+
for item in update_candidates
|
156
|
+
if item.parsed_version > current_parsed_version
|
157
|
+
]
|
158
|
+
# Sort update candidates by parsed version
|
159
|
+
update_candidates = sorted(
|
160
|
+
update_candidates,
|
161
|
+
key=lambda obj: obj.parsed_version,
|
162
|
+
)
|
163
|
+
response.append(update_candidates)
|
164
|
+
|
165
|
+
return response
|
166
|
+
|
167
|
+
|
168
|
+
@router.post(
|
169
|
+
"/project/{project_id}/workflow/{workflow_id}/wftask/replace-task/",
|
170
|
+
response_model=WorkflowTaskReadV2,
|
171
|
+
status_code=status.HTTP_201_CREATED,
|
172
|
+
)
|
173
|
+
async def replace_workflowtask(
|
174
|
+
project_id: int,
|
175
|
+
workflow_id: int,
|
176
|
+
workflow_task_id: int,
|
177
|
+
task_id: int,
|
178
|
+
replace: WorkflowTaskReplaceV2,
|
179
|
+
user: UserOAuth = Depends(current_active_user),
|
180
|
+
db: AsyncSession = Depends(get_async_db),
|
181
|
+
) -> WorkflowTaskReadV2:
|
182
|
+
|
183
|
+
# Get objects from database
|
184
|
+
old_wftask, workflow = await _get_workflow_task_check_owner(
|
185
|
+
project_id=project_id,
|
186
|
+
workflow_id=workflow_id,
|
187
|
+
workflow_task_id=workflow_task_id,
|
188
|
+
user_id=user.id,
|
189
|
+
db=db,
|
190
|
+
)
|
191
|
+
new_task = await _get_task_read_access(
|
192
|
+
task_id=task_id,
|
193
|
+
user_id=user.id,
|
194
|
+
db=db,
|
195
|
+
require_active=True,
|
196
|
+
)
|
197
|
+
|
198
|
+
# Preliminary checks
|
199
|
+
if not _is_type_update_valid(
|
200
|
+
old_type=old_wftask.task_type,
|
201
|
+
new_type=new_task.type,
|
202
|
+
):
|
203
|
+
raise HTTPException(
|
204
|
+
status_code=status.HTTP_422_UNPROCESSABLE_ENTITY,
|
205
|
+
detail=(
|
206
|
+
"Cannot change task type from "
|
207
|
+
f"{old_wftask.task_type} to {new_task.type}."
|
208
|
+
),
|
209
|
+
)
|
210
|
+
|
211
|
+
if replace.args_non_parallel is not None and new_task.type == "parallel":
|
212
|
+
raise HTTPException(
|
213
|
+
status_code=status.HTTP_422_UNPROCESSABLE_ENTITY,
|
214
|
+
detail="Cannot set 'args_non_parallel' for parallel task.",
|
215
|
+
)
|
216
|
+
if replace.args_parallel is not None and new_task.type == "non_parallel":
|
217
|
+
raise HTTPException(
|
218
|
+
status_code=status.HTTP_422_UNPROCESSABLE_ENTITY,
|
219
|
+
detail="Cannot set 'args_parallel' for non-parallel task.",
|
220
|
+
)
|
221
|
+
_check_type_filters_compatibility(
|
222
|
+
task_input_types=new_task.input_types,
|
223
|
+
wftask_type_filters=old_wftask.type_filters,
|
224
|
+
)
|
225
|
+
|
226
|
+
# Task arguments
|
227
|
+
if replace.args_non_parallel is None:
|
228
|
+
_args_non_parallel = old_wftask.args_non_parallel
|
229
|
+
else:
|
230
|
+
_args_non_parallel = replace.args_non_parallel
|
231
|
+
if replace.args_parallel is None:
|
232
|
+
_args_parallel = old_wftask.args_parallel
|
233
|
+
else:
|
234
|
+
_args_parallel = replace.args_parallel
|
235
|
+
|
236
|
+
# If user's changes to `meta_non_parallel` are compatible with new task,
|
237
|
+
# keep them; else, get `meta_non_parallel` from new task
|
238
|
+
if (
|
239
|
+
old_wftask.meta_non_parallel != old_wftask.task.meta_non_parallel
|
240
|
+
) and (old_wftask.task.meta_non_parallel == new_task.meta_non_parallel):
|
241
|
+
_meta_non_parallel = old_wftask.meta_non_parallel
|
242
|
+
else:
|
243
|
+
_meta_non_parallel = new_task.meta_non_parallel
|
244
|
+
# Same for `meta_parallel`
|
245
|
+
if (old_wftask.meta_parallel != old_wftask.task.meta_parallel) and (
|
246
|
+
old_wftask.task.meta_parallel == new_task.meta_parallel
|
247
|
+
):
|
248
|
+
_meta_parallel = old_wftask.meta_parallel
|
249
|
+
else:
|
250
|
+
_meta_parallel = new_task.meta_parallel
|
251
|
+
|
252
|
+
new_workflow_task = WorkflowTaskV2(
|
253
|
+
task_id=new_task.id,
|
254
|
+
task_type=new_task.type,
|
255
|
+
task=new_task,
|
256
|
+
# old-task values
|
257
|
+
type_filters=old_wftask.type_filters,
|
258
|
+
# possibly new values
|
259
|
+
args_non_parallel=_args_non_parallel,
|
260
|
+
args_parallel=_args_parallel,
|
261
|
+
meta_non_parallel=_meta_non_parallel,
|
262
|
+
meta_parallel=_meta_parallel,
|
263
|
+
)
|
264
|
+
|
265
|
+
workflow_task_order = old_wftask.order
|
266
|
+
workflow.task_list.remove(old_wftask)
|
267
|
+
workflow.task_list.insert(workflow_task_order, new_workflow_task)
|
268
|
+
await db.commit()
|
269
|
+
await db.refresh(new_workflow_task)
|
270
|
+
return new_workflow_task
|
@@ -1,5 +1,4 @@
|
|
1
1
|
from copy import copy
|
2
|
-
from typing import Optional
|
3
2
|
|
4
3
|
from fastapi import APIRouter
|
5
4
|
from fastapi import Depends
|
@@ -40,7 +39,7 @@ async def get_workflow_list(
|
|
40
39
|
project_id: int,
|
41
40
|
user: UserOAuth = Depends(current_active_user),
|
42
41
|
db: AsyncSession = Depends(get_async_db),
|
43
|
-
) ->
|
42
|
+
) -> list[WorkflowReadV2] | None:
|
44
43
|
"""
|
45
44
|
Get workflow list for given project
|
46
45
|
"""
|
@@ -67,7 +66,7 @@ async def create_workflow(
|
|
67
66
|
workflow: WorkflowCreateV2,
|
68
67
|
user: UserOAuth = Depends(current_active_user),
|
69
68
|
db: AsyncSession = Depends(get_async_db),
|
70
|
-
) ->
|
69
|
+
) -> WorkflowReadV2 | None:
|
71
70
|
"""
|
72
71
|
Create a workflow, associate to a project
|
73
72
|
"""
|
@@ -95,7 +94,7 @@ async def read_workflow(
|
|
95
94
|
workflow_id: int,
|
96
95
|
user: UserOAuth = Depends(current_active_user),
|
97
96
|
db: AsyncSession = Depends(get_async_db),
|
98
|
-
) ->
|
97
|
+
) -> WorkflowReadV2WithWarnings | None:
|
99
98
|
"""
|
100
99
|
Get info on an existing workflow
|
101
100
|
"""
|
@@ -129,7 +128,7 @@ async def update_workflow(
|
|
129
128
|
patch: WorkflowUpdateV2,
|
130
129
|
user: UserOAuth = Depends(current_active_user),
|
131
130
|
db: AsyncSession = Depends(get_async_db),
|
132
|
-
) ->
|
131
|
+
) -> WorkflowReadV2WithWarnings | None:
|
133
132
|
"""
|
134
133
|
Edit a workflow
|
135
134
|
"""
|
@@ -237,7 +236,7 @@ async def export_workflow(
|
|
237
236
|
workflow_id: int,
|
238
237
|
user: UserOAuth = Depends(current_active_user),
|
239
238
|
db: AsyncSession = Depends(get_async_db),
|
240
|
-
) ->
|
239
|
+
) -> WorkflowExportV2 | None:
|
241
240
|
"""
|
242
241
|
Export an existing workflow, after stripping all IDs
|
243
242
|
"""
|
@@ -1,5 +1,3 @@
|
|
1
|
-
from typing import Optional
|
2
|
-
|
3
1
|
from fastapi import APIRouter
|
4
2
|
from fastapi import Depends
|
5
3
|
from fastapi import HTTPException
|
@@ -64,7 +62,7 @@ async def _get_user_accessible_taskgroups(
|
|
64
62
|
async def _get_task_by_source(
|
65
63
|
source: str,
|
66
64
|
task_groups_list: list[TaskGroupV2],
|
67
|
-
) ->
|
65
|
+
) -> int | None:
|
68
66
|
"""
|
69
67
|
Find task with a given source.
|
70
68
|
|
@@ -93,7 +91,7 @@ async def _disambiguate_task_groups(
|
|
93
91
|
user_id: int,
|
94
92
|
db: AsyncSession,
|
95
93
|
default_group_id: int,
|
96
|
-
) ->
|
94
|
+
) -> TaskV2 | None:
|
97
95
|
"""
|
98
96
|
Disambiguate task groups based on ownership information.
|
99
97
|
"""
|
@@ -164,7 +162,7 @@ async def _get_task_by_taskimport(
|
|
164
162
|
user_id: int,
|
165
163
|
default_group_id: int,
|
166
164
|
db: AsyncSession,
|
167
|
-
) ->
|
165
|
+
) -> int | None:
|
168
166
|
"""
|
169
167
|
Find a task based on `task_import`.
|
170
168
|
|
@@ -1,5 +1,4 @@
|
|
1
1
|
from copy import deepcopy
|
2
|
-
from typing import Optional
|
3
2
|
|
4
3
|
from fastapi import APIRouter
|
5
4
|
from fastapi import Depends
|
@@ -15,125 +14,14 @@ from ._aux_functions import _workflow_insert_task
|
|
15
14
|
from ._aux_functions_tasks import _check_type_filters_compatibility
|
16
15
|
from ._aux_functions_tasks import _get_task_read_access
|
17
16
|
from fractal_server.app.models import UserOAuth
|
18
|
-
from fractal_server.app.models.v2 import WorkflowTaskV2
|
19
17
|
from fractal_server.app.routes.auth import current_active_user
|
20
18
|
from fractal_server.app.schemas.v2 import WorkflowTaskCreateV2
|
21
19
|
from fractal_server.app.schemas.v2 import WorkflowTaskReadV2
|
22
|
-
from fractal_server.app.schemas.v2 import WorkflowTaskReplaceV2
|
23
20
|
from fractal_server.app.schemas.v2 import WorkflowTaskUpdateV2
|
24
21
|
|
25
22
|
router = APIRouter()
|
26
23
|
|
27
24
|
|
28
|
-
@router.post(
|
29
|
-
"/project/{project_id}/workflow/{workflow_id}/wftask/replace-task/",
|
30
|
-
response_model=WorkflowTaskReadV2,
|
31
|
-
status_code=status.HTTP_201_CREATED,
|
32
|
-
)
|
33
|
-
async def replace_workflowtask(
|
34
|
-
project_id: int,
|
35
|
-
workflow_id: int,
|
36
|
-
workflow_task_id: int,
|
37
|
-
task_id: int,
|
38
|
-
replace: WorkflowTaskReplaceV2,
|
39
|
-
user: UserOAuth = Depends(current_active_user),
|
40
|
-
db: AsyncSession = Depends(get_async_db),
|
41
|
-
) -> WorkflowTaskReadV2:
|
42
|
-
|
43
|
-
# Get objects from database
|
44
|
-
old_wftask, workflow = await _get_workflow_task_check_owner(
|
45
|
-
project_id=project_id,
|
46
|
-
workflow_id=workflow_id,
|
47
|
-
workflow_task_id=workflow_task_id,
|
48
|
-
user_id=user.id,
|
49
|
-
db=db,
|
50
|
-
)
|
51
|
-
new_task = await _get_task_read_access(
|
52
|
-
task_id=task_id,
|
53
|
-
user_id=user.id,
|
54
|
-
db=db,
|
55
|
-
require_active=True,
|
56
|
-
)
|
57
|
-
|
58
|
-
# Preliminary checks
|
59
|
-
EQUIVALENT_TASK_TYPES = [
|
60
|
-
{"non_parallel", "converter_non_parallel"},
|
61
|
-
{"compound", "converter_compound"},
|
62
|
-
]
|
63
|
-
if (
|
64
|
-
old_wftask.task_type != new_task.type
|
65
|
-
and {old_wftask.task_type, new_task.type} not in EQUIVALENT_TASK_TYPES
|
66
|
-
):
|
67
|
-
raise HTTPException(
|
68
|
-
status_code=status.HTTP_422_UNPROCESSABLE_ENTITY,
|
69
|
-
detail=(
|
70
|
-
"Cannot change task type from "
|
71
|
-
f"{old_wftask.task_type} to {new_task.type}."
|
72
|
-
),
|
73
|
-
)
|
74
|
-
|
75
|
-
if replace.args_non_parallel is not None and new_task.type == "parallel":
|
76
|
-
raise HTTPException(
|
77
|
-
status_code=status.HTTP_422_UNPROCESSABLE_ENTITY,
|
78
|
-
detail="Cannot set 'args_non_parallel' for parallel task.",
|
79
|
-
)
|
80
|
-
if replace.args_parallel is not None and new_task.type == "non_parallel":
|
81
|
-
raise HTTPException(
|
82
|
-
status_code=status.HTTP_422_UNPROCESSABLE_ENTITY,
|
83
|
-
detail="Cannot set 'args_parallel' for non-parallel task.",
|
84
|
-
)
|
85
|
-
_check_type_filters_compatibility(
|
86
|
-
task_input_types=new_task.input_types,
|
87
|
-
wftask_type_filters=old_wftask.type_filters,
|
88
|
-
)
|
89
|
-
|
90
|
-
# Task arguments
|
91
|
-
if replace.args_non_parallel is None:
|
92
|
-
_args_non_parallel = old_wftask.args_non_parallel
|
93
|
-
else:
|
94
|
-
_args_non_parallel = replace.args_non_parallel
|
95
|
-
if replace.args_parallel is None:
|
96
|
-
_args_parallel = old_wftask.args_parallel
|
97
|
-
else:
|
98
|
-
_args_parallel = replace.args_parallel
|
99
|
-
|
100
|
-
# If user's changes to `meta_non_parallel` are compatible with new task,
|
101
|
-
# keep them; else, get `meta_non_parallel` from new task
|
102
|
-
if (
|
103
|
-
old_wftask.meta_non_parallel != old_wftask.task.meta_non_parallel
|
104
|
-
) and (old_wftask.task.meta_non_parallel == new_task.meta_non_parallel):
|
105
|
-
_meta_non_parallel = old_wftask.meta_non_parallel
|
106
|
-
else:
|
107
|
-
_meta_non_parallel = new_task.meta_non_parallel
|
108
|
-
# Same for `meta_parallel`
|
109
|
-
if (old_wftask.meta_parallel != old_wftask.task.meta_parallel) and (
|
110
|
-
old_wftask.task.meta_parallel == new_task.meta_parallel
|
111
|
-
):
|
112
|
-
_meta_parallel = old_wftask.meta_parallel
|
113
|
-
else:
|
114
|
-
_meta_parallel = new_task.meta_parallel
|
115
|
-
|
116
|
-
new_workflow_task = WorkflowTaskV2(
|
117
|
-
task_id=new_task.id,
|
118
|
-
task_type=new_task.type,
|
119
|
-
task=new_task,
|
120
|
-
# old-task values
|
121
|
-
type_filters=old_wftask.type_filters,
|
122
|
-
# possibly new values
|
123
|
-
args_non_parallel=_args_non_parallel,
|
124
|
-
args_parallel=_args_parallel,
|
125
|
-
meta_non_parallel=_meta_non_parallel,
|
126
|
-
meta_parallel=_meta_parallel,
|
127
|
-
)
|
128
|
-
|
129
|
-
workflow_task_order = old_wftask.order
|
130
|
-
workflow.task_list.remove(old_wftask)
|
131
|
-
workflow.task_list.insert(workflow_task_order, new_workflow_task)
|
132
|
-
await db.commit()
|
133
|
-
await db.refresh(new_workflow_task)
|
134
|
-
return new_workflow_task
|
135
|
-
|
136
|
-
|
137
25
|
@router.post(
|
138
26
|
"/project/{project_id}/workflow/{workflow_id}/wftask/",
|
139
27
|
response_model=WorkflowTaskReadV2,
|
@@ -146,7 +34,7 @@ async def create_workflowtask(
|
|
146
34
|
wftask: WorkflowTaskCreateV2,
|
147
35
|
user: UserOAuth = Depends(current_active_user),
|
148
36
|
db: AsyncSession = Depends(get_async_db),
|
149
|
-
) ->
|
37
|
+
) -> WorkflowTaskReadV2 | None:
|
150
38
|
"""
|
151
39
|
Add a WorkflowTask to a Workflow
|
152
40
|
"""
|
@@ -237,7 +125,7 @@ async def update_workflowtask(
|
|
237
125
|
workflow_task_update: WorkflowTaskUpdateV2,
|
238
126
|
user: UserOAuth = Depends(current_active_user),
|
239
127
|
db: AsyncSession = Depends(get_async_db),
|
240
|
-
) ->
|
128
|
+
) -> WorkflowTaskReadV2 | None:
|
241
129
|
"""
|
242
130
|
Edit a WorkflowTask of a Workflow
|
243
131
|
"""
|
@@ -164,11 +164,11 @@ async def get_current_user_allowed_viewer_paths(
|
|
164
164
|
viewer_paths_nested = res.scalars().all()
|
165
165
|
|
166
166
|
# Flatten a nested object and make its elements unique
|
167
|
-
all_viewer_paths_set =
|
167
|
+
all_viewer_paths_set = {
|
168
168
|
path
|
169
169
|
for _viewer_paths in viewer_paths_nested
|
170
170
|
for path in _viewer_paths
|
171
|
-
|
171
|
+
}
|
172
172
|
|
173
173
|
authorized_paths.extend(all_viewer_paths_set)
|
174
174
|
|
@@ -1,5 +1,4 @@
|
|
1
1
|
from typing import Generic
|
2
|
-
from typing import Optional
|
3
2
|
from typing import TypeVar
|
4
3
|
|
5
4
|
from fastapi import HTTPException
|
@@ -14,7 +13,7 @@ T = TypeVar("T")
|
|
14
13
|
class PaginationRequest(BaseModel):
|
15
14
|
|
16
15
|
page: int = Field(ge=1)
|
17
|
-
page_size:
|
16
|
+
page_size: int | None = Field(ge=1)
|
18
17
|
|
19
18
|
@model_validator(mode="after")
|
20
19
|
def valid_pagination_parameters(self):
|
@@ -26,7 +25,7 @@ class PaginationRequest(BaseModel):
|
|
26
25
|
|
27
26
|
|
28
27
|
def get_pagination_params(
|
29
|
-
page: int = 1, page_size:
|
28
|
+
page: int = 1, page_size: int | None = None
|
30
29
|
) -> PaginationRequest:
|
31
30
|
try:
|
32
31
|
pagination = PaginationRequest(page=page, page_size=page_size)
|
@@ -1,5 +1,4 @@
|
|
1
1
|
import os
|
2
|
-
from typing import Optional
|
3
2
|
|
4
3
|
|
5
4
|
class TaskExecutionError(RuntimeError):
|
@@ -20,16 +19,16 @@ class TaskExecutionError(RuntimeError):
|
|
20
19
|
Human readable name of the failing task.
|
21
20
|
"""
|
22
21
|
|
23
|
-
workflow_task_id:
|
24
|
-
workflow_task_order:
|
25
|
-
task_name:
|
22
|
+
workflow_task_id: int | None = None
|
23
|
+
workflow_task_order: int | None = None
|
24
|
+
task_name: str | None = None
|
26
25
|
|
27
26
|
def __init__(
|
28
27
|
self,
|
29
28
|
*args,
|
30
|
-
workflow_task_id:
|
31
|
-
workflow_task_order:
|
32
|
-
task_name:
|
29
|
+
workflow_task_id: int | None = None,
|
30
|
+
workflow_task_order: int | None = None,
|
31
|
+
task_name: str | None = None,
|
33
32
|
):
|
34
33
|
super().__init__(*args)
|
35
34
|
self.workflow_task_id = workflow_task_id
|
@@ -63,18 +62,18 @@ class JobExecutionError(RuntimeError):
|
|
63
62
|
Path to the file with the command stderr
|
64
63
|
"""
|
65
64
|
|
66
|
-
cmd_file:
|
67
|
-
stdout_file:
|
68
|
-
stderr_file:
|
69
|
-
info:
|
65
|
+
cmd_file: str | None = None
|
66
|
+
stdout_file: str | None = None
|
67
|
+
stderr_file: str | None = None
|
68
|
+
info: str | None = None
|
70
69
|
|
71
70
|
def __init__(
|
72
71
|
self,
|
73
72
|
*args,
|
74
|
-
cmd_file:
|
75
|
-
stdout_file:
|
76
|
-
stderr_file:
|
77
|
-
info:
|
73
|
+
cmd_file: str | None = None,
|
74
|
+
stdout_file: str | None = None,
|
75
|
+
stderr_file: str | None = None,
|
76
|
+
info: str | None = None,
|
78
77
|
):
|
79
78
|
super().__init__(*args)
|
80
79
|
self.cmd_file = cmd_file
|
@@ -88,7 +87,7 @@ class JobExecutionError(RuntimeError):
|
|
88
87
|
empty or missing
|
89
88
|
"""
|
90
89
|
if os.path.exists(filepath):
|
91
|
-
with open(filepath
|
90
|
+
with open(filepath) as f:
|
92
91
|
content = f.read()
|
93
92
|
if content:
|
94
93
|
return f"Content of {filepath}:\n{content}"
|
@@ -19,7 +19,7 @@ TASK_TYPES_MULTISUBMIT: list[TaskTypeType] = [
|
|
19
19
|
logger = set_logger(__name__)
|
20
20
|
|
21
21
|
|
22
|
-
class BaseRunner
|
22
|
+
class BaseRunner:
|
23
23
|
"""
|
24
24
|
Base class for Fractal runners.
|
25
25
|
"""
|
@@ -150,9 +150,9 @@ class BaseRunner(object):
|
|
150
150
|
f"{len(list_parameters)=}."
|
151
151
|
)
|
152
152
|
|
153
|
-
subfolders =
|
153
|
+
subfolders = {
|
154
154
|
task_file.wftask_subfolder_local for task_file in list_task_files
|
155
|
-
|
155
|
+
}
|
156
156
|
if len(subfolders) != 1:
|
157
157
|
raise ValueError(f"More than one subfolders: {subfolders}.")
|
158
158
|
|
@@ -44,7 +44,7 @@ def call_command_wrapper(*, cmd: str, log_path: str) -> None:
|
|
44
44
|
if result.returncode != 0:
|
45
45
|
stderr = ""
|
46
46
|
if os.path.isfile(log_path):
|
47
|
-
with open(log_path
|
47
|
+
with open(log_path) as fp_stderr:
|
48
48
|
stderr = fp_stderr.read()
|
49
49
|
raise TaskExecutionError(
|
50
50
|
f"Task failed with returncode={result.returncode}.\n"
|