fractal-server 2.13.0__py3-none-any.whl → 2.14.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- fractal_server/__init__.py +1 -1
- fractal_server/__main__.py +3 -1
- fractal_server/app/models/linkusergroup.py +6 -2
- fractal_server/app/models/v2/__init__.py +11 -1
- fractal_server/app/models/v2/accounting.py +35 -0
- fractal_server/app/models/v2/dataset.py +1 -11
- fractal_server/app/models/v2/history.py +78 -0
- fractal_server/app/models/v2/job.py +10 -3
- fractal_server/app/models/v2/task_group.py +2 -2
- fractal_server/app/models/v2/workflow.py +1 -1
- fractal_server/app/models/v2/workflowtask.py +1 -1
- fractal_server/app/routes/admin/v2/__init__.py +4 -0
- fractal_server/app/routes/admin/v2/accounting.py +98 -0
- fractal_server/app/routes/admin/v2/impersonate.py +35 -0
- fractal_server/app/routes/admin/v2/job.py +5 -13
- fractal_server/app/routes/admin/v2/task.py +1 -1
- fractal_server/app/routes/admin/v2/task_group.py +4 -29
- fractal_server/app/routes/api/__init__.py +1 -1
- fractal_server/app/routes/api/v2/__init__.py +8 -2
- fractal_server/app/routes/api/v2/_aux_functions.py +66 -0
- fractal_server/app/routes/api/v2/_aux_functions_history.py +166 -0
- fractal_server/app/routes/api/v2/_aux_functions_task_lifecycle.py +3 -3
- fractal_server/app/routes/api/v2/dataset.py +0 -17
- fractal_server/app/routes/api/v2/history.py +544 -0
- fractal_server/app/routes/api/v2/images.py +31 -43
- fractal_server/app/routes/api/v2/job.py +30 -0
- fractal_server/app/routes/api/v2/project.py +1 -53
- fractal_server/app/routes/api/v2/{status.py → status_legacy.py} +6 -6
- fractal_server/app/routes/api/v2/submit.py +17 -14
- fractal_server/app/routes/api/v2/task.py +3 -10
- fractal_server/app/routes/api/v2/task_collection_custom.py +4 -9
- fractal_server/app/routes/api/v2/task_group.py +2 -22
- fractal_server/app/routes/api/v2/verify_image_types.py +61 -0
- fractal_server/app/routes/api/v2/workflow.py +28 -69
- fractal_server/app/routes/api/v2/workflowtask.py +53 -50
- fractal_server/app/routes/auth/group.py +0 -16
- fractal_server/app/routes/auth/oauth.py +5 -3
- fractal_server/app/routes/aux/__init__.py +0 -20
- fractal_server/app/routes/pagination.py +47 -0
- fractal_server/app/runner/components.py +0 -3
- fractal_server/app/runner/compress_folder.py +57 -29
- fractal_server/app/runner/exceptions.py +4 -0
- fractal_server/app/runner/executors/base_runner.py +157 -0
- fractal_server/app/runner/{v2/_local/_local_config.py → executors/local/get_local_config.py} +7 -9
- fractal_server/app/runner/executors/local/runner.py +248 -0
- fractal_server/app/runner/executors/{slurm → slurm_common}/_batching.py +1 -1
- fractal_server/app/runner/executors/{slurm → slurm_common}/_slurm_config.py +9 -7
- fractal_server/app/runner/executors/slurm_common/base_slurm_runner.py +868 -0
- fractal_server/app/runner/{v2/_slurm_common → executors/slurm_common}/get_slurm_config.py +48 -17
- fractal_server/app/runner/executors/{slurm → slurm_common}/remote.py +36 -47
- fractal_server/app/runner/executors/slurm_common/slurm_job_task_models.py +134 -0
- fractal_server/app/runner/executors/slurm_ssh/runner.py +268 -0
- fractal_server/app/runner/executors/slurm_sudo/__init__.py +0 -0
- fractal_server/app/runner/executors/{slurm/sudo → slurm_sudo}/_subprocess_run_as_user.py +2 -83
- fractal_server/app/runner/executors/slurm_sudo/runner.py +193 -0
- fractal_server/app/runner/extract_archive.py +1 -3
- fractal_server/app/runner/task_files.py +134 -87
- fractal_server/app/runner/v2/__init__.py +0 -395
- fractal_server/app/runner/v2/_local.py +88 -0
- fractal_server/app/runner/v2/{_slurm_ssh/__init__.py → _slurm_ssh.py} +22 -19
- fractal_server/app/runner/v2/{_slurm_sudo/__init__.py → _slurm_sudo.py} +19 -15
- fractal_server/app/runner/v2/db_tools.py +119 -0
- fractal_server/app/runner/v2/runner.py +219 -98
- fractal_server/app/runner/v2/runner_functions.py +491 -189
- fractal_server/app/runner/v2/runner_functions_low_level.py +40 -43
- fractal_server/app/runner/v2/submit_workflow.py +358 -0
- fractal_server/app/runner/v2/task_interface.py +31 -0
- fractal_server/app/schemas/_validators.py +13 -24
- fractal_server/app/schemas/user.py +10 -7
- fractal_server/app/schemas/user_settings.py +9 -21
- fractal_server/app/schemas/v2/__init__.py +10 -1
- fractal_server/app/schemas/v2/accounting.py +18 -0
- fractal_server/app/schemas/v2/dataset.py +12 -94
- fractal_server/app/schemas/v2/dumps.py +26 -9
- fractal_server/app/schemas/v2/history.py +80 -0
- fractal_server/app/schemas/v2/job.py +15 -8
- fractal_server/app/schemas/v2/manifest.py +14 -7
- fractal_server/app/schemas/v2/project.py +9 -7
- fractal_server/app/schemas/v2/status_legacy.py +35 -0
- fractal_server/app/schemas/v2/task.py +72 -77
- fractal_server/app/schemas/v2/task_collection.py +14 -32
- fractal_server/app/schemas/v2/task_group.py +10 -9
- fractal_server/app/schemas/v2/workflow.py +10 -11
- fractal_server/app/schemas/v2/workflowtask.py +2 -21
- fractal_server/app/security/__init__.py +3 -3
- fractal_server/app/security/signup_email.py +2 -2
- fractal_server/config.py +91 -90
- fractal_server/images/tools.py +23 -0
- fractal_server/migrations/versions/47351f8c7ebc_drop_dataset_filters.py +50 -0
- fractal_server/migrations/versions/9db60297b8b2_set_ondelete.py +250 -0
- fractal_server/migrations/versions/af1ef1c83c9b_add_accounting_tables.py +57 -0
- fractal_server/migrations/versions/c90a7c76e996_job_id_in_history_run.py +41 -0
- fractal_server/migrations/versions/e81103413827_add_job_type_filters.py +36 -0
- fractal_server/migrations/versions/f37aceb45062_make_historyunit_logfile_required.py +39 -0
- fractal_server/migrations/versions/fbce16ff4e47_new_history_items.py +120 -0
- fractal_server/ssh/_fabric.py +28 -14
- fractal_server/tasks/v2/local/collect.py +2 -2
- fractal_server/tasks/v2/ssh/collect.py +2 -2
- fractal_server/tasks/v2/templates/2_pip_install.sh +1 -1
- fractal_server/tasks/v2/templates/4_pip_show.sh +1 -1
- fractal_server/tasks/v2/utils_background.py +1 -20
- fractal_server/tasks/v2/utils_database.py +30 -17
- fractal_server/tasks/v2/utils_templates.py +6 -0
- {fractal_server-2.13.0.dist-info → fractal_server-2.14.0.dist-info}/METADATA +4 -4
- {fractal_server-2.13.0.dist-info → fractal_server-2.14.0.dist-info}/RECORD +114 -99
- {fractal_server-2.13.0.dist-info → fractal_server-2.14.0.dist-info}/WHEEL +1 -1
- fractal_server/app/runner/executors/slurm/ssh/_executor_wait_thread.py +0 -126
- fractal_server/app/runner/executors/slurm/ssh/_slurm_job.py +0 -116
- fractal_server/app/runner/executors/slurm/ssh/executor.py +0 -1386
- fractal_server/app/runner/executors/slurm/sudo/_check_jobs_status.py +0 -71
- fractal_server/app/runner/executors/slurm/sudo/_executor_wait_thread.py +0 -130
- fractal_server/app/runner/executors/slurm/sudo/executor.py +0 -1281
- fractal_server/app/runner/v2/_local/__init__.py +0 -129
- fractal_server/app/runner/v2/_local/_submit_setup.py +0 -52
- fractal_server/app/runner/v2/_local/executor.py +0 -100
- fractal_server/app/runner/v2/_slurm_ssh/_submit_setup.py +0 -83
- fractal_server/app/runner/v2/_slurm_sudo/_submit_setup.py +0 -83
- fractal_server/app/runner/v2/handle_failed_job.py +0 -59
- fractal_server/app/schemas/v2/status.py +0 -16
- /fractal_server/app/{runner/executors/slurm → history}/__init__.py +0 -0
- /fractal_server/app/runner/executors/{slurm/ssh → local}/__init__.py +0 -0
- /fractal_server/app/runner/executors/{slurm/sudo → slurm_common}/__init__.py +0 -0
- /fractal_server/app/runner/executors/{_job_states.py → slurm_common/_job_states.py} +0 -0
- /fractal_server/app/runner/executors/{slurm → slurm_common}/utils_executors.py +0 -0
- /fractal_server/app/runner/{v2/_slurm_common → executors/slurm_ssh}/__init__.py +0 -0
- {fractal_server-2.13.0.dist-info → fractal_server-2.14.0.dist-info}/LICENSE +0 -0
- {fractal_server-2.13.0.dist-info → fractal_server-2.14.0.dist-info}/entry_points.txt +0 -0
@@ -35,80 +35,81 @@ async def replace_workflowtask(
|
|
35
35
|
workflow_id: int,
|
36
36
|
workflow_task_id: int,
|
37
37
|
task_id: int,
|
38
|
-
replace:
|
38
|
+
replace: WorkflowTaskReplaceV2,
|
39
39
|
user: UserOAuth = Depends(current_active_user),
|
40
40
|
db: AsyncSession = Depends(get_async_db),
|
41
41
|
) -> WorkflowTaskReadV2:
|
42
42
|
|
43
|
-
|
43
|
+
# Get objects from database
|
44
|
+
old_wftask, workflow = await _get_workflow_task_check_owner(
|
44
45
|
project_id=project_id,
|
45
46
|
workflow_id=workflow_id,
|
46
47
|
workflow_task_id=workflow_task_id,
|
47
48
|
user_id=user.id,
|
48
49
|
db=db,
|
49
50
|
)
|
50
|
-
|
51
51
|
new_task = await _get_task_read_access(
|
52
|
-
task_id=task_id,
|
52
|
+
task_id=task_id,
|
53
|
+
user_id=user.id,
|
54
|
+
db=db,
|
55
|
+
require_active=True,
|
53
56
|
)
|
54
57
|
|
55
|
-
|
58
|
+
# Preliminary checks
|
59
|
+
EQUIVALENT_TASK_TYPES = [
|
60
|
+
{"non_parallel", "converter_non_parallel"},
|
61
|
+
{"compound", "converter_compound"},
|
62
|
+
]
|
63
|
+
if (
|
64
|
+
old_wftask.task_type != new_task.type
|
65
|
+
and {old_wftask.task_type, new_task.type} not in EQUIVALENT_TASK_TYPES
|
66
|
+
):
|
56
67
|
raise HTTPException(
|
57
68
|
status_code=status.HTTP_422_UNPROCESSABLE_ENTITY,
|
58
69
|
detail=(
|
59
|
-
|
60
|
-
f"
|
70
|
+
"Cannot change task type from "
|
71
|
+
f"{old_wftask.task_type} to {new_task.type}."
|
61
72
|
),
|
62
73
|
)
|
63
74
|
|
75
|
+
if replace.args_non_parallel is not None and new_task.type == "parallel":
|
76
|
+
raise HTTPException(
|
77
|
+
status_code=status.HTTP_422_UNPROCESSABLE_ENTITY,
|
78
|
+
detail="Cannot set 'args_non_parallel' for parallel task.",
|
79
|
+
)
|
80
|
+
if replace.args_parallel is not None and new_task.type == "non_parallel":
|
81
|
+
raise HTTPException(
|
82
|
+
status_code=status.HTTP_422_UNPROCESSABLE_ENTITY,
|
83
|
+
detail="Cannot set 'args_parallel' for non-parallel task.",
|
84
|
+
)
|
64
85
|
_check_type_filters_compatibility(
|
65
86
|
task_input_types=new_task.input_types,
|
66
|
-
wftask_type_filters=
|
87
|
+
wftask_type_filters=old_wftask.type_filters,
|
67
88
|
)
|
68
89
|
|
69
|
-
|
70
|
-
|
71
|
-
|
72
|
-
|
73
|
-
|
74
|
-
|
75
|
-
|
76
|
-
|
77
|
-
|
78
|
-
"when Task is 'parallel'."
|
79
|
-
),
|
80
|
-
)
|
81
|
-
else:
|
82
|
-
_args_non_parallel = replace.args_non_parallel
|
83
|
-
|
84
|
-
if replace.args_parallel is not None:
|
85
|
-
if new_task.type == "non_parallel":
|
86
|
-
raise HTTPException(
|
87
|
-
status_code=status.HTTP_422_UNPROCESSABLE_ENTITY,
|
88
|
-
detail=(
|
89
|
-
"Cannot set 'args_parallel' "
|
90
|
-
"when Task is 'non_parallel'."
|
91
|
-
),
|
92
|
-
)
|
93
|
-
else:
|
94
|
-
_args_parallel = replace.args_parallel
|
90
|
+
# Task arguments
|
91
|
+
if replace.args_non_parallel is None:
|
92
|
+
_args_non_parallel = old_wftask.args_non_parallel
|
93
|
+
else:
|
94
|
+
_args_non_parallel = replace.args_non_parallel
|
95
|
+
if replace.args_parallel is None:
|
96
|
+
_args_parallel = old_wftask.args_parallel
|
97
|
+
else:
|
98
|
+
_args_parallel = replace.args_parallel
|
95
99
|
|
96
100
|
# If user's changes to `meta_non_parallel` are compatible with new task,
|
97
101
|
# keep them; else, get `meta_non_parallel` from new task
|
98
102
|
if (
|
99
|
-
|
100
|
-
|
101
|
-
|
102
|
-
old_workflow_task.task.meta_non_parallel == new_task.meta_non_parallel
|
103
|
-
):
|
104
|
-
_meta_non_parallel = old_workflow_task.meta_non_parallel
|
103
|
+
old_wftask.meta_non_parallel != old_wftask.task.meta_non_parallel
|
104
|
+
) and (old_wftask.task.meta_non_parallel == new_task.meta_non_parallel):
|
105
|
+
_meta_non_parallel = old_wftask.meta_non_parallel
|
105
106
|
else:
|
106
107
|
_meta_non_parallel = new_task.meta_non_parallel
|
107
108
|
# Same for `meta_parallel`
|
108
|
-
if (
|
109
|
-
|
110
|
-
)
|
111
|
-
_meta_parallel =
|
109
|
+
if (old_wftask.meta_parallel != old_wftask.task.meta_parallel) and (
|
110
|
+
old_wftask.task.meta_parallel == new_task.meta_parallel
|
111
|
+
):
|
112
|
+
_meta_parallel = old_wftask.meta_parallel
|
112
113
|
else:
|
113
114
|
_meta_parallel = new_task.meta_parallel
|
114
115
|
|
@@ -117,7 +118,7 @@ async def replace_workflowtask(
|
|
117
118
|
task_type=new_task.type,
|
118
119
|
task=new_task,
|
119
120
|
# old-task values
|
120
|
-
type_filters=
|
121
|
+
type_filters=old_wftask.type_filters,
|
121
122
|
# possibly new values
|
122
123
|
args_non_parallel=_args_non_parallel,
|
123
124
|
args_parallel=_args_parallel,
|
@@ -125,8 +126,8 @@ async def replace_workflowtask(
|
|
125
126
|
meta_parallel=_meta_parallel,
|
126
127
|
)
|
127
128
|
|
128
|
-
workflow_task_order =
|
129
|
-
workflow.task_list.remove(
|
129
|
+
workflow_task_order = old_wftask.order
|
130
|
+
workflow.task_list.remove(old_wftask)
|
130
131
|
workflow.task_list.insert(workflow_task_order, new_workflow_task)
|
131
132
|
await db.commit()
|
132
133
|
await db.refresh(new_workflow_task)
|
@@ -201,8 +202,6 @@ async def create_workflowtask(
|
|
201
202
|
db=db,
|
202
203
|
)
|
203
204
|
|
204
|
-
await db.close()
|
205
|
-
|
206
205
|
return wftask_db
|
207
206
|
|
208
207
|
|
@@ -268,7 +267,10 @@ async def update_workflowtask(
|
|
268
267
|
"parallel."
|
269
268
|
),
|
270
269
|
)
|
271
|
-
elif db_wf_task.task_type
|
270
|
+
elif db_wf_task.task_type in [
|
271
|
+
"non_parallel",
|
272
|
+
"converter_non_parallel",
|
273
|
+
] and (
|
272
274
|
workflow_task_update.args_parallel is not None
|
273
275
|
or workflow_task_update.meta_parallel is not None
|
274
276
|
):
|
@@ -333,6 +335,7 @@ async def delete_workflowtask(
|
|
333
335
|
db=db,
|
334
336
|
)
|
335
337
|
|
338
|
+
# Delete WorkflowTask
|
336
339
|
await db.delete(db_workflow_task)
|
337
340
|
await db.commit()
|
338
341
|
|
@@ -19,7 +19,6 @@ from fractal_server.app.models import LinkUserGroup
|
|
19
19
|
from fractal_server.app.models import UserGroup
|
20
20
|
from fractal_server.app.models import UserOAuth
|
21
21
|
from fractal_server.app.models import UserSettings
|
22
|
-
from fractal_server.app.models.v2 import TaskGroupV2
|
23
22
|
from fractal_server.app.schemas.user_group import UserGroupCreate
|
24
23
|
from fractal_server.app.schemas.user_group import UserGroupRead
|
25
24
|
from fractal_server.app.schemas.user_group import UserGroupUpdate
|
@@ -156,21 +155,6 @@ async def delete_single_group(
|
|
156
155
|
),
|
157
156
|
)
|
158
157
|
|
159
|
-
# Cascade operations
|
160
|
-
|
161
|
-
res = await db.execute(
|
162
|
-
select(LinkUserGroup).where(LinkUserGroup.group_id == group_id)
|
163
|
-
)
|
164
|
-
for link in res.scalars().all():
|
165
|
-
await db.delete(link)
|
166
|
-
|
167
|
-
res = await db.execute(
|
168
|
-
select(TaskGroupV2).where(TaskGroupV2.user_group_id == group_id)
|
169
|
-
)
|
170
|
-
for task_group in res.scalars().all():
|
171
|
-
task_group.user_group_id = None
|
172
|
-
db.add(task_group)
|
173
|
-
|
174
158
|
# Delete
|
175
159
|
|
176
160
|
await db.delete(group)
|
@@ -27,20 +27,22 @@ for client_config in settings.OAUTH_CLIENTS_CONFIG:
|
|
27
27
|
from httpx_oauth.clients.google import GoogleOAuth2
|
28
28
|
|
29
29
|
client = GoogleOAuth2(
|
30
|
-
client_config.CLIENT_ID,
|
30
|
+
client_config.CLIENT_ID,
|
31
|
+
client_config.CLIENT_SECRET.get_secret_value(),
|
31
32
|
)
|
32
33
|
elif client_name == "github":
|
33
34
|
from httpx_oauth.clients.github import GitHubOAuth2
|
34
35
|
|
35
36
|
client = GitHubOAuth2(
|
36
|
-
client_config.CLIENT_ID,
|
37
|
+
client_config.CLIENT_ID,
|
38
|
+
client_config.CLIENT_SECRET.get_secret_value(),
|
37
39
|
)
|
38
40
|
else:
|
39
41
|
from httpx_oauth.clients.openid import OpenID
|
40
42
|
|
41
43
|
client = OpenID(
|
42
44
|
client_config.CLIENT_ID,
|
43
|
-
client_config.CLIENT_SECRET,
|
45
|
+
client_config.CLIENT_SECRET.get_secret_value(),
|
44
46
|
client_config.OIDC_CONFIGURATION_ENDPOINT,
|
45
47
|
)
|
46
48
|
|
@@ -1,20 +0,0 @@
|
|
1
|
-
from datetime import datetime
|
2
|
-
from typing import Optional
|
3
|
-
|
4
|
-
from fastapi import HTTPException
|
5
|
-
from fastapi import status
|
6
|
-
|
7
|
-
|
8
|
-
def _raise_if_naive_datetime(*timestamps: tuple[Optional[datetime]]) -> None:
|
9
|
-
"""
|
10
|
-
Raise 422 if any not-null argument is a naive `datetime` object:
|
11
|
-
https://docs.python.org/3/library/datetime.html#determining-if-an-object-is-aware-or-naive
|
12
|
-
"""
|
13
|
-
for timestamp in filter(None, timestamps):
|
14
|
-
if (timestamp.tzinfo is None) or (
|
15
|
-
timestamp.tzinfo.utcoffset(timestamp) is None
|
16
|
-
):
|
17
|
-
raise HTTPException(
|
18
|
-
status_code=status.HTTP_422_UNPROCESSABLE_ENTITY,
|
19
|
-
detail=f"{timestamp=} is naive. You must provide a timezone.",
|
20
|
-
)
|
@@ -0,0 +1,47 @@
|
|
1
|
+
from typing import Generic
|
2
|
+
from typing import Optional
|
3
|
+
from typing import TypeVar
|
4
|
+
|
5
|
+
from fastapi import HTTPException
|
6
|
+
from pydantic import BaseModel
|
7
|
+
from pydantic import Field
|
8
|
+
from pydantic import model_validator
|
9
|
+
from pydantic import ValidationError
|
10
|
+
|
11
|
+
T = TypeVar("T")
|
12
|
+
|
13
|
+
|
14
|
+
class PaginationRequest(BaseModel):
|
15
|
+
|
16
|
+
page: int = Field(ge=1)
|
17
|
+
page_size: Optional[int] = Field(ge=1)
|
18
|
+
|
19
|
+
@model_validator(mode="after")
|
20
|
+
def valid_pagination_parameters(self):
|
21
|
+
if self.page_size is None and self.page > 1:
|
22
|
+
raise ValueError(
|
23
|
+
f"page_size is None but page={self.page} is greater than 1."
|
24
|
+
)
|
25
|
+
return self
|
26
|
+
|
27
|
+
|
28
|
+
def get_pagination_params(
|
29
|
+
page: int = 1, page_size: Optional[int] = None
|
30
|
+
) -> PaginationRequest:
|
31
|
+
try:
|
32
|
+
pagination = PaginationRequest(page=page, page_size=page_size)
|
33
|
+
except ValidationError as e:
|
34
|
+
raise HTTPException(
|
35
|
+
status_code=422,
|
36
|
+
detail=f"Invalid pagination parameters. Original error: '{e}'.",
|
37
|
+
)
|
38
|
+
return pagination
|
39
|
+
|
40
|
+
|
41
|
+
class PaginationResponse(BaseModel, Generic[T]):
|
42
|
+
|
43
|
+
current_page: int = Field(ge=1)
|
44
|
+
page_size: int = Field(ge=0)
|
45
|
+
total_count: int = Field(ge=0)
|
46
|
+
|
47
|
+
items: list[T]
|
@@ -11,8 +11,8 @@ built-in `tarfile` library has to do with performance issues we observed
|
|
11
11
|
when handling files which were just created within a SLURM job, and in the
|
12
12
|
context of a CephFS filesystem.
|
13
13
|
"""
|
14
|
-
import shutil
|
15
14
|
import sys
|
15
|
+
import time
|
16
16
|
from pathlib import Path
|
17
17
|
|
18
18
|
from fractal_server.app.runner.run_subprocess import run_subprocess
|
@@ -20,48 +20,66 @@ from fractal_server.logger import get_logger
|
|
20
20
|
from fractal_server.logger import set_logger
|
21
21
|
|
22
22
|
|
23
|
-
def
|
23
|
+
def _copy_subfolder(src: Path, dest: Path, logger_name: str):
|
24
|
+
t_start = time.perf_counter()
|
24
25
|
cmd_cp = f"cp -r {src.as_posix()} {dest.as_posix()}"
|
25
26
|
logger = get_logger(logger_name=logger_name)
|
26
27
|
logger.debug(f"{cmd_cp=}")
|
27
28
|
res = run_subprocess(cmd=cmd_cp, logger_name=logger_name)
|
29
|
+
elapsed = time.perf_counter() - t_start
|
30
|
+
logger.debug(f"[_copy_subfolder] END {elapsed=} s ({dest.as_posix()})")
|
28
31
|
return res
|
29
32
|
|
30
33
|
|
31
|
-
def
|
32
|
-
tarfile_path:
|
34
|
+
def _create_tar_archive(
|
35
|
+
tarfile_path: str,
|
33
36
|
subfolder_path_tmp_copy: Path,
|
34
37
|
logger_name: str,
|
35
|
-
|
38
|
+
filelist_path: str | None,
|
36
39
|
):
|
37
40
|
logger = get_logger(logger_name)
|
38
|
-
|
39
|
-
|
40
|
-
|
41
|
+
logger.debug(f"[_create_tar_archive] START ({tarfile_path})")
|
42
|
+
t_start = time.perf_counter()
|
43
|
+
|
44
|
+
if filelist_path is None:
|
45
|
+
cmd_tar = (
|
46
|
+
f"tar -c -z -f {tarfile_path} "
|
47
|
+
f"--directory={subfolder_path_tmp_copy.as_posix()} "
|
48
|
+
"."
|
49
|
+
)
|
41
50
|
else:
|
42
|
-
|
51
|
+
cmd_tar = (
|
52
|
+
f"tar -c -z -f {tarfile_path} "
|
53
|
+
f"--directory={subfolder_path_tmp_copy.as_posix()} "
|
54
|
+
f"--files-from={filelist_path} --ignore-failed-read"
|
55
|
+
)
|
43
56
|
|
44
|
-
cmd_tar = (
|
45
|
-
f"tar czf {tarfile_path} "
|
46
|
-
f"{exclude_options} "
|
47
|
-
f"--directory={subfolder_path_tmp_copy.as_posix()} "
|
48
|
-
"."
|
49
|
-
)
|
50
57
|
logger.debug(f"cmd tar:\n{cmd_tar}")
|
58
|
+
|
51
59
|
run_subprocess(cmd=cmd_tar, logger_name=logger_name, allow_char="*")
|
60
|
+
elapsed = time.perf_counter() - t_start
|
61
|
+
logger.debug(f"[_create_tar_archive] END {elapsed=} s ({tarfile_path})")
|
52
62
|
|
53
63
|
|
54
|
-
def
|
64
|
+
def _remove_temp_subfolder(subfolder_path_tmp_copy: Path, logger_name: str):
|
55
65
|
logger = get_logger(logger_name)
|
66
|
+
t_start = time.perf_counter()
|
56
67
|
try:
|
57
|
-
|
58
|
-
|
68
|
+
cmd_rm = f"rm -rf {subfolder_path_tmp_copy}"
|
69
|
+
logger.debug(f"cmd rm:\n{cmd_rm}")
|
70
|
+
run_subprocess(cmd=cmd_rm, logger_name=logger_name, allow_char="*")
|
59
71
|
except Exception as e:
|
60
|
-
logger.debug(f"ERROR during
|
72
|
+
logger.debug(f"ERROR during {cmd_rm}: {e}")
|
73
|
+
elapsed = time.perf_counter() - t_start
|
74
|
+
logger.debug(
|
75
|
+
f"[_remove_temp_subfolder] END {elapsed=} s "
|
76
|
+
f"({subfolder_path_tmp_copy=})"
|
77
|
+
)
|
61
78
|
|
62
79
|
|
63
80
|
def compress_folder(
|
64
|
-
subfolder_path: Path,
|
81
|
+
subfolder_path: Path,
|
82
|
+
filelist_path: str | None,
|
65
83
|
) -> str:
|
66
84
|
"""
|
67
85
|
Compress e.g. `/path/archive` into `/path/archive.tar.gz`
|
@@ -91,14 +109,16 @@ def compress_folder(
|
|
91
109
|
subfolder_path.parent / f"{subfolder_path.name}_copy"
|
92
110
|
)
|
93
111
|
try:
|
94
|
-
|
95
|
-
subfolder_path,
|
112
|
+
_copy_subfolder(
|
113
|
+
subfolder_path,
|
114
|
+
subfolder_path_tmp_copy,
|
115
|
+
logger_name=logger_name,
|
96
116
|
)
|
97
|
-
|
117
|
+
_create_tar_archive(
|
98
118
|
tarfile_path,
|
99
119
|
subfolder_path_tmp_copy,
|
100
120
|
logger_name=logger_name,
|
101
|
-
|
121
|
+
filelist_path=filelist_path,
|
102
122
|
)
|
103
123
|
return tarfile_path
|
104
124
|
|
@@ -107,7 +127,9 @@ def compress_folder(
|
|
107
127
|
sys.exit(1)
|
108
128
|
|
109
129
|
finally:
|
110
|
-
|
130
|
+
_remove_temp_subfolder(
|
131
|
+
subfolder_path_tmp_copy, logger_name=logger_name
|
132
|
+
)
|
111
133
|
|
112
134
|
|
113
135
|
def main(sys_argv: list[str]):
|
@@ -115,15 +137,21 @@ def main(sys_argv: list[str]):
|
|
115
137
|
help_msg = (
|
116
138
|
"Expected use:\n"
|
117
139
|
"python -m fractal_server.app.runner.compress_folder "
|
118
|
-
"path/to/folder [--
|
140
|
+
"path/to/folder [--filelist /path/to/filelist]\n"
|
119
141
|
)
|
120
142
|
num_args = len(sys_argv[1:])
|
121
143
|
if num_args == 0:
|
122
144
|
sys.exit(f"Invalid argument.\n{help_msg}\nProvided: {sys_argv[1:]=}")
|
123
145
|
elif num_args == 1:
|
124
|
-
compress_folder(
|
125
|
-
|
126
|
-
|
146
|
+
compress_folder(
|
147
|
+
subfolder_path=Path(sys_argv[1]),
|
148
|
+
filelist_path=None,
|
149
|
+
)
|
150
|
+
elif num_args == 3 and sys_argv[2] == "--filelist":
|
151
|
+
compress_folder(
|
152
|
+
subfolder_path=Path(sys_argv[1]),
|
153
|
+
filelist_path=sys_argv[3],
|
154
|
+
)
|
127
155
|
else:
|
128
156
|
sys.exit(f"Invalid argument.\n{help_msg}\nProvided: {sys_argv[1:]=}")
|
129
157
|
|
@@ -37,6 +37,10 @@ class TaskExecutionError(RuntimeError):
|
|
37
37
|
self.task_name = task_name
|
38
38
|
|
39
39
|
|
40
|
+
class TaskOutputValidationError(ValueError):
|
41
|
+
pass
|
42
|
+
|
43
|
+
|
40
44
|
class JobExecutionError(RuntimeError):
|
41
45
|
"""
|
42
46
|
Forwards errors in the execution of a task that are due to external factors
|
@@ -0,0 +1,157 @@
|
|
1
|
+
from typing import Any
|
2
|
+
|
3
|
+
from fractal_server.app.runner.task_files import TaskFiles
|
4
|
+
from fractal_server.app.schemas.v2.task import TaskTypeType
|
5
|
+
from fractal_server.logger import set_logger
|
6
|
+
|
7
|
+
TASK_TYPES_SUBMIT: list[TaskTypeType] = [
|
8
|
+
"compound",
|
9
|
+
"converter_compound",
|
10
|
+
"non_parallel",
|
11
|
+
"converter_non_parallel",
|
12
|
+
]
|
13
|
+
TASK_TYPES_MULTISUBMIT: list[TaskTypeType] = [
|
14
|
+
"compound",
|
15
|
+
"converter_compound",
|
16
|
+
"parallel",
|
17
|
+
]
|
18
|
+
|
19
|
+
logger = set_logger(__name__)
|
20
|
+
|
21
|
+
|
22
|
+
class BaseRunner(object):
|
23
|
+
"""
|
24
|
+
Base class for Fractal runners.
|
25
|
+
"""
|
26
|
+
|
27
|
+
def submit(
|
28
|
+
self,
|
29
|
+
func: callable,
|
30
|
+
parameters: dict[str, Any],
|
31
|
+
history_unit_id: int,
|
32
|
+
task_type: TaskTypeType,
|
33
|
+
task_files: TaskFiles,
|
34
|
+
config: Any,
|
35
|
+
user_id: int,
|
36
|
+
) -> tuple[Any, BaseException]:
|
37
|
+
"""
|
38
|
+
Run a single fractal task.
|
39
|
+
|
40
|
+
Args:
|
41
|
+
func: Function to be executed.
|
42
|
+
parameters: Dictionary of parameters.
|
43
|
+
history_unit_id:
|
44
|
+
Database ID of the corresponding `HistoryUnit` entry.
|
45
|
+
task_type: Task type.
|
46
|
+
task_files: `TaskFiles` object.
|
47
|
+
config: Runner-specific parameters.
|
48
|
+
user_id:
|
49
|
+
"""
|
50
|
+
raise NotImplementedError()
|
51
|
+
|
52
|
+
def multisubmit(
|
53
|
+
self,
|
54
|
+
func: callable,
|
55
|
+
list_parameters: list[dict[str, Any]],
|
56
|
+
history_unit_ids: list[int],
|
57
|
+
list_task_files: list[TaskFiles],
|
58
|
+
task_type: TaskTypeType,
|
59
|
+
config: Any,
|
60
|
+
user_id: int,
|
61
|
+
) -> tuple[dict[int, Any], dict[int, BaseException]]:
|
62
|
+
"""
|
63
|
+
Run a parallel fractal task.
|
64
|
+
|
65
|
+
Args:
|
66
|
+
func: Function to be executed.
|
67
|
+
parameters:
|
68
|
+
Dictionary of parameters. Must include `zarr_urls` key.
|
69
|
+
history_unit_ids:
|
70
|
+
Database IDs of the corresponding `HistoryUnit` entries.
|
71
|
+
task_type: Task type.
|
72
|
+
task_files: `TaskFiles` object.
|
73
|
+
config: Runner-specific parameters.
|
74
|
+
user_id
|
75
|
+
"""
|
76
|
+
raise NotImplementedError()
|
77
|
+
|
78
|
+
def validate_submit_parameters(
|
79
|
+
self,
|
80
|
+
parameters: dict[str, Any],
|
81
|
+
task_type: TaskTypeType,
|
82
|
+
) -> None:
|
83
|
+
"""
|
84
|
+
Validate parameters for `submit` method
|
85
|
+
|
86
|
+
Args:
|
87
|
+
parameters: Parameters dictionary.
|
88
|
+
task_type: Task type.s
|
89
|
+
"""
|
90
|
+
logger.info("[validate_submit_parameters] START")
|
91
|
+
if task_type not in TASK_TYPES_SUBMIT:
|
92
|
+
raise ValueError(f"Invalid {task_type=} for `submit`.")
|
93
|
+
if not isinstance(parameters, dict):
|
94
|
+
raise ValueError("`parameters` must be a dictionary.")
|
95
|
+
if task_type in ["non_parallel", "compound"]:
|
96
|
+
if "zarr_urls" not in parameters.keys():
|
97
|
+
raise ValueError(
|
98
|
+
f"No 'zarr_urls' key in in {list(parameters.keys())}"
|
99
|
+
)
|
100
|
+
elif task_type in ["converter_non_parallel", "converter_compound"]:
|
101
|
+
if "zarr_urls" in parameters.keys():
|
102
|
+
raise ValueError(
|
103
|
+
f"Forbidden 'zarr_urls' key in {list(parameters.keys())}"
|
104
|
+
)
|
105
|
+
logger.info("[validate_submit_parameters] END")
|
106
|
+
|
107
|
+
def validate_multisubmit_parameters(
|
108
|
+
self,
|
109
|
+
*,
|
110
|
+
task_type: TaskTypeType,
|
111
|
+
list_parameters: list[dict[str, Any]],
|
112
|
+
list_task_files: list[TaskFiles],
|
113
|
+
history_unit_ids: list[int],
|
114
|
+
) -> None:
|
115
|
+
"""
|
116
|
+
Validate parameters for `multisubmit` method
|
117
|
+
|
118
|
+
Args:
|
119
|
+
task_type: Task type.
|
120
|
+
list_parameters: List of parameters dictionaries.
|
121
|
+
list_task_files:
|
122
|
+
history_unit_ids:
|
123
|
+
"""
|
124
|
+
if task_type not in TASK_TYPES_MULTISUBMIT:
|
125
|
+
raise ValueError(f"Invalid {task_type=} for `multisubmit`.")
|
126
|
+
|
127
|
+
if not isinstance(list_parameters, list):
|
128
|
+
raise ValueError("`parameters` must be a list.")
|
129
|
+
|
130
|
+
if len(list_parameters) != len(list_task_files):
|
131
|
+
raise ValueError(
|
132
|
+
f"{len(list_task_files)=} differs from "
|
133
|
+
f"{len(list_parameters)=}."
|
134
|
+
)
|
135
|
+
if len(history_unit_ids) != len(list_parameters):
|
136
|
+
raise ValueError(
|
137
|
+
f"{len(history_unit_ids)=} differs from "
|
138
|
+
f"{len(list_parameters)=}."
|
139
|
+
)
|
140
|
+
|
141
|
+
subfolders = set(
|
142
|
+
task_file.wftask_subfolder_local for task_file in list_task_files
|
143
|
+
)
|
144
|
+
if len(subfolders) != 1:
|
145
|
+
raise ValueError(f"More than one subfolders: {subfolders}.")
|
146
|
+
|
147
|
+
for single_kwargs in list_parameters:
|
148
|
+
if not isinstance(single_kwargs, dict):
|
149
|
+
raise ValueError("kwargs itemt must be a dictionary.")
|
150
|
+
if "zarr_url" not in single_kwargs.keys():
|
151
|
+
raise ValueError(
|
152
|
+
f"No 'zarr_url' key in in {list(single_kwargs.keys())}"
|
153
|
+
)
|
154
|
+
if task_type == "parallel":
|
155
|
+
zarr_urls = [kwargs["zarr_url"] for kwargs in list_parameters]
|
156
|
+
if len(zarr_urls) != len(set(zarr_urls)):
|
157
|
+
raise ValueError("Non-unique zarr_urls")
|
fractal_server/app/runner/{v2/_local/_local_config.py → executors/local/get_local_config.py}
RENAMED
@@ -20,9 +20,9 @@ from pydantic import BaseModel
|
|
20
20
|
from pydantic import ConfigDict
|
21
21
|
from pydantic import ValidationError
|
22
22
|
|
23
|
-
from
|
24
|
-
from
|
25
|
-
from
|
23
|
+
from fractal_server.app.models.v2 import WorkflowTaskV2
|
24
|
+
from fractal_server.config import get_settings
|
25
|
+
from fractal_server.syringe import Inject
|
26
26
|
|
27
27
|
|
28
28
|
class LocalBackendConfigError(ValueError):
|
@@ -47,18 +47,16 @@ class LocalBackendConfig(BaseModel):
|
|
47
47
|
model_config = ConfigDict(extra="forbid")
|
48
48
|
parallel_tasks_per_job: Optional[int] = None
|
49
49
|
|
50
|
-
|
51
|
-
def
|
52
|
-
|
53
|
-
Return a default `LocalBackendConfig` configuration object
|
54
|
-
"""
|
55
|
-
return LocalBackendConfig(parallel_tasks_per_job=None)
|
50
|
+
@property
|
51
|
+
def batch_size(self) -> int:
|
52
|
+
return self.parallel_tasks_per_job or 1
|
56
53
|
|
57
54
|
|
58
55
|
def get_local_backend_config(
|
59
56
|
wftask: WorkflowTaskV2,
|
60
57
|
which_type: Literal["non_parallel", "parallel"],
|
61
58
|
config_path: Optional[Path] = None,
|
59
|
+
tot_tasks: int = 1,
|
62
60
|
) -> LocalBackendConfig:
|
63
61
|
"""
|
64
62
|
Prepare a `LocalBackendConfig` configuration object
|