fractal-server 2.8.1__py3-none-any.whl → 2.9.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- fractal_server/__init__.py +1 -1
- fractal_server/app/db/__init__.py +2 -35
- fractal_server/app/models/v2/__init__.py +3 -3
- fractal_server/app/models/v2/task.py +0 -72
- fractal_server/app/models/v2/task_group.py +113 -0
- fractal_server/app/routes/admin/v1.py +13 -30
- fractal_server/app/routes/admin/v2/__init__.py +4 -0
- fractal_server/app/routes/admin/v2/job.py +13 -24
- fractal_server/app/routes/admin/v2/task.py +13 -0
- fractal_server/app/routes/admin/v2/task_group.py +75 -14
- fractal_server/app/routes/admin/v2/task_group_lifecycle.py +267 -0
- fractal_server/app/routes/api/v1/project.py +7 -19
- fractal_server/app/routes/api/v2/__init__.py +11 -2
- fractal_server/app/routes/api/v2/{_aux_functions_task_collection.py → _aux_functions_task_lifecycle.py} +83 -0
- fractal_server/app/routes/api/v2/_aux_functions_tasks.py +27 -17
- fractal_server/app/routes/api/v2/submit.py +19 -24
- fractal_server/app/routes/api/v2/task_collection.py +33 -65
- fractal_server/app/routes/api/v2/task_collection_custom.py +3 -3
- fractal_server/app/routes/api/v2/task_group.py +86 -14
- fractal_server/app/routes/api/v2/task_group_lifecycle.py +272 -0
- fractal_server/app/routes/api/v2/workflow.py +1 -1
- fractal_server/app/routes/api/v2/workflow_import.py +2 -2
- fractal_server/app/routes/auth/current_user.py +60 -17
- fractal_server/app/routes/auth/group.py +67 -39
- fractal_server/app/routes/auth/users.py +97 -99
- fractal_server/app/routes/aux/__init__.py +20 -0
- fractal_server/app/runner/executors/slurm/_slurm_config.py +0 -17
- fractal_server/app/runner/executors/slurm/ssh/executor.py +49 -204
- fractal_server/app/runner/executors/slurm/sudo/executor.py +26 -109
- fractal_server/app/runner/executors/slurm/utils_executors.py +58 -0
- fractal_server/app/runner/v2/_local_experimental/executor.py +2 -1
- fractal_server/app/schemas/_validators.py +0 -15
- fractal_server/app/schemas/user.py +16 -10
- fractal_server/app/schemas/user_group.py +0 -11
- fractal_server/app/schemas/v1/applyworkflow.py +0 -8
- fractal_server/app/schemas/v1/dataset.py +0 -5
- fractal_server/app/schemas/v1/project.py +0 -5
- fractal_server/app/schemas/v1/state.py +0 -5
- fractal_server/app/schemas/v1/workflow.py +0 -5
- fractal_server/app/schemas/v2/__init__.py +4 -2
- fractal_server/app/schemas/v2/dataset.py +0 -6
- fractal_server/app/schemas/v2/job.py +0 -8
- fractal_server/app/schemas/v2/project.py +0 -5
- fractal_server/app/schemas/v2/task_collection.py +0 -21
- fractal_server/app/schemas/v2/task_group.py +59 -8
- fractal_server/app/schemas/v2/workflow.py +0 -5
- fractal_server/app/security/__init__.py +17 -0
- fractal_server/config.py +61 -59
- fractal_server/migrations/versions/d256a7379ab8_taskgroup_activity_and_venv_info_to_.py +117 -0
- fractal_server/ssh/_fabric.py +156 -83
- fractal_server/tasks/utils.py +2 -12
- fractal_server/tasks/v2/local/__init__.py +3 -0
- fractal_server/tasks/v2/local/_utils.py +70 -0
- fractal_server/tasks/v2/local/collect.py +291 -0
- fractal_server/tasks/v2/local/deactivate.py +218 -0
- fractal_server/tasks/v2/local/reactivate.py +159 -0
- fractal_server/tasks/v2/ssh/__init__.py +3 -0
- fractal_server/tasks/v2/ssh/_utils.py +87 -0
- fractal_server/tasks/v2/ssh/collect.py +311 -0
- fractal_server/tasks/v2/ssh/deactivate.py +253 -0
- fractal_server/tasks/v2/ssh/reactivate.py +202 -0
- fractal_server/tasks/v2/templates/{_2_preliminary_pip_operations.sh → 1_create_venv.sh} +6 -7
- fractal_server/tasks/v2/templates/{_3_pip_install.sh → 2_pip_install.sh} +8 -1
- fractal_server/tasks/v2/templates/{_4_pip_freeze.sh → 3_pip_freeze.sh} +0 -7
- fractal_server/tasks/v2/templates/{_5_pip_show.sh → 4_pip_show.sh} +5 -6
- fractal_server/tasks/v2/templates/5_get_venv_size_and_file_number.sh +10 -0
- fractal_server/tasks/v2/templates/6_pip_install_from_freeze.sh +35 -0
- fractal_server/tasks/v2/utils_background.py +42 -127
- fractal_server/tasks/v2/utils_templates.py +32 -2
- fractal_server/utils.py +4 -2
- fractal_server/zip_tools.py +21 -4
- {fractal_server-2.8.1.dist-info → fractal_server-2.9.0.dist-info}/METADATA +3 -5
- {fractal_server-2.8.1.dist-info → fractal_server-2.9.0.dist-info}/RECORD +77 -64
- fractal_server/app/models/v2/collection_state.py +0 -22
- fractal_server/tasks/v2/collection_local.py +0 -357
- fractal_server/tasks/v2/collection_ssh.py +0 -352
- fractal_server/tasks/v2/templates/_1_create_venv.sh +0 -42
- /fractal_server/tasks/v2/{database_operations.py → utils_database.py} +0 -0
- {fractal_server-2.8.1.dist-info → fractal_server-2.9.0.dist-info}/LICENSE +0 -0
- {fractal_server-2.8.1.dist-info → fractal_server-2.9.0.dist-info}/WHEEL +0 -0
- {fractal_server-2.8.1.dist-info → fractal_server-2.9.0.dist-info}/entry_points.txt +0 -0
@@ -0,0 +1,202 @@
|
|
1
|
+
import logging
|
2
|
+
import time
|
3
|
+
from pathlib import Path
|
4
|
+
from tempfile import TemporaryDirectory
|
5
|
+
|
6
|
+
from ..utils_background import add_commit_refresh
|
7
|
+
from ..utils_background import fail_and_cleanup
|
8
|
+
from ..utils_templates import get_collection_replacements
|
9
|
+
from ._utils import _customize_and_run_template
|
10
|
+
from fractal_server.app.db import get_sync_db
|
11
|
+
from fractal_server.app.models.v2 import TaskGroupActivityV2
|
12
|
+
from fractal_server.app.models.v2 import TaskGroupV2
|
13
|
+
from fractal_server.app.schemas.v2 import TaskGroupActivityActionV2
|
14
|
+
from fractal_server.app.schemas.v2.task_group import TaskGroupActivityStatusV2
|
15
|
+
from fractal_server.logger import set_logger
|
16
|
+
from fractal_server.ssh._fabric import FractalSSH
|
17
|
+
from fractal_server.tasks.utils import get_log_path
|
18
|
+
from fractal_server.tasks.v2.utils_background import get_current_log
|
19
|
+
from fractal_server.tasks.v2.utils_python_interpreter import (
|
20
|
+
get_python_interpreter_v2,
|
21
|
+
)
|
22
|
+
from fractal_server.tasks.v2.utils_templates import SCRIPTS_SUBFOLDER
|
23
|
+
from fractal_server.utils import get_timestamp
|
24
|
+
|
25
|
+
LOGGER_NAME = __name__
|
26
|
+
|
27
|
+
|
28
|
+
def reactivate_ssh(
|
29
|
+
*,
|
30
|
+
task_group_activity_id: int,
|
31
|
+
task_group_id: int,
|
32
|
+
fractal_ssh: FractalSSH,
|
33
|
+
tasks_base_dir: str,
|
34
|
+
) -> None:
|
35
|
+
"""
|
36
|
+
Reactivate a task group venv.
|
37
|
+
|
38
|
+
This function is run as a background task, therefore exceptions must be
|
39
|
+
handled.
|
40
|
+
|
41
|
+
Arguments:
|
42
|
+
task_group_id:
|
43
|
+
task_group_activity_id:
|
44
|
+
fractal_ssh:
|
45
|
+
tasks_base_dir:
|
46
|
+
Only used as a `safe_root` in `remove_dir`, and typically set to
|
47
|
+
`user_settings.ssh_tasks_dir`.
|
48
|
+
"""
|
49
|
+
|
50
|
+
with TemporaryDirectory() as tmpdir:
|
51
|
+
log_file_path = get_log_path(Path(tmpdir))
|
52
|
+
logger = set_logger(
|
53
|
+
logger_name=LOGGER_NAME,
|
54
|
+
log_file_path=log_file_path,
|
55
|
+
)
|
56
|
+
|
57
|
+
with next(get_sync_db()) as db:
|
58
|
+
|
59
|
+
# Get main objects from db
|
60
|
+
activity = db.get(TaskGroupActivityV2, task_group_activity_id)
|
61
|
+
task_group = db.get(TaskGroupV2, task_group_id)
|
62
|
+
if activity is None or task_group is None:
|
63
|
+
# Use `logging` directly
|
64
|
+
logging.error(
|
65
|
+
"Cannot find database rows with "
|
66
|
+
f"{task_group_id=} and {task_group_activity_id=}:\n"
|
67
|
+
f"{task_group=}\n{activity=}. Exit."
|
68
|
+
)
|
69
|
+
return
|
70
|
+
|
71
|
+
# Log some info
|
72
|
+
logger.debug("START")
|
73
|
+
for key, value in task_group.model_dump().items():
|
74
|
+
logger.debug(f"task_group.{key}: {value}")
|
75
|
+
|
76
|
+
# Check that SSH connection works
|
77
|
+
try:
|
78
|
+
fractal_ssh.check_connection()
|
79
|
+
except Exception as e:
|
80
|
+
logger.error("Cannot establish SSH connection.")
|
81
|
+
fail_and_cleanup(
|
82
|
+
task_group=task_group,
|
83
|
+
task_group_activity=activity,
|
84
|
+
logger_name=LOGGER_NAME,
|
85
|
+
log_file_path=log_file_path,
|
86
|
+
exception=e,
|
87
|
+
db=db,
|
88
|
+
)
|
89
|
+
return
|
90
|
+
|
91
|
+
# Check that the (remote) task_group venv_path does not exist
|
92
|
+
if fractal_ssh.remote_exists(task_group.venv_path):
|
93
|
+
error_msg = f"{task_group.venv_path} already exists."
|
94
|
+
logger.error(error_msg)
|
95
|
+
fail_and_cleanup(
|
96
|
+
task_group=task_group,
|
97
|
+
task_group_activity=activity,
|
98
|
+
logger_name=LOGGER_NAME,
|
99
|
+
log_file_path=log_file_path,
|
100
|
+
exception=FileExistsError(error_msg),
|
101
|
+
db=db,
|
102
|
+
)
|
103
|
+
return
|
104
|
+
|
105
|
+
try:
|
106
|
+
activity.status = TaskGroupActivityStatusV2.ONGOING
|
107
|
+
activity = add_commit_refresh(obj=activity, db=db)
|
108
|
+
|
109
|
+
# Prepare replacements for templates
|
110
|
+
replacements = get_collection_replacements(
|
111
|
+
task_group=task_group,
|
112
|
+
python_bin=get_python_interpreter_v2(
|
113
|
+
python_version=task_group.python_version
|
114
|
+
),
|
115
|
+
)
|
116
|
+
|
117
|
+
# Prepare replacements for templates
|
118
|
+
pip_freeze_file_local = f"{tmpdir}/pip_freeze.txt"
|
119
|
+
pip_freeze_file_remote = (
|
120
|
+
Path(task_group.path) / "_tmp_pip_freeze.txt"
|
121
|
+
).as_posix()
|
122
|
+
with open(pip_freeze_file_local, "w") as f:
|
123
|
+
f.write(task_group.pip_freeze)
|
124
|
+
fractal_ssh.send_file(
|
125
|
+
local=pip_freeze_file_local, remote=pip_freeze_file_remote
|
126
|
+
)
|
127
|
+
replacements.append(
|
128
|
+
("__PIP_FREEZE_FILE__", pip_freeze_file_remote)
|
129
|
+
)
|
130
|
+
|
131
|
+
# Define script_dir_remote and create it if missing
|
132
|
+
script_dir_remote = (
|
133
|
+
Path(task_group.path) / SCRIPTS_SUBFOLDER
|
134
|
+
).as_posix()
|
135
|
+
fractal_ssh.mkdir(folder=script_dir_remote, parents=True)
|
136
|
+
|
137
|
+
# Prepare common arguments for `_customize_and_run_template`
|
138
|
+
common_args = dict(
|
139
|
+
replacements=replacements,
|
140
|
+
script_dir_local=(
|
141
|
+
Path(tmpdir) / SCRIPTS_SUBFOLDER
|
142
|
+
).as_posix(),
|
143
|
+
script_dir_remote=script_dir_remote,
|
144
|
+
prefix=(
|
145
|
+
f"{int(time.time())}_"
|
146
|
+
f"{TaskGroupActivityActionV2.REACTIVATE}"
|
147
|
+
),
|
148
|
+
fractal_ssh=fractal_ssh,
|
149
|
+
logger_name=LOGGER_NAME,
|
150
|
+
)
|
151
|
+
|
152
|
+
# Create remote directory for scripts
|
153
|
+
fractal_ssh.mkdir(folder=script_dir_remote)
|
154
|
+
|
155
|
+
logger.debug("start - create venv")
|
156
|
+
_customize_and_run_template(
|
157
|
+
template_filename="1_create_venv.sh",
|
158
|
+
**common_args,
|
159
|
+
)
|
160
|
+
logger.debug("end - create venv")
|
161
|
+
activity.log = get_current_log(log_file_path)
|
162
|
+
activity.timestamp_ended = get_timestamp()
|
163
|
+
activity = add_commit_refresh(obj=activity, db=db)
|
164
|
+
|
165
|
+
logger.debug("start - install from pip freeze")
|
166
|
+
_customize_and_run_template(
|
167
|
+
template_filename="6_pip_install_from_freeze.sh",
|
168
|
+
**common_args,
|
169
|
+
)
|
170
|
+
logger.debug("end - install from pip freeze")
|
171
|
+
activity.log = get_current_log(log_file_path)
|
172
|
+
activity.status = TaskGroupActivityStatusV2.OK
|
173
|
+
activity.timestamp_ended = get_timestamp()
|
174
|
+
activity = add_commit_refresh(obj=activity, db=db)
|
175
|
+
task_group.active = True
|
176
|
+
task_group = add_commit_refresh(obj=task_group, db=db)
|
177
|
+
logger.debug("END")
|
178
|
+
|
179
|
+
except Exception as reactivate_e:
|
180
|
+
# Delete corrupted venv_path
|
181
|
+
try:
|
182
|
+
logger.info(f"Now delete folder {task_group.venv_path}")
|
183
|
+
fractal_ssh.remove_folder(
|
184
|
+
folder=task_group.venv_path,
|
185
|
+
safe_root=tasks_base_dir,
|
186
|
+
)
|
187
|
+
logger.info(f"Deleted folder {task_group.venv_path}")
|
188
|
+
except Exception as rm_e:
|
189
|
+
logger.error(
|
190
|
+
"Removing folder failed.\n"
|
191
|
+
f"Original error:\n{str(rm_e)}"
|
192
|
+
)
|
193
|
+
|
194
|
+
fail_and_cleanup(
|
195
|
+
task_group=task_group,
|
196
|
+
task_group_activity=activity,
|
197
|
+
logger_name=LOGGER_NAME,
|
198
|
+
log_file_path=log_file_path,
|
199
|
+
exception=reactivate_e,
|
200
|
+
db=db,
|
201
|
+
)
|
202
|
+
return
|
@@ -5,18 +5,17 @@ write_log(){
|
|
5
5
|
echo "[collect-task, $TIMESTAMP] $1"
|
6
6
|
}
|
7
7
|
|
8
|
+
|
8
9
|
# Variables to be filled within fractal-server
|
9
10
|
PACKAGE_ENV_DIR=__PACKAGE_ENV_DIR__
|
11
|
+
PYTHON=__PYTHON__
|
10
12
|
|
11
13
|
TIME_START=$(date +%s)
|
12
14
|
|
13
|
-
|
14
|
-
|
15
|
-
|
16
|
-
write_log "
|
17
|
-
"$VENVPYTHON" -m pip install --no-cache-dir "pip<=__FRACTAL_MAX_PIP_VERSION__" --upgrade
|
18
|
-
"$VENVPYTHON" -m pip install --no-cache-dir setuptools
|
19
|
-
write_log "END upgrade pip"
|
15
|
+
# Create venv
|
16
|
+
write_log "START create venv in ${PACKAGE_ENV_DIR}"
|
17
|
+
"$PYTHON" -m venv "$PACKAGE_ENV_DIR" --copies
|
18
|
+
write_log "END create venv in ${PACKAGE_ENV_DIR}"
|
20
19
|
echo
|
21
20
|
|
22
21
|
# End
|
@@ -5,16 +5,23 @@ write_log(){
|
|
5
5
|
echo "[collect-task, $TIMESTAMP] $1"
|
6
6
|
}
|
7
7
|
|
8
|
-
|
9
8
|
# Variables to be filled within fractal-server
|
10
9
|
PACKAGE_ENV_DIR=__PACKAGE_ENV_DIR__
|
11
10
|
INSTALL_STRING=__INSTALL_STRING__
|
12
11
|
PINNED_PACKAGE_LIST="__PINNED_PACKAGE_LIST__"
|
12
|
+
FRACTAL_MAX_PIP_VERSION="__FRACTAL_MAX_PIP_VERSION__"
|
13
13
|
|
14
14
|
TIME_START=$(date +%s)
|
15
15
|
|
16
16
|
VENVPYTHON=${PACKAGE_ENV_DIR}/bin/python
|
17
17
|
|
18
|
+
# Upgrade `pip` and install `setuptools`
|
19
|
+
write_log "START upgrade pip and install setuptools"
|
20
|
+
"$VENVPYTHON" -m pip install --no-cache-dir "pip<=${FRACTAL_MAX_PIP_VERSION}" --upgrade
|
21
|
+
"$VENVPYTHON" -m pip install --no-cache-dir setuptools
|
22
|
+
write_log "END upgrade pip and install setuptools"
|
23
|
+
echo
|
24
|
+
|
18
25
|
# Install package
|
19
26
|
write_log "START install ${INSTALL_STRING}"
|
20
27
|
"$VENVPYTHON" -m pip install --no-cache-dir "$INSTALL_STRING"
|
@@ -11,7 +11,6 @@ PACKAGE_ENV_DIR=__PACKAGE_ENV_DIR__
|
|
11
11
|
PACKAGE_NAME=__PACKAGE_NAME__
|
12
12
|
|
13
13
|
|
14
|
-
|
15
14
|
TIME_START=$(date +%s)
|
16
15
|
|
17
16
|
VENVPYTHON=${PACKAGE_ENV_DIR}/bin/python
|
@@ -38,16 +37,16 @@ echo
|
|
38
37
|
MANIFEST_RELATIVE_PATH=$($VENVPYTHON -m pip show "$PACKAGE_NAME" --files | grep "__FRACTAL_MANIFEST__.json" | tr -d "[:space:]")
|
39
38
|
write_log "Manifest relative path: $MANIFEST_RELATIVE_PATH"
|
40
39
|
echo
|
41
|
-
|
42
|
-
write_log "Manifest absolute path: $MANIFEST_ABSOLUTE_PATH"
|
43
|
-
echo
|
44
|
-
if [ -f "$MANIFEST_ABSOLUTE_PATH" ]; then
|
40
|
+
if [ "$MANIFEST_RELATIVE_PATH" != "" ]; then
|
45
41
|
write_log "OK: manifest path exists"
|
46
42
|
echo
|
47
43
|
else
|
48
|
-
write_log "ERROR: manifest path not found
|
44
|
+
write_log "ERROR: manifest path not found for $PACKAGE_NAME"
|
49
45
|
exit 2
|
50
46
|
fi
|
47
|
+
MANIFEST_ABSOLUTE_PATH="${PACKAGE_PARENT_FOLDER}/${MANIFEST_RELATIVE_PATH}"
|
48
|
+
write_log "Manifest absolute path: $MANIFEST_ABSOLUTE_PATH"
|
49
|
+
echo
|
51
50
|
|
52
51
|
# End
|
53
52
|
TIME_END=$(date +%s)
|
@@ -0,0 +1,10 @@
|
|
1
|
+
set -e
|
2
|
+
|
3
|
+
# Variables to be filled within fractal-server
|
4
|
+
PACKAGE_ENV_DIR=__PACKAGE_ENV_DIR__
|
5
|
+
|
6
|
+
# Find memory usage and file number
|
7
|
+
ENV_DISK_USAGE=$(du -sk "${PACKAGE_ENV_DIR}" | cut -f1)
|
8
|
+
ENV_FILE_NUMBER=$(find "${PACKAGE_ENV_DIR}" -type f | wc -l)
|
9
|
+
|
10
|
+
echo $ENV_DISK_USAGE $ENV_FILE_NUMBER
|
@@ -0,0 +1,35 @@
|
|
1
|
+
set -e
|
2
|
+
|
3
|
+
write_log(){
|
4
|
+
TIMESTAMP=$(date -u +"%Y-%m-%dT%H:%M:%SZ")
|
5
|
+
echo "[collect-task, $TIMESTAMP] $1"
|
6
|
+
}
|
7
|
+
|
8
|
+
# Variables to be filled within fractal-server
|
9
|
+
PACKAGE_ENV_DIR=__PACKAGE_ENV_DIR__
|
10
|
+
PIP_FREEZE_FILE=__PIP_FREEZE_FILE__
|
11
|
+
FRACTAL_MAX_PIP_VERSION=__FRACTAL_MAX_PIP_VERSION__
|
12
|
+
|
13
|
+
TIME_START=$(date +%s)
|
14
|
+
|
15
|
+
VENVPYTHON=${PACKAGE_ENV_DIR}/bin/python
|
16
|
+
|
17
|
+
# Upgrade `pip` and install `setuptools`
|
18
|
+
write_log "START upgrade pip and install setuptools"
|
19
|
+
"$VENVPYTHON" -m pip install --no-cache-dir "pip<=${FRACTAL_MAX_PIP_VERSION}" --upgrade
|
20
|
+
"$VENVPYTHON" -m pip install --no-cache-dir setuptools
|
21
|
+
write_log "END upgrade pip and install setuptools"
|
22
|
+
echo
|
23
|
+
|
24
|
+
# Install from pip-freeze file
|
25
|
+
write_log "START installing requirements from ${PIP_FREEZE_FILE}"
|
26
|
+
"$VENVPYTHON" -m pip install -r "${PIP_FREEZE_FILE}"
|
27
|
+
write_log "END installing requirements from ${PIP_FREEZE_FILE}"
|
28
|
+
echo
|
29
|
+
|
30
|
+
# End
|
31
|
+
TIME_END=$(date +%s)
|
32
|
+
write_log "All good up to here."
|
33
|
+
write_log "Elapsed: $((TIME_END - TIME_START)) seconds"
|
34
|
+
write_log "Exit."
|
35
|
+
echo
|
@@ -1,121 +1,70 @@
|
|
1
1
|
from pathlib import Path
|
2
2
|
from typing import Optional
|
3
|
+
from typing import TypeVar
|
3
4
|
|
4
5
|
from sqlalchemy.orm import Session as DBSyncSession
|
5
|
-
from sqlalchemy.orm.attributes import flag_modified
|
6
6
|
from sqlmodel import select
|
7
7
|
|
8
|
-
from fractal_server.app.models.v2 import
|
8
|
+
from fractal_server.app.models.v2 import TaskGroupActivityV2
|
9
9
|
from fractal_server.app.models.v2 import TaskGroupV2
|
10
|
-
from fractal_server.app.schemas.v2 import CollectionStatusV2
|
11
10
|
from fractal_server.app.schemas.v2 import TaskCreateV2
|
11
|
+
from fractal_server.app.schemas.v2 import TaskGroupActivityStatusV2
|
12
12
|
from fractal_server.app.schemas.v2.manifest import ManifestV2
|
13
|
+
from fractal_server.app.schemas.v2.task_group import TaskGroupActivityActionV2
|
13
14
|
from fractal_server.logger import get_logger
|
14
15
|
from fractal_server.logger import reset_logger_handlers
|
16
|
+
from fractal_server.utils import get_timestamp
|
15
17
|
|
18
|
+
T = TypeVar("T")
|
16
19
|
|
17
|
-
def _set_collection_state_data_status(
|
18
|
-
*,
|
19
|
-
state_id: int,
|
20
|
-
new_status: CollectionStatusV2,
|
21
|
-
logger_name: str,
|
22
|
-
db: DBSyncSession,
|
23
|
-
):
|
24
|
-
logger = get_logger(logger_name)
|
25
|
-
logger.debug(f"{state_id=} - set state.data['status'] to {new_status}")
|
26
|
-
collection_state = db.get(CollectionStateV2, state_id)
|
27
|
-
collection_state.data["status"] = CollectionStatusV2(new_status)
|
28
|
-
flag_modified(collection_state, "data")
|
29
|
-
db.commit()
|
30
20
|
|
31
|
-
|
32
|
-
|
33
|
-
*,
|
34
|
-
state_id: int,
|
35
|
-
new_log: str,
|
36
|
-
logger_name: str,
|
37
|
-
db: DBSyncSession,
|
38
|
-
):
|
39
|
-
logger = get_logger(logger_name)
|
40
|
-
logger.debug(f"{state_id=} - set state.data['log']")
|
41
|
-
collection_state = db.get(CollectionStateV2, state_id)
|
42
|
-
collection_state.data["log"] = new_log
|
43
|
-
flag_modified(collection_state, "data")
|
44
|
-
db.commit()
|
45
|
-
|
46
|
-
|
47
|
-
def _set_collection_state_data_info(
|
48
|
-
*,
|
49
|
-
state_id: int,
|
50
|
-
new_info: str,
|
51
|
-
logger_name: str,
|
52
|
-
db: DBSyncSession,
|
53
|
-
):
|
54
|
-
logger = get_logger(logger_name)
|
55
|
-
logger.debug(f"{state_id=} - set state.data['info']")
|
56
|
-
collection_state = db.get(CollectionStateV2, state_id)
|
57
|
-
collection_state.data["info"] = new_info
|
58
|
-
flag_modified(collection_state, "data")
|
21
|
+
def add_commit_refresh(*, obj: T, db: DBSyncSession) -> T:
|
22
|
+
db.add(obj)
|
59
23
|
db.commit()
|
24
|
+
db.refresh(obj)
|
25
|
+
return obj
|
60
26
|
|
61
27
|
|
62
|
-
def
|
63
|
-
|
28
|
+
def fail_and_cleanup(
|
29
|
+
task_group: TaskGroupV2,
|
30
|
+
task_group_activity: TaskGroupActivityV2,
|
64
31
|
logger_name: str,
|
65
32
|
exception: Exception,
|
66
|
-
db: DBSyncSession,
|
67
|
-
task_group_id: int,
|
68
33
|
log_file_path: Path,
|
34
|
+
db: DBSyncSession,
|
69
35
|
):
|
70
36
|
logger = get_logger(logger_name)
|
71
|
-
logger.error(
|
72
|
-
|
73
|
-
|
74
|
-
state_id=state_id,
|
75
|
-
new_status=CollectionStatusV2.FAIL,
|
76
|
-
logger_name=logger_name,
|
77
|
-
db=db,
|
37
|
+
logger.error(
|
38
|
+
f"Task {task_group_activity.action} failed. "
|
39
|
+
f"Original error: {str(exception)}"
|
78
40
|
)
|
79
41
|
|
80
|
-
|
81
|
-
|
82
|
-
|
83
|
-
|
84
|
-
|
85
|
-
|
86
|
-
db=db,
|
87
|
-
)
|
88
|
-
# For backwards-compatibility, we also set state.data["info"]
|
89
|
-
_set_collection_state_data_info(
|
90
|
-
state_id=state_id,
|
91
|
-
new_info=f"Original error: {exception}",
|
92
|
-
logger_name=logger_name,
|
93
|
-
db=db,
|
94
|
-
)
|
42
|
+
task_group_activity.status = TaskGroupActivityStatusV2.FAILED
|
43
|
+
task_group_activity.timestamp_ended = get_timestamp()
|
44
|
+
task_group_activity.log = get_current_log(log_file_path)
|
45
|
+
task_group_activity = add_commit_refresh(obj=task_group_activity, db=db)
|
46
|
+
if task_group_activity.action == TaskGroupActivityActionV2.COLLECT:
|
47
|
+
logger.info(f"Now delete TaskGroupV2 with {task_group.id=}")
|
95
48
|
|
96
|
-
|
97
|
-
|
98
|
-
|
99
|
-
stm = select(CollectionStateV2).where(
|
100
|
-
CollectionStateV2.taskgroupv2_id == task_group_id
|
101
|
-
)
|
102
|
-
res = db.execute(stm)
|
103
|
-
collection_states = res.scalars().all()
|
104
|
-
for collection_state in collection_states:
|
105
|
-
logger.info(
|
106
|
-
f"Setting CollectionStateV2[{collection_state.id}].taskgroupv2_id "
|
107
|
-
"to None."
|
49
|
+
logger.info("Start of TaskGroupActivityV2 cascade operations.")
|
50
|
+
stm = select(TaskGroupActivityV2).where(
|
51
|
+
TaskGroupActivityV2.taskgroupv2_id == task_group.id
|
108
52
|
)
|
109
|
-
|
110
|
-
|
111
|
-
|
112
|
-
|
113
|
-
|
114
|
-
|
115
|
-
|
53
|
+
res = db.execute(stm)
|
54
|
+
task_group_activity_list = res.scalars().all()
|
55
|
+
for task_group_activity in task_group_activity_list:
|
56
|
+
logger.info(
|
57
|
+
f"Setting TaskGroupActivityV2[{task_group_activity.id}]"
|
58
|
+
".taskgroupv2_id to None."
|
59
|
+
)
|
60
|
+
task_group_activity.taskgroupv2_id = None
|
61
|
+
db.add(task_group_activity)
|
62
|
+
logger.info("End of TaskGroupActivityV2 cascade operations.")
|
63
|
+
logger.info(f"TaskGroupV2 with {task_group.id=} deleted")
|
116
64
|
|
65
|
+
db.delete(task_group)
|
66
|
+
db.commit()
|
117
67
|
reset_logger_handlers(logger)
|
118
|
-
return
|
119
68
|
|
120
69
|
|
121
70
|
def _prepare_tasks_metadata(
|
@@ -170,40 +119,6 @@ def _prepare_tasks_metadata(
|
|
170
119
|
return task_list
|
171
120
|
|
172
121
|
|
173
|
-
def
|
174
|
-
""
|
175
|
-
|
176
|
-
|
177
|
-
Args:
|
178
|
-
task_list:
|
179
|
-
"""
|
180
|
-
for _task in task_list:
|
181
|
-
if _task.command_non_parallel is not None:
|
182
|
-
_task_path = _task.command_non_parallel.split()[1]
|
183
|
-
if not Path(_task_path).exists():
|
184
|
-
raise FileNotFoundError(
|
185
|
-
f"Task `{_task.name}` has `command_non_parallel` "
|
186
|
-
f"pointing to missing file `{_task_path}`."
|
187
|
-
)
|
188
|
-
if _task.command_parallel is not None:
|
189
|
-
_task_path = _task.command_parallel.split()[1]
|
190
|
-
if not Path(_task_path).exists():
|
191
|
-
raise FileNotFoundError(
|
192
|
-
f"Task `{_task.name}` has `command_parallel` "
|
193
|
-
f"pointing to missing file `{_task_path}`."
|
194
|
-
)
|
195
|
-
|
196
|
-
|
197
|
-
def _refresh_logs(
|
198
|
-
*,
|
199
|
-
state_id: int,
|
200
|
-
log_file_path: Path,
|
201
|
-
db: DBSyncSession,
|
202
|
-
) -> None:
|
203
|
-
"""
|
204
|
-
Read logs from file and update them in the db.
|
205
|
-
"""
|
206
|
-
collection_state = db.get(CollectionStateV2, state_id)
|
207
|
-
collection_state.data["log"] = log_file_path.open("r").read()
|
208
|
-
flag_modified(collection_state, "data")
|
209
|
-
db.commit()
|
122
|
+
def get_current_log(logger_file_path: str) -> str:
|
123
|
+
with open(logger_file_path, "r") as f:
|
124
|
+
return f.read()
|
@@ -1,7 +1,13 @@
|
|
1
1
|
from pathlib import Path
|
2
2
|
|
3
|
+
from fractal_server.app.models.v2 import TaskGroupV2
|
4
|
+
from fractal_server.config import get_settings
|
5
|
+
from fractal_server.syringe import Inject
|
6
|
+
|
3
7
|
TEMPLATES_DIR = Path(__file__).parent / "templates"
|
4
8
|
|
9
|
+
SCRIPTS_SUBFOLDER = "scripts"
|
10
|
+
|
5
11
|
|
6
12
|
def customize_template(
|
7
13
|
*,
|
@@ -25,14 +31,16 @@ def customize_template(
|
|
25
31
|
script_data = template_data
|
26
32
|
for old_new in replacements:
|
27
33
|
script_data = script_data.replace(old_new[0], old_new[1])
|
34
|
+
# Create parent folder if needed
|
35
|
+
Path(script_path).parent.mkdir(exist_ok=True)
|
28
36
|
# Write script locally
|
29
37
|
with open(script_path, "w") as f:
|
30
38
|
f.write(script_data)
|
31
39
|
|
32
40
|
|
33
|
-
def
|
41
|
+
def parse_script_pip_show_stdout(stdout: str) -> dict[str, str]:
|
34
42
|
"""
|
35
|
-
Parse standard output of
|
43
|
+
Parse standard output of 4_pip_show.sh
|
36
44
|
"""
|
37
45
|
searches = [
|
38
46
|
("Python interpreter:", "python_bin"),
|
@@ -57,3 +65,25 @@ def parse_script_5_stdout(stdout: str) -> dict[str, str]:
|
|
57
65
|
attribute_value = actual_line.split(search)[-1].strip(" ")
|
58
66
|
attributes[attribute_name] = attribute_value
|
59
67
|
return attributes
|
68
|
+
|
69
|
+
|
70
|
+
def get_collection_replacements(
|
71
|
+
*, task_group: TaskGroupV2, python_bin: str
|
72
|
+
) -> dict[str, str]:
|
73
|
+
settings = Inject(get_settings)
|
74
|
+
|
75
|
+
replacements = [
|
76
|
+
("__PACKAGE_NAME__", task_group.pkg_name),
|
77
|
+
("__PACKAGE_ENV_DIR__", task_group.venv_path),
|
78
|
+
("__PYTHON__", python_bin),
|
79
|
+
("__INSTALL_STRING__", task_group.pip_install_string),
|
80
|
+
(
|
81
|
+
"__FRACTAL_MAX_PIP_VERSION__",
|
82
|
+
settings.FRACTAL_MAX_PIP_VERSION,
|
83
|
+
),
|
84
|
+
(
|
85
|
+
"__PINNED_PACKAGE_LIST__",
|
86
|
+
task_group.pinned_package_versions_string,
|
87
|
+
),
|
88
|
+
]
|
89
|
+
return replacements
|
fractal_server/utils.py
CHANGED
@@ -108,8 +108,10 @@ def execute_command_sync(
|
|
108
108
|
stdout = res.stdout
|
109
109
|
stderr = res.stderr
|
110
110
|
logger.debug(f"{returncode=}")
|
111
|
-
logger.debug(
|
112
|
-
logger.debug(
|
111
|
+
logger.debug("STDOUT:")
|
112
|
+
logger.debug(stdout)
|
113
|
+
logger.debug("STDERR:")
|
114
|
+
logger.debug(stderr)
|
113
115
|
if res.returncode != 0:
|
114
116
|
logger.debug(f"ERROR in subprocess call to '{command}'")
|
115
117
|
raise RuntimeError(
|
fractal_server/zip_tools.py
CHANGED
@@ -122,9 +122,26 @@ def _zip_folder_to_file_and_remove(folder: str) -> None:
|
|
122
122
|
3. Checks if the folder can be safely deleted using the
|
123
123
|
`_folder_can_be_deleted` function. If so, deletes the original folder.
|
124
124
|
"""
|
125
|
-
|
126
|
-
|
125
|
+
|
126
|
+
tmp_zipfile = f"{folder}_tmp.zip"
|
127
|
+
zipfile = f"{folder}.zip"
|
128
|
+
|
129
|
+
try:
|
130
|
+
logger.info(f"Start creating temporary zip file at '{tmp_zipfile}'.")
|
131
|
+
_create_zip(folder, tmp_zipfile)
|
132
|
+
logger.info("Zip file created.")
|
133
|
+
except Exception as e:
|
134
|
+
logger.error(
|
135
|
+
f"Error while creating temporary zip file. Original error: '{e}'."
|
136
|
+
)
|
137
|
+
Path(tmp_zipfile).unlink(missing_ok=True)
|
138
|
+
return
|
139
|
+
|
140
|
+
logger.info(f"Moving temporary zip file to {zipfile}.")
|
141
|
+
shutil.move(tmp_zipfile, zipfile)
|
142
|
+
logger.info("Zip file moved.")
|
143
|
+
|
127
144
|
if _folder_can_be_deleted(folder):
|
128
|
-
logger.info(f"
|
145
|
+
logger.info(f"Removing folder '{folder}'.")
|
129
146
|
shutil.rmtree(folder)
|
130
|
-
logger.info(
|
147
|
+
logger.info("Folder removed.")
|