fractal-server 2.14.15__py3-none-any.whl → 2.15.0a0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- fractal_server/__init__.py +1 -1
- fractal_server/app/models/v2/history.py +2 -0
- fractal_server/app/models/v2/task_group.py +17 -5
- fractal_server/app/routes/admin/v2/task_group_lifecycle.py +2 -2
- fractal_server/app/routes/api/v2/__init__.py +6 -0
- fractal_server/app/routes/api/v2/history.py +2 -2
- fractal_server/app/routes/api/v2/pre_submission_checks.py +3 -3
- fractal_server/app/routes/api/v2/task_collection.py +3 -3
- fractal_server/app/routes/api/v2/task_collection_custom.py +2 -2
- fractal_server/app/routes/api/v2/task_collection_pixi.py +236 -0
- fractal_server/app/routes/api/v2/task_group_lifecycle.py +8 -3
- fractal_server/app/runner/executors/slurm_ssh/runner.py +3 -1
- fractal_server/app/runner/v2/runner.py +2 -2
- fractal_server/app/schemas/v2/__init__.py +2 -1
- fractal_server/app/schemas/v2/dumps.py +1 -1
- fractal_server/app/schemas/v2/task_collection.py +1 -1
- fractal_server/app/schemas/v2/task_group.py +16 -5
- fractal_server/config.py +42 -0
- fractal_server/images/status_tools.py +80 -75
- fractal_server/migrations/versions/791ce783d3d8_add_indices.py +41 -0
- fractal_server/migrations/versions/b1e7f7a1ff71_task_group_for_pixi.py +53 -0
- fractal_server/ssh/_fabric.py +3 -0
- fractal_server/tasks/v2/local/__init__.py +2 -0
- fractal_server/tasks/v2/local/_utils.py +7 -2
- fractal_server/tasks/v2/local/collect.py +14 -12
- fractal_server/tasks/v2/local/collect_pixi.py +222 -0
- fractal_server/tasks/v2/local/deactivate.py +29 -25
- fractal_server/tasks/v2/local/deactivate_pixi.py +110 -0
- fractal_server/tasks/v2/local/reactivate.py +1 -1
- fractal_server/tasks/v2/ssh/__init__.py +1 -0
- fractal_server/tasks/v2/ssh/_utils.py +5 -5
- fractal_server/tasks/v2/ssh/collect.py +16 -15
- fractal_server/tasks/v2/ssh/collect_pixi.py +296 -0
- fractal_server/tasks/v2/ssh/deactivate.py +32 -31
- fractal_server/tasks/v2/ssh/reactivate.py +1 -1
- fractal_server/tasks/v2/templates/pixi_1_collect.sh +70 -0
- fractal_server/tasks/v2/utils_background.py +37 -9
- fractal_server/tasks/v2/utils_pixi.py +36 -0
- {fractal_server-2.14.15.dist-info → fractal_server-2.15.0a0.dist-info}/METADATA +4 -4
- {fractal_server-2.14.15.dist-info → fractal_server-2.15.0a0.dist-info}/RECORD +43 -35
- {fractal_server-2.14.15.dist-info → fractal_server-2.15.0a0.dist-info}/LICENSE +0 -0
- {fractal_server-2.14.15.dist-info → fractal_server-2.15.0a0.dist-info}/WHEEL +0 -0
- {fractal_server-2.14.15.dist-info → fractal_server-2.15.0a0.dist-info}/entry_points.txt +0 -0
@@ -0,0 +1,222 @@
|
|
1
|
+
import json
|
2
|
+
import logging
|
3
|
+
import shutil
|
4
|
+
import time
|
5
|
+
from pathlib import Path
|
6
|
+
from tempfile import TemporaryDirectory
|
7
|
+
|
8
|
+
from ..utils_database import create_db_tasks_and_update_task_group_sync
|
9
|
+
from ..utils_pixi import parse_collect_stdout
|
10
|
+
from ..utils_pixi import SOURCE_DIR_NAME
|
11
|
+
from fractal_server.app.db import get_sync_db
|
12
|
+
from fractal_server.app.models.v2 import TaskGroupActivityV2
|
13
|
+
from fractal_server.app.models.v2 import TaskGroupV2
|
14
|
+
from fractal_server.app.schemas.v2 import FractalUploadedFile
|
15
|
+
from fractal_server.app.schemas.v2 import TaskGroupActivityActionV2
|
16
|
+
from fractal_server.app.schemas.v2 import TaskGroupActivityStatusV2
|
17
|
+
from fractal_server.app.schemas.v2.manifest import ManifestV2
|
18
|
+
from fractal_server.config import get_settings
|
19
|
+
from fractal_server.logger import reset_logger_handlers
|
20
|
+
from fractal_server.logger import set_logger
|
21
|
+
from fractal_server.syringe import Inject
|
22
|
+
from fractal_server.tasks.utils import get_log_path
|
23
|
+
from fractal_server.tasks.v2.local._utils import _customize_and_run_template
|
24
|
+
from fractal_server.tasks.v2.local._utils import check_task_files_exist
|
25
|
+
from fractal_server.tasks.v2.utils_background import add_commit_refresh
|
26
|
+
from fractal_server.tasks.v2.utils_background import fail_and_cleanup
|
27
|
+
from fractal_server.tasks.v2.utils_background import get_current_log
|
28
|
+
from fractal_server.tasks.v2.utils_background import prepare_tasks_metadata
|
29
|
+
from fractal_server.tasks.v2.utils_templates import SCRIPTS_SUBFOLDER
|
30
|
+
from fractal_server.utils import get_timestamp
|
31
|
+
|
32
|
+
|
33
|
+
def collect_local_pixi(
|
34
|
+
*,
|
35
|
+
task_group_activity_id: int,
|
36
|
+
task_group_id: int,
|
37
|
+
tar_gz_file: FractalUploadedFile,
|
38
|
+
) -> None:
|
39
|
+
settings = Inject(get_settings)
|
40
|
+
|
41
|
+
LOGGER_NAME = f"{__name__}.ID{task_group_activity_id}"
|
42
|
+
|
43
|
+
with TemporaryDirectory() as tmpdir:
|
44
|
+
log_file_path = get_log_path(Path(tmpdir))
|
45
|
+
logger = set_logger(
|
46
|
+
logger_name=LOGGER_NAME,
|
47
|
+
log_file_path=log_file_path,
|
48
|
+
)
|
49
|
+
|
50
|
+
with next(get_sync_db()) as db:
|
51
|
+
activity = db.get(TaskGroupActivityV2, task_group_activity_id)
|
52
|
+
task_group = db.get(TaskGroupV2, task_group_id)
|
53
|
+
if activity is None or task_group is None:
|
54
|
+
logging.error(
|
55
|
+
"Cannot find database rows with "
|
56
|
+
f"{task_group_id=} and {task_group_activity_id=}:\n"
|
57
|
+
f"{task_group=}\n{activity=}. Exit."
|
58
|
+
)
|
59
|
+
return
|
60
|
+
|
61
|
+
logger.info("START")
|
62
|
+
for key, value in task_group.model_dump().items():
|
63
|
+
logger.debug(f"task_group.{key}: {value}")
|
64
|
+
|
65
|
+
if Path(task_group.path).exists():
|
66
|
+
error_msg = f"{task_group.path} already exists."
|
67
|
+
logger.error(error_msg)
|
68
|
+
fail_and_cleanup(
|
69
|
+
task_group=task_group,
|
70
|
+
task_group_activity=activity,
|
71
|
+
logger_name=LOGGER_NAME,
|
72
|
+
log_file_path=log_file_path,
|
73
|
+
exception=FileExistsError(error_msg),
|
74
|
+
db=db,
|
75
|
+
)
|
76
|
+
return
|
77
|
+
|
78
|
+
# Set `pixi_bin` and check that it exists
|
79
|
+
pixi_home = settings.pixi.versions[task_group.pixi_version]
|
80
|
+
pixi_bin = Path(pixi_home, "bin/pixi").as_posix()
|
81
|
+
|
82
|
+
try:
|
83
|
+
Path(task_group.path).mkdir(parents=True)
|
84
|
+
logger.info(f"Created {task_group.path}")
|
85
|
+
archive_path = Path(
|
86
|
+
task_group.path, tar_gz_file.filename
|
87
|
+
).as_posix()
|
88
|
+
logger.info(f"Write tar.gz-file contents into {archive_path}.")
|
89
|
+
with open(archive_path, "wb") as f:
|
90
|
+
f.write(tar_gz_file.contents)
|
91
|
+
task_group.archive_path = archive_path
|
92
|
+
task_group = add_commit_refresh(obj=task_group, db=db)
|
93
|
+
|
94
|
+
replacements = {
|
95
|
+
("__PIXI_HOME__", pixi_home),
|
96
|
+
("__PACKAGE_DIR__", task_group.path),
|
97
|
+
("__TAR_GZ_PATH__", archive_path),
|
98
|
+
(
|
99
|
+
"__IMPORT_PACKAGE_NAME__",
|
100
|
+
task_group.pkg_name.replace("-", "_"),
|
101
|
+
),
|
102
|
+
("__SOURCE_DIR_NAME__", SOURCE_DIR_NAME),
|
103
|
+
}
|
104
|
+
|
105
|
+
activity.status = TaskGroupActivityStatusV2.ONGOING
|
106
|
+
activity.log = get_current_log(log_file_path)
|
107
|
+
activity = add_commit_refresh(obj=activity, db=db)
|
108
|
+
|
109
|
+
stdout = _customize_and_run_template(
|
110
|
+
template_filename="pixi_1_collect.sh",
|
111
|
+
replacements=replacements,
|
112
|
+
script_dir=Path(
|
113
|
+
task_group.path, SCRIPTS_SUBFOLDER
|
114
|
+
).as_posix(),
|
115
|
+
prefix=(
|
116
|
+
f"{int(time.time())}_"
|
117
|
+
f"{TaskGroupActivityActionV2.COLLECT}_"
|
118
|
+
),
|
119
|
+
logger_name=LOGGER_NAME,
|
120
|
+
)
|
121
|
+
activity.log = get_current_log(log_file_path)
|
122
|
+
activity = add_commit_refresh(obj=activity, db=db)
|
123
|
+
|
124
|
+
# Parse stdout
|
125
|
+
parsed_output = parse_collect_stdout(stdout)
|
126
|
+
package_root = parsed_output["package_root"]
|
127
|
+
venv_size = parsed_output["venv_size"]
|
128
|
+
venv_file_number = parsed_output["venv_file_number"]
|
129
|
+
|
130
|
+
# Read and validate manifest
|
131
|
+
# NOTE: we are only supporting the manifest path being relative
|
132
|
+
# to the top-level folder
|
133
|
+
manifest_path = f"{package_root}/__FRACTAL_MANIFEST__.json"
|
134
|
+
with open(manifest_path) as json_data:
|
135
|
+
pkg_manifest_dict = json.load(json_data)
|
136
|
+
logger.info(f"loaded {manifest_path=}")
|
137
|
+
logger.info("now validating manifest content")
|
138
|
+
pkg_manifest = ManifestV2(**pkg_manifest_dict)
|
139
|
+
logger.info("validated manifest content")
|
140
|
+
activity.log = get_current_log(log_file_path)
|
141
|
+
activity = add_commit_refresh(obj=activity, db=db)
|
142
|
+
|
143
|
+
logger.info("_prepare_tasks_metadata - start")
|
144
|
+
task_list = prepare_tasks_metadata(
|
145
|
+
package_manifest=pkg_manifest,
|
146
|
+
package_version=task_group.version,
|
147
|
+
package_root=Path(package_root),
|
148
|
+
pixi_bin=pixi_bin,
|
149
|
+
pixi_manifest_path=(
|
150
|
+
Path(
|
151
|
+
task_group.path,
|
152
|
+
SOURCE_DIR_NAME,
|
153
|
+
"pyproject.toml",
|
154
|
+
).as_posix()
|
155
|
+
),
|
156
|
+
)
|
157
|
+
check_task_files_exist(task_list=task_list)
|
158
|
+
logger.info("_prepare_tasks_metadata - end")
|
159
|
+
activity.log = get_current_log(log_file_path)
|
160
|
+
activity = add_commit_refresh(obj=activity, db=db)
|
161
|
+
|
162
|
+
logger.info("create_db_tasks_and_update_task_group - start")
|
163
|
+
create_db_tasks_and_update_task_group_sync(
|
164
|
+
task_list=task_list,
|
165
|
+
task_group_id=task_group.id,
|
166
|
+
db=db,
|
167
|
+
)
|
168
|
+
logger.info("create_db_tasks_and_update_task_group - end")
|
169
|
+
|
170
|
+
# Update task_group data
|
171
|
+
logger.info(
|
172
|
+
"Add env_info, venv_size and venv_file_number "
|
173
|
+
"to TaskGroupV2 - start"
|
174
|
+
)
|
175
|
+
with Path(
|
176
|
+
task_group.path,
|
177
|
+
SOURCE_DIR_NAME,
|
178
|
+
"pixi.lock",
|
179
|
+
).open() as f:
|
180
|
+
pixi_lock_contents = f.read()
|
181
|
+
|
182
|
+
# NOTE: see issue 2626 about whether to keep `pixi.lock` files
|
183
|
+
# in the database
|
184
|
+
task_group.env_info = pixi_lock_contents
|
185
|
+
task_group.venv_size_in_kB = int(venv_size)
|
186
|
+
task_group.venv_file_number = int(venv_file_number)
|
187
|
+
task_group = add_commit_refresh(obj=task_group, db=db)
|
188
|
+
logger.info(
|
189
|
+
"Add env_info, venv_size and venv_file_number "
|
190
|
+
"to TaskGroupV2 - end"
|
191
|
+
)
|
192
|
+
|
193
|
+
# Finalize (write metadata to DB)
|
194
|
+
logger.info("finalising - START")
|
195
|
+
activity.status = TaskGroupActivityStatusV2.OK
|
196
|
+
activity.timestamp_ended = get_timestamp()
|
197
|
+
activity = add_commit_refresh(obj=activity, db=db)
|
198
|
+
logger.info("finalising - END")
|
199
|
+
logger.info("END")
|
200
|
+
|
201
|
+
reset_logger_handlers(logger)
|
202
|
+
|
203
|
+
except Exception as collection_e:
|
204
|
+
# Delete corrupted package dir
|
205
|
+
try:
|
206
|
+
logger.info(f"Now delete folder {task_group.path}")
|
207
|
+
shutil.rmtree(task_group.path)
|
208
|
+
logger.info(f"Deleted folder {task_group.path}")
|
209
|
+
except Exception as rm_e:
|
210
|
+
logger.error(
|
211
|
+
"Removing folder failed.\n"
|
212
|
+
f"Original error:\n{str(rm_e)}"
|
213
|
+
)
|
214
|
+
|
215
|
+
fail_and_cleanup(
|
216
|
+
task_group=task_group,
|
217
|
+
task_group_activity=activity,
|
218
|
+
logger_name=LOGGER_NAME,
|
219
|
+
log_file_path=log_file_path,
|
220
|
+
exception=collection_e,
|
221
|
+
db=db,
|
222
|
+
)
|
@@ -87,10 +87,10 @@ def deactivate_local(
|
|
87
87
|
activity.status = TaskGroupActivityStatusV2.ONGOING
|
88
88
|
activity = add_commit_refresh(obj=activity, db=db)
|
89
89
|
|
90
|
-
if task_group.
|
90
|
+
if task_group.env_info is None:
|
91
91
|
logger.warning(
|
92
92
|
"Recreate pip-freeze information, since "
|
93
|
-
f"{task_group.
|
93
|
+
f"{task_group.env_info=}. NOTE: this should only "
|
94
94
|
"happen for task groups created before 2.9.0."
|
95
95
|
)
|
96
96
|
# Prepare replacements for templates
|
@@ -120,7 +120,7 @@ def deactivate_local(
|
|
120
120
|
logger.info("Add pip freeze stdout to TaskGroupV2 - start")
|
121
121
|
activity.log = get_current_log(log_file_path)
|
122
122
|
activity = add_commit_refresh(obj=activity, db=db)
|
123
|
-
task_group.
|
123
|
+
task_group.env_info = pip_freeze_stdout
|
124
124
|
task_group = add_commit_refresh(obj=task_group, db=db)
|
125
125
|
logger.info("Add pip freeze stdout to TaskGroupV2 - end")
|
126
126
|
|
@@ -131,15 +131,15 @@ def deactivate_local(
|
|
131
131
|
f"Handle specific cases for {task_group.origin=}."
|
132
132
|
)
|
133
133
|
|
134
|
-
# Blocking situation: `
|
134
|
+
# Blocking situation: `archive_path` is not set or points
|
135
135
|
# to a missing path
|
136
136
|
if (
|
137
|
-
task_group.
|
138
|
-
or not Path(task_group.
|
137
|
+
task_group.archive_path is None
|
138
|
+
or not Path(task_group.archive_path).exists()
|
139
139
|
):
|
140
140
|
error_msg = (
|
141
141
|
"Invalid wheel path for task group with "
|
142
|
-
f"{task_group_id=}. {task_group.
|
142
|
+
f"{task_group_id=}. {task_group.archive_path=} is "
|
143
143
|
"unset or does not exist."
|
144
144
|
)
|
145
145
|
logger.error(error_msg)
|
@@ -153,48 +153,52 @@ def deactivate_local(
|
|
153
153
|
)
|
154
154
|
return
|
155
155
|
|
156
|
-
# Recoverable situation: `
|
156
|
+
# Recoverable situation: `archive_path` was not yet copied
|
157
157
|
# over to the correct server-side folder
|
158
|
-
|
159
|
-
|
158
|
+
archive_path_parent_dir = Path(
|
159
|
+
task_group.archive_path
|
160
|
+
).parent
|
161
|
+
if archive_path_parent_dir != Path(task_group.path):
|
160
162
|
logger.warning(
|
161
|
-
f"{
|
162
|
-
f"{task_group.path}. NOTE: this should only "
|
163
|
+
f"{archive_path_parent_dir.as_posix()} differs "
|
164
|
+
f"from {task_group.path}. NOTE: this should only "
|
163
165
|
"happen for task groups created before 2.9.0."
|
164
166
|
)
|
165
167
|
|
166
|
-
if task_group.
|
168
|
+
if task_group.archive_path not in task_group.env_info:
|
167
169
|
raise ValueError(
|
168
|
-
f"Cannot find {task_group.
|
170
|
+
f"Cannot find {task_group.archive_path=} in "
|
169
171
|
"pip-freeze data. Exit."
|
170
172
|
)
|
171
173
|
|
172
174
|
logger.info(
|
173
175
|
f"Now copy wheel file into {task_group.path}."
|
174
176
|
)
|
175
|
-
|
177
|
+
new_archive_path = (
|
176
178
|
Path(task_group.path)
|
177
|
-
/ Path(task_group.
|
179
|
+
/ Path(task_group.archive_path).name
|
178
180
|
).as_posix()
|
179
|
-
shutil.copy(task_group.
|
180
|
-
logger.info(
|
181
|
+
shutil.copy(task_group.archive_path, new_archive_path)
|
182
|
+
logger.info(
|
183
|
+
f"Copied wheel file to {new_archive_path}."
|
184
|
+
)
|
181
185
|
|
182
|
-
task_group.
|
183
|
-
new_pip_freeze = task_group.
|
184
|
-
task_group.
|
185
|
-
|
186
|
+
task_group.archive_path = new_archive_path
|
187
|
+
new_pip_freeze = task_group.env_info.replace(
|
188
|
+
task_group.archive_path,
|
189
|
+
new_archive_path,
|
186
190
|
)
|
187
|
-
task_group.
|
191
|
+
task_group.env_info = new_pip_freeze
|
188
192
|
task_group = add_commit_refresh(obj=task_group, db=db)
|
189
193
|
logger.info(
|
190
|
-
"Updated `
|
194
|
+
"Updated `archive_path` and `env_info` "
|
191
195
|
"task-group attributes."
|
192
196
|
)
|
193
197
|
|
194
198
|
# Fail if `pip_freeze` includes "github.com", see
|
195
199
|
# https://github.com/fractal-analytics-platform/fractal-server/issues/2142
|
196
200
|
for forbidden_string in FORBIDDEN_DEPENDENCY_STRINGS:
|
197
|
-
if forbidden_string in task_group.
|
201
|
+
if forbidden_string in task_group.env_info:
|
198
202
|
raise ValueError(
|
199
203
|
"Deactivation and reactivation of task packages "
|
200
204
|
f"with direct {forbidden_string} dependencies "
|
@@ -0,0 +1,110 @@
|
|
1
|
+
import logging
|
2
|
+
import shutil
|
3
|
+
from pathlib import Path
|
4
|
+
from tempfile import TemporaryDirectory
|
5
|
+
|
6
|
+
from ..utils_background import add_commit_refresh
|
7
|
+
from ..utils_background import fail_and_cleanup
|
8
|
+
from ..utils_pixi import SOURCE_DIR_NAME
|
9
|
+
from fractal_server.app.db import get_sync_db
|
10
|
+
from fractal_server.app.models.v2 import TaskGroupActivityV2
|
11
|
+
from fractal_server.app.models.v2 import TaskGroupV2
|
12
|
+
from fractal_server.app.schemas.v2.task_group import TaskGroupActivityStatusV2
|
13
|
+
from fractal_server.logger import reset_logger_handlers
|
14
|
+
from fractal_server.logger import set_logger
|
15
|
+
from fractal_server.tasks.utils import get_log_path
|
16
|
+
from fractal_server.tasks.v2.utils_background import get_current_log
|
17
|
+
from fractal_server.utils import get_timestamp
|
18
|
+
|
19
|
+
|
20
|
+
def deactivate_local_pixi(
|
21
|
+
*,
|
22
|
+
task_group_activity_id: int,
|
23
|
+
task_group_id: int,
|
24
|
+
) -> None:
|
25
|
+
"""
|
26
|
+
Deactivate a task group venv.
|
27
|
+
|
28
|
+
This function is run as a background task, therefore exceptions must be
|
29
|
+
handled.
|
30
|
+
|
31
|
+
Arguments:
|
32
|
+
task_group_id:
|
33
|
+
task_group_activity_id:
|
34
|
+
"""
|
35
|
+
|
36
|
+
LOGGER_NAME = f"{__name__}.ID{task_group_activity_id}"
|
37
|
+
|
38
|
+
with TemporaryDirectory() as tmpdir:
|
39
|
+
log_file_path = get_log_path(Path(tmpdir))
|
40
|
+
logger = set_logger(
|
41
|
+
logger_name=LOGGER_NAME,
|
42
|
+
log_file_path=log_file_path,
|
43
|
+
)
|
44
|
+
|
45
|
+
with next(get_sync_db()) as db:
|
46
|
+
|
47
|
+
# Get main objects from db
|
48
|
+
activity = db.get(TaskGroupActivityV2, task_group_activity_id)
|
49
|
+
task_group = db.get(TaskGroupV2, task_group_id)
|
50
|
+
if activity is None or task_group is None:
|
51
|
+
# Use `logging` directly
|
52
|
+
logging.error(
|
53
|
+
"Cannot find database rows with "
|
54
|
+
f"{task_group_id=} and {task_group_activity_id=}:\n"
|
55
|
+
f"{task_group=}\n{activity=}. Exit."
|
56
|
+
)
|
57
|
+
return
|
58
|
+
|
59
|
+
# Log some info
|
60
|
+
logger.debug("START")
|
61
|
+
|
62
|
+
for key, value in task_group.model_dump().items():
|
63
|
+
logger.debug(f"task_group.{key}: {value}")
|
64
|
+
|
65
|
+
source_dir = Path(task_group.path, SOURCE_DIR_NAME)
|
66
|
+
# Check that the (local) task_group venv_path does exist
|
67
|
+
if not source_dir.exists():
|
68
|
+
error_msg = f"'{source_dir.as_posix()}' does not exist."
|
69
|
+
logger.error(error_msg)
|
70
|
+
fail_and_cleanup(
|
71
|
+
task_group=task_group,
|
72
|
+
task_group_activity=activity,
|
73
|
+
logger_name=LOGGER_NAME,
|
74
|
+
log_file_path=log_file_path,
|
75
|
+
exception=FileNotFoundError(error_msg),
|
76
|
+
db=db,
|
77
|
+
)
|
78
|
+
return
|
79
|
+
|
80
|
+
try:
|
81
|
+
|
82
|
+
activity.status = TaskGroupActivityStatusV2.ONGOING
|
83
|
+
activity = add_commit_refresh(obj=activity, db=db)
|
84
|
+
|
85
|
+
# Actually mark the task group as non-active
|
86
|
+
logger.info("Now setting `active=False`.")
|
87
|
+
task_group.active = False
|
88
|
+
task_group = add_commit_refresh(obj=task_group, db=db)
|
89
|
+
|
90
|
+
# Proceed with deactivation
|
91
|
+
logger.info(f"Now removing '{source_dir.as_posix()}'.")
|
92
|
+
shutil.rmtree(source_dir)
|
93
|
+
logger.info(f"All good, '{source_dir.as_posix()}' removed.")
|
94
|
+
activity.status = TaskGroupActivityStatusV2.OK
|
95
|
+
activity.log = get_current_log(log_file_path)
|
96
|
+
activity.timestamp_ended = get_timestamp()
|
97
|
+
activity = add_commit_refresh(obj=activity, db=db)
|
98
|
+
|
99
|
+
reset_logger_handlers(logger)
|
100
|
+
|
101
|
+
except Exception as e:
|
102
|
+
fail_and_cleanup(
|
103
|
+
task_group=task_group,
|
104
|
+
task_group_activity=activity,
|
105
|
+
logger_name=LOGGER_NAME,
|
106
|
+
log_file_path=log_file_path,
|
107
|
+
exception=e,
|
108
|
+
db=db,
|
109
|
+
)
|
110
|
+
return
|
@@ -69,16 +69,16 @@ def _copy_wheel_file_ssh(
|
|
69
69
|
*, task_group: TaskGroupV2, fractal_ssh: FractalSSH, logger_name: str
|
70
70
|
) -> str:
|
71
71
|
"""
|
72
|
-
Handle the situation where `task_group.
|
73
|
-
`task_group.path`, by copying `
|
72
|
+
Handle the situation where `task_group.archive_path` is not part of
|
73
|
+
`task_group.path`, by copying `archive_path` into `path`.
|
74
74
|
|
75
75
|
Returns:
|
76
|
-
The new `
|
76
|
+
The new `archive_path`.
|
77
77
|
"""
|
78
78
|
logger = get_logger(logger_name=logger_name)
|
79
|
-
source = task_group.
|
79
|
+
source = task_group.archive_path
|
80
80
|
dest = (
|
81
|
-
Path(task_group.path) / Path(task_group.
|
81
|
+
Path(task_group.path) / Path(task_group.archive_path).name
|
82
82
|
).as_posix()
|
83
83
|
cmd = f"cp {source} {dest}"
|
84
84
|
logger.debug(f"[_copy_wheel_file] START {source=} {dest=}")
|
@@ -4,15 +4,15 @@ from pathlib import Path
|
|
4
4
|
from tempfile import TemporaryDirectory
|
5
5
|
|
6
6
|
from ....ssh._fabric import SingleUseFractalSSH
|
7
|
-
from ..utils_background import _prepare_tasks_metadata
|
8
7
|
from ..utils_background import fail_and_cleanup
|
8
|
+
from ..utils_background import prepare_tasks_metadata
|
9
9
|
from ..utils_database import create_db_tasks_and_update_task_group_sync
|
10
10
|
from fractal_server.app.db import get_sync_db
|
11
11
|
from fractal_server.app.models.v2 import TaskGroupActivityV2
|
12
12
|
from fractal_server.app.models.v2 import TaskGroupV2
|
13
|
+
from fractal_server.app.schemas.v2 import FractalUploadedFile
|
13
14
|
from fractal_server.app.schemas.v2 import TaskGroupActivityActionV2
|
14
15
|
from fractal_server.app.schemas.v2 import TaskGroupActivityStatusV2
|
15
|
-
from fractal_server.app.schemas.v2 import WheelFile
|
16
16
|
from fractal_server.app.schemas.v2.manifest import ManifestV2
|
17
17
|
from fractal_server.logger import reset_logger_handlers
|
18
18
|
from fractal_server.logger import set_logger
|
@@ -38,7 +38,7 @@ def collect_ssh(
|
|
38
38
|
task_group_activity_id: int,
|
39
39
|
ssh_config: SSHConfig,
|
40
40
|
tasks_base_dir: str,
|
41
|
-
wheel_file:
|
41
|
+
wheel_file: FractalUploadedFile | None = None,
|
42
42
|
) -> None:
|
43
43
|
"""
|
44
44
|
Collect a task package over SSH
|
@@ -135,25 +135,26 @@ def collect_ssh(
|
|
135
135
|
fractal_ssh.mkdir(folder=script_dir_remote, parents=True)
|
136
136
|
|
137
137
|
# Write wheel file locally and send it to remote path,
|
138
|
-
# and set task_group.
|
138
|
+
# and set task_group.archive_path
|
139
139
|
if wheel_file is not None:
|
140
140
|
wheel_filename = wheel_file.filename
|
141
|
-
|
141
|
+
archive_path = (
|
142
142
|
Path(task_group.path) / wheel_filename
|
143
143
|
).as_posix()
|
144
|
-
|
144
|
+
tmp_archive_path = (
|
145
145
|
Path(tmpdir) / wheel_filename
|
146
146
|
).as_posix()
|
147
147
|
logger.info(
|
148
|
-
|
148
|
+
"Write wheel-file contents into "
|
149
|
+
f"{tmp_archive_path}"
|
149
150
|
)
|
150
|
-
with open(
|
151
|
+
with open(tmp_archive_path, "wb") as f:
|
151
152
|
f.write(wheel_file.contents)
|
152
153
|
fractal_ssh.send_file(
|
153
|
-
local=
|
154
|
-
remote=
|
154
|
+
local=tmp_archive_path,
|
155
|
+
remote=archive_path,
|
155
156
|
)
|
156
|
-
task_group.
|
157
|
+
task_group.archive_path = archive_path
|
157
158
|
task_group = add_commit_refresh(obj=task_group, db=db)
|
158
159
|
|
159
160
|
replacements = get_collection_replacements(
|
@@ -263,7 +264,7 @@ def collect_ssh(
|
|
263
264
|
logger.info("Manifest is a valid ManifestV2")
|
264
265
|
|
265
266
|
logger.info("_prepare_tasks_metadata - start")
|
266
|
-
task_list =
|
267
|
+
task_list = prepare_tasks_metadata(
|
267
268
|
package_manifest=pkg_manifest,
|
268
269
|
package_version=task_group.version,
|
269
270
|
package_root=Path(package_root_remote),
|
@@ -283,15 +284,15 @@ def collect_ssh(
|
|
283
284
|
|
284
285
|
# Update task_group data
|
285
286
|
logger.info(
|
286
|
-
"Add
|
287
|
+
"Add env_info, venv_size and venv_file_number "
|
287
288
|
"to TaskGroupV2 - start"
|
288
289
|
)
|
289
|
-
task_group.
|
290
|
+
task_group.env_info = pip_freeze_stdout
|
290
291
|
task_group.venv_size_in_kB = int(venv_size)
|
291
292
|
task_group.venv_file_number = int(venv_file_number)
|
292
293
|
task_group = add_commit_refresh(obj=task_group, db=db)
|
293
294
|
logger.info(
|
294
|
-
"Add
|
295
|
+
"Add env_info, venv_size and venv_file_number "
|
295
296
|
"to TaskGroupV2 - end"
|
296
297
|
)
|
297
298
|
|