fractal-server 2.14.15__py3-none-any.whl → 2.15.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- fractal_server/__init__.py +1 -1
- fractal_server/app/models/security.py +2 -2
- fractal_server/app/models/user_settings.py +2 -2
- fractal_server/app/models/v2/dataset.py +3 -3
- fractal_server/app/models/v2/history.py +2 -0
- fractal_server/app/models/v2/job.py +6 -6
- fractal_server/app/models/v2/task.py +12 -8
- fractal_server/app/models/v2/task_group.py +19 -7
- fractal_server/app/models/v2/workflowtask.py +6 -6
- fractal_server/app/routes/admin/v2/task_group_lifecycle.py +2 -5
- fractal_server/app/routes/api/v2/__init__.py +6 -0
- fractal_server/app/routes/api/v2/_aux_functions_tasks.py +22 -0
- fractal_server/app/routes/api/v2/history.py +2 -2
- fractal_server/app/routes/api/v2/pre_submission_checks.py +3 -3
- fractal_server/app/routes/api/v2/task_collection.py +8 -18
- fractal_server/app/routes/api/v2/task_collection_custom.py +2 -2
- fractal_server/app/routes/api/v2/task_collection_pixi.py +219 -0
- fractal_server/app/routes/api/v2/task_group.py +3 -0
- fractal_server/app/routes/api/v2/task_group_lifecycle.py +26 -10
- fractal_server/app/runner/executors/slurm_common/_slurm_config.py +10 -0
- fractal_server/app/runner/executors/slurm_common/base_slurm_runner.py +39 -14
- fractal_server/app/runner/executors/slurm_common/get_slurm_config.py +8 -1
- fractal_server/app/runner/executors/slurm_ssh/runner.py +3 -1
- fractal_server/app/runner/v2/runner.py +2 -2
- fractal_server/app/schemas/v2/__init__.py +1 -1
- fractal_server/app/schemas/v2/dumps.py +1 -1
- fractal_server/app/schemas/v2/task_collection.py +1 -1
- fractal_server/app/schemas/v2/task_group.py +7 -5
- fractal_server/config.py +70 -0
- fractal_server/images/status_tools.py +80 -75
- fractal_server/migrations/versions/791ce783d3d8_add_indices.py +41 -0
- fractal_server/migrations/versions/b1e7f7a1ff71_task_group_for_pixi.py +53 -0
- fractal_server/migrations/versions/b3ffb095f973_json_to_jsonb.py +340 -0
- fractal_server/ssh/_fabric.py +29 -0
- fractal_server/tasks/v2/local/__init__.py +3 -0
- fractal_server/tasks/v2/local/_utils.py +4 -3
- fractal_server/tasks/v2/local/collect.py +26 -30
- fractal_server/tasks/v2/local/collect_pixi.py +252 -0
- fractal_server/tasks/v2/local/deactivate.py +39 -46
- fractal_server/tasks/v2/local/deactivate_pixi.py +98 -0
- fractal_server/tasks/v2/local/reactivate.py +12 -23
- fractal_server/tasks/v2/local/reactivate_pixi.py +184 -0
- fractal_server/tasks/v2/ssh/__init__.py +3 -0
- fractal_server/tasks/v2/ssh/_utils.py +50 -9
- fractal_server/tasks/v2/ssh/collect.py +46 -56
- fractal_server/tasks/v2/ssh/collect_pixi.py +315 -0
- fractal_server/tasks/v2/ssh/deactivate.py +54 -67
- fractal_server/tasks/v2/ssh/deactivate_pixi.py +122 -0
- fractal_server/tasks/v2/ssh/reactivate.py +25 -38
- fractal_server/tasks/v2/ssh/reactivate_pixi.py +233 -0
- fractal_server/tasks/v2/templates/pixi_1_extract.sh +40 -0
- fractal_server/tasks/v2/templates/pixi_2_install.sh +52 -0
- fractal_server/tasks/v2/templates/pixi_3_post_install.sh +76 -0
- fractal_server/tasks/v2/utils_background.py +50 -8
- fractal_server/tasks/v2/utils_pixi.py +38 -0
- fractal_server/tasks/v2/utils_templates.py +14 -1
- {fractal_server-2.14.15.dist-info → fractal_server-2.15.0.dist-info}/METADATA +4 -4
- {fractal_server-2.14.15.dist-info → fractal_server-2.15.0.dist-info}/RECORD +61 -47
- {fractal_server-2.14.15.dist-info → fractal_server-2.15.0.dist-info}/LICENSE +0 -0
- {fractal_server-2.14.15.dist-info → fractal_server-2.15.0.dist-info}/WHEEL +0 -0
- {fractal_server-2.14.15.dist-info → fractal_server-2.15.0.dist-info}/entry_points.txt +0 -0
@@ -0,0 +1,252 @@
|
|
1
|
+
import json
|
2
|
+
import shutil
|
3
|
+
import time
|
4
|
+
from pathlib import Path
|
5
|
+
from tempfile import TemporaryDirectory
|
6
|
+
|
7
|
+
from ..utils_database import create_db_tasks_and_update_task_group_sync
|
8
|
+
from ..utils_pixi import parse_collect_stdout
|
9
|
+
from ..utils_pixi import SOURCE_DIR_NAME
|
10
|
+
from fractal_server.app.db import get_sync_db
|
11
|
+
from fractal_server.app.schemas.v2 import FractalUploadedFile
|
12
|
+
from fractal_server.app.schemas.v2 import TaskGroupActivityActionV2
|
13
|
+
from fractal_server.app.schemas.v2 import TaskGroupActivityStatusV2
|
14
|
+
from fractal_server.app.schemas.v2.manifest import ManifestV2
|
15
|
+
from fractal_server.config import get_settings
|
16
|
+
from fractal_server.logger import reset_logger_handlers
|
17
|
+
from fractal_server.logger import set_logger
|
18
|
+
from fractal_server.syringe import Inject
|
19
|
+
from fractal_server.tasks.utils import get_log_path
|
20
|
+
from fractal_server.tasks.v2.local._utils import _customize_and_run_template
|
21
|
+
from fractal_server.tasks.v2.local._utils import check_task_files_exist
|
22
|
+
from fractal_server.tasks.v2.utils_background import add_commit_refresh
|
23
|
+
from fractal_server.tasks.v2.utils_background import fail_and_cleanup
|
24
|
+
from fractal_server.tasks.v2.utils_background import (
|
25
|
+
get_activity_and_task_group,
|
26
|
+
)
|
27
|
+
from fractal_server.tasks.v2.utils_background import get_current_log
|
28
|
+
from fractal_server.tasks.v2.utils_background import prepare_tasks_metadata
|
29
|
+
from fractal_server.tasks.v2.utils_templates import SCRIPTS_SUBFOLDER
|
30
|
+
from fractal_server.utils import execute_command_sync
|
31
|
+
from fractal_server.utils import get_timestamp
|
32
|
+
|
33
|
+
|
34
|
+
def collect_local_pixi(
|
35
|
+
*,
|
36
|
+
task_group_activity_id: int,
|
37
|
+
task_group_id: int,
|
38
|
+
tar_gz_file: FractalUploadedFile,
|
39
|
+
) -> None:
|
40
|
+
settings = Inject(get_settings)
|
41
|
+
|
42
|
+
LOGGER_NAME = f"{__name__}.ID{task_group_activity_id}"
|
43
|
+
|
44
|
+
with TemporaryDirectory() as tmpdir:
|
45
|
+
log_file_path = get_log_path(Path(tmpdir))
|
46
|
+
logger = set_logger(
|
47
|
+
logger_name=LOGGER_NAME,
|
48
|
+
log_file_path=log_file_path,
|
49
|
+
)
|
50
|
+
|
51
|
+
logger.info("START")
|
52
|
+
with next(get_sync_db()) as db:
|
53
|
+
db_objects_ok, task_group, activity = get_activity_and_task_group(
|
54
|
+
task_group_activity_id=task_group_activity_id,
|
55
|
+
task_group_id=task_group_id,
|
56
|
+
db=db,
|
57
|
+
logger_name=LOGGER_NAME,
|
58
|
+
)
|
59
|
+
if not db_objects_ok:
|
60
|
+
return
|
61
|
+
|
62
|
+
if Path(task_group.path).exists():
|
63
|
+
# We handle this before the try/except to avoid the rmtree
|
64
|
+
error_msg = f"{task_group.path} already exists."
|
65
|
+
logger.error(error_msg)
|
66
|
+
fail_and_cleanup(
|
67
|
+
task_group=task_group,
|
68
|
+
task_group_activity=activity,
|
69
|
+
logger_name=LOGGER_NAME,
|
70
|
+
log_file_path=log_file_path,
|
71
|
+
exception=FileExistsError(error_msg),
|
72
|
+
db=db,
|
73
|
+
)
|
74
|
+
return
|
75
|
+
|
76
|
+
try:
|
77
|
+
Path(task_group.path).mkdir(parents=True)
|
78
|
+
logger.info(f"Created {task_group.path}")
|
79
|
+
archive_path = Path(
|
80
|
+
task_group.path, tar_gz_file.filename
|
81
|
+
).as_posix()
|
82
|
+
logger.info(f"Write tar.gz-file contents into {archive_path}.")
|
83
|
+
with open(archive_path, "wb") as f:
|
84
|
+
f.write(tar_gz_file.contents)
|
85
|
+
task_group.archive_path = archive_path
|
86
|
+
task_group = add_commit_refresh(obj=task_group, db=db)
|
87
|
+
|
88
|
+
common_args = dict(
|
89
|
+
replacements={
|
90
|
+
(
|
91
|
+
"__PIXI_HOME__",
|
92
|
+
settings.pixi.versions[task_group.pixi_version],
|
93
|
+
),
|
94
|
+
("__PACKAGE_DIR__", task_group.path),
|
95
|
+
("__TAR_GZ_PATH__", archive_path),
|
96
|
+
(
|
97
|
+
"__IMPORT_PACKAGE_NAME__",
|
98
|
+
task_group.pkg_name.replace("-", "_"),
|
99
|
+
),
|
100
|
+
("__SOURCE_DIR_NAME__", SOURCE_DIR_NAME),
|
101
|
+
("__FROZEN_OPTION__", ""),
|
102
|
+
(
|
103
|
+
"__TOKIO_WORKER_THREADS__",
|
104
|
+
str(settings.pixi.TOKIO_WORKER_THREADS),
|
105
|
+
),
|
106
|
+
(
|
107
|
+
"__PIXI_CONCURRENT_SOLVES__",
|
108
|
+
str(settings.pixi.PIXI_CONCURRENT_SOLVES),
|
109
|
+
),
|
110
|
+
(
|
111
|
+
"__PIXI_CONCURRENT_DOWNLOADS__",
|
112
|
+
str(settings.pixi.PIXI_CONCURRENT_DOWNLOADS),
|
113
|
+
),
|
114
|
+
},
|
115
|
+
script_dir=Path(
|
116
|
+
task_group.path, SCRIPTS_SUBFOLDER
|
117
|
+
).as_posix(),
|
118
|
+
prefix=(
|
119
|
+
f"{int(time.time())}_"
|
120
|
+
f"{TaskGroupActivityActionV2.COLLECT}"
|
121
|
+
),
|
122
|
+
logger_name=LOGGER_NAME,
|
123
|
+
)
|
124
|
+
|
125
|
+
activity.status = TaskGroupActivityStatusV2.ONGOING
|
126
|
+
activity.log = get_current_log(log_file_path)
|
127
|
+
activity = add_commit_refresh(obj=activity, db=db)
|
128
|
+
|
129
|
+
# Run script 1
|
130
|
+
_customize_and_run_template(
|
131
|
+
template_filename="pixi_1_extract.sh",
|
132
|
+
**common_args,
|
133
|
+
)
|
134
|
+
activity.log = get_current_log(log_file_path)
|
135
|
+
activity = add_commit_refresh(obj=activity, db=db)
|
136
|
+
|
137
|
+
# Run script 2
|
138
|
+
_customize_and_run_template(
|
139
|
+
template_filename="pixi_2_install.sh",
|
140
|
+
**common_args,
|
141
|
+
)
|
142
|
+
activity.log = get_current_log(log_file_path)
|
143
|
+
activity = add_commit_refresh(obj=activity, db=db)
|
144
|
+
|
145
|
+
# Run script 3
|
146
|
+
stdout = _customize_and_run_template(
|
147
|
+
template_filename="pixi_3_post_install.sh",
|
148
|
+
**common_args,
|
149
|
+
)
|
150
|
+
activity.log = get_current_log(log_file_path)
|
151
|
+
activity = add_commit_refresh(obj=activity, db=db)
|
152
|
+
|
153
|
+
# Parse stdout
|
154
|
+
parsed_output = parse_collect_stdout(stdout)
|
155
|
+
package_root = parsed_output["package_root"]
|
156
|
+
venv_size = parsed_output["venv_size"]
|
157
|
+
venv_file_number = parsed_output["venv_file_number"]
|
158
|
+
project_python_wrapper = parsed_output[
|
159
|
+
"project_python_wrapper"
|
160
|
+
]
|
161
|
+
|
162
|
+
# Make task folder 755
|
163
|
+
source_dir = Path(task_group.path, SOURCE_DIR_NAME).as_posix()
|
164
|
+
command = f"chmod 755 {source_dir} -R"
|
165
|
+
execute_command_sync(
|
166
|
+
command=command,
|
167
|
+
logger_name=LOGGER_NAME,
|
168
|
+
)
|
169
|
+
|
170
|
+
# Read and validate manifest
|
171
|
+
# NOTE: we are only supporting the manifest path being relative
|
172
|
+
# to the top-level folder
|
173
|
+
manifest_path = f"{package_root}/__FRACTAL_MANIFEST__.json"
|
174
|
+
with open(manifest_path) as json_data:
|
175
|
+
pkg_manifest_dict = json.load(json_data)
|
176
|
+
logger.info(f"loaded {manifest_path=}")
|
177
|
+
logger.info("now validating manifest content")
|
178
|
+
pkg_manifest = ManifestV2(**pkg_manifest_dict)
|
179
|
+
logger.info("validated manifest content")
|
180
|
+
activity.log = get_current_log(log_file_path)
|
181
|
+
activity = add_commit_refresh(obj=activity, db=db)
|
182
|
+
|
183
|
+
logger.info("_prepare_tasks_metadata - start")
|
184
|
+
task_list = prepare_tasks_metadata(
|
185
|
+
package_manifest=pkg_manifest,
|
186
|
+
package_version=task_group.version,
|
187
|
+
package_root=Path(package_root),
|
188
|
+
project_python_wrapper=Path(project_python_wrapper),
|
189
|
+
)
|
190
|
+
check_task_files_exist(task_list=task_list)
|
191
|
+
logger.info("_prepare_tasks_metadata - end")
|
192
|
+
activity.log = get_current_log(log_file_path)
|
193
|
+
activity = add_commit_refresh(obj=activity, db=db)
|
194
|
+
|
195
|
+
logger.info("create_db_tasks_and_update_task_group - start")
|
196
|
+
create_db_tasks_and_update_task_group_sync(
|
197
|
+
task_list=task_list,
|
198
|
+
task_group_id=task_group.id,
|
199
|
+
db=db,
|
200
|
+
)
|
201
|
+
logger.info("create_db_tasks_and_update_task_group - end")
|
202
|
+
|
203
|
+
# Update task_group data
|
204
|
+
logger.info(
|
205
|
+
"Add env_info, venv_size and venv_file_number "
|
206
|
+
"to TaskGroupV2 - start"
|
207
|
+
)
|
208
|
+
with Path(
|
209
|
+
task_group.path,
|
210
|
+
SOURCE_DIR_NAME,
|
211
|
+
"pixi.lock",
|
212
|
+
).open() as f:
|
213
|
+
pixi_lock_contents = f.read()
|
214
|
+
|
215
|
+
# NOTE: see issue 2626 about whether to keep `pixi.lock` files
|
216
|
+
# in the database
|
217
|
+
task_group.env_info = pixi_lock_contents
|
218
|
+
task_group.venv_size_in_kB = int(venv_size)
|
219
|
+
task_group.venv_file_number = int(venv_file_number)
|
220
|
+
task_group = add_commit_refresh(obj=task_group, db=db)
|
221
|
+
logger.info(
|
222
|
+
"Add env_info, venv_size and venv_file_number "
|
223
|
+
"to TaskGroupV2 - end"
|
224
|
+
)
|
225
|
+
|
226
|
+
# Finalize (write metadata to DB)
|
227
|
+
logger.info("finalising - START")
|
228
|
+
activity.status = TaskGroupActivityStatusV2.OK
|
229
|
+
activity.timestamp_ended = get_timestamp()
|
230
|
+
activity = add_commit_refresh(obj=activity, db=db)
|
231
|
+
logger.info("finalising - END")
|
232
|
+
logger.info("END")
|
233
|
+
|
234
|
+
reset_logger_handlers(logger)
|
235
|
+
|
236
|
+
except Exception as collection_e:
|
237
|
+
try:
|
238
|
+
logger.info(f"Now delete folder {task_group.path}")
|
239
|
+
shutil.rmtree(task_group.path)
|
240
|
+
logger.info(f"Deleted folder {task_group.path}")
|
241
|
+
except Exception as rm_e:
|
242
|
+
logger.error(
|
243
|
+
f"Removing folder failed. Original error: {str(rm_e)}"
|
244
|
+
)
|
245
|
+
fail_and_cleanup(
|
246
|
+
task_group=task_group,
|
247
|
+
task_group_activity=activity,
|
248
|
+
logger_name=LOGGER_NAME,
|
249
|
+
log_file_path=log_file_path,
|
250
|
+
exception=collection_e,
|
251
|
+
db=db,
|
252
|
+
)
|
@@ -1,4 +1,3 @@
|
|
1
|
-
import logging
|
2
1
|
import shutil
|
3
2
|
import time
|
4
3
|
from pathlib import Path
|
@@ -6,11 +5,10 @@ from tempfile import TemporaryDirectory
|
|
6
5
|
|
7
6
|
from ..utils_background import add_commit_refresh
|
8
7
|
from ..utils_background import fail_and_cleanup
|
8
|
+
from ..utils_background import get_activity_and_task_group
|
9
9
|
from ..utils_templates import get_collection_replacements
|
10
10
|
from ._utils import _customize_and_run_template
|
11
11
|
from fractal_server.app.db import get_sync_db
|
12
|
-
from fractal_server.app.models.v2 import TaskGroupActivityV2
|
13
|
-
from fractal_server.app.models.v2 import TaskGroupV2
|
14
12
|
from fractal_server.app.schemas.v2 import TaskGroupActivityActionV2
|
15
13
|
from fractal_server.app.schemas.v2 import TaskGroupV2OriginEnum
|
16
14
|
from fractal_server.app.schemas.v2.task_group import TaskGroupActivityStatusV2
|
@@ -48,26 +46,17 @@ def deactivate_local(
|
|
48
46
|
log_file_path=log_file_path,
|
49
47
|
)
|
50
48
|
|
49
|
+
logger.debug("START")
|
51
50
|
with next(get_sync_db()) as db:
|
52
|
-
|
53
|
-
|
54
|
-
|
55
|
-
|
56
|
-
|
57
|
-
|
58
|
-
|
59
|
-
"Cannot find database rows with "
|
60
|
-
f"{task_group_id=} and {task_group_activity_id=}:\n"
|
61
|
-
f"{task_group=}\n{activity=}. Exit."
|
62
|
-
)
|
51
|
+
db_objects_ok, task_group, activity = get_activity_and_task_group(
|
52
|
+
task_group_activity_id=task_group_activity_id,
|
53
|
+
task_group_id=task_group_id,
|
54
|
+
db=db,
|
55
|
+
logger_name=LOGGER_NAME,
|
56
|
+
)
|
57
|
+
if not db_objects_ok:
|
63
58
|
return
|
64
59
|
|
65
|
-
# Log some info
|
66
|
-
logger.debug("START")
|
67
|
-
|
68
|
-
for key, value in task_group.model_dump().items():
|
69
|
-
logger.debug(f"task_group.{key}: {value}")
|
70
|
-
|
71
60
|
# Check that the (local) task_group venv_path does exist
|
72
61
|
if not Path(task_group.venv_path).exists():
|
73
62
|
error_msg = f"{task_group.venv_path} does not exist."
|
@@ -87,10 +76,10 @@ def deactivate_local(
|
|
87
76
|
activity.status = TaskGroupActivityStatusV2.ONGOING
|
88
77
|
activity = add_commit_refresh(obj=activity, db=db)
|
89
78
|
|
90
|
-
if task_group.
|
79
|
+
if task_group.env_info is None:
|
91
80
|
logger.warning(
|
92
81
|
"Recreate pip-freeze information, since "
|
93
|
-
f"{task_group.
|
82
|
+
f"{task_group.env_info=}. NOTE: this should only "
|
94
83
|
"happen for task groups created before 2.9.0."
|
95
84
|
)
|
96
85
|
# Prepare replacements for templates
|
@@ -107,7 +96,7 @@ def deactivate_local(
|
|
107
96
|
).as_posix(),
|
108
97
|
prefix=(
|
109
98
|
f"{int(time.time())}_"
|
110
|
-
f"{TaskGroupActivityActionV2.DEACTIVATE}
|
99
|
+
f"{TaskGroupActivityActionV2.DEACTIVATE}"
|
111
100
|
),
|
112
101
|
logger_name=LOGGER_NAME,
|
113
102
|
)
|
@@ -120,7 +109,7 @@ def deactivate_local(
|
|
120
109
|
logger.info("Add pip freeze stdout to TaskGroupV2 - start")
|
121
110
|
activity.log = get_current_log(log_file_path)
|
122
111
|
activity = add_commit_refresh(obj=activity, db=db)
|
123
|
-
task_group.
|
112
|
+
task_group.env_info = pip_freeze_stdout
|
124
113
|
task_group = add_commit_refresh(obj=task_group, db=db)
|
125
114
|
logger.info("Add pip freeze stdout to TaskGroupV2 - end")
|
126
115
|
|
@@ -131,15 +120,15 @@ def deactivate_local(
|
|
131
120
|
f"Handle specific cases for {task_group.origin=}."
|
132
121
|
)
|
133
122
|
|
134
|
-
# Blocking situation: `
|
123
|
+
# Blocking situation: `archive_path` is not set or points
|
135
124
|
# to a missing path
|
136
125
|
if (
|
137
|
-
task_group.
|
138
|
-
or not Path(task_group.
|
126
|
+
task_group.archive_path is None
|
127
|
+
or not Path(task_group.archive_path).exists()
|
139
128
|
):
|
140
129
|
error_msg = (
|
141
130
|
"Invalid wheel path for task group with "
|
142
|
-
f"{task_group_id=}. {task_group.
|
131
|
+
f"{task_group_id=}. {task_group.archive_path=} is "
|
143
132
|
"unset or does not exist."
|
144
133
|
)
|
145
134
|
logger.error(error_msg)
|
@@ -153,48 +142,52 @@ def deactivate_local(
|
|
153
142
|
)
|
154
143
|
return
|
155
144
|
|
156
|
-
# Recoverable situation: `
|
145
|
+
# Recoverable situation: `archive_path` was not yet copied
|
157
146
|
# over to the correct server-side folder
|
158
|
-
|
159
|
-
|
147
|
+
archive_path_parent_dir = Path(
|
148
|
+
task_group.archive_path
|
149
|
+
).parent
|
150
|
+
if archive_path_parent_dir != Path(task_group.path):
|
160
151
|
logger.warning(
|
161
|
-
f"{
|
162
|
-
f"{task_group.path}. NOTE: this should only "
|
152
|
+
f"{archive_path_parent_dir.as_posix()} differs "
|
153
|
+
f"from {task_group.path}. NOTE: this should only "
|
163
154
|
"happen for task groups created before 2.9.0."
|
164
155
|
)
|
165
156
|
|
166
|
-
if task_group.
|
157
|
+
if task_group.archive_path not in task_group.env_info:
|
167
158
|
raise ValueError(
|
168
|
-
f"Cannot find {task_group.
|
159
|
+
f"Cannot find {task_group.archive_path=} in "
|
169
160
|
"pip-freeze data. Exit."
|
170
161
|
)
|
171
162
|
|
172
163
|
logger.info(
|
173
164
|
f"Now copy wheel file into {task_group.path}."
|
174
165
|
)
|
175
|
-
|
166
|
+
new_archive_path = (
|
176
167
|
Path(task_group.path)
|
177
|
-
/ Path(task_group.
|
168
|
+
/ Path(task_group.archive_path).name
|
178
169
|
).as_posix()
|
179
|
-
shutil.copy(task_group.
|
180
|
-
logger.info(
|
170
|
+
shutil.copy(task_group.archive_path, new_archive_path)
|
171
|
+
logger.info(
|
172
|
+
f"Copied wheel file to {new_archive_path}."
|
173
|
+
)
|
181
174
|
|
182
|
-
task_group.
|
183
|
-
new_pip_freeze = task_group.
|
184
|
-
task_group.
|
185
|
-
|
175
|
+
task_group.archive_path = new_archive_path
|
176
|
+
new_pip_freeze = task_group.env_info.replace(
|
177
|
+
task_group.archive_path,
|
178
|
+
new_archive_path,
|
186
179
|
)
|
187
|
-
task_group.
|
180
|
+
task_group.env_info = new_pip_freeze
|
188
181
|
task_group = add_commit_refresh(obj=task_group, db=db)
|
189
182
|
logger.info(
|
190
|
-
"Updated `
|
183
|
+
"Updated `archive_path` and `env_info` "
|
191
184
|
"task-group attributes."
|
192
185
|
)
|
193
186
|
|
194
187
|
# Fail if `pip_freeze` includes "github.com", see
|
195
188
|
# https://github.com/fractal-analytics-platform/fractal-server/issues/2142
|
196
189
|
for forbidden_string in FORBIDDEN_DEPENDENCY_STRINGS:
|
197
|
-
if forbidden_string in task_group.
|
190
|
+
if forbidden_string in task_group.env_info:
|
198
191
|
raise ValueError(
|
199
192
|
"Deactivation and reactivation of task packages "
|
200
193
|
f"with direct {forbidden_string} dependencies "
|
@@ -0,0 +1,98 @@
|
|
1
|
+
import shutil
|
2
|
+
from pathlib import Path
|
3
|
+
from tempfile import TemporaryDirectory
|
4
|
+
|
5
|
+
from ..utils_background import add_commit_refresh
|
6
|
+
from ..utils_background import fail_and_cleanup
|
7
|
+
from ..utils_background import get_activity_and_task_group
|
8
|
+
from ..utils_pixi import SOURCE_DIR_NAME
|
9
|
+
from fractal_server.app.db import get_sync_db
|
10
|
+
from fractal_server.app.schemas.v2.task_group import TaskGroupActivityStatusV2
|
11
|
+
from fractal_server.logger import reset_logger_handlers
|
12
|
+
from fractal_server.logger import set_logger
|
13
|
+
from fractal_server.tasks.utils import get_log_path
|
14
|
+
from fractal_server.tasks.v2.utils_background import get_current_log
|
15
|
+
from fractal_server.utils import get_timestamp
|
16
|
+
|
17
|
+
|
18
|
+
def deactivate_local_pixi(
|
19
|
+
*,
|
20
|
+
task_group_activity_id: int,
|
21
|
+
task_group_id: int,
|
22
|
+
) -> None:
|
23
|
+
"""
|
24
|
+
Deactivate a pixi task group venv.
|
25
|
+
|
26
|
+
This function is run as a background task, therefore exceptions must be
|
27
|
+
handled.
|
28
|
+
|
29
|
+
Arguments:
|
30
|
+
task_group_id:
|
31
|
+
task_group_activity_id:
|
32
|
+
"""
|
33
|
+
|
34
|
+
LOGGER_NAME = f"{__name__}.ID{task_group_activity_id}"
|
35
|
+
|
36
|
+
with TemporaryDirectory() as tmpdir:
|
37
|
+
log_file_path = get_log_path(Path(tmpdir))
|
38
|
+
logger = set_logger(
|
39
|
+
logger_name=LOGGER_NAME,
|
40
|
+
log_file_path=log_file_path,
|
41
|
+
)
|
42
|
+
|
43
|
+
logger.debug("START")
|
44
|
+
with next(get_sync_db()) as db:
|
45
|
+
db_objects_ok, task_group, activity = get_activity_and_task_group(
|
46
|
+
task_group_activity_id=task_group_activity_id,
|
47
|
+
task_group_id=task_group_id,
|
48
|
+
db=db,
|
49
|
+
logger_name=LOGGER_NAME,
|
50
|
+
)
|
51
|
+
if not db_objects_ok:
|
52
|
+
return
|
53
|
+
|
54
|
+
source_dir = Path(task_group.path, SOURCE_DIR_NAME)
|
55
|
+
if not source_dir.exists():
|
56
|
+
error_msg = f"'{source_dir.as_posix()}' does not exist."
|
57
|
+
logger.error(error_msg)
|
58
|
+
fail_and_cleanup(
|
59
|
+
task_group=task_group,
|
60
|
+
task_group_activity=activity,
|
61
|
+
logger_name=LOGGER_NAME,
|
62
|
+
log_file_path=log_file_path,
|
63
|
+
exception=FileNotFoundError(error_msg),
|
64
|
+
db=db,
|
65
|
+
)
|
66
|
+
return
|
67
|
+
|
68
|
+
try:
|
69
|
+
|
70
|
+
activity.status = TaskGroupActivityStatusV2.ONGOING
|
71
|
+
activity = add_commit_refresh(obj=activity, db=db)
|
72
|
+
|
73
|
+
# Actually mark the task group as non-active
|
74
|
+
logger.info("Now setting `active=False`.")
|
75
|
+
task_group.active = False
|
76
|
+
task_group = add_commit_refresh(obj=task_group, db=db)
|
77
|
+
|
78
|
+
# Proceed with deactivation
|
79
|
+
logger.info(f"Now removing '{source_dir.as_posix()}'.")
|
80
|
+
shutil.rmtree(source_dir)
|
81
|
+
logger.info(f"All good, '{source_dir.as_posix()}' removed.")
|
82
|
+
activity.status = TaskGroupActivityStatusV2.OK
|
83
|
+
activity.log = get_current_log(log_file_path)
|
84
|
+
activity.timestamp_ended = get_timestamp()
|
85
|
+
activity = add_commit_refresh(obj=activity, db=db)
|
86
|
+
|
87
|
+
reset_logger_handlers(logger)
|
88
|
+
|
89
|
+
except Exception as e:
|
90
|
+
fail_and_cleanup(
|
91
|
+
task_group=task_group,
|
92
|
+
task_group_activity=activity,
|
93
|
+
logger_name=LOGGER_NAME,
|
94
|
+
log_file_path=log_file_path,
|
95
|
+
exception=e,
|
96
|
+
db=db,
|
97
|
+
)
|
98
|
+
return
|
@@ -1,4 +1,3 @@
|
|
1
|
-
import logging
|
2
1
|
import shutil
|
3
2
|
import time
|
4
3
|
from pathlib import Path
|
@@ -6,11 +5,10 @@ from tempfile import TemporaryDirectory
|
|
6
5
|
|
7
6
|
from ..utils_background import add_commit_refresh
|
8
7
|
from ..utils_background import fail_and_cleanup
|
8
|
+
from ..utils_background import get_activity_and_task_group
|
9
9
|
from ..utils_templates import get_collection_replacements
|
10
10
|
from ._utils import _customize_and_run_template
|
11
11
|
from fractal_server.app.db import get_sync_db
|
12
|
-
from fractal_server.app.models.v2 import TaskGroupActivityV2
|
13
|
-
from fractal_server.app.models.v2 import TaskGroupV2
|
14
12
|
from fractal_server.app.schemas.v2 import TaskGroupActivityActionV2
|
15
13
|
from fractal_server.app.schemas.v2.task_group import TaskGroupActivityStatusV2
|
16
14
|
from fractal_server.logger import reset_logger_handlers
|
@@ -49,26 +47,17 @@ def reactivate_local(
|
|
49
47
|
log_file_path=log_file_path,
|
50
48
|
)
|
51
49
|
|
50
|
+
logger.debug("START")
|
52
51
|
with next(get_sync_db()) as db:
|
53
|
-
|
54
|
-
|
55
|
-
|
56
|
-
|
57
|
-
|
58
|
-
|
59
|
-
|
60
|
-
"Cannot find database rows with "
|
61
|
-
f"{task_group_id=} and {task_group_activity_id=}:\n"
|
62
|
-
f"{task_group=}\n{activity=}. Exit."
|
63
|
-
)
|
52
|
+
db_objects_ok, task_group, activity = get_activity_and_task_group(
|
53
|
+
task_group_activity_id=task_group_activity_id,
|
54
|
+
task_group_id=task_group_id,
|
55
|
+
db=db,
|
56
|
+
logger_name=LOGGER_NAME,
|
57
|
+
)
|
58
|
+
if not db_objects_ok:
|
64
59
|
return
|
65
60
|
|
66
|
-
# Log some info
|
67
|
-
logger.debug("START")
|
68
|
-
|
69
|
-
for key, value in task_group.model_dump().items():
|
70
|
-
logger.debug(f"task_group.{key}: {value}")
|
71
|
-
|
72
61
|
# Check that the (local) task_group venv_path does not exist
|
73
62
|
if Path(task_group.venv_path).exists():
|
74
63
|
error_msg = f"{task_group.venv_path} already exists."
|
@@ -95,11 +84,11 @@ def reactivate_local(
|
|
95
84
|
),
|
96
85
|
)
|
97
86
|
with open(f"{tmpdir}/pip_freeze.txt", "w") as f:
|
98
|
-
f.write(task_group.
|
87
|
+
f.write(task_group.env_info)
|
99
88
|
replacements.append(
|
100
89
|
("__PIP_FREEZE_FILE__", f"{tmpdir}/pip_freeze.txt")
|
101
90
|
)
|
102
|
-
# Prepare common arguments for `_customize_and_run_template
|
91
|
+
# Prepare common arguments for `_customize_and_run_template`
|
103
92
|
common_args = dict(
|
104
93
|
replacements=replacements,
|
105
94
|
script_dir=(
|
@@ -107,7 +96,7 @@ def reactivate_local(
|
|
107
96
|
).as_posix(),
|
108
97
|
prefix=(
|
109
98
|
f"{int(time.time())}_"
|
110
|
-
f"{TaskGroupActivityActionV2.REACTIVATE}
|
99
|
+
f"{TaskGroupActivityActionV2.REACTIVATE}"
|
111
100
|
),
|
112
101
|
logger_name=LOGGER_NAME,
|
113
102
|
)
|