fractal-server 2.14.16__py3-none-any.whl → 2.15.0a1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (41) hide show
  1. fractal_server/__init__.py +1 -1
  2. fractal_server/app/models/v2/task_group.py +17 -5
  3. fractal_server/app/routes/admin/v2/task_group_lifecycle.py +2 -2
  4. fractal_server/app/routes/api/v2/__init__.py +6 -0
  5. fractal_server/app/routes/api/v2/task_collection.py +3 -3
  6. fractal_server/app/routes/api/v2/task_collection_custom.py +2 -2
  7. fractal_server/app/routes/api/v2/task_collection_pixi.py +236 -0
  8. fractal_server/app/routes/api/v2/task_group_lifecycle.py +26 -7
  9. fractal_server/app/schemas/v2/__init__.py +2 -1
  10. fractal_server/app/schemas/v2/dumps.py +1 -1
  11. fractal_server/app/schemas/v2/task_collection.py +1 -1
  12. fractal_server/app/schemas/v2/task_group.py +16 -5
  13. fractal_server/config.py +42 -0
  14. fractal_server/migrations/versions/b1e7f7a1ff71_task_group_for_pixi.py +53 -0
  15. fractal_server/ssh/_fabric.py +26 -0
  16. fractal_server/tasks/v2/local/__init__.py +3 -0
  17. fractal_server/tasks/v2/local/_utils.py +7 -2
  18. fractal_server/tasks/v2/local/collect.py +23 -24
  19. fractal_server/tasks/v2/local/collect_pixi.py +234 -0
  20. fractal_server/tasks/v2/local/deactivate.py +36 -39
  21. fractal_server/tasks/v2/local/deactivate_pixi.py +102 -0
  22. fractal_server/tasks/v2/local/reactivate.py +9 -16
  23. fractal_server/tasks/v2/local/reactivate_pixi.py +146 -0
  24. fractal_server/tasks/v2/ssh/__init__.py +3 -0
  25. fractal_server/tasks/v2/ssh/_utils.py +5 -5
  26. fractal_server/tasks/v2/ssh/collect.py +23 -28
  27. fractal_server/tasks/v2/ssh/collect_pixi.py +306 -0
  28. fractal_server/tasks/v2/ssh/deactivate.py +39 -45
  29. fractal_server/tasks/v2/ssh/deactivate_pixi.py +128 -0
  30. fractal_server/tasks/v2/ssh/reactivate.py +8 -15
  31. fractal_server/tasks/v2/ssh/reactivate_pixi.py +108 -0
  32. fractal_server/tasks/v2/templates/pixi_1_extract.sh +40 -0
  33. fractal_server/tasks/v2/templates/pixi_2_install.sh +48 -0
  34. fractal_server/tasks/v2/templates/pixi_3_post_install.sh +80 -0
  35. fractal_server/tasks/v2/utils_background.py +43 -8
  36. fractal_server/tasks/v2/utils_pixi.py +38 -0
  37. {fractal_server-2.14.16.dist-info → fractal_server-2.15.0a1.dist-info}/METADATA +1 -1
  38. {fractal_server-2.14.16.dist-info → fractal_server-2.15.0a1.dist-info}/RECORD +41 -29
  39. {fractal_server-2.14.16.dist-info → fractal_server-2.15.0a1.dist-info}/LICENSE +0 -0
  40. {fractal_server-2.14.16.dist-info → fractal_server-2.15.0a1.dist-info}/WHEEL +0 -0
  41. {fractal_server-2.14.16.dist-info → fractal_server-2.15.0a1.dist-info}/entry_points.txt +0 -0
@@ -1,4 +1,3 @@
1
- import logging
2
1
  import shutil
3
2
  import time
4
3
  from pathlib import Path
@@ -6,11 +5,10 @@ from tempfile import TemporaryDirectory
6
5
 
7
6
  from ..utils_background import add_commit_refresh
8
7
  from ..utils_background import fail_and_cleanup
8
+ from ..utils_background import get_activity_and_task_group
9
9
  from ..utils_templates import get_collection_replacements
10
10
  from ._utils import _customize_and_run_template
11
11
  from fractal_server.app.db import get_sync_db
12
- from fractal_server.app.models.v2 import TaskGroupActivityV2
13
- from fractal_server.app.models.v2 import TaskGroupV2
14
12
  from fractal_server.app.schemas.v2 import TaskGroupActivityActionV2
15
13
  from fractal_server.app.schemas.v2.task_group import TaskGroupActivityStatusV2
16
14
  from fractal_server.logger import reset_logger_handlers
@@ -50,17 +48,12 @@ def reactivate_local(
50
48
  )
51
49
 
52
50
  with next(get_sync_db()) as db:
53
-
54
- # Get main objects from db
55
- activity = db.get(TaskGroupActivityV2, task_group_activity_id)
56
- task_group = db.get(TaskGroupV2, task_group_id)
57
- if activity is None or task_group is None:
58
- # Use `logging` directly
59
- logging.error(
60
- "Cannot find database rows with "
61
- f"{task_group_id=} and {task_group_activity_id=}:\n"
62
- f"{task_group=}\n{activity=}. Exit."
63
- )
51
+ success, task_group, activity = get_activity_and_task_group(
52
+ task_group_activity_id=task_group_activity_id,
53
+ task_group_id=task_group_id,
54
+ db=db,
55
+ )
56
+ if not success:
64
57
  return
65
58
 
66
59
  # Log some info
@@ -95,11 +88,11 @@ def reactivate_local(
95
88
  ),
96
89
  )
97
90
  with open(f"{tmpdir}/pip_freeze.txt", "w") as f:
98
- f.write(task_group.pip_freeze)
91
+ f.write(task_group.env_info)
99
92
  replacements.append(
100
93
  ("__PIP_FREEZE_FILE__", f"{tmpdir}/pip_freeze.txt")
101
94
  )
102
- # Prepare common arguments for `_customize_and_run_template``
95
+ # Prepare common arguments for `_customize_and_run_template`
103
96
  common_args = dict(
104
97
  replacements=replacements,
105
98
  script_dir=(
@@ -0,0 +1,146 @@
1
+ import shlex
2
+ import shutil
3
+ import subprocess # nosec
4
+ from pathlib import Path
5
+ from tempfile import TemporaryDirectory
6
+
7
+ from ..utils_background import add_commit_refresh
8
+ from ..utils_background import fail_and_cleanup
9
+ from ..utils_background import get_activity_and_task_group
10
+ from ..utils_pixi import SOURCE_DIR_NAME
11
+ from fractal_server.app.db import get_sync_db
12
+ from fractal_server.app.schemas.v2.task_group import TaskGroupActivityStatusV2
13
+ from fractal_server.config import get_settings
14
+ from fractal_server.logger import reset_logger_handlers
15
+ from fractal_server.logger import set_logger
16
+ from fractal_server.syringe import Inject
17
+ from fractal_server.tasks.utils import get_log_path
18
+ from fractal_server.tasks.v2.utils_background import get_current_log
19
+ from fractal_server.utils import get_timestamp
20
+
21
+
22
+ def reactivate_local_pixi(
23
+ *,
24
+ task_group_activity_id: int,
25
+ task_group_id: int,
26
+ ) -> None:
27
+ """
28
+ Reactivate a task group venv.
29
+
30
+ This function is run as a background task, therefore exceptions must be
31
+ handled.
32
+
33
+ Arguments:
34
+ task_group_id:
35
+ task_group_activity_id:
36
+ """
37
+
38
+ LOGGER_NAME = f"{__name__}.ID{task_group_activity_id}"
39
+
40
+ with TemporaryDirectory() as tmpdir:
41
+ log_file_path = get_log_path(Path(tmpdir))
42
+ logger = set_logger(
43
+ logger_name=LOGGER_NAME,
44
+ log_file_path=log_file_path,
45
+ )
46
+ with next(get_sync_db()) as db:
47
+ success, task_group, activity = get_activity_and_task_group(
48
+ task_group_activity_id=task_group_activity_id,
49
+ task_group_id=task_group_id,
50
+ db=db,
51
+ )
52
+ if not success:
53
+ return
54
+
55
+ # Log some info
56
+ logger.debug("START")
57
+
58
+ for key, value in task_group.model_dump().items():
59
+ logger.debug(f"task_group.{key}: {value}")
60
+
61
+ source_dir = Path(task_group.path, SOURCE_DIR_NAME).as_posix()
62
+ if Path(source_dir).exists():
63
+ error_msg = f"{source_dir} already exists."
64
+ logger.error(error_msg)
65
+ fail_and_cleanup(
66
+ task_group=task_group,
67
+ task_group_activity=activity,
68
+ logger_name=LOGGER_NAME,
69
+ log_file_path=log_file_path,
70
+ exception=FileExistsError(error_msg),
71
+ db=db,
72
+ )
73
+ return
74
+
75
+ try:
76
+ activity.status = TaskGroupActivityStatusV2.ONGOING
77
+ activity = add_commit_refresh(obj=activity, db=db)
78
+
79
+ logger.debug("start - writing pixi lock")
80
+ with open(f"{task_group.path}/pixi.lock", "w") as f:
81
+ f.write(task_group.env_info)
82
+ logger.debug("end - writing pixi lock")
83
+
84
+ subprocess.run( # nosec
85
+ shlex.split(
86
+ f"tar xz -f {task_group.archive_path} "
87
+ f"{Path(task_group.archive_path).name}"
88
+ ),
89
+ encoding="utf-8",
90
+ cwd=task_group.path,
91
+ )
92
+
93
+ subprocess.run( # nosec
94
+ shlex.split(
95
+ f"mv {Path(task_group.archive_path).name} {source_dir}"
96
+ ),
97
+ encoding="utf-8",
98
+ cwd=task_group.path,
99
+ )
100
+
101
+ settings = Inject(get_settings)
102
+ pixi_home = settings.pixi.versions[task_group.pixi_version]
103
+ pixi_bin = Path(pixi_home, "bin/pixi").as_posix()
104
+
105
+ logger.debug("start - pixi install")
106
+ subprocess.run( # nosec
107
+ shlex.split(
108
+ f"{pixi_bin} install "
109
+ f"--manifest-path {source_dir}/pyproject.toml --frozen"
110
+ ),
111
+ encoding="utf-8",
112
+ cwd=task_group.path,
113
+ )
114
+ logger.debug("end - pixi install")
115
+
116
+ activity.log = get_current_log(log_file_path)
117
+ activity.status = TaskGroupActivityStatusV2.OK
118
+ activity.timestamp_ended = get_timestamp()
119
+ activity = add_commit_refresh(obj=activity, db=db)
120
+ task_group.active = True
121
+ task_group = add_commit_refresh(obj=task_group, db=db)
122
+ logger.debug("END")
123
+
124
+ reset_logger_handlers(logger)
125
+
126
+ except Exception as reactivate_e:
127
+ # Delete corrupted source_dir
128
+ try:
129
+ logger.info(f"Now delete folder {source_dir}")
130
+ shutil.rmtree(source_dir)
131
+ logger.info(f"Deleted folder {source_dir}")
132
+ except Exception as rm_e:
133
+ logger.error(
134
+ "Removing folder failed.\n"
135
+ f"Original error:\n{str(rm_e)}"
136
+ )
137
+
138
+ fail_and_cleanup(
139
+ task_group=task_group,
140
+ task_group_activity=activity,
141
+ logger_name=LOGGER_NAME,
142
+ log_file_path=log_file_path,
143
+ exception=reactivate_e,
144
+ db=db,
145
+ )
146
+ return
@@ -1,3 +1,6 @@
1
1
  from .collect import collect_ssh # noqa
2
+ from .collect_pixi import collect_ssh_pixi # noqa
2
3
  from .deactivate import deactivate_ssh # noqa
4
+ from .deactivate_pixi import deactivate_ssh_pixi # noqa
3
5
  from .reactivate import reactivate_ssh # noqa
6
+ from .reactivate_pixi import reactivate_ssh_pixi # noqa
@@ -69,16 +69,16 @@ def _copy_wheel_file_ssh(
69
69
  *, task_group: TaskGroupV2, fractal_ssh: FractalSSH, logger_name: str
70
70
  ) -> str:
71
71
  """
72
- Handle the situation where `task_group.wheel_path` is not part of
73
- `task_group.path`, by copying `wheel_path` into `path`.
72
+ Handle the situation where `task_group.archive_path` is not part of
73
+ `task_group.path`, by copying `archive_path` into `path`.
74
74
 
75
75
  Returns:
76
- The new `wheel_path`.
76
+ The new `archive_path`.
77
77
  """
78
78
  logger = get_logger(logger_name=logger_name)
79
- source = task_group.wheel_path
79
+ source = task_group.archive_path
80
80
  dest = (
81
- Path(task_group.path) / Path(task_group.wheel_path).name
81
+ Path(task_group.path) / Path(task_group.archive_path).name
82
82
  ).as_posix()
83
83
  cmd = f"cp {source} {dest}"
84
84
  logger.debug(f"[_copy_wheel_file] START {source=} {dest=}")
@@ -1,18 +1,16 @@
1
- import logging
2
1
  import time
3
2
  from pathlib import Path
4
3
  from tempfile import TemporaryDirectory
5
4
 
6
5
  from ....ssh._fabric import SingleUseFractalSSH
7
- from ..utils_background import _prepare_tasks_metadata
8
6
  from ..utils_background import fail_and_cleanup
7
+ from ..utils_background import get_activity_and_task_group
8
+ from ..utils_background import prepare_tasks_metadata
9
9
  from ..utils_database import create_db_tasks_and_update_task_group_sync
10
10
  from fractal_server.app.db import get_sync_db
11
- from fractal_server.app.models.v2 import TaskGroupActivityV2
12
- from fractal_server.app.models.v2 import TaskGroupV2
11
+ from fractal_server.app.schemas.v2 import FractalUploadedFile
13
12
  from fractal_server.app.schemas.v2 import TaskGroupActivityActionV2
14
13
  from fractal_server.app.schemas.v2 import TaskGroupActivityStatusV2
15
- from fractal_server.app.schemas.v2 import WheelFile
16
14
  from fractal_server.app.schemas.v2.manifest import ManifestV2
17
15
  from fractal_server.logger import reset_logger_handlers
18
16
  from fractal_server.logger import set_logger
@@ -38,7 +36,7 @@ def collect_ssh(
38
36
  task_group_activity_id: int,
39
37
  ssh_config: SSHConfig,
40
38
  tasks_base_dir: str,
41
- wheel_file: WheelFile | None = None,
39
+ wheel_file: FractalUploadedFile | None = None,
42
40
  ) -> None:
43
41
  """
44
42
  Collect a task package over SSH
@@ -76,16 +74,12 @@ def collect_ssh(
76
74
  ) as fractal_ssh:
77
75
 
78
76
  with next(get_sync_db()) as db:
79
- # Get main objects from db
80
- activity = db.get(TaskGroupActivityV2, task_group_activity_id)
81
- task_group = db.get(TaskGroupV2, task_group_id)
82
- if activity is None or task_group is None:
83
- # Use `logging` directly
84
- logging.error(
85
- "Cannot find database rows with "
86
- f"{task_group_id=} and {task_group_activity_id=}:\n"
87
- f"{task_group=}\n{activity=}. Exit."
88
- )
77
+ success, task_group, activity = get_activity_and_task_group(
78
+ task_group_activity_id=task_group_activity_id,
79
+ task_group_id=task_group_id,
80
+ db=db,
81
+ )
82
+ if not success:
89
83
  return
90
84
 
91
85
  # Log some info
@@ -135,25 +129,26 @@ def collect_ssh(
135
129
  fractal_ssh.mkdir(folder=script_dir_remote, parents=True)
136
130
 
137
131
  # Write wheel file locally and send it to remote path,
138
- # and set task_group.wheel_path
132
+ # and set task_group.archive_path
139
133
  if wheel_file is not None:
140
134
  wheel_filename = wheel_file.filename
141
- wheel_path = (
135
+ archive_path = (
142
136
  Path(task_group.path) / wheel_filename
143
137
  ).as_posix()
144
- tmp_wheel_path = (
138
+ tmp_archive_path = (
145
139
  Path(tmpdir) / wheel_filename
146
140
  ).as_posix()
147
141
  logger.info(
148
- f"Write wheel-file contents into {tmp_wheel_path}"
142
+ "Write wheel-file contents into "
143
+ f"{tmp_archive_path}"
149
144
  )
150
- with open(tmp_wheel_path, "wb") as f:
145
+ with open(tmp_archive_path, "wb") as f:
151
146
  f.write(wheel_file.contents)
152
147
  fractal_ssh.send_file(
153
- local=tmp_wheel_path,
154
- remote=wheel_path,
148
+ local=tmp_archive_path,
149
+ remote=archive_path,
155
150
  )
156
- task_group.wheel_path = wheel_path
151
+ task_group.archive_path = archive_path
157
152
  task_group = add_commit_refresh(obj=task_group, db=db)
158
153
 
159
154
  replacements = get_collection_replacements(
@@ -263,7 +258,7 @@ def collect_ssh(
263
258
  logger.info("Manifest is a valid ManifestV2")
264
259
 
265
260
  logger.info("_prepare_tasks_metadata - start")
266
- task_list = _prepare_tasks_metadata(
261
+ task_list = prepare_tasks_metadata(
267
262
  package_manifest=pkg_manifest,
268
263
  package_version=task_group.version,
269
264
  package_root=Path(package_root_remote),
@@ -283,15 +278,15 @@ def collect_ssh(
283
278
 
284
279
  # Update task_group data
285
280
  logger.info(
286
- "Add pip_freeze, venv_size and venv_file_number "
281
+ "Add env_info, venv_size and venv_file_number "
287
282
  "to TaskGroupV2 - start"
288
283
  )
289
- task_group.pip_freeze = pip_freeze_stdout
284
+ task_group.env_info = pip_freeze_stdout
290
285
  task_group.venv_size_in_kB = int(venv_size)
291
286
  task_group.venv_file_number = int(venv_file_number)
292
287
  task_group = add_commit_refresh(obj=task_group, db=db)
293
288
  logger.info(
294
- "Add pip_freeze, venv_size and venv_file_number "
289
+ "Add env_info, venv_size and venv_file_number "
295
290
  "to TaskGroupV2 - end"
296
291
  )
297
292
 
@@ -0,0 +1,306 @@
1
+ import time
2
+ from pathlib import Path
3
+ from tempfile import TemporaryDirectory
4
+
5
+ from ....ssh._fabric import SingleUseFractalSSH
6
+ from ..utils_background import fail_and_cleanup
7
+ from ..utils_background import get_activity_and_task_group
8
+ from ..utils_background import prepare_tasks_metadata
9
+ from ..utils_database import create_db_tasks_and_update_task_group_sync
10
+ from ..utils_pixi import parse_collect_stdout
11
+ from ..utils_pixi import SOURCE_DIR_NAME
12
+ from fractal_server.app.db import get_sync_db
13
+ from fractal_server.app.schemas.v2 import FractalUploadedFile
14
+ from fractal_server.app.schemas.v2 import TaskGroupActivityActionV2
15
+ from fractal_server.app.schemas.v2 import TaskGroupActivityStatusV2
16
+ from fractal_server.app.schemas.v2.manifest import ManifestV2
17
+ from fractal_server.config import get_settings
18
+ from fractal_server.logger import reset_logger_handlers
19
+ from fractal_server.logger import set_logger
20
+ from fractal_server.ssh._fabric import SSHConfig
21
+ from fractal_server.syringe import Inject
22
+ from fractal_server.tasks.v2.ssh._utils import _customize_and_run_template
23
+ from fractal_server.tasks.v2.utils_background import add_commit_refresh
24
+ from fractal_server.tasks.v2.utils_background import get_current_log
25
+ from fractal_server.tasks.v2.utils_templates import SCRIPTS_SUBFOLDER
26
+ from fractal_server.utils import get_timestamp
27
+
28
+
29
+ def collect_ssh_pixi(
30
+ *,
31
+ task_group_id: int,
32
+ task_group_activity_id: int,
33
+ ssh_config: SSHConfig,
34
+ tasks_base_dir: str,
35
+ tar_gz_file: FractalUploadedFile,
36
+ ) -> None:
37
+ """
38
+ Collect a task package over SSH
39
+
40
+ This function runs as a background task, therefore exceptions must be
41
+ handled.
42
+
43
+ NOTE: since this function is sync, it runs within a thread - due to
44
+ starlette/fastapi handling of background tasks (see
45
+ https://github.com/encode/starlette/blob/master/starlette/background.py).
46
+
47
+
48
+ Arguments:
49
+ task_group_id:
50
+ task_group_activity_id:
51
+ ssh_config:
52
+ tasks_base_dir:
53
+ Only used as a `safe_root` in `remove_dir`, and typically set to
54
+ `user_settings.ssh_tasks_dir`.
55
+ tar_gz_file:
56
+ """
57
+
58
+ LOGGER_NAME = f"{__name__}.ID{task_group_activity_id}"
59
+
60
+ # Work within a temporary folder, where also logs will be placed
61
+ with TemporaryDirectory() as tmpdir:
62
+ log_file_path = Path(tmpdir) / "log"
63
+ logger = set_logger(
64
+ logger_name=LOGGER_NAME,
65
+ log_file_path=log_file_path,
66
+ )
67
+ with SingleUseFractalSSH(
68
+ ssh_config=ssh_config,
69
+ logger_name=LOGGER_NAME,
70
+ ) as fractal_ssh:
71
+
72
+ with next(get_sync_db()) as db:
73
+ success, task_group, activity = get_activity_and_task_group(
74
+ task_group_activity_id=task_group_activity_id,
75
+ task_group_id=task_group_id,
76
+ db=db,
77
+ )
78
+ if not success:
79
+ return
80
+
81
+ # Log some info
82
+ logger.info("START")
83
+ for key, value in task_group.model_dump().items():
84
+ logger.debug(f"task_group.{key}: {value}")
85
+
86
+ # Check that SSH connection works
87
+ try:
88
+ fractal_ssh.check_connection()
89
+ except Exception as e:
90
+ logger.error("Cannot establish SSH connection.")
91
+ fail_and_cleanup(
92
+ task_group=task_group,
93
+ task_group_activity=activity,
94
+ logger_name=LOGGER_NAME,
95
+ log_file_path=log_file_path,
96
+ exception=e,
97
+ db=db,
98
+ )
99
+ return
100
+
101
+ try:
102
+
103
+ # Check that the (remote) task_group path does not exist
104
+ if fractal_ssh.remote_exists(task_group.path):
105
+ error_msg = f"{task_group.path} already exists."
106
+ logger.error(error_msg)
107
+ fail_and_cleanup(
108
+ task_group=task_group,
109
+ task_group_activity=activity,
110
+ logger_name=LOGGER_NAME,
111
+ log_file_path=log_file_path,
112
+ exception=FileExistsError(error_msg),
113
+ db=db,
114
+ )
115
+ return
116
+
117
+ # Create remote `task_group.path` and `script_dir_remote`
118
+ # folders (note that because of `parents=True` we are in
119
+ # the `no error if existing, make parent directories as
120
+ # needed` scenario for `mkdir`)
121
+ script_dir_remote = (
122
+ Path(task_group.path) / SCRIPTS_SUBFOLDER
123
+ ).as_posix()
124
+ fractal_ssh.mkdir(folder=task_group.path, parents=True)
125
+ fractal_ssh.mkdir(folder=script_dir_remote, parents=True)
126
+
127
+ # Write tar.gz file locally and send it to remote path,
128
+ # and set task_group.archive_path
129
+ tar_gz_filename = tar_gz_file.filename
130
+ archive_path = (
131
+ Path(task_group.path) / tar_gz_filename
132
+ ).as_posix()
133
+ tmp_archive_path = (
134
+ Path(tmpdir) / tar_gz_filename
135
+ ).as_posix()
136
+ logger.info(
137
+ f"Write tar.gz-file contents into {tmp_archive_path}"
138
+ )
139
+ with open(tmp_archive_path, "wb") as f:
140
+ f.write(tar_gz_file.contents)
141
+ fractal_ssh.send_file(
142
+ local=tmp_archive_path,
143
+ remote=archive_path,
144
+ )
145
+ task_group.archive_path = archive_path
146
+ task_group = add_commit_refresh(obj=task_group, db=db)
147
+
148
+ # Set `pixi_bin`
149
+ settings = Inject(get_settings)
150
+ pixi_home = settings.pixi.versions[task_group.pixi_version]
151
+
152
+ replacements = {
153
+ ("__PIXI_HOME__", pixi_home),
154
+ ("__PACKAGE_DIR__", task_group.path),
155
+ ("__TAR_GZ_PATH__", archive_path),
156
+ (
157
+ "__IMPORT_PACKAGE_NAME__",
158
+ task_group.pkg_name.replace("-", "_"),
159
+ ),
160
+ ("__SOURCE_DIR_NAME__", SOURCE_DIR_NAME),
161
+ }
162
+
163
+ logger.info("installing - START")
164
+
165
+ # Set status to ONGOING and refresh logs
166
+ activity.status = TaskGroupActivityStatusV2.ONGOING
167
+ activity.log = get_current_log(log_file_path)
168
+ activity = add_commit_refresh(obj=activity, db=db)
169
+
170
+ common_args = dict(
171
+ script_dir_local=(
172
+ Path(tmpdir) / SCRIPTS_SUBFOLDER
173
+ ).as_posix(),
174
+ script_dir_remote=script_dir_remote,
175
+ prefix=(
176
+ f"{int(time.time())}_"
177
+ f"{TaskGroupActivityActionV2.COLLECT}"
178
+ ),
179
+ fractal_ssh=fractal_ssh,
180
+ logger_name=LOGGER_NAME,
181
+ )
182
+
183
+ # Run the three pixi-related scripts
184
+ _customize_and_run_template(
185
+ template_filename="pixi_1_extract.sh",
186
+ replacements=replacements,
187
+ **common_args,
188
+ )
189
+ activity.log = get_current_log(log_file_path)
190
+ activity = add_commit_refresh(obj=activity, db=db)
191
+
192
+ _customize_and_run_template(
193
+ template_filename="pixi_2_install.sh",
194
+ replacements=replacements,
195
+ **common_args,
196
+ )
197
+ activity.log = get_current_log(log_file_path)
198
+ activity = add_commit_refresh(obj=activity, db=db)
199
+
200
+ stdout = _customize_and_run_template(
201
+ template_filename="pixi_3_post_install.sh",
202
+ replacements=replacements,
203
+ **common_args,
204
+ )
205
+ activity.log = get_current_log(log_file_path)
206
+ activity = add_commit_refresh(obj=activity, db=db)
207
+
208
+ # Parse stdout
209
+ parsed_output = parse_collect_stdout(stdout)
210
+ package_root_remote = parsed_output["package_root"]
211
+ venv_size = parsed_output["venv_size"]
212
+ venv_file_number = parsed_output["venv_file_number"]
213
+ project_python_wrapper = parsed_output[
214
+ "project_python_wrapper"
215
+ ]
216
+
217
+ # Read and validate remote manifest file
218
+ manifest_path_remote = (
219
+ f"{package_root_remote}/__FRACTAL_MANIFEST__.json"
220
+ )
221
+ pkg_manifest_dict = fractal_ssh.read_remote_json_file(
222
+ manifest_path_remote
223
+ )
224
+ logger.info(f"Loaded {manifest_path_remote=}")
225
+ pkg_manifest = ManifestV2(**pkg_manifest_dict)
226
+ logger.info("Manifest is a valid ManifestV2")
227
+
228
+ logger.info("_prepare_tasks_metadata - start")
229
+ task_list = prepare_tasks_metadata(
230
+ package_manifest=pkg_manifest,
231
+ package_version=task_group.version,
232
+ package_root=Path(package_root_remote),
233
+ project_python_wrapper=Path(project_python_wrapper),
234
+ )
235
+ logger.info("_prepare_tasks_metadata - end")
236
+
237
+ logger.info(
238
+ "create_db_tasks_and_update_task_group - " "start"
239
+ )
240
+ create_db_tasks_and_update_task_group_sync(
241
+ task_list=task_list,
242
+ task_group_id=task_group.id,
243
+ db=db,
244
+ )
245
+ logger.info("create_db_tasks_and_update_task_group - end")
246
+
247
+ # NOTE: see issue 2626 about whether to keep `pixi.lock`
248
+ # files in the database
249
+ remote_pixi_lock_file = Path(
250
+ task_group.path,
251
+ SOURCE_DIR_NAME,
252
+ "pixi.lock",
253
+ ).as_posix()
254
+ pixi_lock_contents = fractal_ssh.read_remote_text_file(
255
+ remote_pixi_lock_file
256
+ )
257
+
258
+ # Update task_group data
259
+ logger.info(
260
+ "Add env_info, venv_size and venv_file_number "
261
+ "to TaskGroupV2 - start"
262
+ )
263
+ task_group.env_info = pixi_lock_contents
264
+ task_group.venv_size_in_kB = int(venv_size)
265
+ task_group.venv_file_number = int(venv_file_number)
266
+ task_group = add_commit_refresh(obj=task_group, db=db)
267
+ logger.info(
268
+ "Add env_info, venv_size and venv_file_number "
269
+ "to TaskGroupV2 - end"
270
+ )
271
+
272
+ # Finalize (write metadata to DB)
273
+ logger.info("finalising - START")
274
+ activity.status = TaskGroupActivityStatusV2.OK
275
+ activity.timestamp_ended = get_timestamp()
276
+ activity = add_commit_refresh(obj=activity, db=db)
277
+ logger.info("finalising - END")
278
+ logger.info("END")
279
+ reset_logger_handlers(logger)
280
+
281
+ except Exception as collection_e:
282
+ # Delete corrupted package dir
283
+ try:
284
+ logger.info(
285
+ f"Now delete remote folder {task_group.path}"
286
+ )
287
+ fractal_ssh.remove_folder(
288
+ folder=task_group.path,
289
+ safe_root=tasks_base_dir,
290
+ )
291
+ logger.info(
292
+ f"Deleted remoted folder {task_group.path}"
293
+ )
294
+ except Exception as e_rm:
295
+ logger.error(
296
+ "Removing folder failed. "
297
+ f"Original error:\n{str(e_rm)}"
298
+ )
299
+ fail_and_cleanup(
300
+ task_group=task_group,
301
+ task_group_activity=activity,
302
+ log_file_path=log_file_path,
303
+ logger_name=LOGGER_NAME,
304
+ exception=collection_e,
305
+ db=db,
306
+ )