fractal-server 2.15.0a3__py3-none-any.whl → 2.15.0a4__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (29) hide show
  1. fractal_server/__init__.py +1 -1
  2. fractal_server/app/routes/admin/v2/task_group_lifecycle.py +0 -3
  3. fractal_server/app/routes/api/v2/task_collection.py +0 -2
  4. fractal_server/app/routes/api/v2/task_collection_pixi.py +1 -12
  5. fractal_server/app/routes/api/v2/task_group_lifecycle.py +0 -3
  6. fractal_server/app/schemas/v2/__init__.py +0 -1
  7. fractal_server/app/schemas/v2/task_group.py +0 -9
  8. fractal_server/tasks/v2/local/_utils.py +1 -5
  9. fractal_server/tasks/v2/local/collect.py +5 -8
  10. fractal_server/tasks/v2/local/collect_pixi.py +8 -13
  11. fractal_server/tasks/v2/local/deactivate.py +5 -9
  12. fractal_server/tasks/v2/local/deactivate_pixi.py +4 -10
  13. fractal_server/tasks/v2/local/reactivate.py +5 -9
  14. fractal_server/tasks/v2/local/reactivate_pixi.py +8 -14
  15. fractal_server/tasks/v2/ssh/_utils.py +45 -4
  16. fractal_server/tasks/v2/ssh/collect.py +32 -37
  17. fractal_server/tasks/v2/ssh/collect_pixi.py +34 -45
  18. fractal_server/tasks/v2/ssh/deactivate.py +21 -28
  19. fractal_server/tasks/v2/ssh/deactivate_pixi.py +20 -28
  20. fractal_server/tasks/v2/ssh/reactivate.py +23 -29
  21. fractal_server/tasks/v2/ssh/reactivate_pixi.py +144 -38
  22. fractal_server/tasks/v2/templates/pixi_2_install.sh +3 -8
  23. fractal_server/tasks/v2/utils_background.py +7 -0
  24. fractal_server/tasks/v2/utils_templates.py +14 -1
  25. {fractal_server-2.15.0a3.dist-info → fractal_server-2.15.0a4.dist-info}/METADATA +1 -1
  26. {fractal_server-2.15.0a3.dist-info → fractal_server-2.15.0a4.dist-info}/RECORD +29 -29
  27. {fractal_server-2.15.0a3.dist-info → fractal_server-2.15.0a4.dist-info}/LICENSE +0 -0
  28. {fractal_server-2.15.0a3.dist-info → fractal_server-2.15.0a4.dist-info}/WHEEL +0 -0
  29. {fractal_server-2.15.0a3.dist-info → fractal_server-2.15.0a4.dist-info}/entry_points.txt +0 -0
@@ -1 +1 @@
1
- __VERSION__ = "2.15.0a3"
1
+ __VERSION__ = "2.15.0a4"
@@ -2,7 +2,6 @@ from fastapi import APIRouter
2
2
  from fastapi import BackgroundTasks
3
3
  from fastapi import Depends
4
4
  from fastapi import HTTPException
5
- from fastapi import Request
6
5
  from fastapi import Response
7
6
  from fastapi import status
8
7
 
@@ -51,7 +50,6 @@ async def deactivate_task_group(
51
50
  task_group_id: int,
52
51
  background_tasks: BackgroundTasks,
53
52
  response: Response,
54
- request: Request,
55
53
  superuser: UserOAuth = Depends(current_active_superuser),
56
54
  db: AsyncSession = Depends(get_async_db),
57
55
  ) -> TaskGroupReadV2:
@@ -157,7 +155,6 @@ async def reactivate_task_group(
157
155
  task_group_id: int,
158
156
  background_tasks: BackgroundTasks,
159
157
  response: Response,
160
- request: Request,
161
158
  superuser: UserOAuth = Depends(current_active_superuser),
162
159
  db: AsyncSession = Depends(get_async_db),
163
160
  ) -> TaskGroupReadV2:
@@ -7,7 +7,6 @@ from fastapi import Depends
7
7
  from fastapi import File
8
8
  from fastapi import Form
9
9
  from fastapi import HTTPException
10
- from fastapi import Request
11
10
  from fastapi import Response
12
11
  from fastapi import status
13
12
  from fastapi import UploadFile
@@ -151,7 +150,6 @@ def parse_request_data(
151
150
  response_model=TaskGroupActivityV2Read,
152
151
  )
153
152
  async def collect_tasks_pip(
154
- request: Request,
155
153
  response: Response,
156
154
  background_tasks: BackgroundTasks,
157
155
  request_data: CollectionRequestData = Depends(parse_request_data),
@@ -6,11 +6,9 @@ from fastapi import BackgroundTasks
6
6
  from fastapi import Depends
7
7
  from fastapi import Form
8
8
  from fastapi import HTTPException
9
- from fastapi import Request
10
9
  from fastapi import Response
11
10
  from fastapi import status
12
11
  from fastapi import UploadFile
13
- from pydantic import ValidationError
14
12
  from sqlmodel import select
15
13
 
16
14
  from fractal_server.app.db import AsyncSession
@@ -35,7 +33,6 @@ from fractal_server.app.schemas.v2 import FractalUploadedFile
35
33
  from fractal_server.app.schemas.v2 import TaskGroupActivityActionV2
36
34
  from fractal_server.app.schemas.v2 import TaskGroupActivityStatusV2
37
35
  from fractal_server.app.schemas.v2 import TaskGroupActivityV2Read
38
- from fractal_server.app.schemas.v2 import TaskGroupCreateV2StrictPixi
39
36
  from fractal_server.app.schemas.v2.task_group import TaskGroupV2OriginEnum
40
37
  from fractal_server.config import get_settings
41
38
  from fractal_server.logger import set_logger
@@ -81,7 +78,6 @@ def validate_pkgname_and_version(filename: str) -> tuple[str, str]:
81
78
  response_model=TaskGroupActivityV2Read,
82
79
  )
83
80
  async def collect_task_pixi(
84
- request: Request,
85
81
  response: Response,
86
82
  background_tasks: BackgroundTasks,
87
83
  file: UploadFile,
@@ -147,13 +143,6 @@ async def collect_task_pixi(
147
143
  version=version,
148
144
  path=task_group_path,
149
145
  )
150
- try:
151
- TaskGroupCreateV2StrictPixi(**task_group_attrs)
152
- except ValidationError as e:
153
- raise HTTPException(
154
- status_code=status.HTTP_422_UNPROCESSABLE_ENTITY,
155
- detail=f"Invalid task-group object. Original error: {e}",
156
- )
157
146
 
158
147
  await _verify_non_duplication_user_constraint(
159
148
  user_id=user.id,
@@ -168,7 +157,7 @@ async def collect_task_pixi(
168
157
  db=db,
169
158
  )
170
159
 
171
- # FIXME: to be removed with issue #2634
160
+ # NOTE: to be removed with issue #2634
172
161
  stm = select(TaskGroupV2).where(TaskGroupV2.path == task_group_path)
173
162
  res = await db.execute(stm)
174
163
  for conflicting_task_group in res.scalars().all():
@@ -2,7 +2,6 @@ from fastapi import APIRouter
2
2
  from fastapi import BackgroundTasks
3
3
  from fastapi import Depends
4
4
  from fastapi import HTTPException
5
- from fastapi import Request
6
5
  from fastapi import Response
7
6
  from fastapi import status
8
7
 
@@ -48,7 +47,6 @@ async def deactivate_task_group(
48
47
  task_group_id: int,
49
48
  background_tasks: BackgroundTasks,
50
49
  response: Response,
51
- request: Request,
52
50
  user: UserOAuth = Depends(current_active_user),
53
51
  db: AsyncSession = Depends(get_async_db),
54
52
  ) -> TaskGroupReadV2:
@@ -168,7 +166,6 @@ async def reactivate_task_group(
168
166
  task_group_id: int,
169
167
  background_tasks: BackgroundTasks,
170
168
  response: Response,
171
- request: Request,
172
169
  user: UserOAuth = Depends(current_active_user),
173
170
  db: AsyncSession = Depends(get_async_db),
174
171
  ) -> TaskGroupReadV2:
@@ -41,7 +41,6 @@ from .task_group import TaskGroupActivityStatusV2 # noqa F401
41
41
  from .task_group import TaskGroupActivityV2Read # noqa F401
42
42
  from .task_group import TaskGroupCreateV2 # noqa F401
43
43
  from .task_group import TaskGroupCreateV2Strict # noqa F401
44
- from .task_group import TaskGroupCreateV2StrictPixi # noqa F401
45
44
  from .task_group import TaskGroupReadV2 # noqa F401
46
45
  from .task_group import TaskGroupUpdateV2 # noqa F401
47
46
  from .task_group import TaskGroupV2OriginEnum # noqa F401
@@ -62,15 +62,6 @@ class TaskGroupCreateV2Strict(TaskGroupCreateV2):
62
62
  python_version: NonEmptyStr
63
63
 
64
64
 
65
- class TaskGroupCreateV2StrictPixi(TaskGroupCreateV2):
66
- """
67
- A strict version of TaskGroupCreateV2, to be used for pixi task collection.
68
- """
69
-
70
- path: AbsolutePathStr
71
- pixi_version: NonEmptyStr
72
-
73
-
74
65
  class TaskGroupReadV2(BaseModel):
75
66
  id: int
76
67
  task_list: list[TaskReadV2]
@@ -31,7 +31,7 @@ def _customize_and_run_template(
31
31
  f"Invalid {template_filename=} (it must end with '.sh')."
32
32
  )
33
33
 
34
- script_filename = f"{prefix}{template_filename}"
34
+ script_filename = f"{prefix}_{template_filename}"
35
35
  script_path_local = Path(script_dir) / script_filename
36
36
  # Read template
37
37
  customize_template(
@@ -50,10 +50,6 @@ def check_task_files_exist(task_list: list[TaskCreateV2]) -> None:
50
50
  """
51
51
  Check that the modules listed in task commands point to existing files.
52
52
 
53
- Note: commands may be like `/one/python /another/task.py` or
54
- `/one/pixi [...] /another/task.py`, and in both cases `split()[-1]`
55
- returns `/another/task.py`.
56
-
57
53
  Args:
58
54
  task_list:
59
55
  """
@@ -67,20 +67,17 @@ def collect_local(
67
67
  log_file_path=log_file_path,
68
68
  )
69
69
 
70
+ logger.info("START")
70
71
  with next(get_sync_db()) as db:
71
- success, task_group, activity = get_activity_and_task_group(
72
+ db_objects_ok, task_group, activity = get_activity_and_task_group(
72
73
  task_group_activity_id=task_group_activity_id,
73
74
  task_group_id=task_group_id,
74
75
  db=db,
76
+ logger_name=LOGGER_NAME,
75
77
  )
76
- if not success:
78
+ if not db_objects_ok:
77
79
  return
78
80
 
79
- # Log some info
80
- logger.info("START")
81
- for key, value in task_group.model_dump().items():
82
- logger.debug(f"task_group.{key}: {value}")
83
-
84
81
  # Check that the (local) task_group path does exist
85
82
  if Path(task_group.path).exists():
86
83
  error_msg = f"{task_group.path} already exists."
@@ -130,7 +127,7 @@ def collect_local(
130
127
  ).as_posix(),
131
128
  prefix=(
132
129
  f"{int(time.time())}_"
133
- f"{TaskGroupActivityActionV2.COLLECT}_"
130
+ f"{TaskGroupActivityActionV2.COLLECT}"
134
131
  ),
135
132
  logger_name=LOGGER_NAME,
136
133
  )
@@ -47,22 +47,19 @@ def collect_local_pixi(
47
47
  log_file_path=log_file_path,
48
48
  )
49
49
 
50
+ logger.info("START")
50
51
  with next(get_sync_db()) as db:
51
- success, task_group, activity = get_activity_and_task_group(
52
+ db_objects_ok, task_group, activity = get_activity_and_task_group(
52
53
  task_group_activity_id=task_group_activity_id,
53
54
  task_group_id=task_group_id,
54
55
  db=db,
56
+ logger_name=LOGGER_NAME,
55
57
  )
56
- if not success:
58
+ if not db_objects_ok:
57
59
  return
58
60
 
59
- logger.info("START")
60
- for key, value in task_group.model_dump(
61
- exclude={"env_info"}
62
- ).items():
63
- logger.debug(f"task_group.{key}: {value}")
64
-
65
61
  if Path(task_group.path).exists():
62
+ # We handle this before the try/except to avoid the rmtree
66
63
  error_msg = f"{task_group.path} already exists."
67
64
  logger.error(error_msg)
68
65
  fail_and_cleanup(
@@ -100,13 +97,14 @@ def collect_local_pixi(
100
97
  task_group.pkg_name.replace("-", "_"),
101
98
  ),
102
99
  ("__SOURCE_DIR_NAME__", SOURCE_DIR_NAME),
100
+ ("__FROZEN_OPTION__", ""),
103
101
  },
104
102
  script_dir=Path(
105
103
  task_group.path, SCRIPTS_SUBFOLDER
106
104
  ).as_posix(),
107
105
  prefix=(
108
106
  f"{int(time.time())}_"
109
- f"{TaskGroupActivityActionV2.COLLECT}_"
107
+ f"{TaskGroupActivityActionV2.COLLECT}"
110
108
  ),
111
109
  logger_name=LOGGER_NAME,
112
110
  )
@@ -215,17 +213,14 @@ def collect_local_pixi(
215
213
  reset_logger_handlers(logger)
216
214
 
217
215
  except Exception as collection_e:
218
- # Delete corrupted package dir
219
216
  try:
220
217
  logger.info(f"Now delete folder {task_group.path}")
221
218
  shutil.rmtree(task_group.path)
222
219
  logger.info(f"Deleted folder {task_group.path}")
223
220
  except Exception as rm_e:
224
221
  logger.error(
225
- "Removing folder failed.\n"
226
- f"Original error:\n{str(rm_e)}"
222
+ f"Removing folder failed. Original error: {str(rm_e)}"
227
223
  )
228
-
229
224
  fail_and_cleanup(
230
225
  task_group=task_group,
231
226
  task_group_activity=activity,
@@ -46,21 +46,17 @@ def deactivate_local(
46
46
  log_file_path=log_file_path,
47
47
  )
48
48
 
49
+ logger.debug("START")
49
50
  with next(get_sync_db()) as db:
50
- success, task_group, activity = get_activity_and_task_group(
51
+ db_objects_ok, task_group, activity = get_activity_and_task_group(
51
52
  task_group_activity_id=task_group_activity_id,
52
53
  task_group_id=task_group_id,
53
54
  db=db,
55
+ logger_name=LOGGER_NAME,
54
56
  )
55
- if not success:
57
+ if not db_objects_ok:
56
58
  return
57
59
 
58
- # Log some info
59
- logger.debug("START")
60
-
61
- for key, value in task_group.model_dump().items():
62
- logger.debug(f"task_group.{key}: {value}")
63
-
64
60
  # Check that the (local) task_group venv_path does exist
65
61
  if not Path(task_group.venv_path).exists():
66
62
  error_msg = f"{task_group.venv_path} does not exist."
@@ -100,7 +96,7 @@ def deactivate_local(
100
96
  ).as_posix(),
101
97
  prefix=(
102
98
  f"{int(time.time())}_"
103
- f"{TaskGroupActivityActionV2.DEACTIVATE}_"
99
+ f"{TaskGroupActivityActionV2.DEACTIVATE}"
104
100
  ),
105
101
  logger_name=LOGGER_NAME,
106
102
  )
@@ -40,23 +40,17 @@ def deactivate_local_pixi(
40
40
  log_file_path=log_file_path,
41
41
  )
42
42
 
43
+ logger.debug("START")
43
44
  with next(get_sync_db()) as db:
44
- success, task_group, activity = get_activity_and_task_group(
45
+ db_objects_ok, task_group, activity = get_activity_and_task_group(
45
46
  task_group_activity_id=task_group_activity_id,
46
47
  task_group_id=task_group_id,
47
48
  db=db,
49
+ logger_name=LOGGER_NAME,
48
50
  )
49
- if not success:
51
+ if not db_objects_ok:
50
52
  return
51
53
 
52
- # Log some info
53
- logger.debug("START")
54
-
55
- for key, value in task_group.model_dump(
56
- exclude={"env_info"}
57
- ).items():
58
- logger.debug(f"task_group.{key}: {value}")
59
-
60
54
  source_dir = Path(task_group.path, SOURCE_DIR_NAME)
61
55
  if not source_dir.exists():
62
56
  error_msg = f"'{source_dir.as_posix()}' does not exist."
@@ -47,21 +47,17 @@ def reactivate_local(
47
47
  log_file_path=log_file_path,
48
48
  )
49
49
 
50
+ logger.debug("START")
50
51
  with next(get_sync_db()) as db:
51
- success, task_group, activity = get_activity_and_task_group(
52
+ db_objects_ok, task_group, activity = get_activity_and_task_group(
52
53
  task_group_activity_id=task_group_activity_id,
53
54
  task_group_id=task_group_id,
54
55
  db=db,
56
+ logger_name=LOGGER_NAME,
55
57
  )
56
- if not success:
58
+ if not db_objects_ok:
57
59
  return
58
60
 
59
- # Log some info
60
- logger.debug("START")
61
-
62
- for key, value in task_group.model_dump().items():
63
- logger.debug(f"task_group.{key}: {value}")
64
-
65
61
  # Check that the (local) task_group venv_path does not exist
66
62
  if Path(task_group.venv_path).exists():
67
63
  error_msg = f"{task_group.venv_path} already exists."
@@ -100,7 +96,7 @@ def reactivate_local(
100
96
  ).as_posix(),
101
97
  prefix=(
102
98
  f"{int(time.time())}_"
103
- f"{TaskGroupActivityActionV2.REACTIVATE}_"
99
+ f"{TaskGroupActivityActionV2.REACTIVATE}"
104
100
  ),
105
101
  logger_name=LOGGER_NAME,
106
102
  )
@@ -45,23 +45,17 @@ def reactivate_local_pixi(
45
45
  logger_name=LOGGER_NAME,
46
46
  log_file_path=log_file_path,
47
47
  )
48
+ logger.debug("START")
48
49
  with next(get_sync_db()) as db:
49
- success, task_group, activity = get_activity_and_task_group(
50
+ db_objects_ok, task_group, activity = get_activity_and_task_group(
50
51
  task_group_activity_id=task_group_activity_id,
51
52
  task_group_id=task_group_id,
52
53
  db=db,
54
+ logger_name=LOGGER_NAME,
53
55
  )
54
- if not success:
56
+ if not db_objects_ok:
55
57
  return
56
58
 
57
- # Log some info
58
- logger.debug("START")
59
-
60
- for key, value in task_group.model_dump(
61
- exclude={"env_info"}
62
- ).items():
63
- logger.debug(f"task_group.{key}: {value}")
64
-
65
59
  source_dir = Path(task_group.path, SOURCE_DIR_NAME).as_posix()
66
60
  if Path(source_dir).exists():
67
61
  error_msg = f"{source_dir} already exists."
@@ -94,7 +88,7 @@ def reactivate_local_pixi(
94
88
  task_group.pkg_name.replace("-", "_"),
95
89
  ),
96
90
  ("__SOURCE_DIR_NAME__", SOURCE_DIR_NAME),
97
- ("__FROZEN_OPTION__", "true"),
91
+ ("__FROZEN_OPTION__", "--frozen"),
98
92
  },
99
93
  script_dir=Path(
100
94
  task_group.path, SCRIPTS_SUBFOLDER
@@ -128,7 +122,7 @@ def reactivate_local_pixi(
128
122
  activity.log = get_current_log(log_file_path)
129
123
  activity = add_commit_refresh(obj=activity, db=db)
130
124
 
131
- # Run script 3
125
+ # Run script 3 - post-install
132
126
  _customize_and_run_template(
133
127
  template_filename="pixi_3_post_install.sh",
134
128
  **common_args,
@@ -154,8 +148,8 @@ def reactivate_local_pixi(
154
148
  logger.info(f"Deleted folder {source_dir}")
155
149
  except Exception as rm_e:
156
150
  logger.error(
157
- "Removing folder failed.\n"
158
- f"Original error:\n{str(rm_e)}"
151
+ "Removing folder failed. "
152
+ f"Original error: {str(rm_e)}"
159
153
  )
160
154
 
161
155
  fail_and_cleanup(
@@ -1,6 +1,10 @@
1
1
  import os
2
2
  from pathlib import Path
3
3
 
4
+ from sqlalchemy.ext.asyncio import AsyncSession
5
+
6
+ from ..utils_background import fail_and_cleanup
7
+ from fractal_server.app.models.v2 import TaskGroupActivityV2
4
8
  from fractal_server.app.models.v2 import TaskGroupV2
5
9
  from fractal_server.logger import get_logger
6
10
  from fractal_server.ssh._fabric import FractalSSH
@@ -10,7 +14,7 @@ from fractal_server.tasks.v2.utils_templates import customize_template
10
14
  def _customize_and_run_template(
11
15
  *,
12
16
  template_filename: str,
13
- replacements: list[tuple[str, str]],
17
+ replacements: set[tuple[str, str]],
14
18
  script_dir_local: str,
15
19
  prefix: str,
16
20
  fractal_ssh: FractalSSH,
@@ -66,7 +70,10 @@ def _customize_and_run_template(
66
70
 
67
71
 
68
72
  def _copy_wheel_file_ssh(
69
- *, task_group: TaskGroupV2, fractal_ssh: FractalSSH, logger_name: str
73
+ *,
74
+ task_group: TaskGroupV2,
75
+ fractal_ssh: FractalSSH,
76
+ logger_name: str,
70
77
  ) -> str:
71
78
  """
72
79
  Handle the situation where `task_group.archive_path` is not part of
@@ -81,7 +88,41 @@ def _copy_wheel_file_ssh(
81
88
  Path(task_group.path) / Path(task_group.archive_path).name
82
89
  ).as_posix()
83
90
  cmd = f"cp {source} {dest}"
84
- logger.debug(f"[_copy_wheel_file] START {source=} {dest=}")
91
+ logger.debug(f"[_copy_wheel_file_ssh] START {source=} {dest=}")
85
92
  fractal_ssh.run_command(cmd=cmd)
86
- logger.debug(f"[_copy_wheel_file] END {source=} {dest=}")
93
+ logger.debug(f"[_copy_wheel_file_ssh] END {source=} {dest=}")
87
94
  return dest
95
+
96
+
97
+ def check_ssh_or_fail_and_cleanup(
98
+ *,
99
+ fractal_ssh: FractalSSH,
100
+ task_group: TaskGroupV2,
101
+ task_group_activity: TaskGroupActivityV2,
102
+ logger_name: str,
103
+ log_file_path: Path,
104
+ db: AsyncSession,
105
+ ) -> bool:
106
+ """
107
+ Check SSH connection.
108
+
109
+ Returns:
110
+ Whether SSH connection is OK.
111
+ """
112
+ try:
113
+ fractal_ssh.check_connection()
114
+ return True
115
+ except Exception as e:
116
+ logger = get_logger(logger_name=logger_name)
117
+ logger.error(
118
+ "Cannot establish SSH connection. " f"Original error: {str(e)}"
119
+ )
120
+ fail_and_cleanup(
121
+ task_group=task_group,
122
+ task_group_activity=task_group_activity,
123
+ logger_name=logger_name,
124
+ log_file_path=log_file_path,
125
+ exception=e,
126
+ db=db,
127
+ )
128
+ return False
@@ -2,11 +2,11 @@ import time
2
2
  from pathlib import Path
3
3
  from tempfile import TemporaryDirectory
4
4
 
5
- from ....ssh._fabric import SingleUseFractalSSH
6
5
  from ..utils_background import fail_and_cleanup
7
6
  from ..utils_background import get_activity_and_task_group
8
7
  from ..utils_background import prepare_tasks_metadata
9
8
  from ..utils_database import create_db_tasks_and_update_task_group_sync
9
+ from ._utils import check_ssh_or_fail_and_cleanup
10
10
  from fractal_server.app.db import get_sync_db
11
11
  from fractal_server.app.schemas.v2 import FractalUploadedFile
12
12
  from fractal_server.app.schemas.v2 import TaskGroupActivityActionV2
@@ -14,6 +14,7 @@ from fractal_server.app.schemas.v2 import TaskGroupActivityStatusV2
14
14
  from fractal_server.app.schemas.v2.manifest import ManifestV2
15
15
  from fractal_server.logger import reset_logger_handlers
16
16
  from fractal_server.logger import set_logger
17
+ from fractal_server.ssh._fabric import SingleUseFractalSSH
17
18
  from fractal_server.ssh._fabric import SSHConfig
18
19
  from fractal_server.tasks.v2.ssh._utils import _customize_and_run_template
19
20
  from fractal_server.tasks.v2.utils_background import add_commit_refresh
@@ -68,43 +69,39 @@ def collect_ssh(
68
69
  logger_name=LOGGER_NAME,
69
70
  log_file_path=log_file_path,
70
71
  )
71
- with SingleUseFractalSSH(
72
- ssh_config=ssh_config,
73
- logger_name=LOGGER_NAME,
74
- ) as fractal_ssh:
75
-
76
- with next(get_sync_db()) as db:
77
- success, task_group, activity = get_activity_and_task_group(
78
- task_group_activity_id=task_group_activity_id,
79
- task_group_id=task_group_id,
80
- db=db,
81
- )
82
- if not success:
83
- return
84
-
85
- # Log some info
86
- logger.info("START")
87
- for key, value in task_group.model_dump().items():
88
- logger.debug(f"task_group.{key}: {value}")
89
-
90
- # Check that SSH connection works
72
+ logger.info("START")
73
+ with next(get_sync_db()) as db:
74
+ db_objects_ok, task_group, activity = get_activity_and_task_group(
75
+ task_group_activity_id=task_group_activity_id,
76
+ task_group_id=task_group_id,
77
+ db=db,
78
+ logger_name=LOGGER_NAME,
79
+ )
80
+ if not db_objects_ok:
81
+ return
82
+
83
+ with SingleUseFractalSSH(
84
+ ssh_config=ssh_config,
85
+ logger_name=LOGGER_NAME,
86
+ ) as fractal_ssh:
87
+
91
88
  try:
92
- fractal_ssh.check_connection()
93
- except Exception as e:
94
- logger.error("Cannot establish SSH connection.")
95
- fail_and_cleanup(
89
+ # Check SSH connection
90
+ ssh_ok = check_ssh_or_fail_and_cleanup(
91
+ fractal_ssh=fractal_ssh,
96
92
  task_group=task_group,
97
93
  task_group_activity=activity,
98
94
  logger_name=LOGGER_NAME,
99
95
  log_file_path=log_file_path,
100
- exception=e,
101
96
  db=db,
102
97
  )
103
- return
104
-
105
- try:
98
+ if not ssh_ok:
99
+ return
106
100
 
107
101
  # Check that the (remote) task_group path does not exist
102
+ # NOTE: this is not part of the try/except below, in order
103
+ # to avoid removing the existing folder (as part of the
104
+ # exception-handling).
108
105
  if fractal_ssh.remote_exists(task_group.path):
109
106
  error_msg = f"{task_group.path} already exists."
110
107
  logger.error(error_msg)
@@ -119,9 +116,7 @@ def collect_ssh(
119
116
  return
120
117
 
121
118
  # Create remote `task_group.path` and `script_dir_remote`
122
- # folders (note that because of `parents=True` we are in
123
- # the `no error if existing, make parent directories as
124
- # needed` scenario for `mkdir`)
119
+ # folders
125
120
  script_dir_remote = (
126
121
  Path(task_group.path) / SCRIPTS_SUBFOLDER
127
122
  ).as_posix()
@@ -139,8 +134,7 @@ def collect_ssh(
139
134
  Path(tmpdir) / wheel_filename
140
135
  ).as_posix()
141
136
  logger.info(
142
- "Write wheel-file contents into "
143
- f"{tmp_archive_path}"
137
+ f"Write wheel file into {tmp_archive_path}"
144
138
  )
145
139
  with open(tmp_archive_path, "wb") as f:
146
140
  f.write(wheel_file.contents)
@@ -161,8 +155,8 @@ def collect_ssh(
161
155
  # Prepare common arguments for _customize_and_run_template
162
156
  common_args = dict(
163
157
  replacements=replacements,
164
- script_dir_local=(
165
- Path(tmpdir) / SCRIPTS_SUBFOLDER
158
+ script_dir_local=Path(
159
+ tmpdir, SCRIPTS_SUBFOLDER
166
160
  ).as_posix(),
167
161
  script_dir_remote=script_dir_remote,
168
162
  prefix=(
@@ -187,6 +181,7 @@ def collect_ssh(
187
181
  )
188
182
  activity.log = get_current_log(log_file_path)
189
183
  activity = add_commit_refresh(obj=activity, db=db)
184
+
190
185
  # Run script 2
191
186
  stdout = _customize_and_run_template(
192
187
  template_filename="2_pip_install.sh",
@@ -315,7 +310,7 @@ def collect_ssh(
315
310
  except Exception as e_rm:
316
311
  logger.error(
317
312
  "Removing folder failed. "
318
- f"Original error:\n{str(e_rm)}"
313
+ f"Original error: {str(e_rm)}"
319
314
  )
320
315
  fail_and_cleanup(
321
316
  task_group=task_group,