fractal-server 2.14.15__py3-none-any.whl → 2.15.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (61) hide show
  1. fractal_server/__init__.py +1 -1
  2. fractal_server/app/models/security.py +2 -2
  3. fractal_server/app/models/user_settings.py +2 -2
  4. fractal_server/app/models/v2/dataset.py +3 -3
  5. fractal_server/app/models/v2/history.py +2 -0
  6. fractal_server/app/models/v2/job.py +6 -6
  7. fractal_server/app/models/v2/task.py +12 -8
  8. fractal_server/app/models/v2/task_group.py +19 -7
  9. fractal_server/app/models/v2/workflowtask.py +6 -6
  10. fractal_server/app/routes/admin/v2/task_group_lifecycle.py +2 -5
  11. fractal_server/app/routes/api/v2/__init__.py +6 -0
  12. fractal_server/app/routes/api/v2/_aux_functions_tasks.py +22 -0
  13. fractal_server/app/routes/api/v2/history.py +2 -2
  14. fractal_server/app/routes/api/v2/pre_submission_checks.py +3 -3
  15. fractal_server/app/routes/api/v2/task_collection.py +8 -18
  16. fractal_server/app/routes/api/v2/task_collection_custom.py +2 -2
  17. fractal_server/app/routes/api/v2/task_collection_pixi.py +219 -0
  18. fractal_server/app/routes/api/v2/task_group.py +3 -0
  19. fractal_server/app/routes/api/v2/task_group_lifecycle.py +26 -10
  20. fractal_server/app/runner/executors/slurm_common/_slurm_config.py +10 -0
  21. fractal_server/app/runner/executors/slurm_common/base_slurm_runner.py +39 -14
  22. fractal_server/app/runner/executors/slurm_common/get_slurm_config.py +8 -1
  23. fractal_server/app/runner/executors/slurm_ssh/runner.py +3 -1
  24. fractal_server/app/runner/v2/runner.py +2 -2
  25. fractal_server/app/schemas/v2/__init__.py +1 -1
  26. fractal_server/app/schemas/v2/dumps.py +1 -1
  27. fractal_server/app/schemas/v2/task_collection.py +1 -1
  28. fractal_server/app/schemas/v2/task_group.py +7 -5
  29. fractal_server/config.py +70 -0
  30. fractal_server/images/status_tools.py +80 -75
  31. fractal_server/migrations/versions/791ce783d3d8_add_indices.py +41 -0
  32. fractal_server/migrations/versions/b1e7f7a1ff71_task_group_for_pixi.py +53 -0
  33. fractal_server/migrations/versions/b3ffb095f973_json_to_jsonb.py +340 -0
  34. fractal_server/ssh/_fabric.py +29 -0
  35. fractal_server/tasks/v2/local/__init__.py +3 -0
  36. fractal_server/tasks/v2/local/_utils.py +4 -3
  37. fractal_server/tasks/v2/local/collect.py +26 -30
  38. fractal_server/tasks/v2/local/collect_pixi.py +252 -0
  39. fractal_server/tasks/v2/local/deactivate.py +39 -46
  40. fractal_server/tasks/v2/local/deactivate_pixi.py +98 -0
  41. fractal_server/tasks/v2/local/reactivate.py +12 -23
  42. fractal_server/tasks/v2/local/reactivate_pixi.py +184 -0
  43. fractal_server/tasks/v2/ssh/__init__.py +3 -0
  44. fractal_server/tasks/v2/ssh/_utils.py +50 -9
  45. fractal_server/tasks/v2/ssh/collect.py +46 -56
  46. fractal_server/tasks/v2/ssh/collect_pixi.py +315 -0
  47. fractal_server/tasks/v2/ssh/deactivate.py +54 -67
  48. fractal_server/tasks/v2/ssh/deactivate_pixi.py +122 -0
  49. fractal_server/tasks/v2/ssh/reactivate.py +25 -38
  50. fractal_server/tasks/v2/ssh/reactivate_pixi.py +233 -0
  51. fractal_server/tasks/v2/templates/pixi_1_extract.sh +40 -0
  52. fractal_server/tasks/v2/templates/pixi_2_install.sh +52 -0
  53. fractal_server/tasks/v2/templates/pixi_3_post_install.sh +76 -0
  54. fractal_server/tasks/v2/utils_background.py +50 -8
  55. fractal_server/tasks/v2/utils_pixi.py +38 -0
  56. fractal_server/tasks/v2/utils_templates.py +14 -1
  57. {fractal_server-2.14.15.dist-info → fractal_server-2.15.0.dist-info}/METADATA +4 -4
  58. {fractal_server-2.14.15.dist-info → fractal_server-2.15.0.dist-info}/RECORD +61 -47
  59. {fractal_server-2.14.15.dist-info → fractal_server-2.15.0.dist-info}/LICENSE +0 -0
  60. {fractal_server-2.14.15.dist-info → fractal_server-2.15.0.dist-info}/WHEEL +0 -0
  61. {fractal_server-2.14.15.dist-info → fractal_server-2.15.0.dist-info}/entry_points.txt +0 -0
@@ -0,0 +1,219 @@
1
+ import os
2
+ from pathlib import Path
3
+
4
+ from fastapi import APIRouter
5
+ from fastapi import BackgroundTasks
6
+ from fastapi import Depends
7
+ from fastapi import Form
8
+ from fastapi import HTTPException
9
+ from fastapi import Response
10
+ from fastapi import status
11
+ from fastapi import UploadFile
12
+
13
+ from fractal_server.app.db import AsyncSession
14
+ from fractal_server.app.db import get_async_db
15
+ from fractal_server.app.models import UserOAuth
16
+ from fractal_server.app.models.v2 import TaskGroupActivityV2
17
+ from fractal_server.app.models.v2 import TaskGroupV2
18
+ from fractal_server.app.routes.api.v2._aux_functions_tasks import (
19
+ _get_valid_user_group_id,
20
+ )
21
+ from fractal_server.app.routes.api.v2._aux_functions_tasks import (
22
+ _verify_non_duplication_group_constraint,
23
+ )
24
+ from fractal_server.app.routes.api.v2._aux_functions_tasks import (
25
+ _verify_non_duplication_group_path,
26
+ )
27
+ from fractal_server.app.routes.api.v2._aux_functions_tasks import (
28
+ _verify_non_duplication_user_constraint,
29
+ )
30
+ from fractal_server.app.routes.auth import current_active_verified_user
31
+ from fractal_server.app.routes.aux.validate_user_settings import (
32
+ validate_user_settings,
33
+ )
34
+ from fractal_server.app.schemas.v2 import FractalUploadedFile
35
+ from fractal_server.app.schemas.v2 import TaskGroupActivityActionV2
36
+ from fractal_server.app.schemas.v2 import TaskGroupActivityStatusV2
37
+ from fractal_server.app.schemas.v2 import TaskGroupActivityV2Read
38
+ from fractal_server.app.schemas.v2.task_group import TaskGroupV2OriginEnum
39
+ from fractal_server.config import get_settings
40
+ from fractal_server.logger import set_logger
41
+ from fractal_server.ssh._fabric import SSHConfig
42
+ from fractal_server.syringe import Inject
43
+ from fractal_server.tasks.v2.local import collect_local_pixi
44
+ from fractal_server.tasks.v2.ssh import collect_ssh_pixi
45
+ from fractal_server.tasks.v2.utils_package_names import normalize_package_name
46
+ from fractal_server.types import NonEmptyStr
47
+
48
+
49
+ router = APIRouter()
50
+
51
+ logger = set_logger(__name__)
52
+
53
+
54
+ def validate_pkgname_and_version(filename: str) -> tuple[str, str]:
55
+ if not filename.endswith(".tar.gz"):
56
+ raise HTTPException(
57
+ status_code=status.HTTP_422_UNPROCESSABLE_ENTITY,
58
+ detail=f"{filename=} does not end with '.tar.gz'.",
59
+ )
60
+ filename_splitted = filename.split("-")
61
+ if len(filename_splitted) != 2:
62
+ raise HTTPException(
63
+ status_code=status.HTTP_422_UNPROCESSABLE_ENTITY,
64
+ detail=(
65
+ f"Invalid filename: '{filename}' must contain a single `-` "
66
+ "character, separating the package name from the version "
67
+ "(expected format: 'pkg_name-version')."
68
+ ),
69
+ )
70
+
71
+ pkg_name = filename_splitted[0]
72
+ version = filename.removeprefix(f"{pkg_name}-").removesuffix(".tar.gz")
73
+
74
+ return normalize_package_name(pkg_name), version
75
+
76
+
77
+ @router.post(
78
+ "/collect/pixi/",
79
+ status_code=202,
80
+ response_model=TaskGroupActivityV2Read,
81
+ )
82
+ async def collect_task_pixi(
83
+ response: Response,
84
+ background_tasks: BackgroundTasks,
85
+ file: UploadFile,
86
+ pixi_version: NonEmptyStr | None = Form(None),
87
+ private: bool = False,
88
+ user_group_id: int | None = None,
89
+ user: UserOAuth = Depends(current_active_verified_user),
90
+ db: AsyncSession = Depends(get_async_db),
91
+ ) -> TaskGroupActivityV2Read:
92
+
93
+ settings = Inject(get_settings)
94
+ # Check if Pixi is available
95
+ if settings.pixi is None:
96
+ raise HTTPException(
97
+ status_code=status.HTTP_422_UNPROCESSABLE_ENTITY,
98
+ detail="Pixi task collection is not available.",
99
+ )
100
+ # Check if provided Pixi version is available. Use default if not provided
101
+ if pixi_version is None:
102
+ pixi_version = settings.pixi.default_version
103
+ else:
104
+ if pixi_version not in settings.pixi.versions:
105
+ raise HTTPException(
106
+ status_code=status.HTTP_422_UNPROCESSABLE_ENTITY,
107
+ detail=(
108
+ f"Pixi version {pixi_version} is not available. Available "
109
+ f"versions: {list(settings.pixi.versions.keys())}"
110
+ ),
111
+ )
112
+
113
+ pkg_name, version = validate_pkgname_and_version(file.filename)
114
+ tar_gz_content = await file.read()
115
+ tar_gz_file = FractalUploadedFile(
116
+ filename=file.filename,
117
+ contents=tar_gz_content,
118
+ )
119
+
120
+ user_group_id = await _get_valid_user_group_id(
121
+ user_group_id=user_group_id,
122
+ private=private,
123
+ user_id=user.id,
124
+ db=db,
125
+ )
126
+
127
+ user_settings = await validate_user_settings(
128
+ user=user, backend=settings.FRACTAL_RUNNER_BACKEND, db=db
129
+ )
130
+
131
+ if settings.FRACTAL_RUNNER_BACKEND == "slurm_ssh":
132
+ base_tasks_path = user_settings.ssh_tasks_dir
133
+ else:
134
+ base_tasks_path = settings.FRACTAL_TASKS_DIR.as_posix()
135
+ task_group_path = (
136
+ Path(base_tasks_path) / str(user.id) / pkg_name / version
137
+ ).as_posix()
138
+
139
+ task_group_attrs = dict(
140
+ user_id=user.id,
141
+ user_group_id=user_group_id,
142
+ origin=TaskGroupV2OriginEnum.PIXI,
143
+ pixi_version=pixi_version,
144
+ pkg_name=pkg_name,
145
+ version=version,
146
+ path=task_group_path,
147
+ )
148
+
149
+ await _verify_non_duplication_user_constraint(
150
+ user_id=user.id,
151
+ pkg_name=task_group_attrs["pkg_name"],
152
+ version=task_group_attrs["version"],
153
+ db=db,
154
+ )
155
+ await _verify_non_duplication_group_constraint(
156
+ user_group_id=task_group_attrs["user_group_id"],
157
+ pkg_name=task_group_attrs["pkg_name"],
158
+ version=task_group_attrs["version"],
159
+ db=db,
160
+ )
161
+ await _verify_non_duplication_group_path(
162
+ path=task_group_attrs["path"],
163
+ db=db,
164
+ )
165
+
166
+ if settings.FRACTAL_RUNNER_BACKEND != "slurm_ssh":
167
+ if Path(task_group_path).exists():
168
+ raise HTTPException(
169
+ status_code=status.HTTP_422_UNPROCESSABLE_ENTITY,
170
+ detail=f"{task_group_path} already exists.",
171
+ )
172
+
173
+ task_group = TaskGroupV2(**task_group_attrs)
174
+ db.add(task_group)
175
+ await db.commit()
176
+ await db.refresh(task_group)
177
+ db.expunge(task_group)
178
+
179
+ task_group_activity = TaskGroupActivityV2(
180
+ user_id=task_group.user_id,
181
+ taskgroupv2_id=task_group.id,
182
+ status=TaskGroupActivityStatusV2.PENDING,
183
+ action=TaskGroupActivityActionV2.COLLECT,
184
+ pkg_name=task_group.pkg_name,
185
+ version=task_group.version,
186
+ )
187
+ db.add(task_group_activity)
188
+ await db.commit()
189
+ await db.refresh(task_group_activity)
190
+
191
+ if settings.FRACTAL_RUNNER_BACKEND == "slurm_ssh":
192
+ ssh_config = SSHConfig(
193
+ user=user_settings.ssh_username,
194
+ host=user_settings.ssh_host,
195
+ key_path=user_settings.ssh_private_key_path,
196
+ )
197
+
198
+ background_tasks.add_task(
199
+ collect_ssh_pixi,
200
+ task_group_id=task_group.id,
201
+ task_group_activity_id=task_group_activity.id,
202
+ ssh_config=ssh_config,
203
+ tasks_base_dir=user_settings.ssh_tasks_dir,
204
+ tar_gz_file=tar_gz_file,
205
+ )
206
+ else:
207
+ background_tasks.add_task(
208
+ collect_local_pixi,
209
+ task_group_id=task_group.id,
210
+ task_group_activity_id=task_group_activity.id,
211
+ tar_gz_file=tar_gz_file,
212
+ )
213
+ logger.info(
214
+ "Task-collection endpoint: start background collection "
215
+ "and return task_group_activity. "
216
+ f"Current pid is {os.getpid()}. "
217
+ )
218
+ response.status_code = status.HTTP_202_ACCEPTED
219
+ return task_group_activity
@@ -12,6 +12,7 @@ from pydantic.types import AwareDatetime
12
12
  from sqlmodel import or_
13
13
  from sqlmodel import select
14
14
 
15
+ from ._aux_functions_task_lifecycle import check_no_ongoing_activity
15
16
  from ._aux_functions_tasks import _get_task_group_full_access
16
17
  from ._aux_functions_tasks import _get_task_group_read_access
17
18
  from ._aux_functions_tasks import _verify_non_duplication_group_constraint
@@ -216,6 +217,8 @@ async def delete_task_group(
216
217
  db=db,
217
218
  )
218
219
 
220
+ await check_no_ongoing_activity(task_group_id=task_group_id, db=db)
221
+
219
222
  stm = select(WorkflowTaskV2).where(
220
223
  WorkflowTaskV2.task_id.in_({task.id for task in task_group.task_list})
221
224
  )
@@ -2,7 +2,6 @@ from fastapi import APIRouter
2
2
  from fastapi import BackgroundTasks
3
3
  from fastapi import Depends
4
4
  from fastapi import HTTPException
5
- from fastapi import Request
6
5
  from fastapi import Response
7
6
  from fastapi import status
8
7
 
@@ -25,9 +24,13 @@ from fractal_server.logger import set_logger
25
24
  from fractal_server.ssh._fabric import SSHConfig
26
25
  from fractal_server.syringe import Inject
27
26
  from fractal_server.tasks.v2.local import deactivate_local
27
+ from fractal_server.tasks.v2.local import deactivate_local_pixi
28
28
  from fractal_server.tasks.v2.local import reactivate_local
29
+ from fractal_server.tasks.v2.local import reactivate_local_pixi
29
30
  from fractal_server.tasks.v2.ssh import deactivate_ssh
31
+ from fractal_server.tasks.v2.ssh import deactivate_ssh_pixi
30
32
  from fractal_server.tasks.v2.ssh import reactivate_ssh
33
+ from fractal_server.tasks.v2.ssh import reactivate_ssh_pixi
31
34
  from fractal_server.utils import get_timestamp
32
35
 
33
36
  router = APIRouter()
@@ -44,7 +47,6 @@ async def deactivate_task_group(
44
47
  task_group_id: int,
45
48
  background_tasks: BackgroundTasks,
46
49
  response: Response,
47
- request: Request,
48
50
  user: UserOAuth = Depends(current_active_user),
49
51
  db: AsyncSession = Depends(get_async_db),
50
52
  ) -> TaskGroupReadV2:
@@ -125,9 +127,12 @@ async def deactivate_task_group(
125
127
  host=user_settings.ssh_host,
126
128
  key_path=user_settings.ssh_private_key_path,
127
129
  )
128
-
130
+ if task_group.origin == TaskGroupV2OriginEnum.PIXI:
131
+ deactivate_function = deactivate_ssh_pixi
132
+ else:
133
+ deactivate_function = deactivate_ssh
129
134
  background_tasks.add_task(
130
- deactivate_ssh,
135
+ deactivate_function,
131
136
  task_group_id=task_group.id,
132
137
  task_group_activity_id=task_group_activity.id,
133
138
  ssh_config=ssh_config,
@@ -135,8 +140,12 @@ async def deactivate_task_group(
135
140
  )
136
141
 
137
142
  else:
143
+ if task_group.origin == TaskGroupV2OriginEnum.PIXI:
144
+ deactivate_function = deactivate_local_pixi
145
+ else:
146
+ deactivate_function = deactivate_local
138
147
  background_tasks.add_task(
139
- deactivate_local,
148
+ deactivate_function,
140
149
  task_group_id=task_group.id,
141
150
  task_group_activity_id=task_group_activity.id,
142
151
  )
@@ -157,7 +166,6 @@ async def reactivate_task_group(
157
166
  task_group_id: int,
158
167
  background_tasks: BackgroundTasks,
159
168
  response: Response,
160
- request: Request,
161
169
  user: UserOAuth = Depends(current_active_user),
162
170
  db: AsyncSession = Depends(get_async_db),
163
171
  ) -> TaskGroupReadV2:
@@ -210,12 +218,12 @@ async def reactivate_task_group(
210
218
  response.status_code = status.HTTP_202_ACCEPTED
211
219
  return task_group_activity
212
220
 
213
- if task_group.pip_freeze is None:
221
+ if task_group.env_info is None:
214
222
  raise HTTPException(
215
223
  status_code=status.HTTP_422_UNPROCESSABLE_ENTITY,
216
224
  detail=(
217
225
  "Cannot reactivate a task group with "
218
- f"{task_group.pip_freeze=}."
226
+ f"{task_group.env_info=}."
219
227
  ),
220
228
  )
221
229
 
@@ -247,8 +255,12 @@ async def reactivate_task_group(
247
255
  key_path=user_settings.ssh_private_key_path,
248
256
  )
249
257
 
258
+ if task_group.origin == TaskGroupV2OriginEnum.PIXI:
259
+ reactivate_function = reactivate_ssh_pixi
260
+ else:
261
+ reactivate_function = reactivate_ssh
250
262
  background_tasks.add_task(
251
- reactivate_ssh,
263
+ reactivate_function,
252
264
  task_group_id=task_group.id,
253
265
  task_group_activity_id=task_group_activity.id,
254
266
  ssh_config=ssh_config,
@@ -256,8 +268,12 @@ async def reactivate_task_group(
256
268
  )
257
269
 
258
270
  else:
271
+ if task_group.origin == TaskGroupV2OriginEnum.PIXI:
272
+ reactivate_function = reactivate_local_pixi
273
+ else:
274
+ reactivate_function = reactivate_local
259
275
  background_tasks.add_task(
260
- reactivate_local,
276
+ reactivate_function,
261
277
  task_group_id=task_group.id,
262
278
  task_group_activity_id=task_group_activity.id,
263
279
  )
@@ -48,6 +48,8 @@ class _SlurmConfigSet(BaseModel):
48
48
  constraint:
49
49
  gres:
50
50
  time:
51
+ exclude:
52
+ nodelist:
51
53
  account:
52
54
  extra_lines:
53
55
  """
@@ -59,6 +61,8 @@ class _SlurmConfigSet(BaseModel):
59
61
  mem: int | str | None = None
60
62
  constraint: str | None = None
61
63
  gres: str | None = None
64
+ exclude: str | None = None
65
+ nodelist: str | None = None
62
66
  time: str | None = None
63
67
  account: str | None = None
64
68
  extra_lines: list[str] | None = None
@@ -227,6 +231,8 @@ class SlurmConfig(BaseModel):
227
231
  account: Corresponds to SLURM option.
228
232
  gpus: Corresponds to SLURM option.
229
233
  time: Corresponds to SLURM option (WARNING: not fully supported).
234
+ nodelist: Corresponds to SLURM option.
235
+ exclude: Corresponds to SLURM option.
230
236
  prefix: Prefix of configuration lines in SLURM submission scripts.
231
237
  shebang_line: Shebang line for SLURM submission scripts.
232
238
  extra_lines: Additional lines to include in SLURM submission scripts.
@@ -268,6 +274,8 @@ class SlurmConfig(BaseModel):
268
274
  gpus: str | None = None
269
275
  time: str | None = None
270
276
  account: str | None = None
277
+ nodelist: str | None = None
278
+ exclude: str | None = None
271
279
 
272
280
  # Free-field attribute for extra lines to be added to the SLURM job
273
281
  # preamble
@@ -361,6 +369,8 @@ class SlurmConfig(BaseModel):
361
369
  "gpus",
362
370
  "time",
363
371
  "account",
372
+ "exclude",
373
+ "nodelist",
364
374
  ]:
365
375
  value = getattr(self, key)
366
376
  if value is not None:
@@ -182,33 +182,53 @@ class BaseSlurmRunner(BaseRunner):
182
182
  def _mkdir_remote_folder(self, folder: str) -> None:
183
183
  raise NotImplementedError("Implement in child class.")
184
184
 
185
- def _submit_single_sbatch(
185
+ def _enrich_slurm_config(
186
186
  self,
187
- *,
188
- base_command: str,
189
- slurm_job: SlurmJob,
190
187
  slurm_config: SlurmConfig,
191
- ) -> str:
192
- logger.debug("[_submit_single_sbatch] START")
188
+ ) -> SlurmConfig:
189
+ """
190
+ Return an enriched `SlurmConfig` object
193
191
 
194
- # Include SLURM account in `slurm_config`. Note: we make this change
195
- # here, rather than exposing a new argument of `get_slurm_config`,
196
- # because it's a backend-specific argument while `get_slurm_config` has
197
- # a generic interface.
192
+ Include `self.account` and `self.common_script_lines` into a
193
+ `SlurmConfig` object. Extracting this logic into an independent
194
+ class method is useful to fix issue #2659 (which was due to
195
+ performing this same operation multiple times rather than once).
196
+
197
+ Args:
198
+ slurm_config: The original `SlurmConfig` object.
199
+
200
+ Returns:
201
+ A new, up-to-date, `SlurmConfig` object.
202
+ """
203
+
204
+ new_slurm_config = slurm_config.model_copy()
205
+
206
+ # Include SLURM account in `slurm_config`.
198
207
  if self.slurm_account is not None:
199
- slurm_config.account = self.slurm_account
208
+ new_slurm_config.account = self.slurm_account
200
209
 
201
210
  # Include common_script_lines in extra_lines
202
211
  if len(self.common_script_lines) > 0:
203
212
  logger.debug(
204
213
  f"Add {self.common_script_lines} to "
205
- f"{slurm_config.extra_lines=}."
214
+ f"{new_slurm_config.extra_lines=}."
206
215
  )
207
- current_extra_lines = slurm_config.extra_lines or []
208
- slurm_config.extra_lines = (
216
+ current_extra_lines = new_slurm_config.extra_lines or []
217
+ new_slurm_config.extra_lines = (
209
218
  current_extra_lines + self.common_script_lines
210
219
  )
211
220
 
221
+ return new_slurm_config
222
+
223
+ def _submit_single_sbatch(
224
+ self,
225
+ *,
226
+ base_command: str,
227
+ slurm_job: SlurmJob,
228
+ slurm_config: SlurmConfig,
229
+ ) -> str:
230
+ logger.debug("[_submit_single_sbatch] START")
231
+
212
232
  for task in slurm_job.tasks:
213
233
  # Write input file
214
234
  if self.slurm_runner_type == "ssh":
@@ -508,6 +528,9 @@ class BaseSlurmRunner(BaseRunner):
508
528
  user_id: int,
509
529
  ) -> tuple[Any, Exception]:
510
530
  logger.debug("[submit] START")
531
+
532
+ config = self._enrich_slurm_config(config)
533
+
511
534
  try:
512
535
  workdir_local = task_files.wftask_subfolder_local
513
536
  workdir_remote = task_files.wftask_subfolder_remote
@@ -649,6 +672,8 @@ class BaseSlurmRunner(BaseRunner):
649
672
  input images, while for compound tasks these can differ.
650
673
  """
651
674
 
675
+ config = self._enrich_slurm_config(config)
676
+
652
677
  logger.debug(f"[multisubmit] START, {len(list_parameters)=}")
653
678
  try:
654
679
  if self.is_shutdown():
@@ -125,7 +125,14 @@ def get_slurm_config_internal(
125
125
  )
126
126
  logger.error(error_msg)
127
127
  raise SlurmConfigError(error_msg)
128
- for key in ["time", "gres", "gpus", "constraint"]:
128
+ for key in [
129
+ "time",
130
+ "gres",
131
+ "gpus",
132
+ "constraint",
133
+ "nodelist",
134
+ "exclude",
135
+ ]:
129
136
  value = wftask_meta.get(key, None)
130
137
  if value is not None:
131
138
  slurm_dict[key] = value
@@ -40,8 +40,10 @@ class SlurmSSHRunner(BaseSlurmRunner):
40
40
  self.fractal_ssh = fractal_ssh
41
41
  logger.warning(self.fractal_ssh)
42
42
 
43
- settings = Inject(get_settings)
43
+ # Check SSH connection and try to recover from a closed-socket error
44
+ self.fractal_ssh.check_connection()
44
45
 
46
+ settings = Inject(get_settings)
45
47
  super().__init__(
46
48
  slurm_runner_type="ssh",
47
49
  root_dir_local=root_dir_local,
@@ -32,7 +32,7 @@ from fractal_server.app.schemas.v2 import TaskDumpV2
32
32
  from fractal_server.app.schemas.v2 import TaskGroupDumpV2
33
33
  from fractal_server.app.schemas.v2 import TaskType
34
34
  from fractal_server.images import SingleImage
35
- from fractal_server.images.status_tools import enrich_images_sync
35
+ from fractal_server.images.status_tools import enrich_images_unsorted_sync
36
36
  from fractal_server.images.status_tools import IMAGE_STATUS_KEY
37
37
  from fractal_server.images.tools import filter_image_list
38
38
  from fractal_server.images.tools import find_image_by_zarr_url
@@ -147,7 +147,7 @@ def execute_tasks_v2(
147
147
 
148
148
  if ind_wftask == 0 and ENRICH_IMAGES_WITH_STATUS:
149
149
  # FIXME: Could this be done on `type_filtered_images`?
150
- tmp_images = enrich_images_sync(
150
+ tmp_images = enrich_images_unsorted_sync(
151
151
  images=tmp_images,
152
152
  dataset_id=dataset.id,
153
153
  workflowtask_id=wftask.id,
@@ -33,9 +33,9 @@ from .task import TaskImportV2Legacy # noqa F401
33
33
  from .task import TaskReadV2 # noqa F401
34
34
  from .task import TaskType # noqa F401
35
35
  from .task import TaskUpdateV2 # noqa F401
36
+ from .task_collection import FractalUploadedFile # noqa F401
36
37
  from .task_collection import TaskCollectCustomV2 # noqa F401
37
38
  from .task_collection import TaskCollectPipV2 # noqa F401
38
- from .task_collection import WheelFile # noqa F401
39
39
  from .task_group import TaskGroupActivityActionV2 # noqa F401
40
40
  from .task_group import TaskGroupActivityStatusV2 # noqa F401
41
41
  from .task_group import TaskGroupActivityV2Read # noqa F401
@@ -86,4 +86,4 @@ class TaskGroupDumpV2(BaseModel):
86
86
 
87
87
  path: str | None = None
88
88
  venv_path: str | None = None
89
- wheel_path: str | None = None
89
+ archive_path: str | None = None
@@ -12,7 +12,7 @@ from fractal_server.types import DictStrStr
12
12
  from fractal_server.types import NonEmptyStr
13
13
 
14
14
 
15
- class WheelFile(BaseModel):
15
+ class FractalUploadedFile(BaseModel):
16
16
  """
17
17
  Model for data sent from the endpoint to the background task.
18
18
  """
@@ -16,6 +16,7 @@ from fractal_server.types import NonEmptyStr
16
16
  class TaskGroupV2OriginEnum(StrEnum):
17
17
  PYPI = "pypi"
18
18
  WHEELFILE = "wheel-file"
19
+ PIXI = "pixi"
19
20
  OTHER = "other"
20
21
 
21
22
 
@@ -41,11 +42,12 @@ class TaskGroupCreateV2(BaseModel):
41
42
  pkg_name: str
42
43
  version: str | None = None
43
44
  python_version: NonEmptyStr = None
45
+ pixi_version: NonEmptyStr = None
44
46
  path: AbsolutePathStr = None
45
47
  venv_path: AbsolutePathStr = None
46
- wheel_path: AbsolutePathStr = None
48
+ archive_path: AbsolutePathStr = None
47
49
  pip_extras: NonEmptyStr = None
48
- pip_freeze: str | None = None
50
+ env_info: str | None = None
49
51
  pinned_package_versions: DictStrStr = Field(default_factory=dict)
50
52
 
51
53
 
@@ -55,8 +57,8 @@ class TaskGroupCreateV2Strict(TaskGroupCreateV2):
55
57
  """
56
58
 
57
59
  path: AbsolutePathStr
58
- venv_path: AbsolutePathStr
59
60
  version: NonEmptyStr
61
+ venv_path: AbsolutePathStr
60
62
  python_version: NonEmptyStr
61
63
 
62
64
 
@@ -71,10 +73,10 @@ class TaskGroupReadV2(BaseModel):
71
73
  pkg_name: str
72
74
  version: str | None = None
73
75
  python_version: str | None = None
76
+ pixi_version: str | None = None
74
77
  path: str | None = None
75
78
  venv_path: str | None = None
76
- wheel_path: str | None = None
77
- pip_freeze: str | None = None
79
+ archive_path: str | None = None
78
80
  pip_extras: str | None = None
79
81
  pinned_package_versions: dict[str, str] = Field(default_factory=dict)
80
82