fractal-server 2.14.16__py3-none-any.whl → 2.15.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (54) hide show
  1. fractal_server/__init__.py +1 -1
  2. fractal_server/app/models/security.py +2 -2
  3. fractal_server/app/models/user_settings.py +2 -2
  4. fractal_server/app/models/v2/dataset.py +3 -3
  5. fractal_server/app/models/v2/job.py +6 -6
  6. fractal_server/app/models/v2/task.py +12 -8
  7. fractal_server/app/models/v2/task_group.py +19 -7
  8. fractal_server/app/models/v2/workflowtask.py +6 -6
  9. fractal_server/app/routes/admin/v2/task_group_lifecycle.py +2 -5
  10. fractal_server/app/routes/api/v2/__init__.py +6 -0
  11. fractal_server/app/routes/api/v2/_aux_functions_tasks.py +22 -0
  12. fractal_server/app/routes/api/v2/task_collection.py +8 -18
  13. fractal_server/app/routes/api/v2/task_collection_custom.py +2 -2
  14. fractal_server/app/routes/api/v2/task_collection_pixi.py +219 -0
  15. fractal_server/app/routes/api/v2/task_group.py +3 -0
  16. fractal_server/app/routes/api/v2/task_group_lifecycle.py +26 -10
  17. fractal_server/app/runner/executors/slurm_common/_slurm_config.py +10 -0
  18. fractal_server/app/runner/executors/slurm_common/base_slurm_runner.py +39 -14
  19. fractal_server/app/runner/executors/slurm_common/get_slurm_config.py +8 -1
  20. fractal_server/app/schemas/v2/__init__.py +1 -1
  21. fractal_server/app/schemas/v2/dumps.py +1 -1
  22. fractal_server/app/schemas/v2/task_collection.py +1 -1
  23. fractal_server/app/schemas/v2/task_group.py +7 -5
  24. fractal_server/config.py +70 -0
  25. fractal_server/migrations/versions/b1e7f7a1ff71_task_group_for_pixi.py +53 -0
  26. fractal_server/migrations/versions/b3ffb095f973_json_to_jsonb.py +340 -0
  27. fractal_server/ssh/_fabric.py +26 -0
  28. fractal_server/tasks/v2/local/__init__.py +3 -0
  29. fractal_server/tasks/v2/local/_utils.py +4 -3
  30. fractal_server/tasks/v2/local/collect.py +26 -30
  31. fractal_server/tasks/v2/local/collect_pixi.py +252 -0
  32. fractal_server/tasks/v2/local/deactivate.py +39 -46
  33. fractal_server/tasks/v2/local/deactivate_pixi.py +98 -0
  34. fractal_server/tasks/v2/local/reactivate.py +12 -23
  35. fractal_server/tasks/v2/local/reactivate_pixi.py +184 -0
  36. fractal_server/tasks/v2/ssh/__init__.py +3 -0
  37. fractal_server/tasks/v2/ssh/_utils.py +50 -9
  38. fractal_server/tasks/v2/ssh/collect.py +46 -56
  39. fractal_server/tasks/v2/ssh/collect_pixi.py +315 -0
  40. fractal_server/tasks/v2/ssh/deactivate.py +54 -67
  41. fractal_server/tasks/v2/ssh/deactivate_pixi.py +122 -0
  42. fractal_server/tasks/v2/ssh/reactivate.py +25 -38
  43. fractal_server/tasks/v2/ssh/reactivate_pixi.py +233 -0
  44. fractal_server/tasks/v2/templates/pixi_1_extract.sh +40 -0
  45. fractal_server/tasks/v2/templates/pixi_2_install.sh +52 -0
  46. fractal_server/tasks/v2/templates/pixi_3_post_install.sh +76 -0
  47. fractal_server/tasks/v2/utils_background.py +50 -8
  48. fractal_server/tasks/v2/utils_pixi.py +38 -0
  49. fractal_server/tasks/v2/utils_templates.py +14 -1
  50. {fractal_server-2.14.16.dist-info → fractal_server-2.15.0.dist-info}/METADATA +1 -1
  51. {fractal_server-2.14.16.dist-info → fractal_server-2.15.0.dist-info}/RECORD +54 -41
  52. {fractal_server-2.14.16.dist-info → fractal_server-2.15.0.dist-info}/LICENSE +0 -0
  53. {fractal_server-2.14.16.dist-info → fractal_server-2.15.0.dist-info}/WHEEL +0 -0
  54. {fractal_server-2.14.16.dist-info → fractal_server-2.15.0.dist-info}/entry_points.txt +0 -0
@@ -2,7 +2,6 @@ from fastapi import APIRouter
2
2
  from fastapi import BackgroundTasks
3
3
  from fastapi import Depends
4
4
  from fastapi import HTTPException
5
- from fastapi import Request
6
5
  from fastapi import Response
7
6
  from fastapi import status
8
7
 
@@ -25,9 +24,13 @@ from fractal_server.logger import set_logger
25
24
  from fractal_server.ssh._fabric import SSHConfig
26
25
  from fractal_server.syringe import Inject
27
26
  from fractal_server.tasks.v2.local import deactivate_local
27
+ from fractal_server.tasks.v2.local import deactivate_local_pixi
28
28
  from fractal_server.tasks.v2.local import reactivate_local
29
+ from fractal_server.tasks.v2.local import reactivate_local_pixi
29
30
  from fractal_server.tasks.v2.ssh import deactivate_ssh
31
+ from fractal_server.tasks.v2.ssh import deactivate_ssh_pixi
30
32
  from fractal_server.tasks.v2.ssh import reactivate_ssh
33
+ from fractal_server.tasks.v2.ssh import reactivate_ssh_pixi
31
34
  from fractal_server.utils import get_timestamp
32
35
 
33
36
  router = APIRouter()
@@ -44,7 +47,6 @@ async def deactivate_task_group(
44
47
  task_group_id: int,
45
48
  background_tasks: BackgroundTasks,
46
49
  response: Response,
47
- request: Request,
48
50
  user: UserOAuth = Depends(current_active_user),
49
51
  db: AsyncSession = Depends(get_async_db),
50
52
  ) -> TaskGroupReadV2:
@@ -125,9 +127,12 @@ async def deactivate_task_group(
125
127
  host=user_settings.ssh_host,
126
128
  key_path=user_settings.ssh_private_key_path,
127
129
  )
128
-
130
+ if task_group.origin == TaskGroupV2OriginEnum.PIXI:
131
+ deactivate_function = deactivate_ssh_pixi
132
+ else:
133
+ deactivate_function = deactivate_ssh
129
134
  background_tasks.add_task(
130
- deactivate_ssh,
135
+ deactivate_function,
131
136
  task_group_id=task_group.id,
132
137
  task_group_activity_id=task_group_activity.id,
133
138
  ssh_config=ssh_config,
@@ -135,8 +140,12 @@ async def deactivate_task_group(
135
140
  )
136
141
 
137
142
  else:
143
+ if task_group.origin == TaskGroupV2OriginEnum.PIXI:
144
+ deactivate_function = deactivate_local_pixi
145
+ else:
146
+ deactivate_function = deactivate_local
138
147
  background_tasks.add_task(
139
- deactivate_local,
148
+ deactivate_function,
140
149
  task_group_id=task_group.id,
141
150
  task_group_activity_id=task_group_activity.id,
142
151
  )
@@ -157,7 +166,6 @@ async def reactivate_task_group(
157
166
  task_group_id: int,
158
167
  background_tasks: BackgroundTasks,
159
168
  response: Response,
160
- request: Request,
161
169
  user: UserOAuth = Depends(current_active_user),
162
170
  db: AsyncSession = Depends(get_async_db),
163
171
  ) -> TaskGroupReadV2:
@@ -210,12 +218,12 @@ async def reactivate_task_group(
210
218
  response.status_code = status.HTTP_202_ACCEPTED
211
219
  return task_group_activity
212
220
 
213
- if task_group.pip_freeze is None:
221
+ if task_group.env_info is None:
214
222
  raise HTTPException(
215
223
  status_code=status.HTTP_422_UNPROCESSABLE_ENTITY,
216
224
  detail=(
217
225
  "Cannot reactivate a task group with "
218
- f"{task_group.pip_freeze=}."
226
+ f"{task_group.env_info=}."
219
227
  ),
220
228
  )
221
229
 
@@ -247,8 +255,12 @@ async def reactivate_task_group(
247
255
  key_path=user_settings.ssh_private_key_path,
248
256
  )
249
257
 
258
+ if task_group.origin == TaskGroupV2OriginEnum.PIXI:
259
+ reactivate_function = reactivate_ssh_pixi
260
+ else:
261
+ reactivate_function = reactivate_ssh
250
262
  background_tasks.add_task(
251
- reactivate_ssh,
263
+ reactivate_function,
252
264
  task_group_id=task_group.id,
253
265
  task_group_activity_id=task_group_activity.id,
254
266
  ssh_config=ssh_config,
@@ -256,8 +268,12 @@ async def reactivate_task_group(
256
268
  )
257
269
 
258
270
  else:
271
+ if task_group.origin == TaskGroupV2OriginEnum.PIXI:
272
+ reactivate_function = reactivate_local_pixi
273
+ else:
274
+ reactivate_function = reactivate_local
259
275
  background_tasks.add_task(
260
- reactivate_local,
276
+ reactivate_function,
261
277
  task_group_id=task_group.id,
262
278
  task_group_activity_id=task_group_activity.id,
263
279
  )
@@ -48,6 +48,8 @@ class _SlurmConfigSet(BaseModel):
48
48
  constraint:
49
49
  gres:
50
50
  time:
51
+ exclude:
52
+ nodelist:
51
53
  account:
52
54
  extra_lines:
53
55
  """
@@ -59,6 +61,8 @@ class _SlurmConfigSet(BaseModel):
59
61
  mem: int | str | None = None
60
62
  constraint: str | None = None
61
63
  gres: str | None = None
64
+ exclude: str | None = None
65
+ nodelist: str | None = None
62
66
  time: str | None = None
63
67
  account: str | None = None
64
68
  extra_lines: list[str] | None = None
@@ -227,6 +231,8 @@ class SlurmConfig(BaseModel):
227
231
  account: Corresponds to SLURM option.
228
232
  gpus: Corresponds to SLURM option.
229
233
  time: Corresponds to SLURM option (WARNING: not fully supported).
234
+ nodelist: Corresponds to SLURM option.
235
+ exclude: Corresponds to SLURM option.
230
236
  prefix: Prefix of configuration lines in SLURM submission scripts.
231
237
  shebang_line: Shebang line for SLURM submission scripts.
232
238
  extra_lines: Additional lines to include in SLURM submission scripts.
@@ -268,6 +274,8 @@ class SlurmConfig(BaseModel):
268
274
  gpus: str | None = None
269
275
  time: str | None = None
270
276
  account: str | None = None
277
+ nodelist: str | None = None
278
+ exclude: str | None = None
271
279
 
272
280
  # Free-field attribute for extra lines to be added to the SLURM job
273
281
  # preamble
@@ -361,6 +369,8 @@ class SlurmConfig(BaseModel):
361
369
  "gpus",
362
370
  "time",
363
371
  "account",
372
+ "exclude",
373
+ "nodelist",
364
374
  ]:
365
375
  value = getattr(self, key)
366
376
  if value is not None:
@@ -182,33 +182,53 @@ class BaseSlurmRunner(BaseRunner):
182
182
  def _mkdir_remote_folder(self, folder: str) -> None:
183
183
  raise NotImplementedError("Implement in child class.")
184
184
 
185
- def _submit_single_sbatch(
185
+ def _enrich_slurm_config(
186
186
  self,
187
- *,
188
- base_command: str,
189
- slurm_job: SlurmJob,
190
187
  slurm_config: SlurmConfig,
191
- ) -> str:
192
- logger.debug("[_submit_single_sbatch] START")
188
+ ) -> SlurmConfig:
189
+ """
190
+ Return an enriched `SlurmConfig` object
193
191
 
194
- # Include SLURM account in `slurm_config`. Note: we make this change
195
- # here, rather than exposing a new argument of `get_slurm_config`,
196
- # because it's a backend-specific argument while `get_slurm_config` has
197
- # a generic interface.
192
+ Include `self.account` and `self.common_script_lines` into a
193
+ `SlurmConfig` object. Extracting this logic into an independent
194
+ class method is useful to fix issue #2659 (which was due to
195
+ performing this same operation multiple times rather than once).
196
+
197
+ Args:
198
+ slurm_config: The original `SlurmConfig` object.
199
+
200
+ Returns:
201
+ A new, up-to-date, `SlurmConfig` object.
202
+ """
203
+
204
+ new_slurm_config = slurm_config.model_copy()
205
+
206
+ # Include SLURM account in `slurm_config`.
198
207
  if self.slurm_account is not None:
199
- slurm_config.account = self.slurm_account
208
+ new_slurm_config.account = self.slurm_account
200
209
 
201
210
  # Include common_script_lines in extra_lines
202
211
  if len(self.common_script_lines) > 0:
203
212
  logger.debug(
204
213
  f"Add {self.common_script_lines} to "
205
- f"{slurm_config.extra_lines=}."
214
+ f"{new_slurm_config.extra_lines=}."
206
215
  )
207
- current_extra_lines = slurm_config.extra_lines or []
208
- slurm_config.extra_lines = (
216
+ current_extra_lines = new_slurm_config.extra_lines or []
217
+ new_slurm_config.extra_lines = (
209
218
  current_extra_lines + self.common_script_lines
210
219
  )
211
220
 
221
+ return new_slurm_config
222
+
223
+ def _submit_single_sbatch(
224
+ self,
225
+ *,
226
+ base_command: str,
227
+ slurm_job: SlurmJob,
228
+ slurm_config: SlurmConfig,
229
+ ) -> str:
230
+ logger.debug("[_submit_single_sbatch] START")
231
+
212
232
  for task in slurm_job.tasks:
213
233
  # Write input file
214
234
  if self.slurm_runner_type == "ssh":
@@ -508,6 +528,9 @@ class BaseSlurmRunner(BaseRunner):
508
528
  user_id: int,
509
529
  ) -> tuple[Any, Exception]:
510
530
  logger.debug("[submit] START")
531
+
532
+ config = self._enrich_slurm_config(config)
533
+
511
534
  try:
512
535
  workdir_local = task_files.wftask_subfolder_local
513
536
  workdir_remote = task_files.wftask_subfolder_remote
@@ -649,6 +672,8 @@ class BaseSlurmRunner(BaseRunner):
649
672
  input images, while for compound tasks these can differ.
650
673
  """
651
674
 
675
+ config = self._enrich_slurm_config(config)
676
+
652
677
  logger.debug(f"[multisubmit] START, {len(list_parameters)=}")
653
678
  try:
654
679
  if self.is_shutdown():
@@ -125,7 +125,14 @@ def get_slurm_config_internal(
125
125
  )
126
126
  logger.error(error_msg)
127
127
  raise SlurmConfigError(error_msg)
128
- for key in ["time", "gres", "gpus", "constraint"]:
128
+ for key in [
129
+ "time",
130
+ "gres",
131
+ "gpus",
132
+ "constraint",
133
+ "nodelist",
134
+ "exclude",
135
+ ]:
129
136
  value = wftask_meta.get(key, None)
130
137
  if value is not None:
131
138
  slurm_dict[key] = value
@@ -33,9 +33,9 @@ from .task import TaskImportV2Legacy # noqa F401
33
33
  from .task import TaskReadV2 # noqa F401
34
34
  from .task import TaskType # noqa F401
35
35
  from .task import TaskUpdateV2 # noqa F401
36
+ from .task_collection import FractalUploadedFile # noqa F401
36
37
  from .task_collection import TaskCollectCustomV2 # noqa F401
37
38
  from .task_collection import TaskCollectPipV2 # noqa F401
38
- from .task_collection import WheelFile # noqa F401
39
39
  from .task_group import TaskGroupActivityActionV2 # noqa F401
40
40
  from .task_group import TaskGroupActivityStatusV2 # noqa F401
41
41
  from .task_group import TaskGroupActivityV2Read # noqa F401
@@ -86,4 +86,4 @@ class TaskGroupDumpV2(BaseModel):
86
86
 
87
87
  path: str | None = None
88
88
  venv_path: str | None = None
89
- wheel_path: str | None = None
89
+ archive_path: str | None = None
@@ -12,7 +12,7 @@ from fractal_server.types import DictStrStr
12
12
  from fractal_server.types import NonEmptyStr
13
13
 
14
14
 
15
- class WheelFile(BaseModel):
15
+ class FractalUploadedFile(BaseModel):
16
16
  """
17
17
  Model for data sent from the endpoint to the background task.
18
18
  """
@@ -16,6 +16,7 @@ from fractal_server.types import NonEmptyStr
16
16
  class TaskGroupV2OriginEnum(StrEnum):
17
17
  PYPI = "pypi"
18
18
  WHEELFILE = "wheel-file"
19
+ PIXI = "pixi"
19
20
  OTHER = "other"
20
21
 
21
22
 
@@ -41,11 +42,12 @@ class TaskGroupCreateV2(BaseModel):
41
42
  pkg_name: str
42
43
  version: str | None = None
43
44
  python_version: NonEmptyStr = None
45
+ pixi_version: NonEmptyStr = None
44
46
  path: AbsolutePathStr = None
45
47
  venv_path: AbsolutePathStr = None
46
- wheel_path: AbsolutePathStr = None
48
+ archive_path: AbsolutePathStr = None
47
49
  pip_extras: NonEmptyStr = None
48
- pip_freeze: str | None = None
50
+ env_info: str | None = None
49
51
  pinned_package_versions: DictStrStr = Field(default_factory=dict)
50
52
 
51
53
 
@@ -55,8 +57,8 @@ class TaskGroupCreateV2Strict(TaskGroupCreateV2):
55
57
  """
56
58
 
57
59
  path: AbsolutePathStr
58
- venv_path: AbsolutePathStr
59
60
  version: NonEmptyStr
61
+ venv_path: AbsolutePathStr
60
62
  python_version: NonEmptyStr
61
63
 
62
64
 
@@ -71,10 +73,10 @@ class TaskGroupReadV2(BaseModel):
71
73
  pkg_name: str
72
74
  version: str | None = None
73
75
  python_version: str | None = None
76
+ pixi_version: str | None = None
74
77
  path: str | None = None
75
78
  venv_path: str | None = None
76
- wheel_path: str | None = None
77
- pip_freeze: str | None = None
79
+ archive_path: str | None = None
78
80
  pip_extras: str | None = None
79
81
  pinned_package_versions: dict[str, str] = Field(default_factory=dict)
80
82
 
fractal_server/config.py CHANGED
@@ -11,6 +11,7 @@
11
11
  # <exact-lab.it> under contract with Liberali Lab from the Friedrich Miescher
12
12
  # Institute for Biomedical Research and Pelkmans Lab from the University of
13
13
  # Zurich.
14
+ import json
14
15
  import logging
15
16
  import shutil
16
17
  import sys
@@ -34,6 +35,7 @@ from sqlalchemy.engine import URL
34
35
 
35
36
  import fractal_server
36
37
  from fractal_server.types import AbsolutePathStr
38
+ from fractal_server.types import DictStrStr
37
39
 
38
40
 
39
41
  class MailSettings(BaseModel):
@@ -62,6 +64,63 @@ class MailSettings(BaseModel):
62
64
  use_login: bool
63
65
 
64
66
 
67
+ class PixiSettings(BaseModel):
68
+ """
69
+ Configuration for Pixi task collection
70
+
71
+ See https://pixi.sh/latest/reference/cli/pixi/install/#config-options for
72
+ `pixi install` concurrency options.
73
+ See https://docs.rs/tokio/latest/tokio/#cpu-bound-tasks-and-blocking-code
74
+ for `tokio` configuration.
75
+
76
+ versions:
77
+ Available `pixi` versions and their `PIXI_HOME` folders.
78
+ default_version:
79
+ Default `pixi` version to use for task collection - must be one
80
+ of `versions` keys.
81
+ PIXI_CONCURRENT_SOLVES:
82
+ Value of `--concurrent-solves` for `pixi install`.
83
+ PIXI_CONCURRENT_DOWNLOADS:
84
+ Value of `--concurrent-downloads for `pixi install`.
85
+ TOKIO_WORKER_THREADS:
86
+ From tokio docs, "The core threads are where all asynchronous code
87
+ runs, and Tokio will by default spawn one for each CPU core. You can
88
+ use the environment variable TOKIO_WORKER_THREADS to override the
89
+ default value."
90
+ """
91
+
92
+ versions: DictStrStr
93
+ default_version: str
94
+
95
+ PIXI_CONCURRENT_SOLVES: int = 4
96
+ PIXI_CONCURRENT_DOWNLOADS: int = 4
97
+ TOKIO_WORKER_THREADS: int = 2
98
+
99
+ @model_validator(mode="after")
100
+ def check_pixi_settings(self):
101
+
102
+ if self.default_version not in self.versions:
103
+ raise ValueError(
104
+ f"Default version '{self.default_version}' not in "
105
+ f"available version {list(self.versions.keys())}."
106
+ )
107
+
108
+ pixi_base_dir = Path(self.versions[self.default_version]).parent
109
+
110
+ for key, value in self.versions.items():
111
+
112
+ pixi_path = Path(value)
113
+
114
+ if pixi_path.parent != pixi_base_dir:
115
+ raise ValueError(
116
+ f"{pixi_path=} is not located within the {pixi_base_dir=}."
117
+ )
118
+ if pixi_path.name != key:
119
+ raise ValueError(f"{pixi_path.name=} is not equal to {key=}")
120
+
121
+ return self
122
+
123
+
65
124
  class FractalConfigurationError(RuntimeError):
66
125
  pass
67
126
 
@@ -513,6 +572,17 @@ class Settings(BaseSettings):
513
572
  FRACTAL_VIEWER_AUTHORIZATION_SCHEME is set to "users-folders".
514
573
  """
515
574
 
575
+ FRACTAL_PIXI_CONFIG_FILE: Path | None = None
576
+
577
+ pixi: PixiSettings | None = None
578
+
579
+ @model_validator(mode="after")
580
+ def populate_pixi_settings(self):
581
+ if self.FRACTAL_PIXI_CONFIG_FILE is not None:
582
+ with self.FRACTAL_PIXI_CONFIG_FILE.open("r") as f:
583
+ self.pixi = PixiSettings(**json.load(f))
584
+ return self
585
+
516
586
  ###########################################################################
517
587
  # SMTP SERVICE
518
588
  ###########################################################################
@@ -0,0 +1,53 @@
1
+ """Task group for pixi
2
+
3
+ Revision ID: b1e7f7a1ff71
4
+ Revises: 791ce783d3d8
5
+ Create Date: 2025-05-29 16:31:17.565973
6
+
7
+ """
8
+ import sqlalchemy as sa
9
+ import sqlmodel
10
+ from alembic import op
11
+
12
+
13
+ # revision identifiers, used by Alembic.
14
+ revision = "b1e7f7a1ff71"
15
+ down_revision = "791ce783d3d8"
16
+ branch_labels = None
17
+ depends_on = None
18
+
19
+
20
+ def upgrade() -> None:
21
+ with op.batch_alter_table("taskgroupv2", schema=None) as batch_op:
22
+ batch_op.add_column(
23
+ sa.Column(
24
+ "pixi_version",
25
+ sqlmodel.sql.sqltypes.AutoString(),
26
+ nullable=True,
27
+ )
28
+ )
29
+ batch_op.alter_column(
30
+ "wheel_path",
31
+ nullable=True,
32
+ new_column_name="archive_path",
33
+ )
34
+ batch_op.alter_column(
35
+ "pip_freeze",
36
+ nullable=True,
37
+ new_column_name="env_info",
38
+ )
39
+
40
+
41
+ def downgrade() -> None:
42
+ with op.batch_alter_table("taskgroupv2", schema=None) as batch_op:
43
+ batch_op.alter_column(
44
+ "archive_path",
45
+ nullable=True,
46
+ new_column_name="wheel_path",
47
+ )
48
+ batch_op.alter_column(
49
+ "env_info",
50
+ nullable=True,
51
+ new_column_name="pip_freeze",
52
+ )
53
+ batch_op.drop_column("pixi_version")