fractal-server 2.15.0a4__py3-none-any.whl → 2.15.1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (29) hide show
  1. fractal_server/__init__.py +1 -1
  2. fractal_server/app/models/security.py +2 -2
  3. fractal_server/app/models/user_settings.py +2 -2
  4. fractal_server/app/models/v2/dataset.py +3 -3
  5. fractal_server/app/models/v2/job.py +6 -6
  6. fractal_server/app/models/v2/task.py +5 -4
  7. fractal_server/app/models/v2/task_group.py +2 -2
  8. fractal_server/app/models/v2/workflowtask.py +5 -4
  9. fractal_server/app/routes/api/v2/_aux_functions_tasks.py +22 -0
  10. fractal_server/app/routes/api/v2/task_collection.py +5 -13
  11. fractal_server/app/routes/api/v2/task_collection_pixi.py +7 -13
  12. fractal_server/app/routes/api/v2/task_group.py +3 -0
  13. fractal_server/app/runner/executors/slurm_common/_slurm_config.py +10 -0
  14. fractal_server/app/runner/executors/slurm_common/base_slurm_runner.py +39 -14
  15. fractal_server/app/runner/executors/slurm_common/get_slurm_config.py +8 -1
  16. fractal_server/config.py +29 -1
  17. fractal_server/migrations/versions/b3ffb095f973_json_to_jsonb.py +264 -0
  18. fractal_server/tasks/v2/local/collect_pixi.py +21 -0
  19. fractal_server/tasks/v2/local/reactivate_pixi.py +21 -0
  20. fractal_server/tasks/v2/ssh/collect_pixi.py +22 -2
  21. fractal_server/tasks/v2/ssh/reactivate_pixi.py +20 -3
  22. fractal_server/tasks/v2/templates/pixi_1_extract.sh +1 -1
  23. fractal_server/tasks/v2/templates/pixi_2_install.sh +11 -2
  24. fractal_server/tasks/v2/templates/pixi_3_post_install.sh +1 -5
  25. {fractal_server-2.15.0a4.dist-info → fractal_server-2.15.1.dist-info}/METADATA +1 -1
  26. {fractal_server-2.15.0a4.dist-info → fractal_server-2.15.1.dist-info}/RECORD +29 -28
  27. {fractal_server-2.15.0a4.dist-info → fractal_server-2.15.1.dist-info}/LICENSE +0 -0
  28. {fractal_server-2.15.0a4.dist-info → fractal_server-2.15.1.dist-info}/WHEEL +0 -0
  29. {fractal_server-2.15.0a4.dist-info → fractal_server-2.15.1.dist-info}/entry_points.txt +0 -0
@@ -1 +1 @@
1
- __VERSION__ = "2.15.0a4"
1
+ __VERSION__ = "2.15.1"
@@ -15,8 +15,8 @@ from typing import Optional
15
15
  from pydantic import ConfigDict
16
16
  from pydantic import EmailStr
17
17
  from sqlalchemy import Column
18
+ from sqlalchemy.dialects.postgresql import JSONB
18
19
  from sqlalchemy.types import DateTime
19
- from sqlalchemy.types import JSON
20
20
  from sqlmodel import Field
21
21
  from sqlmodel import Relationship
22
22
  from sqlmodel import SQLModel
@@ -124,5 +124,5 @@ class UserGroup(SQLModel, table=True):
124
124
  sa_column=Column(DateTime(timezone=True), nullable=False),
125
125
  )
126
126
  viewer_paths: list[str] = Field(
127
- sa_column=Column(JSON, server_default="[]", nullable=False)
127
+ sa_column=Column(JSONB, server_default="[]", nullable=False)
128
128
  )
@@ -1,5 +1,5 @@
1
1
  from sqlalchemy import Column
2
- from sqlalchemy.types import JSON
2
+ from sqlalchemy.dialects.postgresql import JSONB
3
3
  from sqlmodel import Field
4
4
  from sqlmodel import SQLModel
5
5
 
@@ -25,7 +25,7 @@ class UserSettings(SQLModel, table=True):
25
25
 
26
26
  id: int | None = Field(default=None, primary_key=True)
27
27
  slurm_accounts: list[str] = Field(
28
- sa_column=Column(JSON, server_default="[]", nullable=False)
28
+ sa_column=Column(JSONB, server_default="[]", nullable=False)
29
29
  )
30
30
  ssh_host: str | None = None
31
31
  ssh_username: str | None = None
@@ -3,8 +3,8 @@ from typing import Any
3
3
 
4
4
  from pydantic import ConfigDict
5
5
  from sqlalchemy import Column
6
+ from sqlalchemy.dialects.postgresql import JSONB
6
7
  from sqlalchemy.types import DateTime
7
- from sqlalchemy.types import JSON
8
8
  from sqlmodel import Field
9
9
  from sqlmodel import Relationship
10
10
  from sqlmodel import SQLModel
@@ -24,7 +24,7 @@ class DatasetV2(SQLModel, table=True):
24
24
  )
25
25
 
26
26
  history: list[dict[str, Any]] = Field(
27
- sa_column=Column(JSON, server_default="[]", nullable=False)
27
+ sa_column=Column(JSONB, server_default="[]", nullable=False)
28
28
  )
29
29
 
30
30
  timestamp_created: datetime = Field(
@@ -34,7 +34,7 @@ class DatasetV2(SQLModel, table=True):
34
34
 
35
35
  zarr_dir: str
36
36
  images: list[dict[str, Any]] = Field(
37
- sa_column=Column(JSON, server_default="[]", nullable=False)
37
+ sa_column=Column(JSONB, server_default="[]", nullable=False)
38
38
  )
39
39
 
40
40
  @property
@@ -3,8 +3,8 @@ from typing import Any
3
3
 
4
4
  from pydantic import ConfigDict
5
5
  from sqlalchemy import Column
6
+ from sqlalchemy.dialects.postgresql import JSONB
6
7
  from sqlalchemy.types import DateTime
7
- from sqlalchemy.types import JSON
8
8
  from sqlmodel import Field
9
9
  from sqlmodel import SQLModel
10
10
 
@@ -31,13 +31,13 @@ class JobV2(SQLModel, table=True):
31
31
  slurm_account: str | None = None
32
32
 
33
33
  dataset_dump: dict[str, Any] = Field(
34
- sa_column=Column(JSON, nullable=False)
34
+ sa_column=Column(JSONB, nullable=False)
35
35
  )
36
36
  workflow_dump: dict[str, Any] = Field(
37
- sa_column=Column(JSON, nullable=False)
37
+ sa_column=Column(JSONB, nullable=False)
38
38
  )
39
39
  project_dump: dict[str, Any] = Field(
40
- sa_column=Column(JSON, nullable=False)
40
+ sa_column=Column(JSONB, nullable=False)
41
41
  )
42
42
 
43
43
  worker_init: str | None = None
@@ -57,8 +57,8 @@ class JobV2(SQLModel, table=True):
57
57
  log: str | None = None
58
58
 
59
59
  attribute_filters: AttributeFilters = Field(
60
- sa_column=Column(JSON, nullable=False, server_default="{}")
60
+ sa_column=Column(JSONB, nullable=False, server_default="{}")
61
61
  )
62
62
  type_filters: dict[str, bool] = Field(
63
- sa_column=Column(JSON, nullable=False, server_default="{}")
63
+ sa_column=Column(JSONB, nullable=False, server_default="{}")
64
64
  )
@@ -1,7 +1,8 @@
1
1
  from typing import Any
2
2
 
3
3
  from sqlalchemy import Column
4
- from sqlalchemy.types import JSON
4
+ from sqlalchemy.dialects.postgresql import JSON
5
+ from sqlalchemy.dialects.postgresql import JSONB
5
6
  from sqlmodel import Field
6
7
  from sqlmodel import SQLModel
7
8
 
@@ -33,8 +34,8 @@ class TaskV2(SQLModel, table=True):
33
34
  docs_info: str | None = None
34
35
  docs_link: str | None = None
35
36
 
36
- input_types: dict[str, bool] = Field(sa_column=Column(JSON), default={})
37
- output_types: dict[str, bool] = Field(sa_column=Column(JSON), default={})
37
+ input_types: dict[str, bool] = Field(sa_column=Column(JSONB), default={})
38
+ output_types: dict[str, bool] = Field(sa_column=Column(JSONB), default={})
38
39
 
39
40
  taskgroupv2_id: int = Field(foreign_key="taskgroupv2.id")
40
41
 
@@ -42,5 +43,5 @@ class TaskV2(SQLModel, table=True):
42
43
  modality: str | None = None
43
44
  authors: str | None = None
44
45
  tags: list[str] = Field(
45
- sa_column=Column(JSON, server_default="[]", nullable=False)
46
+ sa_column=Column(JSONB, server_default="[]", nullable=False)
46
47
  )
@@ -2,8 +2,8 @@ from datetime import datetime
2
2
  from datetime import timezone
3
3
 
4
4
  from sqlalchemy import Column
5
+ from sqlalchemy.dialects.postgresql import JSONB
5
6
  from sqlalchemy.types import DateTime
6
- from sqlalchemy.types import JSON
7
7
  from sqlmodel import Field
8
8
  from sqlmodel import Relationship
9
9
  from sqlmodel import SQLModel
@@ -35,7 +35,7 @@ class TaskGroupV2(SQLModel, table=True):
35
35
  pip_extras: str | None = None
36
36
  pinned_package_versions: dict[str, str] = Field(
37
37
  sa_column=Column(
38
- JSON,
38
+ JSONB,
39
39
  server_default="{}",
40
40
  default={},
41
41
  nullable=True,
@@ -2,7 +2,8 @@ from typing import Any
2
2
 
3
3
  from pydantic import ConfigDict
4
4
  from sqlalchemy import Column
5
- from sqlalchemy.types import JSON
5
+ from sqlalchemy.dialects.postgresql import JSON
6
+ from sqlalchemy.dialects.postgresql import JSONB
6
7
  from sqlmodel import Field
7
8
  from sqlmodel import Relationship
8
9
  from sqlmodel import SQLModel
@@ -24,14 +25,14 @@ class WorkflowTaskV2(SQLModel, table=True):
24
25
  sa_column=Column(JSON), default=None
25
26
  )
26
27
  args_parallel: dict[str, Any] | None = Field(
27
- sa_column=Column(JSON), default=None
28
+ sa_column=Column(JSONB), default=None
28
29
  )
29
30
  args_non_parallel: dict[str, Any] | None = Field(
30
- sa_column=Column(JSON), default=None
31
+ sa_column=Column(JSONB), default=None
31
32
  )
32
33
 
33
34
  type_filters: dict[str, bool] = Field(
34
- sa_column=Column(JSON, nullable=False, server_default="{}")
35
+ sa_column=Column(JSONB, nullable=False, server_default="{}")
35
36
  )
36
37
 
37
38
  # Task
@@ -333,6 +333,28 @@ async def _verify_non_duplication_group_constraint(
333
333
  )
334
334
 
335
335
 
336
+ async def _verify_non_duplication_group_path(
337
+ path: str | None,
338
+ db: AsyncSession,
339
+ ) -> None:
340
+ """
341
+ Verify uniqueness of non-`None` `TaskGroupV2.path`
342
+ """
343
+ if path is None:
344
+ return
345
+ stm = select(TaskGroupV2.id).where(TaskGroupV2.path == path)
346
+ res = await db.execute(stm)
347
+ duplicate_ids = res.scalars().all()
348
+ if duplicate_ids:
349
+ raise HTTPException(
350
+ status_code=status.HTTP_422_UNPROCESSABLE_ENTITY,
351
+ detail=(
352
+ f"Other TaskGroups already have {path=}: "
353
+ f"{sorted(duplicate_ids)}."
354
+ ),
355
+ )
356
+
357
+
336
358
  async def _add_warnings_to_workflow_tasks(
337
359
  wftask_list: list[WorkflowTaskV2], user_id: int, db: AsyncSession
338
360
  ) -> list[dict[str, Any]]:
@@ -13,7 +13,6 @@ from fastapi import UploadFile
13
13
  from pydantic import BaseModel
14
14
  from pydantic import model_validator
15
15
  from pydantic import ValidationError
16
- from sqlmodel import select
17
16
 
18
17
  from .....config import get_settings
19
18
  from .....logger import reset_logger_handlers
@@ -31,6 +30,7 @@ from ...aux.validate_user_settings import validate_user_settings
31
30
  from ._aux_functions_task_lifecycle import get_package_version_from_pypi
32
31
  from ._aux_functions_tasks import _get_valid_user_group_id
33
32
  from ._aux_functions_tasks import _verify_non_duplication_group_constraint
33
+ from ._aux_functions_tasks import _verify_non_duplication_group_path
34
34
  from ._aux_functions_tasks import _verify_non_duplication_user_constraint
35
35
  from fractal_server.app.models import UserOAuth
36
36
  from fractal_server.app.models.v2 import TaskGroupActivityV2
@@ -291,18 +291,10 @@ async def collect_tasks_pip(
291
291
  version=task_group_attrs["version"],
292
292
  db=db,
293
293
  )
294
-
295
- # Verify that task-group path is unique
296
- stm = select(TaskGroupV2).where(TaskGroupV2.path == task_group_path)
297
- res = await db.execute(stm)
298
- for conflicting_task_group in res.scalars().all():
299
- raise HTTPException(
300
- status_code=status.HTTP_422_UNPROCESSABLE_ENTITY,
301
- detail=(
302
- f"Another task-group already has path={task_group_path}.\n"
303
- f"{conflicting_task_group=}"
304
- ),
305
- )
294
+ await _verify_non_duplication_group_path(
295
+ path=task_group_attrs["path"],
296
+ db=db,
297
+ )
306
298
 
307
299
  # On-disk checks
308
300
 
@@ -9,7 +9,6 @@ from fastapi import HTTPException
9
9
  from fastapi import Response
10
10
  from fastapi import status
11
11
  from fastapi import UploadFile
12
- from sqlmodel import select
13
12
 
14
13
  from fractal_server.app.db import AsyncSession
15
14
  from fractal_server.app.db import get_async_db
@@ -22,6 +21,9 @@ from fractal_server.app.routes.api.v2._aux_functions_tasks import (
22
21
  from fractal_server.app.routes.api.v2._aux_functions_tasks import (
23
22
  _verify_non_duplication_group_constraint,
24
23
  )
24
+ from fractal_server.app.routes.api.v2._aux_functions_tasks import (
25
+ _verify_non_duplication_group_path,
26
+ )
25
27
  from fractal_server.app.routes.api.v2._aux_functions_tasks import (
26
28
  _verify_non_duplication_user_constraint,
27
29
  )
@@ -156,18 +158,10 @@ async def collect_task_pixi(
156
158
  version=task_group_attrs["version"],
157
159
  db=db,
158
160
  )
159
-
160
- # NOTE: to be removed with issue #2634
161
- stm = select(TaskGroupV2).where(TaskGroupV2.path == task_group_path)
162
- res = await db.execute(stm)
163
- for conflicting_task_group in res.scalars().all():
164
- raise HTTPException(
165
- status_code=status.HTTP_422_UNPROCESSABLE_ENTITY,
166
- detail=(
167
- f"Another task-group already has path={task_group_path}.\n"
168
- f"{conflicting_task_group=}"
169
- ),
170
- )
161
+ await _verify_non_duplication_group_path(
162
+ path=task_group_attrs["path"],
163
+ db=db,
164
+ )
171
165
 
172
166
  if settings.FRACTAL_RUNNER_BACKEND != "slurm_ssh":
173
167
  if Path(task_group_path).exists():
@@ -12,6 +12,7 @@ from pydantic.types import AwareDatetime
12
12
  from sqlmodel import or_
13
13
  from sqlmodel import select
14
14
 
15
+ from ._aux_functions_task_lifecycle import check_no_ongoing_activity
15
16
  from ._aux_functions_tasks import _get_task_group_full_access
16
17
  from ._aux_functions_tasks import _get_task_group_read_access
17
18
  from ._aux_functions_tasks import _verify_non_duplication_group_constraint
@@ -216,6 +217,8 @@ async def delete_task_group(
216
217
  db=db,
217
218
  )
218
219
 
220
+ await check_no_ongoing_activity(task_group_id=task_group_id, db=db)
221
+
219
222
  stm = select(WorkflowTaskV2).where(
220
223
  WorkflowTaskV2.task_id.in_({task.id for task in task_group.task_list})
221
224
  )
@@ -48,6 +48,8 @@ class _SlurmConfigSet(BaseModel):
48
48
  constraint:
49
49
  gres:
50
50
  time:
51
+ exclude:
52
+ nodelist:
51
53
  account:
52
54
  extra_lines:
53
55
  """
@@ -59,6 +61,8 @@ class _SlurmConfigSet(BaseModel):
59
61
  mem: int | str | None = None
60
62
  constraint: str | None = None
61
63
  gres: str | None = None
64
+ exclude: str | None = None
65
+ nodelist: str | None = None
62
66
  time: str | None = None
63
67
  account: str | None = None
64
68
  extra_lines: list[str] | None = None
@@ -227,6 +231,8 @@ class SlurmConfig(BaseModel):
227
231
  account: Corresponds to SLURM option.
228
232
  gpus: Corresponds to SLURM option.
229
233
  time: Corresponds to SLURM option (WARNING: not fully supported).
234
+ nodelist: Corresponds to SLURM option.
235
+ exclude: Corresponds to SLURM option.
230
236
  prefix: Prefix of configuration lines in SLURM submission scripts.
231
237
  shebang_line: Shebang line for SLURM submission scripts.
232
238
  extra_lines: Additional lines to include in SLURM submission scripts.
@@ -268,6 +274,8 @@ class SlurmConfig(BaseModel):
268
274
  gpus: str | None = None
269
275
  time: str | None = None
270
276
  account: str | None = None
277
+ nodelist: str | None = None
278
+ exclude: str | None = None
271
279
 
272
280
  # Free-field attribute for extra lines to be added to the SLURM job
273
281
  # preamble
@@ -361,6 +369,8 @@ class SlurmConfig(BaseModel):
361
369
  "gpus",
362
370
  "time",
363
371
  "account",
372
+ "exclude",
373
+ "nodelist",
364
374
  ]:
365
375
  value = getattr(self, key)
366
376
  if value is not None:
@@ -182,33 +182,53 @@ class BaseSlurmRunner(BaseRunner):
182
182
  def _mkdir_remote_folder(self, folder: str) -> None:
183
183
  raise NotImplementedError("Implement in child class.")
184
184
 
185
- def _submit_single_sbatch(
185
+ def _enrich_slurm_config(
186
186
  self,
187
- *,
188
- base_command: str,
189
- slurm_job: SlurmJob,
190
187
  slurm_config: SlurmConfig,
191
- ) -> str:
192
- logger.debug("[_submit_single_sbatch] START")
188
+ ) -> SlurmConfig:
189
+ """
190
+ Return an enriched `SlurmConfig` object
193
191
 
194
- # Include SLURM account in `slurm_config`. Note: we make this change
195
- # here, rather than exposing a new argument of `get_slurm_config`,
196
- # because it's a backend-specific argument while `get_slurm_config` has
197
- # a generic interface.
192
+ Include `self.account` and `self.common_script_lines` into a
193
+ `SlurmConfig` object. Extracting this logic into an independent
194
+ class method is useful to fix issue #2659 (which was due to
195
+ performing this same operation multiple times rather than once).
196
+
197
+ Args:
198
+ slurm_config: The original `SlurmConfig` object.
199
+
200
+ Returns:
201
+ A new, up-to-date, `SlurmConfig` object.
202
+ """
203
+
204
+ new_slurm_config = slurm_config.model_copy()
205
+
206
+ # Include SLURM account in `slurm_config`.
198
207
  if self.slurm_account is not None:
199
- slurm_config.account = self.slurm_account
208
+ new_slurm_config.account = self.slurm_account
200
209
 
201
210
  # Include common_script_lines in extra_lines
202
211
  if len(self.common_script_lines) > 0:
203
212
  logger.debug(
204
213
  f"Add {self.common_script_lines} to "
205
- f"{slurm_config.extra_lines=}."
214
+ f"{new_slurm_config.extra_lines=}."
206
215
  )
207
- current_extra_lines = slurm_config.extra_lines or []
208
- slurm_config.extra_lines = (
216
+ current_extra_lines = new_slurm_config.extra_lines or []
217
+ new_slurm_config.extra_lines = (
209
218
  current_extra_lines + self.common_script_lines
210
219
  )
211
220
 
221
+ return new_slurm_config
222
+
223
+ def _submit_single_sbatch(
224
+ self,
225
+ *,
226
+ base_command: str,
227
+ slurm_job: SlurmJob,
228
+ slurm_config: SlurmConfig,
229
+ ) -> str:
230
+ logger.debug("[_submit_single_sbatch] START")
231
+
212
232
  for task in slurm_job.tasks:
213
233
  # Write input file
214
234
  if self.slurm_runner_type == "ssh":
@@ -508,6 +528,9 @@ class BaseSlurmRunner(BaseRunner):
508
528
  user_id: int,
509
529
  ) -> tuple[Any, Exception]:
510
530
  logger.debug("[submit] START")
531
+
532
+ config = self._enrich_slurm_config(config)
533
+
511
534
  try:
512
535
  workdir_local = task_files.wftask_subfolder_local
513
536
  workdir_remote = task_files.wftask_subfolder_remote
@@ -649,6 +672,8 @@ class BaseSlurmRunner(BaseRunner):
649
672
  input images, while for compound tasks these can differ.
650
673
  """
651
674
 
675
+ config = self._enrich_slurm_config(config)
676
+
652
677
  logger.debug(f"[multisubmit] START, {len(list_parameters)=}")
653
678
  try:
654
679
  if self.is_shutdown():
@@ -125,7 +125,14 @@ def get_slurm_config_internal(
125
125
  )
126
126
  logger.error(error_msg)
127
127
  raise SlurmConfigError(error_msg)
128
- for key in ["time", "gres", "gpus", "constraint"]:
128
+ for key in [
129
+ "time",
130
+ "gres",
131
+ "gpus",
132
+ "constraint",
133
+ "nodelist",
134
+ "exclude",
135
+ ]:
129
136
  value = wftask_meta.get(key, None)
130
137
  if value is not None:
131
138
  slurm_dict[key] = value
fractal_server/config.py CHANGED
@@ -65,8 +65,36 @@ class MailSettings(BaseModel):
65
65
 
66
66
 
67
67
  class PixiSettings(BaseModel):
68
- default_version: str
68
+ """
69
+ Configuration for Pixi task collection
70
+
71
+ See https://pixi.sh/latest/reference/cli/pixi/install/#config-options for
72
+ `pixi install` concurrency options.
73
+ See https://docs.rs/tokio/latest/tokio/#cpu-bound-tasks-and-blocking-code
74
+ for `tokio` configuration.
75
+
76
+ versions:
77
+ Available `pixi` versions and their `PIXI_HOME` folders.
78
+ default_version:
79
+ Default `pixi` version to use for task collection - must be one
80
+ of `versions` keys.
81
+ PIXI_CONCURRENT_SOLVES:
82
+ Value of `--concurrent-solves` for `pixi install`.
83
+ PIXI_CONCURRENT_DOWNLOADS:
84
+ Value of `--concurrent-downloads for `pixi install`.
85
+ TOKIO_WORKER_THREADS:
86
+ From tokio docs, "The core threads are where all asynchronous code
87
+ runs, and Tokio will by default spawn one for each CPU core. You can
88
+ use the environment variable TOKIO_WORKER_THREADS to override the
89
+ default value."
90
+ """
91
+
69
92
  versions: DictStrStr
93
+ default_version: str
94
+
95
+ PIXI_CONCURRENT_SOLVES: int = 4
96
+ PIXI_CONCURRENT_DOWNLOADS: int = 4
97
+ TOKIO_WORKER_THREADS: int = 2
70
98
 
71
99
  @model_validator(mode="after")
72
100
  def check_pixi_settings(self):
@@ -0,0 +1,264 @@
1
+ """JSON to JSONB
2
+
3
+ Revision ID: b3ffb095f973
4
+ Revises: b1e7f7a1ff71
5
+ Create Date: 2025-06-19 10:12:06.699107
6
+
7
+ """
8
+ import sqlalchemy as sa
9
+ from alembic import op
10
+ from sqlalchemy.dialects import postgresql
11
+
12
+ # revision identifiers, used by Alembic.
13
+ revision = "b3ffb095f973"
14
+ down_revision = "b1e7f7a1ff71"
15
+ branch_labels = None
16
+ depends_on = None
17
+
18
+
19
+ def upgrade() -> None:
20
+ # ### commands auto generated by Alembic - please adjust! ###
21
+ with op.batch_alter_table("datasetv2", schema=None) as batch_op:
22
+ batch_op.alter_column(
23
+ "history",
24
+ existing_type=postgresql.JSON(astext_type=sa.Text()),
25
+ type_=postgresql.JSONB(astext_type=sa.Text()),
26
+ existing_nullable=False,
27
+ existing_server_default=sa.text("'[]'::json"),
28
+ )
29
+ batch_op.alter_column(
30
+ "images",
31
+ existing_type=postgresql.JSON(astext_type=sa.Text()),
32
+ type_=postgresql.JSONB(astext_type=sa.Text()),
33
+ existing_nullable=False,
34
+ existing_server_default=sa.text("'[]'::json"),
35
+ )
36
+
37
+ with op.batch_alter_table("jobv2", schema=None) as batch_op:
38
+ batch_op.alter_column(
39
+ "dataset_dump",
40
+ existing_type=postgresql.JSON(astext_type=sa.Text()),
41
+ type_=postgresql.JSONB(astext_type=sa.Text()),
42
+ existing_nullable=False,
43
+ )
44
+ batch_op.alter_column(
45
+ "workflow_dump",
46
+ existing_type=postgresql.JSON(astext_type=sa.Text()),
47
+ type_=postgresql.JSONB(astext_type=sa.Text()),
48
+ existing_nullable=False,
49
+ )
50
+ batch_op.alter_column(
51
+ "project_dump",
52
+ existing_type=postgresql.JSON(astext_type=sa.Text()),
53
+ type_=postgresql.JSONB(astext_type=sa.Text()),
54
+ existing_nullable=False,
55
+ )
56
+ batch_op.alter_column(
57
+ "attribute_filters",
58
+ existing_type=postgresql.JSON(astext_type=sa.Text()),
59
+ type_=postgresql.JSONB(astext_type=sa.Text()),
60
+ existing_nullable=False,
61
+ existing_server_default=sa.text("'{}'::json"),
62
+ )
63
+ batch_op.alter_column(
64
+ "type_filters",
65
+ existing_type=postgresql.JSON(astext_type=sa.Text()),
66
+ type_=postgresql.JSONB(astext_type=sa.Text()),
67
+ existing_nullable=False,
68
+ existing_server_default=sa.text("'{}'::json"),
69
+ )
70
+
71
+ with op.batch_alter_table("taskgroupv2", schema=None) as batch_op:
72
+ batch_op.alter_column(
73
+ "pinned_package_versions",
74
+ existing_type=postgresql.JSON(astext_type=sa.Text()),
75
+ type_=postgresql.JSONB(astext_type=sa.Text()),
76
+ existing_nullable=True,
77
+ existing_server_default=sa.text("'{}'::json"),
78
+ )
79
+
80
+ with op.batch_alter_table("taskv2", schema=None) as batch_op:
81
+ batch_op.alter_column(
82
+ "input_types",
83
+ existing_type=postgresql.JSON(astext_type=sa.Text()),
84
+ type_=postgresql.JSONB(astext_type=sa.Text()),
85
+ existing_nullable=True,
86
+ )
87
+ batch_op.alter_column(
88
+ "output_types",
89
+ existing_type=postgresql.JSON(astext_type=sa.Text()),
90
+ type_=postgresql.JSONB(astext_type=sa.Text()),
91
+ existing_nullable=True,
92
+ )
93
+ batch_op.alter_column(
94
+ "tags",
95
+ existing_type=postgresql.JSON(astext_type=sa.Text()),
96
+ type_=postgresql.JSONB(astext_type=sa.Text()),
97
+ existing_nullable=False,
98
+ existing_server_default=sa.text("'[]'::json"),
99
+ )
100
+
101
+ with op.batch_alter_table("user_settings", schema=None) as batch_op:
102
+ batch_op.alter_column(
103
+ "slurm_accounts",
104
+ existing_type=postgresql.JSON(astext_type=sa.Text()),
105
+ type_=postgresql.JSONB(astext_type=sa.Text()),
106
+ existing_nullable=False,
107
+ existing_server_default=sa.text("'[]'::json"),
108
+ )
109
+
110
+ with op.batch_alter_table("usergroup", schema=None) as batch_op:
111
+ batch_op.alter_column(
112
+ "viewer_paths",
113
+ existing_type=postgresql.JSON(astext_type=sa.Text()),
114
+ type_=postgresql.JSONB(astext_type=sa.Text()),
115
+ existing_nullable=False,
116
+ existing_server_default=sa.text("'[]'::json"),
117
+ )
118
+
119
+ with op.batch_alter_table("workflowtaskv2", schema=None) as batch_op:
120
+ batch_op.alter_column(
121
+ "args_parallel",
122
+ existing_type=postgresql.JSON(astext_type=sa.Text()),
123
+ type_=postgresql.JSONB(astext_type=sa.Text()),
124
+ existing_nullable=True,
125
+ )
126
+ batch_op.alter_column(
127
+ "args_non_parallel",
128
+ existing_type=postgresql.JSON(astext_type=sa.Text()),
129
+ type_=postgresql.JSONB(astext_type=sa.Text()),
130
+ existing_nullable=True,
131
+ )
132
+ batch_op.alter_column(
133
+ "type_filters",
134
+ existing_type=postgresql.JSON(astext_type=sa.Text()),
135
+ type_=postgresql.JSONB(astext_type=sa.Text()),
136
+ existing_nullable=False,
137
+ existing_server_default=sa.text("'{}'::json"),
138
+ )
139
+
140
+ # ### end Alembic commands ###
141
+
142
+
143
+ def downgrade() -> None:
144
+ # ### commands auto generated by Alembic - please adjust! ###
145
+ with op.batch_alter_table("workflowtaskv2", schema=None) as batch_op:
146
+ batch_op.alter_column(
147
+ "type_filters",
148
+ existing_type=postgresql.JSONB(astext_type=sa.Text()),
149
+ type_=postgresql.JSON(astext_type=sa.Text()),
150
+ existing_nullable=False,
151
+ existing_server_default=sa.text("'{}'::json"),
152
+ )
153
+ batch_op.alter_column(
154
+ "args_non_parallel",
155
+ existing_type=postgresql.JSONB(astext_type=sa.Text()),
156
+ type_=postgresql.JSON(astext_type=sa.Text()),
157
+ existing_nullable=True,
158
+ )
159
+ batch_op.alter_column(
160
+ "args_parallel",
161
+ existing_type=postgresql.JSONB(astext_type=sa.Text()),
162
+ type_=postgresql.JSON(astext_type=sa.Text()),
163
+ existing_nullable=True,
164
+ )
165
+
166
+ with op.batch_alter_table("usergroup", schema=None) as batch_op:
167
+ batch_op.alter_column(
168
+ "viewer_paths",
169
+ existing_type=postgresql.JSONB(astext_type=sa.Text()),
170
+ type_=postgresql.JSON(astext_type=sa.Text()),
171
+ existing_nullable=False,
172
+ existing_server_default=sa.text("'[]'::json"),
173
+ )
174
+
175
+ with op.batch_alter_table("user_settings", schema=None) as batch_op:
176
+ batch_op.alter_column(
177
+ "slurm_accounts",
178
+ existing_type=postgresql.JSONB(astext_type=sa.Text()),
179
+ type_=postgresql.JSON(astext_type=sa.Text()),
180
+ existing_nullable=False,
181
+ existing_server_default=sa.text("'[]'::json"),
182
+ )
183
+
184
+ with op.batch_alter_table("taskv2", schema=None) as batch_op:
185
+ batch_op.alter_column(
186
+ "tags",
187
+ existing_type=postgresql.JSONB(astext_type=sa.Text()),
188
+ type_=postgresql.JSON(astext_type=sa.Text()),
189
+ existing_nullable=False,
190
+ existing_server_default=sa.text("'[]'::json"),
191
+ )
192
+ batch_op.alter_column(
193
+ "output_types",
194
+ existing_type=postgresql.JSONB(astext_type=sa.Text()),
195
+ type_=postgresql.JSON(astext_type=sa.Text()),
196
+ existing_nullable=True,
197
+ )
198
+ batch_op.alter_column(
199
+ "input_types",
200
+ existing_type=postgresql.JSONB(astext_type=sa.Text()),
201
+ type_=postgresql.JSON(astext_type=sa.Text()),
202
+ existing_nullable=True,
203
+ )
204
+
205
+ with op.batch_alter_table("taskgroupv2", schema=None) as batch_op:
206
+ batch_op.alter_column(
207
+ "pinned_package_versions",
208
+ existing_type=postgresql.JSONB(astext_type=sa.Text()),
209
+ type_=postgresql.JSON(astext_type=sa.Text()),
210
+ existing_nullable=True,
211
+ existing_server_default=sa.text("'{}'::json"),
212
+ )
213
+
214
+ with op.batch_alter_table("jobv2", schema=None) as batch_op:
215
+ batch_op.alter_column(
216
+ "type_filters",
217
+ existing_type=postgresql.JSONB(astext_type=sa.Text()),
218
+ type_=postgresql.JSON(astext_type=sa.Text()),
219
+ existing_nullable=False,
220
+ existing_server_default=sa.text("'{}'::json"),
221
+ )
222
+ batch_op.alter_column(
223
+ "attribute_filters",
224
+ existing_type=postgresql.JSONB(astext_type=sa.Text()),
225
+ type_=postgresql.JSON(astext_type=sa.Text()),
226
+ existing_nullable=False,
227
+ existing_server_default=sa.text("'{}'::json"),
228
+ )
229
+ batch_op.alter_column(
230
+ "project_dump",
231
+ existing_type=postgresql.JSONB(astext_type=sa.Text()),
232
+ type_=postgresql.JSON(astext_type=sa.Text()),
233
+ existing_nullable=False,
234
+ )
235
+ batch_op.alter_column(
236
+ "workflow_dump",
237
+ existing_type=postgresql.JSONB(astext_type=sa.Text()),
238
+ type_=postgresql.JSON(astext_type=sa.Text()),
239
+ existing_nullable=False,
240
+ )
241
+ batch_op.alter_column(
242
+ "dataset_dump",
243
+ existing_type=postgresql.JSONB(astext_type=sa.Text()),
244
+ type_=postgresql.JSON(astext_type=sa.Text()),
245
+ existing_nullable=False,
246
+ )
247
+
248
+ with op.batch_alter_table("datasetv2", schema=None) as batch_op:
249
+ batch_op.alter_column(
250
+ "images",
251
+ existing_type=postgresql.JSONB(astext_type=sa.Text()),
252
+ type_=postgresql.JSON(astext_type=sa.Text()),
253
+ existing_nullable=False,
254
+ existing_server_default=sa.text("'[]'::json"),
255
+ )
256
+ batch_op.alter_column(
257
+ "history",
258
+ existing_type=postgresql.JSONB(astext_type=sa.Text()),
259
+ type_=postgresql.JSON(astext_type=sa.Text()),
260
+ existing_nullable=False,
261
+ existing_server_default=sa.text("'[]'::json"),
262
+ )
263
+
264
+ # ### end Alembic commands ###
@@ -27,6 +27,7 @@ from fractal_server.tasks.v2.utils_background import (
27
27
  from fractal_server.tasks.v2.utils_background import get_current_log
28
28
  from fractal_server.tasks.v2.utils_background import prepare_tasks_metadata
29
29
  from fractal_server.tasks.v2.utils_templates import SCRIPTS_SUBFOLDER
30
+ from fractal_server.utils import execute_command_sync
30
31
  from fractal_server.utils import get_timestamp
31
32
 
32
33
 
@@ -98,6 +99,18 @@ def collect_local_pixi(
98
99
  ),
99
100
  ("__SOURCE_DIR_NAME__", SOURCE_DIR_NAME),
100
101
  ("__FROZEN_OPTION__", ""),
102
+ (
103
+ "__TOKIO_WORKER_THREADS__",
104
+ str(settings.pixi.TOKIO_WORKER_THREADS),
105
+ ),
106
+ (
107
+ "__PIXI_CONCURRENT_SOLVES__",
108
+ str(settings.pixi.PIXI_CONCURRENT_SOLVES),
109
+ ),
110
+ (
111
+ "__PIXI_CONCURRENT_DOWNLOADS__",
112
+ str(settings.pixi.PIXI_CONCURRENT_DOWNLOADS),
113
+ ),
101
114
  },
102
115
  script_dir=Path(
103
116
  task_group.path, SCRIPTS_SUBFOLDER
@@ -146,6 +159,14 @@ def collect_local_pixi(
146
159
  "project_python_wrapper"
147
160
  ]
148
161
 
162
+ # Make task folder 755
163
+ source_dir = Path(task_group.path, SOURCE_DIR_NAME).as_posix()
164
+ command = f"chmod 755 {source_dir} -R"
165
+ execute_command_sync(
166
+ command=command,
167
+ logger_name=LOGGER_NAME,
168
+ )
169
+
149
170
  # Read and validate manifest
150
171
  # NOTE: we are only supporting the manifest path being relative
151
172
  # to the top-level folder
@@ -18,6 +18,7 @@ from fractal_server.tasks.utils import get_log_path
18
18
  from fractal_server.tasks.v2.local._utils import _customize_and_run_template
19
19
  from fractal_server.tasks.v2.utils_background import get_current_log
20
20
  from fractal_server.tasks.v2.utils_templates import SCRIPTS_SUBFOLDER
21
+ from fractal_server.utils import execute_command_sync
21
22
  from fractal_server.utils import get_timestamp
22
23
 
23
24
 
@@ -89,6 +90,18 @@ def reactivate_local_pixi(
89
90
  ),
90
91
  ("__SOURCE_DIR_NAME__", SOURCE_DIR_NAME),
91
92
  ("__FROZEN_OPTION__", "--frozen"),
93
+ (
94
+ "__TOKIO_WORKER_THREADS__",
95
+ str(settings.pixi.TOKIO_WORKER_THREADS),
96
+ ),
97
+ (
98
+ "__PIXI_CONCURRENT_SOLVES__",
99
+ str(settings.pixi.PIXI_CONCURRENT_SOLVES),
100
+ ),
101
+ (
102
+ "__PIXI_CONCURRENT_DOWNLOADS__",
103
+ str(settings.pixi.PIXI_CONCURRENT_DOWNLOADS),
104
+ ),
92
105
  },
93
106
  script_dir=Path(
94
107
  task_group.path, SCRIPTS_SUBFOLDER
@@ -130,6 +143,14 @@ def reactivate_local_pixi(
130
143
  activity.log = get_current_log(log_file_path)
131
144
  activity = add_commit_refresh(obj=activity, db=db)
132
145
 
146
+ # Make task folder 755
147
+ source_dir = Path(task_group.path, SOURCE_DIR_NAME).as_posix()
148
+ command = f"chmod 755 {source_dir} -R"
149
+ execute_command_sync(
150
+ command=command,
151
+ logger_name=LOGGER_NAME,
152
+ )
153
+
133
154
  activity.log = get_current_log(log_file_path)
134
155
  activity.status = TaskGroupActivityStatusV2.OK
135
156
  activity.timestamp_ended = get_timestamp()
@@ -147,6 +147,18 @@ def collect_ssh_pixi(
147
147
  ),
148
148
  ("__SOURCE_DIR_NAME__", SOURCE_DIR_NAME),
149
149
  ("__FROZEN_OPTION__", ""),
150
+ (
151
+ "__TOKIO_WORKER_THREADS__",
152
+ str(settings.pixi.TOKIO_WORKER_THREADS),
153
+ ),
154
+ (
155
+ "__PIXI_CONCURRENT_SOLVES__",
156
+ str(settings.pixi.PIXI_CONCURRENT_SOLVES),
157
+ ),
158
+ (
159
+ "__PIXI_CONCURRENT_DOWNLOADS__",
160
+ str(settings.pixi.PIXI_CONCURRENT_DOWNLOADS),
161
+ ),
150
162
  }
151
163
 
152
164
  logger.info("installing - START")
@@ -170,19 +182,21 @@ def collect_ssh_pixi(
170
182
  )
171
183
 
172
184
  # Run the three pixi-related scripts
173
- _customize_and_run_template(
185
+ stdout = _customize_and_run_template(
174
186
  template_filename="pixi_1_extract.sh",
175
187
  replacements=replacements,
176
188
  **common_args,
177
189
  )
190
+ logger.debug(f"STDOUT: {stdout}")
178
191
  activity.log = get_current_log(log_file_path)
179
192
  activity = add_commit_refresh(obj=activity, db=db)
180
193
 
181
- _customize_and_run_template(
194
+ stdout = _customize_and_run_template(
182
195
  template_filename="pixi_2_install.sh",
183
196
  replacements=replacements,
184
197
  **common_args,
185
198
  )
199
+ logger.debug(f"STDOUT: {stdout}")
186
200
  activity.log = get_current_log(log_file_path)
187
201
  activity = add_commit_refresh(obj=activity, db=db)
188
202
 
@@ -191,6 +205,7 @@ def collect_ssh_pixi(
191
205
  replacements=replacements,
192
206
  **common_args,
193
207
  )
208
+ logger.debug(f"STDOUT: {stdout}")
194
209
  activity.log = get_current_log(log_file_path)
195
210
  activity = add_commit_refresh(obj=activity, db=db)
196
211
 
@@ -203,6 +218,11 @@ def collect_ssh_pixi(
203
218
  "project_python_wrapper"
204
219
  ]
205
220
 
221
+ source_dir = Path(
222
+ task_group.path, SOURCE_DIR_NAME
223
+ ).as_posix()
224
+ fractal_ssh.run_command(cmd=f"chmod 755 {source_dir} -R")
225
+
206
226
  # Read and validate remote manifest file
207
227
  manifest_path_remote = (
208
228
  f"{package_root_remote}/__FRACTAL_MANIFEST__.json"
@@ -114,6 +114,18 @@ def reactivate_ssh_pixi(
114
114
  ),
115
115
  ("__SOURCE_DIR_NAME__", SOURCE_DIR_NAME),
116
116
  ("__FROZEN_OPTION__", "--frozen"),
117
+ (
118
+ "__TOKIO_WORKER_THREADS__",
119
+ str(settings.pixi.TOKIO_WORKER_THREADS),
120
+ ),
121
+ (
122
+ "__PIXI_CONCURRENT_SOLVES__",
123
+ str(settings.pixi.PIXI_CONCURRENT_SOLVES),
124
+ ),
125
+ (
126
+ "__PIXI_CONCURRENT_DOWNLOADS__",
127
+ str(settings.pixi.PIXI_CONCURRENT_DOWNLOADS),
128
+ ),
117
129
  }
118
130
 
119
131
  logger.info("installing - START")
@@ -140,11 +152,12 @@ def reactivate_ssh_pixi(
140
152
  )
141
153
 
142
154
  # Run script 1 - extract tar.gz into `source_dir`
143
- _customize_and_run_template(
155
+ stdout = _customize_and_run_template(
144
156
  template_filename="pixi_1_extract.sh",
145
157
  replacements=replacements,
146
158
  **common_args,
147
159
  )
160
+ logger.debug(f"STDOUT: {stdout}")
148
161
  activity.log = get_current_log(log_file_path)
149
162
  activity = add_commit_refresh(obj=activity, db=db)
150
163
 
@@ -164,23 +177,27 @@ def reactivate_ssh_pixi(
164
177
  )
165
178
 
166
179
  # Run script 2 - run pixi-install command
167
- _customize_and_run_template(
180
+ stdout = _customize_and_run_template(
168
181
  template_filename="pixi_2_install.sh",
169
182
  replacements=replacements,
170
183
  **common_args,
171
184
  )
185
+ logger.debug(f"STDOUT: {stdout}")
172
186
  activity.log = get_current_log(log_file_path)
173
187
  activity = add_commit_refresh(obj=activity, db=db)
174
188
 
175
189
  # Run script 3 - post-install
176
- _customize_and_run_template(
190
+ stdout = _customize_and_run_template(
177
191
  template_filename="pixi_3_post_install.sh",
178
192
  replacements=replacements,
179
193
  **common_args,
180
194
  )
195
+ logger.debug(f"STDOUT: {stdout}")
181
196
  activity.log = get_current_log(log_file_path)
182
197
  activity = add_commit_refresh(obj=activity, db=db)
183
198
 
199
+ fractal_ssh.run_command(cmd=f"chmod 755 {source_dir} -R")
200
+
184
201
  # Finalize (write metadata to DB)
185
202
  activity.status = TaskGroupActivityStatusV2.OK
186
203
  activity.timestamp_ended = get_timestamp()
@@ -2,7 +2,7 @@ set -e
2
2
 
3
3
  write_log(){
4
4
  TIMESTAMP=$(date -u +"%Y-%m-%dT%H:%M:%SZ")
5
- echo "[collect-task-pixi, ${TIMESTAMP}] ${1}"
5
+ echo "[extract-tar-gz-pixi, ${TIMESTAMP}] ${1}"
6
6
  }
7
7
 
8
8
  # Replacements
@@ -2,7 +2,7 @@ set -e
2
2
 
3
3
  write_log(){
4
4
  TIMESTAMP=$(date -u +"%Y-%m-%dT%H:%M:%SZ")
5
- echo "[collect-task-pixi, ${TIMESTAMP}] ${1}"
5
+ echo "[install-tasks-pixi, ${TIMESTAMP}] ${1}"
6
6
  }
7
7
 
8
8
  # Replacements
@@ -10,6 +10,9 @@ PIXI_HOME="__PIXI_HOME__"
10
10
  PACKAGE_DIR="__PACKAGE_DIR__"
11
11
  SOURCE_DIR_NAME="__SOURCE_DIR_NAME__"
12
12
  FROZEN_OPTION="__FROZEN_OPTION__"
13
+ TOKIO_WORKER_THREADS="__TOKIO_WORKER_THREADS__"
14
+ PIXI_CONCURRENT_SOLVES="__PIXI_CONCURRENT_SOLVES__"
15
+ PIXI_CONCURRENT_DOWNLOADS="__PIXI_CONCURRENT_DOWNLOADS__"
13
16
 
14
17
  # Strip trailing `/` from `PACKAGE_DIR`
15
18
  PIXI_HOME=${PIXI_HOME%/}
@@ -24,16 +27,22 @@ PYPROJECT_TOML="${SOURCE_DIR}/pyproject.toml"
24
27
  export PIXI_HOME="${PIXI_HOME}"
25
28
  export PIXI_CACHE_DIR="${PIXI_HOME}/cache"
26
29
  export RATTLER_AUTH_FILE="${PIXI_HOME}/credentials.json"
30
+ export TOKIO_WORKER_THREADS="${TOKIO_WORKER_THREADS}"
27
31
 
28
32
  TIME_START=$(date +%s)
29
33
 
34
+ write_log "Hostname: $(hostname)"
35
+
30
36
  cd "${PACKAGE_DIR}"
31
37
  write_log "Changed working directory to ${PACKAGE_DIR}"
32
38
 
33
39
  # -----------------------------------------------------------------------------
34
40
 
35
41
  write_log "START '${PIXI_EXECUTABLE} install ${FROZEN_OPTION} --manifest-path ${PYPROJECT_TOML}'"
36
- ${PIXI_EXECUTABLE} install ${FROZEN_OPTION} --manifest-path "${PYPROJECT_TOML}"
42
+ ${PIXI_EXECUTABLE} install \
43
+ --concurrent-solves "${PIXI_CONCURRENT_SOLVES}" \
44
+ --concurrent-downloads "${PIXI_CONCURRENT_DOWNLOADS}" \
45
+ ${FROZEN_OPTION} --manifest-path "${PYPROJECT_TOML}"
37
46
  write_log "END '${PIXI_EXECUTABLE} install ${FROZEN_OPTION} --manifest-path ${PYPROJECT_TOML}'"
38
47
  echo
39
48
 
@@ -2,7 +2,7 @@ set -e
2
2
 
3
3
  write_log(){
4
4
  TIMESTAMP=$(date -u +"%Y-%m-%dT%H:%M:%SZ")
5
- echo "[collect-task-pixi, ${TIMESTAMP}] ${1}"
5
+ echo "[after-install-pixi, ${TIMESTAMP}] ${1}"
6
6
  }
7
7
 
8
8
  # Replacements
@@ -70,10 +70,6 @@ write_log "Disk usage: ${ENV_DISK_USAGE}"
70
70
  write_log "Number of files: ${ENV_FILE_NUMBER}"
71
71
  echo
72
72
 
73
- write_log "START chmod 755 ${SOURCE_DIR} -R"
74
- chmod 755 "${SOURCE_DIR}" -R
75
- write_log "END chmod 755 ${SOURCE_DIR} -R"
76
-
77
73
  TIME_END=$(date +%s)
78
74
  write_log "Elapsed: $((TIME_END - TIME_START)) seconds"
79
75
  write_log "All ok, exit."
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.3
2
2
  Name: fractal-server
3
- Version: 2.15.0a4
3
+ Version: 2.15.1
4
4
  Summary: Backend component of the Fractal analytics platform
5
5
  License: BSD-3-Clause
6
6
  Author: Tommaso Comparin
@@ -1,4 +1,4 @@
1
- fractal_server/__init__.py,sha256=o7k56yCVsIjpLW3Sy9jgFDRLcOWV7ruqsNi1y_EeYx0,25
1
+ fractal_server/__init__.py,sha256=6A0nKt1r_WHhSOgKEGX5ueD-wRkklKFCyvI9s88a16s,23
2
2
  fractal_server/__main__.py,sha256=rkM8xjY1KeS3l63irB8yCrlVobR-73uDapC4wvrIlxI,6957
3
3
  fractal_server/alembic.ini,sha256=MWwi7GzjzawI9cCAK1LW7NxIBQDUqD12-ptJoq5JpP0,3153
4
4
  fractal_server/app/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
@@ -6,18 +6,18 @@ fractal_server/app/db/__init__.py,sha256=EFzcf6iKemWlOSRj4vtDT63hAE9HBYWh4abYOdD
6
6
  fractal_server/app/models/__init__.py,sha256=xJWiGAwpXmCpnFMC4c_HTqoUCzMOXrakoGLUH_uMvdA,415
7
7
  fractal_server/app/models/linkusergroup.py,sha256=3KkkE4QIUAlTrBAZs_tVy0pGvAxUAq6yOEjflct_z2M,678
8
8
  fractal_server/app/models/linkuserproject.py,sha256=hvaxh3Lkiy2uUCwB8gvn8RorCpvxSSdzWdCS_U1GL7g,315
9
- fractal_server/app/models/security.py,sha256=PVZ3nTZO3TYpOTLiMARNy2mHAET49i6nE7bKxn1H-vQ,3836
10
- fractal_server/app/models/user_settings.py,sha256=RxzRBGLHF_wc5csrTeHGUSV77Md_X0Lf-SnYVOsEWHc,1263
9
+ fractal_server/app/models/security.py,sha256=NfR0I4dRbOEmCWOKeEHyFO-uqhSJ11dS0B6yWtZRqs4,3852
10
+ fractal_server/app/models/user_settings.py,sha256=WdnrLOP2w8Nqh_3K-4-b-8a7XEC9ILrE6SfbYoTk-7Y,1279
11
11
  fractal_server/app/models/v2/__init__.py,sha256=vjHwek7-IXmaZZL9VF0nD30YL9ca4wNc8P4RXJK_kDc,832
12
12
  fractal_server/app/models/v2/accounting.py,sha256=i-2TsjqyuclxFQ21C-TeDoss7ZBTRuXdzIJfVr2UxwE,1081
13
- fractal_server/app/models/v2/dataset.py,sha256=B_bPnYCSLRFN-vBIOc5nJ31JTruQPxLda9mqpPIJmGk,1209
13
+ fractal_server/app/models/v2/dataset.py,sha256=P_zy4dPQAqrCALQ6737VkAFk1SvcgYjnslGUZhPI8sc,1226
14
14
  fractal_server/app/models/v2/history.py,sha256=CBN2WVg9vW5pHU1RP8TkB_nnJrwnuifCcxgnd53UtEE,2163
15
- fractal_server/app/models/v2/job.py,sha256=LfpwAedMVcA_6Ne0Rr4g3tt0asAQkWz3LSPm7IwZhYc,1978
15
+ fractal_server/app/models/v2/job.py,sha256=e3Un_rUgWC-KazGLDQqy17NQK_2ZsL3EmEmDAky_bN0,1998
16
16
  fractal_server/app/models/v2/project.py,sha256=RmU5BQR4HD6xifRndUhvPBy30wntml-giBRoEysdWXw,755
17
- fractal_server/app/models/v2/task.py,sha256=P7nsS5mCmVyzr4WtcjoiedesqkWvkHA2cQPsMbQt-7o,1427
18
- fractal_server/app/models/v2/task_group.py,sha256=Q78ommMWEG2Sqvg2Y8ICgYA_aGH-N7LdLbnmnDl1l1M,3841
17
+ fractal_server/app/models/v2/task.py,sha256=iBIQB8POQE5MyKvLZhw7jZWlBhbrThzCDzRTcgiAczQ,1493
18
+ fractal_server/app/models/v2/task_group.py,sha256=1cn14RKKOOCCjh42iaT-HyuRrRpCPcYhWRrlMK-Enwc,3857
19
19
  fractal_server/app/models/v2/workflow.py,sha256=wuK9SV1TXrlYcieYLYj5fGvV3K3bW7g9jCM1uv9HHjA,1058
20
- fractal_server/app/models/v2/workflowtask.py,sha256=tph237DXitOnzSv88rk9qgN2VmlI1smWS5fNYHR8jMo,1200
20
+ fractal_server/app/models/v2/workflowtask.py,sha256=qkTc-hcFLpJUVsEUbnDq2BJL0qg9jagy2doZeusF1ek,1266
21
21
  fractal_server/app/routes/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
22
22
  fractal_server/app/routes/admin/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
23
23
  fractal_server/app/routes/admin/v2/__init__.py,sha256=_5lqb6-M8-fZqE1HRMep6pAFYRUKMxrvbZOKs-RXWkw,933
@@ -34,7 +34,7 @@ fractal_server/app/routes/api/v2/_aux_functions.py,sha256=P5exwdiNm0ZxtoGw4wxvm_
34
34
  fractal_server/app/routes/api/v2/_aux_functions_history.py,sha256=Z23xwvBaVEEQ5B-JsWZJpjj4_QqoXqHYONztnbAH6gw,4425
35
35
  fractal_server/app/routes/api/v2/_aux_functions_task_lifecycle.py,sha256=GpKfw9yj01LmOAuNMTOreU1PFkCKpjK5oCt7_wp35-A,6741
36
36
  fractal_server/app/routes/api/v2/_aux_functions_task_version_update.py,sha256=WLDOYCnb6fnS5avKflyx6yN24Vo1n5kJk5ZyiKbzb8Y,1175
37
- fractal_server/app/routes/api/v2/_aux_functions_tasks.py,sha256=MFYnyNPBACSHXTDLXe6cSennnpmlpajN84iivOOMW7Y,11599
37
+ fractal_server/app/routes/api/v2/_aux_functions_tasks.py,sha256=MNty8CBnTMPSAKE5gMT7tCY8QWpCQyhft_shq12hHpA,12208
38
38
  fractal_server/app/routes/api/v2/_aux_task_group_disambiguation.py,sha256=8x1_q9FyCzItnPmdSdLQuwUTy4B9xCsXscp97_lJcpM,4635
39
39
  fractal_server/app/routes/api/v2/dataset.py,sha256=6u4MFqJ3YZ0Zq6Xx8CRMrTPKW55ZaR63Uno21DqFr4Q,8889
40
40
  fractal_server/app/routes/api/v2/history.py,sha256=BHBZYFSF5lw-YYOl0OVV5yEZPMxiqjH72_KwR66EtaE,15495
@@ -45,10 +45,10 @@ fractal_server/app/routes/api/v2/project.py,sha256=ldMEyjtwGpX2teu85sCNWaubDFlw-
45
45
  fractal_server/app/routes/api/v2/status_legacy.py,sha256=ZckHeBy8y21cyQ_OLY-VmkapzMhd3g9ae-qg-r4-uVo,6317
46
46
  fractal_server/app/routes/api/v2/submit.py,sha256=_BDkWtFdo8-p7kZ0Oxaidei04MfuBeaEsWtwJaKZQ_Y,8781
47
47
  fractal_server/app/routes/api/v2/task.py,sha256=ptS47XtxnHzk9bPNZV24Wfroo5sP19RE0-LsfX0ZvOc,7018
48
- fractal_server/app/routes/api/v2/task_collection.py,sha256=9_HWI2BhHfJKZlljmxN3e02IsMrKntLGfJkFl5nboDw,12549
48
+ fractal_server/app/routes/api/v2/task_collection.py,sha256=UcS7tb9RjiDimeI-iWwD0wqnXYQEdEZT56PnPa0zC9Q,12233
49
49
  fractal_server/app/routes/api/v2/task_collection_custom.py,sha256=3EZAzTVlt3wrHAuwxfcYo7LpHefLCcQUctZuolJOQHE,6728
50
- fractal_server/app/routes/api/v2/task_collection_pixi.py,sha256=M0axsUWkq6842p9tHQ9Fs2WX8Og6v8vKSiQ4thMLyOs,7792
51
- fractal_server/app/routes/api/v2/task_group.py,sha256=M96VoKcLqOpZlY0RWnsHza8jN0dzAWK9lxw87Om3Fbw,9063
50
+ fractal_server/app/routes/api/v2/task_collection_pixi.py,sha256=LS5xOYRRvI25TyvPWR9anxQt3emTfuV610zUVKc7eJU,7518
51
+ fractal_server/app/routes/api/v2/task_group.py,sha256=Wmp5Rt6NQm8_EbdJyi3XOkTXxJTTd4MNIy0ja6K-ifA,9205
52
52
  fractal_server/app/routes/api/v2/task_group_lifecycle.py,sha256=-uS_z8E3__t_twEqhZOzcEcAxZsgnpg-c7Ya9RF3_bs,9998
53
53
  fractal_server/app/routes/api/v2/task_version_update.py,sha256=o8W_C0I84X0u8gAMnCvi8ChiVAKrb5WzUBuJLSuujCA,8235
54
54
  fractal_server/app/routes/api/v2/workflow.py,sha256=gwMtpfUY_JiTv5_R_q1I9WNkp6nTqEVtYx8jWNJRxcU,10227
@@ -80,9 +80,9 @@ fractal_server/app/runner/executors/local/runner.py,sha256=DZK_oVxjIewyo7tjB7HvT
80
80
  fractal_server/app/runner/executors/slurm_common/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
81
81
  fractal_server/app/runner/executors/slurm_common/_batching.py,sha256=gbHZIxt90GjUwhB9_UInwVqpX-KdxRQMDeXzUagdL3U,8816
82
82
  fractal_server/app/runner/executors/slurm_common/_job_states.py,sha256=nuV-Zba38kDrRESOVB3gaGbrSPZc4q7YGichQaeqTW0,238
83
- fractal_server/app/runner/executors/slurm_common/_slurm_config.py,sha256=Zv2l_6X1EfSHGRqcBMj2dbai_kP8hfuMfh-WoIUj0tY,15646
84
- fractal_server/app/runner/executors/slurm_common/base_slurm_runner.py,sha256=2F2zgg3DJKAJ5LecFAzMSGLFmsMiM4lMk4Kh9It35F4,35626
85
- fractal_server/app/runner/executors/slurm_common/get_slurm_config.py,sha256=VJNryceLzF5_fx9_lS1nGq85EW8rOQ0KrgtMATcfdQc,7271
83
+ fractal_server/app/runner/executors/slurm_common/_slurm_config.py,sha256=U9BONnnwn8eDqDevwUtFSBcvIsxvNgDHirhcQGJ9t9E,15947
84
+ fractal_server/app/runner/executors/slurm_common/base_slurm_runner.py,sha256=iXRlTuPvBqqq_I-WYF2IRocPEayMCBxCSJkn_eop6t4,36226
85
+ fractal_server/app/runner/executors/slurm_common/get_slurm_config.py,sha256=jhoFHauWJm55bIC_v7pFylbK8WgcRJemGu2OjUiRbpQ,7377
86
86
  fractal_server/app/runner/executors/slurm_common/remote.py,sha256=xWnI6WktHR_7cxUme72ztIeBb4osnbZNu6J2azWn9K8,3765
87
87
  fractal_server/app/runner/executors/slurm_common/slurm_job_task_models.py,sha256=K4SdJOKsUWzDlnkb8Ug_UmTx6nBMsTqn9_oKqwE4XDI,3520
88
88
  fractal_server/app/runner/executors/slurm_ssh/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
@@ -129,7 +129,7 @@ fractal_server/app/schemas/v2/workflowtask.py,sha256=6eweAMyziwaoMT-7R1fVJYunIeZ
129
129
  fractal_server/app/security/__init__.py,sha256=oJ8RVglpOvWPQY4RokiE2YA72Nqo42dZEjywWTt8xr8,14032
130
130
  fractal_server/app/security/signup_email.py,sha256=Xd6QYxcdmg0PHpDwmUE8XQmPcOj3Xjy5oROcIMhmltM,1472
131
131
  fractal_server/app/user_settings.py,sha256=OP1yiYKtPadxwM51_Q0hdPk3z90TCN4z1BLpQsXyWiU,1316
132
- fractal_server/config.py,sha256=T9TbGbKrQqY2jGEGyKW78mVPPYajfypzWk7YxDdFIxg,27205
132
+ fractal_server/config.py,sha256=JvFF2nbXOKI6WPKv2UwvJmT4loStBytdG8EU6qZckY8,28259
133
133
  fractal_server/data_migrations/2_14_10.py,sha256=gMRR5QB0SDv0ToEiXVLg1VrHprM_Ii-9O1Kg-ZF-YhY,1599
134
134
  fractal_server/data_migrations/README.md,sha256=_3AEFvDg9YkybDqCLlFPdDmGJvr6Tw7HRI14aZ3LOIw,398
135
135
  fractal_server/data_migrations/tools.py,sha256=LeMeASwYGtEqd-3wOLle6WARdTGAimoyMmRbbJl-hAM,572
@@ -171,6 +171,7 @@ fractal_server/migrations/versions/a7f4d6137b53_add_workflow_dump_to_applyworkfl
171
171
  fractal_server/migrations/versions/af1ef1c83c9b_add_accounting_tables.py,sha256=BftudWuSGvKGBzIL5AMb3yWkgTAuaKPBGsYcOzp_gLQ,1899
172
172
  fractal_server/migrations/versions/af8673379a5c_drop_old_filter_columns.py,sha256=9sLd0F7nO5chHHm7RZ4wBA-9bvWomS-av_odKwODADM,1551
173
173
  fractal_server/migrations/versions/b1e7f7a1ff71_task_group_for_pixi.py,sha256=loDrqBB-9U3vqLKePEeJy4gK4EuPs_1F345mdrnoCt0,1293
174
+ fractal_server/migrations/versions/b3ffb095f973_json_to_jsonb.py,sha256=Q01lPlBNQgi3hpoUquWj2QUEF7cTsyQ7uikUhWunzWY,10035
174
175
  fractal_server/migrations/versions/c90a7c76e996_job_id_in_history_run.py,sha256=Y1cPwmFOZ4mx3v2XZM6adgu8u0L0VD_R4ADURyMb2ro,1102
175
176
  fractal_server/migrations/versions/d256a7379ab8_taskgroup_activity_and_venv_info_to_.py,sha256=HN3_Pk8G81SzdYjg4K1RZAyjKSlsZGvcYE2nWOUbwxQ,3861
176
177
  fractal_server/migrations/versions/d4fe3708d309_make_applyworkflow_workflow_dump_non_.py,sha256=6cHEZFuTXiQg9yu32Y3RH1XAl71av141WQ6UMbiITIg,949
@@ -193,28 +194,28 @@ fractal_server/tasks/v2/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG
193
194
  fractal_server/tasks/v2/local/__init__.py,sha256=S842wRersYKBKjc7xbmj0ov8b5i1YuCHa2f_yYuxcaI,312
194
195
  fractal_server/tasks/v2/local/_utils.py,sha256=p2KJ4BvEwJxahICpzbvzrc5-ciLCFnLXWPCwdNGi-3Q,2495
195
196
  fractal_server/tasks/v2/local/collect.py,sha256=MQncScKbWv3g9lrjF8WOhzuEoTEOOgS02RqOJno5csI,11897
196
- fractal_server/tasks/v2/local/collect_pixi.py,sha256=mINENdTZW4VJOU1JHzE1a_Dszt5fTlyK7caTENUHxy4,9839
197
+ fractal_server/tasks/v2/local/collect_pixi.py,sha256=i24MS7yxV0_sHkZJ8rd148n8TGqCPo6Zob5LPks3odk,10753
197
198
  fractal_server/tasks/v2/local/deactivate.py,sha256=LoEs2TUoHQOq3JfxufW6zroXD-Xx_b-hLtdigEBi1JU,9732
198
199
  fractal_server/tasks/v2/local/deactivate_pixi.py,sha256=_ycvnLIZ8zUFB3fZbCzDlNudh-SSetl4UkyFrClCcUU,3494
199
200
  fractal_server/tasks/v2/local/reactivate.py,sha256=Q43DOadNeFyyfgNP67lUqaXmZsS6onv67XwxH_-5ANA,5756
200
- fractal_server/tasks/v2/local/reactivate_pixi.py,sha256=bZLYkzhTC3z9bqb_Bm68Ts0yhQO_1qgEyBgT6k2_d00,6406
201
+ fractal_server/tasks/v2/local/reactivate_pixi.py,sha256=wF_3gcMWO_8ArJFo4iYh-51LDZDF_1OuYYHrY9eUSL8,7320
201
202
  fractal_server/tasks/v2/ssh/__init__.py,sha256=vX5aIM9Hbn2T_cIP_LrZ5ekRqJzYm_GSfp-4Iv7kqeI,300
202
203
  fractal_server/tasks/v2/ssh/_utils.py,sha256=ktVH7psQSAhh353fVUe-BwiBZHzTdgXnR-Xv_vfuX0Y,3857
203
204
  fractal_server/tasks/v2/ssh/collect.py,sha256=M9gFD1h9Q1Z-BFQq65dI0vFs6HPCkKQzOkxaLddmChY,14334
204
- fractal_server/tasks/v2/ssh/collect_pixi.py,sha256=QjjXxEPvhFWFwOSOcc43tefcLFwew9hwEs-DYzjXSyo,12865
205
+ fractal_server/tasks/v2/ssh/collect_pixi.py,sha256=q68GSL9yvezqOEc-zt4Ko1d7ZhTlJugoGspRA8JPiJw,13801
205
206
  fractal_server/tasks/v2/ssh/deactivate.py,sha256=XAIy84cLT9MSTMiN67U-wfOjxMm5s7lmrGwhW0qp7BU,12439
206
207
  fractal_server/tasks/v2/ssh/deactivate_pixi.py,sha256=K0yK_NPUqhFMj6cp6G_0Kfn0Yo7oQux4kT5dFPulnos,4748
207
208
  fractal_server/tasks/v2/ssh/reactivate.py,sha256=NJIgMNFKaXMhbvK0iZOsMwMtsms6Boj9f8N4L01X9Bo,8271
208
- fractal_server/tasks/v2/ssh/reactivate_pixi.py,sha256=0xURTkdGgUehKTAImFMwA6LNA6uv2W2us4PSYKdf9Eg,8968
209
+ fractal_server/tasks/v2/ssh/reactivate_pixi.py,sha256=o86FNB8pX9Sv_ROrCOJQqxIfDUhJqzzmo3JKsSfCNlA,9784
209
210
  fractal_server/tasks/v2/templates/1_create_venv.sh,sha256=PK0jdHKtQpda1zULebBaVPORt4t6V17wa4N1ohcj5ac,548
210
211
  fractal_server/tasks/v2/templates/2_pip_install.sh,sha256=jMJPQJXHKznO6fxOOXtFXKPdCmTf1VLLWj_JL_ZdKxo,1644
211
212
  fractal_server/tasks/v2/templates/3_pip_freeze.sh,sha256=JldREScEBI4cD_qjfX4UK7V4aI-FnX9ZvVNxgpSOBFc,168
212
213
  fractal_server/tasks/v2/templates/4_pip_show.sh,sha256=qm1vPy6AkKhWDjCJGXS8LqCLYO3KsAyRK325ZsFcF6U,1747
213
214
  fractal_server/tasks/v2/templates/5_get_venv_size_and_file_number.sh,sha256=q-6ZUvA6w6FDVEoSd9O63LaJ9tKZc7qAFH72SGPrd_k,284
214
215
  fractal_server/tasks/v2/templates/6_pip_install_from_freeze.sh,sha256=A2y8RngEjAcRhG-_owA6P7tAdrS_AszFuGXnaeMV8u0,1122
215
- fractal_server/tasks/v2/templates/pixi_1_extract.sh,sha256=1Z6sd_fTzqQkOfbFswaPZBNLUyv-OrS4euGlcoi8ces,1097
216
- fractal_server/tasks/v2/templates/pixi_2_install.sh,sha256=GsPWkcYGyUWV5ZifoiSenUXLMeNr3MlhMXF86npi1B0,1233
217
- fractal_server/tasks/v2/templates/pixi_3_post_install.sh,sha256=SfXl0l4wY1ygr-DErbaXrgg24UFU7wsz1O8zgbHV0NE,2618
216
+ fractal_server/tasks/v2/templates/pixi_1_extract.sh,sha256=Jdy5OyKo2jxe_qIDB9Zi4a0FL0cMBysxvBPHlUrARQM,1099
217
+ fractal_server/tasks/v2/templates/pixi_2_install.sh,sha256=h6-M101Q1AdAfZNZyPfSUc8AlZ-uS84Hae4vJdDSglY,1601
218
+ fractal_server/tasks/v2/templates/pixi_3_post_install.sh,sha256=99J8KXkNeQk9utuEtUxfAZS6VCThC32X7I7HAp2gdTU,2501
218
219
  fractal_server/tasks/v2/utils_background.py,sha256=_4wGETgZ3JdnJXLYKSI0Lns8LwokJL-NEzUOK5SxCJU,4811
219
220
  fractal_server/tasks/v2/utils_database.py,sha256=yi7793Uue32O59OBVUgomO42oUrVKdSKXoShBUNDdK0,1807
220
221
  fractal_server/tasks/v2/utils_package_names.py,sha256=RDg__xrvQs4ieeVzmVdMcEh95vGQYrv9Hfal-5EDBM8,2393
@@ -229,8 +230,8 @@ fractal_server/types/validators/_workflow_task_arguments_validators.py,sha256=HL
229
230
  fractal_server/urls.py,sha256=QjIKAC1a46bCdiPMu3AlpgFbcv6a4l3ABcd5xz190Og,471
230
231
  fractal_server/utils.py,sha256=Vn35lApt1T1J8nc09sAVqd10Cy0sa3dLipcljI-hkuk,2185
231
232
  fractal_server/zip_tools.py,sha256=tqz_8f-vQ9OBRW-4OQfO6xxY-YInHTyHmZxU7U4PqZo,4885
232
- fractal_server-2.15.0a4.dist-info/LICENSE,sha256=QKAharUuhxL58kSoLizKJeZE3mTCBnX6ucmz8W0lxlk,1576
233
- fractal_server-2.15.0a4.dist-info/METADATA,sha256=govoJDhpiMa5wRwenQpKksSPISsCvuvcM7zNf00LjtQ,4245
234
- fractal_server-2.15.0a4.dist-info/WHEEL,sha256=7dDg4QLnNKTvwIDR9Ac8jJaAmBC_owJrckbC0jjThyA,88
235
- fractal_server-2.15.0a4.dist-info/entry_points.txt,sha256=8tV2kynvFkjnhbtDnxAqImL6HMVKsopgGfew0DOp5UY,58
236
- fractal_server-2.15.0a4.dist-info/RECORD,,
233
+ fractal_server-2.15.1.dist-info/LICENSE,sha256=QKAharUuhxL58kSoLizKJeZE3mTCBnX6ucmz8W0lxlk,1576
234
+ fractal_server-2.15.1.dist-info/METADATA,sha256=kxk3S8JxO5PKXtG0HSpS2jLbnQ0JMF-WHZjrqHxPatY,4243
235
+ fractal_server-2.15.1.dist-info/WHEEL,sha256=7dDg4QLnNKTvwIDR9Ac8jJaAmBC_owJrckbC0jjThyA,88
236
+ fractal_server-2.15.1.dist-info/entry_points.txt,sha256=8tV2kynvFkjnhbtDnxAqImL6HMVKsopgGfew0DOp5UY,58
237
+ fractal_server-2.15.1.dist-info/RECORD,,