fractal-server 2.14.16__py3-none-any.whl → 2.15.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- fractal_server/__init__.py +1 -1
- fractal_server/app/models/security.py +2 -2
- fractal_server/app/models/user_settings.py +2 -2
- fractal_server/app/models/v2/dataset.py +3 -3
- fractal_server/app/models/v2/job.py +6 -6
- fractal_server/app/models/v2/task.py +12 -8
- fractal_server/app/models/v2/task_group.py +19 -7
- fractal_server/app/models/v2/workflowtask.py +6 -6
- fractal_server/app/routes/admin/v2/task_group_lifecycle.py +2 -5
- fractal_server/app/routes/api/v2/__init__.py +6 -0
- fractal_server/app/routes/api/v2/_aux_functions_tasks.py +22 -0
- fractal_server/app/routes/api/v2/task_collection.py +8 -18
- fractal_server/app/routes/api/v2/task_collection_custom.py +2 -2
- fractal_server/app/routes/api/v2/task_collection_pixi.py +219 -0
- fractal_server/app/routes/api/v2/task_group.py +3 -0
- fractal_server/app/routes/api/v2/task_group_lifecycle.py +26 -10
- fractal_server/app/runner/executors/slurm_common/_slurm_config.py +10 -0
- fractal_server/app/runner/executors/slurm_common/base_slurm_runner.py +39 -14
- fractal_server/app/runner/executors/slurm_common/get_slurm_config.py +8 -1
- fractal_server/app/schemas/v2/__init__.py +1 -1
- fractal_server/app/schemas/v2/dumps.py +1 -1
- fractal_server/app/schemas/v2/task_collection.py +1 -1
- fractal_server/app/schemas/v2/task_group.py +7 -5
- fractal_server/config.py +70 -0
- fractal_server/migrations/versions/b1e7f7a1ff71_task_group_for_pixi.py +53 -0
- fractal_server/migrations/versions/b3ffb095f973_json_to_jsonb.py +340 -0
- fractal_server/ssh/_fabric.py +26 -0
- fractal_server/tasks/v2/local/__init__.py +3 -0
- fractal_server/tasks/v2/local/_utils.py +4 -3
- fractal_server/tasks/v2/local/collect.py +26 -30
- fractal_server/tasks/v2/local/collect_pixi.py +252 -0
- fractal_server/tasks/v2/local/deactivate.py +39 -46
- fractal_server/tasks/v2/local/deactivate_pixi.py +98 -0
- fractal_server/tasks/v2/local/reactivate.py +12 -23
- fractal_server/tasks/v2/local/reactivate_pixi.py +184 -0
- fractal_server/tasks/v2/ssh/__init__.py +3 -0
- fractal_server/tasks/v2/ssh/_utils.py +50 -9
- fractal_server/tasks/v2/ssh/collect.py +46 -56
- fractal_server/tasks/v2/ssh/collect_pixi.py +315 -0
- fractal_server/tasks/v2/ssh/deactivate.py +54 -67
- fractal_server/tasks/v2/ssh/deactivate_pixi.py +122 -0
- fractal_server/tasks/v2/ssh/reactivate.py +25 -38
- fractal_server/tasks/v2/ssh/reactivate_pixi.py +233 -0
- fractal_server/tasks/v2/templates/pixi_1_extract.sh +40 -0
- fractal_server/tasks/v2/templates/pixi_2_install.sh +52 -0
- fractal_server/tasks/v2/templates/pixi_3_post_install.sh +76 -0
- fractal_server/tasks/v2/utils_background.py +50 -8
- fractal_server/tasks/v2/utils_pixi.py +38 -0
- fractal_server/tasks/v2/utils_templates.py +14 -1
- {fractal_server-2.14.16.dist-info → fractal_server-2.15.0.dist-info}/METADATA +1 -1
- {fractal_server-2.14.16.dist-info → fractal_server-2.15.0.dist-info}/RECORD +54 -41
- {fractal_server-2.14.16.dist-info → fractal_server-2.15.0.dist-info}/LICENSE +0 -0
- {fractal_server-2.14.16.dist-info → fractal_server-2.15.0.dist-info}/WHEEL +0 -0
- {fractal_server-2.14.16.dist-info → fractal_server-2.15.0.dist-info}/entry_points.txt +0 -0
fractal_server/__init__.py
CHANGED
@@ -1 +1 @@
|
|
1
|
-
__VERSION__ = "2.
|
1
|
+
__VERSION__ = "2.15.0"
|
@@ -15,8 +15,8 @@ from typing import Optional
|
|
15
15
|
from pydantic import ConfigDict
|
16
16
|
from pydantic import EmailStr
|
17
17
|
from sqlalchemy import Column
|
18
|
+
from sqlalchemy.dialects.postgresql import JSONB
|
18
19
|
from sqlalchemy.types import DateTime
|
19
|
-
from sqlalchemy.types import JSON
|
20
20
|
from sqlmodel import Field
|
21
21
|
from sqlmodel import Relationship
|
22
22
|
from sqlmodel import SQLModel
|
@@ -124,5 +124,5 @@ class UserGroup(SQLModel, table=True):
|
|
124
124
|
sa_column=Column(DateTime(timezone=True), nullable=False),
|
125
125
|
)
|
126
126
|
viewer_paths: list[str] = Field(
|
127
|
-
sa_column=Column(
|
127
|
+
sa_column=Column(JSONB, server_default="[]", nullable=False)
|
128
128
|
)
|
@@ -1,5 +1,5 @@
|
|
1
1
|
from sqlalchemy import Column
|
2
|
-
from sqlalchemy.
|
2
|
+
from sqlalchemy.dialects.postgresql import JSONB
|
3
3
|
from sqlmodel import Field
|
4
4
|
from sqlmodel import SQLModel
|
5
5
|
|
@@ -25,7 +25,7 @@ class UserSettings(SQLModel, table=True):
|
|
25
25
|
|
26
26
|
id: int | None = Field(default=None, primary_key=True)
|
27
27
|
slurm_accounts: list[str] = Field(
|
28
|
-
sa_column=Column(
|
28
|
+
sa_column=Column(JSONB, server_default="[]", nullable=False)
|
29
29
|
)
|
30
30
|
ssh_host: str | None = None
|
31
31
|
ssh_username: str | None = None
|
@@ -3,8 +3,8 @@ from typing import Any
|
|
3
3
|
|
4
4
|
from pydantic import ConfigDict
|
5
5
|
from sqlalchemy import Column
|
6
|
+
from sqlalchemy.dialects.postgresql import JSONB
|
6
7
|
from sqlalchemy.types import DateTime
|
7
|
-
from sqlalchemy.types import JSON
|
8
8
|
from sqlmodel import Field
|
9
9
|
from sqlmodel import Relationship
|
10
10
|
from sqlmodel import SQLModel
|
@@ -24,7 +24,7 @@ class DatasetV2(SQLModel, table=True):
|
|
24
24
|
)
|
25
25
|
|
26
26
|
history: list[dict[str, Any]] = Field(
|
27
|
-
sa_column=Column(
|
27
|
+
sa_column=Column(JSONB, server_default="[]", nullable=False)
|
28
28
|
)
|
29
29
|
|
30
30
|
timestamp_created: datetime = Field(
|
@@ -34,7 +34,7 @@ class DatasetV2(SQLModel, table=True):
|
|
34
34
|
|
35
35
|
zarr_dir: str
|
36
36
|
images: list[dict[str, Any]] = Field(
|
37
|
-
sa_column=Column(
|
37
|
+
sa_column=Column(JSONB, server_default="[]", nullable=False)
|
38
38
|
)
|
39
39
|
|
40
40
|
@property
|
@@ -3,8 +3,8 @@ from typing import Any
|
|
3
3
|
|
4
4
|
from pydantic import ConfigDict
|
5
5
|
from sqlalchemy import Column
|
6
|
+
from sqlalchemy.dialects.postgresql import JSONB
|
6
7
|
from sqlalchemy.types import DateTime
|
7
|
-
from sqlalchemy.types import JSON
|
8
8
|
from sqlmodel import Field
|
9
9
|
from sqlmodel import SQLModel
|
10
10
|
|
@@ -31,13 +31,13 @@ class JobV2(SQLModel, table=True):
|
|
31
31
|
slurm_account: str | None = None
|
32
32
|
|
33
33
|
dataset_dump: dict[str, Any] = Field(
|
34
|
-
sa_column=Column(
|
34
|
+
sa_column=Column(JSONB, nullable=False)
|
35
35
|
)
|
36
36
|
workflow_dump: dict[str, Any] = Field(
|
37
|
-
sa_column=Column(
|
37
|
+
sa_column=Column(JSONB, nullable=False)
|
38
38
|
)
|
39
39
|
project_dump: dict[str, Any] = Field(
|
40
|
-
sa_column=Column(
|
40
|
+
sa_column=Column(JSONB, nullable=False)
|
41
41
|
)
|
42
42
|
|
43
43
|
worker_init: str | None = None
|
@@ -57,8 +57,8 @@ class JobV2(SQLModel, table=True):
|
|
57
57
|
log: str | None = None
|
58
58
|
|
59
59
|
attribute_filters: AttributeFilters = Field(
|
60
|
-
sa_column=Column(
|
60
|
+
sa_column=Column(JSONB, nullable=False, server_default="{}")
|
61
61
|
)
|
62
62
|
type_filters: dict[str, bool] = Field(
|
63
|
-
sa_column=Column(
|
63
|
+
sa_column=Column(JSONB, nullable=False, server_default="{}")
|
64
64
|
)
|
@@ -1,7 +1,7 @@
|
|
1
1
|
from typing import Any
|
2
2
|
|
3
3
|
from sqlalchemy import Column
|
4
|
-
from sqlalchemy.
|
4
|
+
from sqlalchemy.dialects.postgresql import JSONB
|
5
5
|
from sqlmodel import Field
|
6
6
|
from sqlmodel import SQLModel
|
7
7
|
|
@@ -16,25 +16,29 @@ class TaskV2(SQLModel, table=True):
|
|
16
16
|
source: str | None = None
|
17
17
|
|
18
18
|
meta_non_parallel: dict[str, Any] = Field(
|
19
|
-
sa_column=Column(
|
19
|
+
sa_column=Column(
|
20
|
+
JSONB, server_default="{}", default={}, nullable=False
|
21
|
+
)
|
20
22
|
)
|
21
23
|
meta_parallel: dict[str, Any] = Field(
|
22
|
-
sa_column=Column(
|
24
|
+
sa_column=Column(
|
25
|
+
JSONB, server_default="{}", default={}, nullable=False
|
26
|
+
)
|
23
27
|
)
|
24
28
|
|
25
29
|
version: str | None = None
|
26
30
|
args_schema_non_parallel: dict[str, Any] | None = Field(
|
27
|
-
sa_column=Column(
|
31
|
+
sa_column=Column(JSONB), default=None
|
28
32
|
)
|
29
33
|
args_schema_parallel: dict[str, Any] | None = Field(
|
30
|
-
sa_column=Column(
|
34
|
+
sa_column=Column(JSONB), default=None
|
31
35
|
)
|
32
36
|
args_schema_version: str | None = None
|
33
37
|
docs_info: str | None = None
|
34
38
|
docs_link: str | None = None
|
35
39
|
|
36
|
-
input_types: dict[str, bool] = Field(sa_column=Column(
|
37
|
-
output_types: dict[str, bool] = Field(sa_column=Column(
|
40
|
+
input_types: dict[str, bool] = Field(sa_column=Column(JSONB), default={})
|
41
|
+
output_types: dict[str, bool] = Field(sa_column=Column(JSONB), default={})
|
38
42
|
|
39
43
|
taskgroupv2_id: int = Field(foreign_key="taskgroupv2.id")
|
40
44
|
|
@@ -42,5 +46,5 @@ class TaskV2(SQLModel, table=True):
|
|
42
46
|
modality: str | None = None
|
43
47
|
authors: str | None = None
|
44
48
|
tags: list[str] = Field(
|
45
|
-
sa_column=Column(
|
49
|
+
sa_column=Column(JSONB, server_default="[]", nullable=False)
|
46
50
|
)
|
@@ -2,8 +2,8 @@ from datetime import datetime
|
|
2
2
|
from datetime import timezone
|
3
3
|
|
4
4
|
from sqlalchemy import Column
|
5
|
+
from sqlalchemy.dialects.postgresql import JSONB
|
5
6
|
from sqlalchemy.types import DateTime
|
6
|
-
from sqlalchemy.types import JSON
|
7
7
|
from sqlmodel import Field
|
8
8
|
from sqlmodel import Relationship
|
9
9
|
from sqlmodel import SQLModel
|
@@ -29,18 +29,19 @@ class TaskGroupV2(SQLModel, table=True):
|
|
29
29
|
pkg_name: str
|
30
30
|
version: str | None = None
|
31
31
|
python_version: str | None = None
|
32
|
+
pixi_version: str | None = None
|
32
33
|
path: str | None = None
|
33
|
-
|
34
|
+
archive_path: str | None = None
|
34
35
|
pip_extras: str | None = None
|
35
36
|
pinned_package_versions: dict[str, str] = Field(
|
36
37
|
sa_column=Column(
|
37
|
-
|
38
|
+
JSONB,
|
38
39
|
server_default="{}",
|
39
40
|
default={},
|
40
41
|
nullable=True,
|
41
42
|
),
|
42
43
|
)
|
43
|
-
|
44
|
+
env_info: str | None = None
|
44
45
|
venv_path: str | None = None
|
45
46
|
venv_size_in_kB: int | None = None
|
46
47
|
venv_file_number: int | None = None
|
@@ -66,15 +67,20 @@ class TaskGroupV2(SQLModel, table=True):
|
|
66
67
|
"""
|
67
68
|
Prepare string to be used in `python -m pip install`.
|
68
69
|
"""
|
70
|
+
if self.origin == "pixi":
|
71
|
+
raise ValueError(
|
72
|
+
f"Cannot call 'pip_install_string' if {self.origin=}."
|
73
|
+
)
|
74
|
+
|
69
75
|
extras = f"[{self.pip_extras}]" if self.pip_extras is not None else ""
|
70
76
|
|
71
|
-
if self.
|
72
|
-
return f"{self.
|
77
|
+
if self.archive_path is not None:
|
78
|
+
return f"{self.archive_path}{extras}"
|
73
79
|
else:
|
74
80
|
if self.version is None:
|
75
81
|
raise ValueError(
|
76
82
|
"Cannot run `pip_install_string` with "
|
77
|
-
f"{self.pkg_name=}, {self.
|
83
|
+
f"{self.pkg_name=}, {self.archive_path=}, {self.version=}."
|
78
84
|
)
|
79
85
|
return f"{self.pkg_name}{extras}=={self.version}"
|
80
86
|
|
@@ -83,6 +89,12 @@ class TaskGroupV2(SQLModel, table=True):
|
|
83
89
|
"""
|
84
90
|
Prepare string to be used in `python -m pip install`.
|
85
91
|
"""
|
92
|
+
if self.origin == "pixi":
|
93
|
+
raise ValueError(
|
94
|
+
"Cannot call 'pinned_package_versions_string' if "
|
95
|
+
f"{self.origin=}."
|
96
|
+
)
|
97
|
+
|
86
98
|
if self.pinned_package_versions is None:
|
87
99
|
return ""
|
88
100
|
output = " ".join(
|
@@ -2,7 +2,7 @@ from typing import Any
|
|
2
2
|
|
3
3
|
from pydantic import ConfigDict
|
4
4
|
from sqlalchemy import Column
|
5
|
-
from sqlalchemy.
|
5
|
+
from sqlalchemy.dialects.postgresql import JSONB
|
6
6
|
from sqlmodel import Field
|
7
7
|
from sqlmodel import Relationship
|
8
8
|
from sqlmodel import SQLModel
|
@@ -18,20 +18,20 @@ class WorkflowTaskV2(SQLModel, table=True):
|
|
18
18
|
workflow_id: int = Field(foreign_key="workflowv2.id", ondelete="CASCADE")
|
19
19
|
order: int | None = None
|
20
20
|
meta_parallel: dict[str, Any] | None = Field(
|
21
|
-
sa_column=Column(
|
21
|
+
sa_column=Column(JSONB), default=None
|
22
22
|
)
|
23
23
|
meta_non_parallel: dict[str, Any] | None = Field(
|
24
|
-
sa_column=Column(
|
24
|
+
sa_column=Column(JSONB), default=None
|
25
25
|
)
|
26
26
|
args_parallel: dict[str, Any] | None = Field(
|
27
|
-
sa_column=Column(
|
27
|
+
sa_column=Column(JSONB), default=None
|
28
28
|
)
|
29
29
|
args_non_parallel: dict[str, Any] | None = Field(
|
30
|
-
sa_column=Column(
|
30
|
+
sa_column=Column(JSONB), default=None
|
31
31
|
)
|
32
32
|
|
33
33
|
type_filters: dict[str, bool] = Field(
|
34
|
-
sa_column=Column(
|
34
|
+
sa_column=Column(JSONB, nullable=False, server_default="{}")
|
35
35
|
)
|
36
36
|
|
37
37
|
# Task
|
@@ -2,7 +2,6 @@ from fastapi import APIRouter
|
|
2
2
|
from fastapi import BackgroundTasks
|
3
3
|
from fastapi import Depends
|
4
4
|
from fastapi import HTTPException
|
5
|
-
from fastapi import Request
|
6
5
|
from fastapi import Response
|
7
6
|
from fastapi import status
|
8
7
|
|
@@ -51,7 +50,6 @@ async def deactivate_task_group(
|
|
51
50
|
task_group_id: int,
|
52
51
|
background_tasks: BackgroundTasks,
|
53
52
|
response: Response,
|
54
|
-
request: Request,
|
55
53
|
superuser: UserOAuth = Depends(current_active_superuser),
|
56
54
|
db: AsyncSession = Depends(get_async_db),
|
57
55
|
) -> TaskGroupReadV2:
|
@@ -157,7 +155,6 @@ async def reactivate_task_group(
|
|
157
155
|
task_group_id: int,
|
158
156
|
background_tasks: BackgroundTasks,
|
159
157
|
response: Response,
|
160
|
-
request: Request,
|
161
158
|
superuser: UserOAuth = Depends(current_active_superuser),
|
162
159
|
db: AsyncSession = Depends(get_async_db),
|
163
160
|
) -> TaskGroupReadV2:
|
@@ -207,12 +204,12 @@ async def reactivate_task_group(
|
|
207
204
|
response.status_code = status.HTTP_202_ACCEPTED
|
208
205
|
return task_group_activity
|
209
206
|
|
210
|
-
if task_group.
|
207
|
+
if task_group.env_info is None:
|
211
208
|
raise HTTPException(
|
212
209
|
status_code=status.HTTP_422_UNPROCESSABLE_ENTITY,
|
213
210
|
detail=(
|
214
211
|
"Cannot reactivate a task group with "
|
215
|
-
f"{task_group.
|
212
|
+
f"{task_group.env_info=}."
|
216
213
|
),
|
217
214
|
)
|
218
215
|
|
@@ -14,6 +14,7 @@ from .submit import router as submit_job_router_v2
|
|
14
14
|
from .task import router as task_router_v2
|
15
15
|
from .task_collection import router as task_collection_router_v2
|
16
16
|
from .task_collection_custom import router as task_collection_router_v2_custom
|
17
|
+
from .task_collection_pixi import router as task_collection_pixi_router_v2
|
17
18
|
from .task_group import router as task_group_router_v2
|
18
19
|
from .task_group_lifecycle import router as task_group_lifecycle_router_v2
|
19
20
|
from .task_version_update import router as task_version_update_router_v2
|
@@ -49,6 +50,11 @@ router_api_v2.include_router(
|
|
49
50
|
prefix="/task",
|
50
51
|
tags=["V2 Task Lifecycle"],
|
51
52
|
)
|
53
|
+
router_api_v2.include_router(
|
54
|
+
task_collection_pixi_router_v2,
|
55
|
+
prefix="/task",
|
56
|
+
tags=["V2 Task Lifecycle"],
|
57
|
+
)
|
52
58
|
router_api_v2.include_router(
|
53
59
|
task_group_lifecycle_router_v2,
|
54
60
|
prefix="/task-group",
|
@@ -333,6 +333,28 @@ async def _verify_non_duplication_group_constraint(
|
|
333
333
|
)
|
334
334
|
|
335
335
|
|
336
|
+
async def _verify_non_duplication_group_path(
|
337
|
+
path: str | None,
|
338
|
+
db: AsyncSession,
|
339
|
+
) -> None:
|
340
|
+
"""
|
341
|
+
Verify uniqueness of non-`None` `TaskGroupV2.path`
|
342
|
+
"""
|
343
|
+
if path is None:
|
344
|
+
return
|
345
|
+
stm = select(TaskGroupV2.id).where(TaskGroupV2.path == path)
|
346
|
+
res = await db.execute(stm)
|
347
|
+
duplicate_ids = res.scalars().all()
|
348
|
+
if duplicate_ids:
|
349
|
+
raise HTTPException(
|
350
|
+
status_code=status.HTTP_422_UNPROCESSABLE_ENTITY,
|
351
|
+
detail=(
|
352
|
+
f"Other TaskGroups already have {path=}: "
|
353
|
+
f"{sorted(duplicate_ids)}."
|
354
|
+
),
|
355
|
+
)
|
356
|
+
|
357
|
+
|
336
358
|
async def _add_warnings_to_workflow_tasks(
|
337
359
|
wftask_list: list[WorkflowTaskV2], user_id: int, db: AsyncSession
|
338
360
|
) -> list[dict[str, Any]]:
|
@@ -7,14 +7,12 @@ from fastapi import Depends
|
|
7
7
|
from fastapi import File
|
8
8
|
from fastapi import Form
|
9
9
|
from fastapi import HTTPException
|
10
|
-
from fastapi import Request
|
11
10
|
from fastapi import Response
|
12
11
|
from fastapi import status
|
13
12
|
from fastapi import UploadFile
|
14
13
|
from pydantic import BaseModel
|
15
14
|
from pydantic import model_validator
|
16
15
|
from pydantic import ValidationError
|
17
|
-
from sqlmodel import select
|
18
16
|
|
19
17
|
from .....config import get_settings
|
20
18
|
from .....logger import reset_logger_handlers
|
@@ -23,15 +21,16 @@ from .....syringe import Inject
|
|
23
21
|
from ....db import AsyncSession
|
24
22
|
from ....db import get_async_db
|
25
23
|
from ....models.v2 import TaskGroupV2
|
24
|
+
from ....schemas.v2 import FractalUploadedFile
|
26
25
|
from ....schemas.v2 import TaskCollectPipV2
|
27
26
|
from ....schemas.v2 import TaskGroupActivityStatusV2
|
28
27
|
from ....schemas.v2 import TaskGroupActivityV2Read
|
29
28
|
from ....schemas.v2 import TaskGroupCreateV2Strict
|
30
|
-
from ....schemas.v2 import WheelFile
|
31
29
|
from ...aux.validate_user_settings import validate_user_settings
|
32
30
|
from ._aux_functions_task_lifecycle import get_package_version_from_pypi
|
33
31
|
from ._aux_functions_tasks import _get_valid_user_group_id
|
34
32
|
from ._aux_functions_tasks import _verify_non_duplication_group_constraint
|
33
|
+
from ._aux_functions_tasks import _verify_non_duplication_group_path
|
35
34
|
from ._aux_functions_tasks import _verify_non_duplication_user_constraint
|
36
35
|
from fractal_server.app.models import UserOAuth
|
37
36
|
from fractal_server.app.models.v2 import TaskGroupActivityV2
|
@@ -151,7 +150,6 @@ def parse_request_data(
|
|
151
150
|
response_model=TaskGroupActivityV2Read,
|
152
151
|
)
|
153
152
|
async def collect_tasks_pip(
|
154
|
-
request: Request,
|
155
153
|
response: Response,
|
156
154
|
background_tasks: BackgroundTasks,
|
157
155
|
request_data: CollectionRequestData = Depends(parse_request_data),
|
@@ -208,13 +206,13 @@ async def collect_tasks_pip(
|
|
208
206
|
# Initialize wheel_file_content as None
|
209
207
|
wheel_file = None
|
210
208
|
|
211
|
-
# Set pkg_name, version, origin and
|
209
|
+
# Set pkg_name, version, origin and archive_path
|
212
210
|
if request_data.origin == TaskGroupV2OriginEnum.WHEELFILE:
|
213
211
|
try:
|
214
212
|
wheel_filename = request_data.file.filename
|
215
213
|
wheel_info = _parse_wheel_filename(wheel_filename)
|
216
214
|
wheel_file_content = await request_data.file.read()
|
217
|
-
wheel_file =
|
215
|
+
wheel_file = FractalUploadedFile(
|
218
216
|
filename=wheel_filename,
|
219
217
|
contents=wheel_file_content,
|
220
218
|
)
|
@@ -293,18 +291,10 @@ async def collect_tasks_pip(
|
|
293
291
|
version=task_group_attrs["version"],
|
294
292
|
db=db,
|
295
293
|
)
|
296
|
-
|
297
|
-
|
298
|
-
|
299
|
-
|
300
|
-
for conflicting_task_group in res.scalars().all():
|
301
|
-
raise HTTPException(
|
302
|
-
status_code=status.HTTP_422_UNPROCESSABLE_ENTITY,
|
303
|
-
detail=(
|
304
|
-
f"Another task-group already has path={task_group_path}.\n"
|
305
|
-
f"{conflicting_task_group=}"
|
306
|
-
),
|
307
|
-
)
|
294
|
+
await _verify_non_duplication_group_path(
|
295
|
+
path=task_group_attrs["path"],
|
296
|
+
db=db,
|
297
|
+
)
|
308
298
|
|
309
299
|
# On-disk checks
|
310
300
|
|
@@ -26,7 +26,7 @@ from fractal_server.logger import set_logger
|
|
26
26
|
from fractal_server.string_tools import validate_cmd
|
27
27
|
from fractal_server.syringe import Inject
|
28
28
|
from fractal_server.tasks.v2.utils_background import (
|
29
|
-
|
29
|
+
prepare_tasks_metadata,
|
30
30
|
)
|
31
31
|
from fractal_server.tasks.v2.utils_database import (
|
32
32
|
create_db_tasks_and_update_task_group_async,
|
@@ -138,7 +138,7 @@ async def collect_task_custom(
|
|
138
138
|
else:
|
139
139
|
package_root = Path(task_collect.package_root)
|
140
140
|
|
141
|
-
task_list: list[TaskCreateV2] =
|
141
|
+
task_list: list[TaskCreateV2] = prepare_tasks_metadata(
|
142
142
|
package_manifest=task_collect.manifest,
|
143
143
|
python_bin=Path(task_collect.python_interpreter),
|
144
144
|
package_root=package_root,
|
@@ -0,0 +1,219 @@
|
|
1
|
+
import os
|
2
|
+
from pathlib import Path
|
3
|
+
|
4
|
+
from fastapi import APIRouter
|
5
|
+
from fastapi import BackgroundTasks
|
6
|
+
from fastapi import Depends
|
7
|
+
from fastapi import Form
|
8
|
+
from fastapi import HTTPException
|
9
|
+
from fastapi import Response
|
10
|
+
from fastapi import status
|
11
|
+
from fastapi import UploadFile
|
12
|
+
|
13
|
+
from fractal_server.app.db import AsyncSession
|
14
|
+
from fractal_server.app.db import get_async_db
|
15
|
+
from fractal_server.app.models import UserOAuth
|
16
|
+
from fractal_server.app.models.v2 import TaskGroupActivityV2
|
17
|
+
from fractal_server.app.models.v2 import TaskGroupV2
|
18
|
+
from fractal_server.app.routes.api.v2._aux_functions_tasks import (
|
19
|
+
_get_valid_user_group_id,
|
20
|
+
)
|
21
|
+
from fractal_server.app.routes.api.v2._aux_functions_tasks import (
|
22
|
+
_verify_non_duplication_group_constraint,
|
23
|
+
)
|
24
|
+
from fractal_server.app.routes.api.v2._aux_functions_tasks import (
|
25
|
+
_verify_non_duplication_group_path,
|
26
|
+
)
|
27
|
+
from fractal_server.app.routes.api.v2._aux_functions_tasks import (
|
28
|
+
_verify_non_duplication_user_constraint,
|
29
|
+
)
|
30
|
+
from fractal_server.app.routes.auth import current_active_verified_user
|
31
|
+
from fractal_server.app.routes.aux.validate_user_settings import (
|
32
|
+
validate_user_settings,
|
33
|
+
)
|
34
|
+
from fractal_server.app.schemas.v2 import FractalUploadedFile
|
35
|
+
from fractal_server.app.schemas.v2 import TaskGroupActivityActionV2
|
36
|
+
from fractal_server.app.schemas.v2 import TaskGroupActivityStatusV2
|
37
|
+
from fractal_server.app.schemas.v2 import TaskGroupActivityV2Read
|
38
|
+
from fractal_server.app.schemas.v2.task_group import TaskGroupV2OriginEnum
|
39
|
+
from fractal_server.config import get_settings
|
40
|
+
from fractal_server.logger import set_logger
|
41
|
+
from fractal_server.ssh._fabric import SSHConfig
|
42
|
+
from fractal_server.syringe import Inject
|
43
|
+
from fractal_server.tasks.v2.local import collect_local_pixi
|
44
|
+
from fractal_server.tasks.v2.ssh import collect_ssh_pixi
|
45
|
+
from fractal_server.tasks.v2.utils_package_names import normalize_package_name
|
46
|
+
from fractal_server.types import NonEmptyStr
|
47
|
+
|
48
|
+
|
49
|
+
router = APIRouter()
|
50
|
+
|
51
|
+
logger = set_logger(__name__)
|
52
|
+
|
53
|
+
|
54
|
+
def validate_pkgname_and_version(filename: str) -> tuple[str, str]:
|
55
|
+
if not filename.endswith(".tar.gz"):
|
56
|
+
raise HTTPException(
|
57
|
+
status_code=status.HTTP_422_UNPROCESSABLE_ENTITY,
|
58
|
+
detail=f"{filename=} does not end with '.tar.gz'.",
|
59
|
+
)
|
60
|
+
filename_splitted = filename.split("-")
|
61
|
+
if len(filename_splitted) != 2:
|
62
|
+
raise HTTPException(
|
63
|
+
status_code=status.HTTP_422_UNPROCESSABLE_ENTITY,
|
64
|
+
detail=(
|
65
|
+
f"Invalid filename: '{filename}' must contain a single `-` "
|
66
|
+
"character, separating the package name from the version "
|
67
|
+
"(expected format: 'pkg_name-version')."
|
68
|
+
),
|
69
|
+
)
|
70
|
+
|
71
|
+
pkg_name = filename_splitted[0]
|
72
|
+
version = filename.removeprefix(f"{pkg_name}-").removesuffix(".tar.gz")
|
73
|
+
|
74
|
+
return normalize_package_name(pkg_name), version
|
75
|
+
|
76
|
+
|
77
|
+
@router.post(
|
78
|
+
"/collect/pixi/",
|
79
|
+
status_code=202,
|
80
|
+
response_model=TaskGroupActivityV2Read,
|
81
|
+
)
|
82
|
+
async def collect_task_pixi(
|
83
|
+
response: Response,
|
84
|
+
background_tasks: BackgroundTasks,
|
85
|
+
file: UploadFile,
|
86
|
+
pixi_version: NonEmptyStr | None = Form(None),
|
87
|
+
private: bool = False,
|
88
|
+
user_group_id: int | None = None,
|
89
|
+
user: UserOAuth = Depends(current_active_verified_user),
|
90
|
+
db: AsyncSession = Depends(get_async_db),
|
91
|
+
) -> TaskGroupActivityV2Read:
|
92
|
+
|
93
|
+
settings = Inject(get_settings)
|
94
|
+
# Check if Pixi is available
|
95
|
+
if settings.pixi is None:
|
96
|
+
raise HTTPException(
|
97
|
+
status_code=status.HTTP_422_UNPROCESSABLE_ENTITY,
|
98
|
+
detail="Pixi task collection is not available.",
|
99
|
+
)
|
100
|
+
# Check if provided Pixi version is available. Use default if not provided
|
101
|
+
if pixi_version is None:
|
102
|
+
pixi_version = settings.pixi.default_version
|
103
|
+
else:
|
104
|
+
if pixi_version not in settings.pixi.versions:
|
105
|
+
raise HTTPException(
|
106
|
+
status_code=status.HTTP_422_UNPROCESSABLE_ENTITY,
|
107
|
+
detail=(
|
108
|
+
f"Pixi version {pixi_version} is not available. Available "
|
109
|
+
f"versions: {list(settings.pixi.versions.keys())}"
|
110
|
+
),
|
111
|
+
)
|
112
|
+
|
113
|
+
pkg_name, version = validate_pkgname_and_version(file.filename)
|
114
|
+
tar_gz_content = await file.read()
|
115
|
+
tar_gz_file = FractalUploadedFile(
|
116
|
+
filename=file.filename,
|
117
|
+
contents=tar_gz_content,
|
118
|
+
)
|
119
|
+
|
120
|
+
user_group_id = await _get_valid_user_group_id(
|
121
|
+
user_group_id=user_group_id,
|
122
|
+
private=private,
|
123
|
+
user_id=user.id,
|
124
|
+
db=db,
|
125
|
+
)
|
126
|
+
|
127
|
+
user_settings = await validate_user_settings(
|
128
|
+
user=user, backend=settings.FRACTAL_RUNNER_BACKEND, db=db
|
129
|
+
)
|
130
|
+
|
131
|
+
if settings.FRACTAL_RUNNER_BACKEND == "slurm_ssh":
|
132
|
+
base_tasks_path = user_settings.ssh_tasks_dir
|
133
|
+
else:
|
134
|
+
base_tasks_path = settings.FRACTAL_TASKS_DIR.as_posix()
|
135
|
+
task_group_path = (
|
136
|
+
Path(base_tasks_path) / str(user.id) / pkg_name / version
|
137
|
+
).as_posix()
|
138
|
+
|
139
|
+
task_group_attrs = dict(
|
140
|
+
user_id=user.id,
|
141
|
+
user_group_id=user_group_id,
|
142
|
+
origin=TaskGroupV2OriginEnum.PIXI,
|
143
|
+
pixi_version=pixi_version,
|
144
|
+
pkg_name=pkg_name,
|
145
|
+
version=version,
|
146
|
+
path=task_group_path,
|
147
|
+
)
|
148
|
+
|
149
|
+
await _verify_non_duplication_user_constraint(
|
150
|
+
user_id=user.id,
|
151
|
+
pkg_name=task_group_attrs["pkg_name"],
|
152
|
+
version=task_group_attrs["version"],
|
153
|
+
db=db,
|
154
|
+
)
|
155
|
+
await _verify_non_duplication_group_constraint(
|
156
|
+
user_group_id=task_group_attrs["user_group_id"],
|
157
|
+
pkg_name=task_group_attrs["pkg_name"],
|
158
|
+
version=task_group_attrs["version"],
|
159
|
+
db=db,
|
160
|
+
)
|
161
|
+
await _verify_non_duplication_group_path(
|
162
|
+
path=task_group_attrs["path"],
|
163
|
+
db=db,
|
164
|
+
)
|
165
|
+
|
166
|
+
if settings.FRACTAL_RUNNER_BACKEND != "slurm_ssh":
|
167
|
+
if Path(task_group_path).exists():
|
168
|
+
raise HTTPException(
|
169
|
+
status_code=status.HTTP_422_UNPROCESSABLE_ENTITY,
|
170
|
+
detail=f"{task_group_path} already exists.",
|
171
|
+
)
|
172
|
+
|
173
|
+
task_group = TaskGroupV2(**task_group_attrs)
|
174
|
+
db.add(task_group)
|
175
|
+
await db.commit()
|
176
|
+
await db.refresh(task_group)
|
177
|
+
db.expunge(task_group)
|
178
|
+
|
179
|
+
task_group_activity = TaskGroupActivityV2(
|
180
|
+
user_id=task_group.user_id,
|
181
|
+
taskgroupv2_id=task_group.id,
|
182
|
+
status=TaskGroupActivityStatusV2.PENDING,
|
183
|
+
action=TaskGroupActivityActionV2.COLLECT,
|
184
|
+
pkg_name=task_group.pkg_name,
|
185
|
+
version=task_group.version,
|
186
|
+
)
|
187
|
+
db.add(task_group_activity)
|
188
|
+
await db.commit()
|
189
|
+
await db.refresh(task_group_activity)
|
190
|
+
|
191
|
+
if settings.FRACTAL_RUNNER_BACKEND == "slurm_ssh":
|
192
|
+
ssh_config = SSHConfig(
|
193
|
+
user=user_settings.ssh_username,
|
194
|
+
host=user_settings.ssh_host,
|
195
|
+
key_path=user_settings.ssh_private_key_path,
|
196
|
+
)
|
197
|
+
|
198
|
+
background_tasks.add_task(
|
199
|
+
collect_ssh_pixi,
|
200
|
+
task_group_id=task_group.id,
|
201
|
+
task_group_activity_id=task_group_activity.id,
|
202
|
+
ssh_config=ssh_config,
|
203
|
+
tasks_base_dir=user_settings.ssh_tasks_dir,
|
204
|
+
tar_gz_file=tar_gz_file,
|
205
|
+
)
|
206
|
+
else:
|
207
|
+
background_tasks.add_task(
|
208
|
+
collect_local_pixi,
|
209
|
+
task_group_id=task_group.id,
|
210
|
+
task_group_activity_id=task_group_activity.id,
|
211
|
+
tar_gz_file=tar_gz_file,
|
212
|
+
)
|
213
|
+
logger.info(
|
214
|
+
"Task-collection endpoint: start background collection "
|
215
|
+
"and return task_group_activity. "
|
216
|
+
f"Current pid is {os.getpid()}. "
|
217
|
+
)
|
218
|
+
response.status_code = status.HTTP_202_ACCEPTED
|
219
|
+
return task_group_activity
|
@@ -12,6 +12,7 @@ from pydantic.types import AwareDatetime
|
|
12
12
|
from sqlmodel import or_
|
13
13
|
from sqlmodel import select
|
14
14
|
|
15
|
+
from ._aux_functions_task_lifecycle import check_no_ongoing_activity
|
15
16
|
from ._aux_functions_tasks import _get_task_group_full_access
|
16
17
|
from ._aux_functions_tasks import _get_task_group_read_access
|
17
18
|
from ._aux_functions_tasks import _verify_non_duplication_group_constraint
|
@@ -216,6 +217,8 @@ async def delete_task_group(
|
|
216
217
|
db=db,
|
217
218
|
)
|
218
219
|
|
220
|
+
await check_no_ongoing_activity(task_group_id=task_group_id, db=db)
|
221
|
+
|
219
222
|
stm = select(WorkflowTaskV2).where(
|
220
223
|
WorkflowTaskV2.task_id.in_({task.id for task in task_group.task_list})
|
221
224
|
)
|