fractal-server 2.14.15__py3-none-any.whl → 2.15.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- fractal_server/__init__.py +1 -1
- fractal_server/app/models/security.py +2 -2
- fractal_server/app/models/user_settings.py +2 -2
- fractal_server/app/models/v2/dataset.py +3 -3
- fractal_server/app/models/v2/history.py +2 -0
- fractal_server/app/models/v2/job.py +6 -6
- fractal_server/app/models/v2/task.py +12 -8
- fractal_server/app/models/v2/task_group.py +19 -7
- fractal_server/app/models/v2/workflowtask.py +6 -6
- fractal_server/app/routes/admin/v2/task_group_lifecycle.py +2 -5
- fractal_server/app/routes/api/v2/__init__.py +6 -0
- fractal_server/app/routes/api/v2/_aux_functions_tasks.py +22 -0
- fractal_server/app/routes/api/v2/history.py +2 -2
- fractal_server/app/routes/api/v2/pre_submission_checks.py +3 -3
- fractal_server/app/routes/api/v2/task_collection.py +8 -18
- fractal_server/app/routes/api/v2/task_collection_custom.py +2 -2
- fractal_server/app/routes/api/v2/task_collection_pixi.py +219 -0
- fractal_server/app/routes/api/v2/task_group.py +3 -0
- fractal_server/app/routes/api/v2/task_group_lifecycle.py +26 -10
- fractal_server/app/runner/executors/slurm_common/_slurm_config.py +10 -0
- fractal_server/app/runner/executors/slurm_common/base_slurm_runner.py +39 -14
- fractal_server/app/runner/executors/slurm_common/get_slurm_config.py +8 -1
- fractal_server/app/runner/executors/slurm_ssh/runner.py +3 -1
- fractal_server/app/runner/v2/runner.py +2 -2
- fractal_server/app/schemas/v2/__init__.py +1 -1
- fractal_server/app/schemas/v2/dumps.py +1 -1
- fractal_server/app/schemas/v2/task_collection.py +1 -1
- fractal_server/app/schemas/v2/task_group.py +7 -5
- fractal_server/config.py +70 -0
- fractal_server/images/status_tools.py +80 -75
- fractal_server/migrations/versions/791ce783d3d8_add_indices.py +41 -0
- fractal_server/migrations/versions/b1e7f7a1ff71_task_group_for_pixi.py +53 -0
- fractal_server/migrations/versions/b3ffb095f973_json_to_jsonb.py +340 -0
- fractal_server/ssh/_fabric.py +29 -0
- fractal_server/tasks/v2/local/__init__.py +3 -0
- fractal_server/tasks/v2/local/_utils.py +4 -3
- fractal_server/tasks/v2/local/collect.py +26 -30
- fractal_server/tasks/v2/local/collect_pixi.py +252 -0
- fractal_server/tasks/v2/local/deactivate.py +39 -46
- fractal_server/tasks/v2/local/deactivate_pixi.py +98 -0
- fractal_server/tasks/v2/local/reactivate.py +12 -23
- fractal_server/tasks/v2/local/reactivate_pixi.py +184 -0
- fractal_server/tasks/v2/ssh/__init__.py +3 -0
- fractal_server/tasks/v2/ssh/_utils.py +50 -9
- fractal_server/tasks/v2/ssh/collect.py +46 -56
- fractal_server/tasks/v2/ssh/collect_pixi.py +315 -0
- fractal_server/tasks/v2/ssh/deactivate.py +54 -67
- fractal_server/tasks/v2/ssh/deactivate_pixi.py +122 -0
- fractal_server/tasks/v2/ssh/reactivate.py +25 -38
- fractal_server/tasks/v2/ssh/reactivate_pixi.py +233 -0
- fractal_server/tasks/v2/templates/pixi_1_extract.sh +40 -0
- fractal_server/tasks/v2/templates/pixi_2_install.sh +52 -0
- fractal_server/tasks/v2/templates/pixi_3_post_install.sh +76 -0
- fractal_server/tasks/v2/utils_background.py +50 -8
- fractal_server/tasks/v2/utils_pixi.py +38 -0
- fractal_server/tasks/v2/utils_templates.py +14 -1
- {fractal_server-2.14.15.dist-info → fractal_server-2.15.0.dist-info}/METADATA +4 -4
- {fractal_server-2.14.15.dist-info → fractal_server-2.15.0.dist-info}/RECORD +61 -47
- {fractal_server-2.14.15.dist-info → fractal_server-2.15.0.dist-info}/LICENSE +0 -0
- {fractal_server-2.14.15.dist-info → fractal_server-2.15.0.dist-info}/WHEEL +0 -0
- {fractal_server-2.14.15.dist-info → fractal_server-2.15.0.dist-info}/entry_points.txt +0 -0
fractal_server/__init__.py
CHANGED
@@ -1 +1 @@
|
|
1
|
-
__VERSION__ = "2.
|
1
|
+
__VERSION__ = "2.15.0"
|
@@ -15,8 +15,8 @@ from typing import Optional
|
|
15
15
|
from pydantic import ConfigDict
|
16
16
|
from pydantic import EmailStr
|
17
17
|
from sqlalchemy import Column
|
18
|
+
from sqlalchemy.dialects.postgresql import JSONB
|
18
19
|
from sqlalchemy.types import DateTime
|
19
|
-
from sqlalchemy.types import JSON
|
20
20
|
from sqlmodel import Field
|
21
21
|
from sqlmodel import Relationship
|
22
22
|
from sqlmodel import SQLModel
|
@@ -124,5 +124,5 @@ class UserGroup(SQLModel, table=True):
|
|
124
124
|
sa_column=Column(DateTime(timezone=True), nullable=False),
|
125
125
|
)
|
126
126
|
viewer_paths: list[str] = Field(
|
127
|
-
sa_column=Column(
|
127
|
+
sa_column=Column(JSONB, server_default="[]", nullable=False)
|
128
128
|
)
|
@@ -1,5 +1,5 @@
|
|
1
1
|
from sqlalchemy import Column
|
2
|
-
from sqlalchemy.
|
2
|
+
from sqlalchemy.dialects.postgresql import JSONB
|
3
3
|
from sqlmodel import Field
|
4
4
|
from sqlmodel import SQLModel
|
5
5
|
|
@@ -25,7 +25,7 @@ class UserSettings(SQLModel, table=True):
|
|
25
25
|
|
26
26
|
id: int | None = Field(default=None, primary_key=True)
|
27
27
|
slurm_accounts: list[str] = Field(
|
28
|
-
sa_column=Column(
|
28
|
+
sa_column=Column(JSONB, server_default="[]", nullable=False)
|
29
29
|
)
|
30
30
|
ssh_host: str | None = None
|
31
31
|
ssh_username: str | None = None
|
@@ -3,8 +3,8 @@ from typing import Any
|
|
3
3
|
|
4
4
|
from pydantic import ConfigDict
|
5
5
|
from sqlalchemy import Column
|
6
|
+
from sqlalchemy.dialects.postgresql import JSONB
|
6
7
|
from sqlalchemy.types import DateTime
|
7
|
-
from sqlalchemy.types import JSON
|
8
8
|
from sqlmodel import Field
|
9
9
|
from sqlmodel import Relationship
|
10
10
|
from sqlmodel import SQLModel
|
@@ -24,7 +24,7 @@ class DatasetV2(SQLModel, table=True):
|
|
24
24
|
)
|
25
25
|
|
26
26
|
history: list[dict[str, Any]] = Field(
|
27
|
-
sa_column=Column(
|
27
|
+
sa_column=Column(JSONB, server_default="[]", nullable=False)
|
28
28
|
)
|
29
29
|
|
30
30
|
timestamp_created: datetime = Field(
|
@@ -34,7 +34,7 @@ class DatasetV2(SQLModel, table=True):
|
|
34
34
|
|
35
35
|
zarr_dir: str
|
36
36
|
images: list[dict[str, Any]] = Field(
|
37
|
-
sa_column=Column(
|
37
|
+
sa_column=Column(JSONB, server_default="[]", nullable=False)
|
38
38
|
)
|
39
39
|
|
40
40
|
@property
|
@@ -65,11 +65,13 @@ class HistoryImageCache(SQLModel, table=True):
|
|
65
65
|
primary_key=True,
|
66
66
|
foreign_key="datasetv2.id",
|
67
67
|
ondelete="CASCADE",
|
68
|
+
index=True,
|
68
69
|
)
|
69
70
|
workflowtask_id: int = Field(
|
70
71
|
primary_key=True,
|
71
72
|
foreign_key="workflowtaskv2.id",
|
72
73
|
ondelete="CASCADE",
|
74
|
+
index=True,
|
73
75
|
)
|
74
76
|
|
75
77
|
latest_history_unit_id: int = Field(
|
@@ -3,8 +3,8 @@ from typing import Any
|
|
3
3
|
|
4
4
|
from pydantic import ConfigDict
|
5
5
|
from sqlalchemy import Column
|
6
|
+
from sqlalchemy.dialects.postgresql import JSONB
|
6
7
|
from sqlalchemy.types import DateTime
|
7
|
-
from sqlalchemy.types import JSON
|
8
8
|
from sqlmodel import Field
|
9
9
|
from sqlmodel import SQLModel
|
10
10
|
|
@@ -31,13 +31,13 @@ class JobV2(SQLModel, table=True):
|
|
31
31
|
slurm_account: str | None = None
|
32
32
|
|
33
33
|
dataset_dump: dict[str, Any] = Field(
|
34
|
-
sa_column=Column(
|
34
|
+
sa_column=Column(JSONB, nullable=False)
|
35
35
|
)
|
36
36
|
workflow_dump: dict[str, Any] = Field(
|
37
|
-
sa_column=Column(
|
37
|
+
sa_column=Column(JSONB, nullable=False)
|
38
38
|
)
|
39
39
|
project_dump: dict[str, Any] = Field(
|
40
|
-
sa_column=Column(
|
40
|
+
sa_column=Column(JSONB, nullable=False)
|
41
41
|
)
|
42
42
|
|
43
43
|
worker_init: str | None = None
|
@@ -57,8 +57,8 @@ class JobV2(SQLModel, table=True):
|
|
57
57
|
log: str | None = None
|
58
58
|
|
59
59
|
attribute_filters: AttributeFilters = Field(
|
60
|
-
sa_column=Column(
|
60
|
+
sa_column=Column(JSONB, nullable=False, server_default="{}")
|
61
61
|
)
|
62
62
|
type_filters: dict[str, bool] = Field(
|
63
|
-
sa_column=Column(
|
63
|
+
sa_column=Column(JSONB, nullable=False, server_default="{}")
|
64
64
|
)
|
@@ -1,7 +1,7 @@
|
|
1
1
|
from typing import Any
|
2
2
|
|
3
3
|
from sqlalchemy import Column
|
4
|
-
from sqlalchemy.
|
4
|
+
from sqlalchemy.dialects.postgresql import JSONB
|
5
5
|
from sqlmodel import Field
|
6
6
|
from sqlmodel import SQLModel
|
7
7
|
|
@@ -16,25 +16,29 @@ class TaskV2(SQLModel, table=True):
|
|
16
16
|
source: str | None = None
|
17
17
|
|
18
18
|
meta_non_parallel: dict[str, Any] = Field(
|
19
|
-
sa_column=Column(
|
19
|
+
sa_column=Column(
|
20
|
+
JSONB, server_default="{}", default={}, nullable=False
|
21
|
+
)
|
20
22
|
)
|
21
23
|
meta_parallel: dict[str, Any] = Field(
|
22
|
-
sa_column=Column(
|
24
|
+
sa_column=Column(
|
25
|
+
JSONB, server_default="{}", default={}, nullable=False
|
26
|
+
)
|
23
27
|
)
|
24
28
|
|
25
29
|
version: str | None = None
|
26
30
|
args_schema_non_parallel: dict[str, Any] | None = Field(
|
27
|
-
sa_column=Column(
|
31
|
+
sa_column=Column(JSONB), default=None
|
28
32
|
)
|
29
33
|
args_schema_parallel: dict[str, Any] | None = Field(
|
30
|
-
sa_column=Column(
|
34
|
+
sa_column=Column(JSONB), default=None
|
31
35
|
)
|
32
36
|
args_schema_version: str | None = None
|
33
37
|
docs_info: str | None = None
|
34
38
|
docs_link: str | None = None
|
35
39
|
|
36
|
-
input_types: dict[str, bool] = Field(sa_column=Column(
|
37
|
-
output_types: dict[str, bool] = Field(sa_column=Column(
|
40
|
+
input_types: dict[str, bool] = Field(sa_column=Column(JSONB), default={})
|
41
|
+
output_types: dict[str, bool] = Field(sa_column=Column(JSONB), default={})
|
38
42
|
|
39
43
|
taskgroupv2_id: int = Field(foreign_key="taskgroupv2.id")
|
40
44
|
|
@@ -42,5 +46,5 @@ class TaskV2(SQLModel, table=True):
|
|
42
46
|
modality: str | None = None
|
43
47
|
authors: str | None = None
|
44
48
|
tags: list[str] = Field(
|
45
|
-
sa_column=Column(
|
49
|
+
sa_column=Column(JSONB, server_default="[]", nullable=False)
|
46
50
|
)
|
@@ -2,8 +2,8 @@ from datetime import datetime
|
|
2
2
|
from datetime import timezone
|
3
3
|
|
4
4
|
from sqlalchemy import Column
|
5
|
+
from sqlalchemy.dialects.postgresql import JSONB
|
5
6
|
from sqlalchemy.types import DateTime
|
6
|
-
from sqlalchemy.types import JSON
|
7
7
|
from sqlmodel import Field
|
8
8
|
from sqlmodel import Relationship
|
9
9
|
from sqlmodel import SQLModel
|
@@ -29,18 +29,19 @@ class TaskGroupV2(SQLModel, table=True):
|
|
29
29
|
pkg_name: str
|
30
30
|
version: str | None = None
|
31
31
|
python_version: str | None = None
|
32
|
+
pixi_version: str | None = None
|
32
33
|
path: str | None = None
|
33
|
-
|
34
|
+
archive_path: str | None = None
|
34
35
|
pip_extras: str | None = None
|
35
36
|
pinned_package_versions: dict[str, str] = Field(
|
36
37
|
sa_column=Column(
|
37
|
-
|
38
|
+
JSONB,
|
38
39
|
server_default="{}",
|
39
40
|
default={},
|
40
41
|
nullable=True,
|
41
42
|
),
|
42
43
|
)
|
43
|
-
|
44
|
+
env_info: str | None = None
|
44
45
|
venv_path: str | None = None
|
45
46
|
venv_size_in_kB: int | None = None
|
46
47
|
venv_file_number: int | None = None
|
@@ -66,15 +67,20 @@ class TaskGroupV2(SQLModel, table=True):
|
|
66
67
|
"""
|
67
68
|
Prepare string to be used in `python -m pip install`.
|
68
69
|
"""
|
70
|
+
if self.origin == "pixi":
|
71
|
+
raise ValueError(
|
72
|
+
f"Cannot call 'pip_install_string' if {self.origin=}."
|
73
|
+
)
|
74
|
+
|
69
75
|
extras = f"[{self.pip_extras}]" if self.pip_extras is not None else ""
|
70
76
|
|
71
|
-
if self.
|
72
|
-
return f"{self.
|
77
|
+
if self.archive_path is not None:
|
78
|
+
return f"{self.archive_path}{extras}"
|
73
79
|
else:
|
74
80
|
if self.version is None:
|
75
81
|
raise ValueError(
|
76
82
|
"Cannot run `pip_install_string` with "
|
77
|
-
f"{self.pkg_name=}, {self.
|
83
|
+
f"{self.pkg_name=}, {self.archive_path=}, {self.version=}."
|
78
84
|
)
|
79
85
|
return f"{self.pkg_name}{extras}=={self.version}"
|
80
86
|
|
@@ -83,6 +89,12 @@ class TaskGroupV2(SQLModel, table=True):
|
|
83
89
|
"""
|
84
90
|
Prepare string to be used in `python -m pip install`.
|
85
91
|
"""
|
92
|
+
if self.origin == "pixi":
|
93
|
+
raise ValueError(
|
94
|
+
"Cannot call 'pinned_package_versions_string' if "
|
95
|
+
f"{self.origin=}."
|
96
|
+
)
|
97
|
+
|
86
98
|
if self.pinned_package_versions is None:
|
87
99
|
return ""
|
88
100
|
output = " ".join(
|
@@ -2,7 +2,7 @@ from typing import Any
|
|
2
2
|
|
3
3
|
from pydantic import ConfigDict
|
4
4
|
from sqlalchemy import Column
|
5
|
-
from sqlalchemy.
|
5
|
+
from sqlalchemy.dialects.postgresql import JSONB
|
6
6
|
from sqlmodel import Field
|
7
7
|
from sqlmodel import Relationship
|
8
8
|
from sqlmodel import SQLModel
|
@@ -18,20 +18,20 @@ class WorkflowTaskV2(SQLModel, table=True):
|
|
18
18
|
workflow_id: int = Field(foreign_key="workflowv2.id", ondelete="CASCADE")
|
19
19
|
order: int | None = None
|
20
20
|
meta_parallel: dict[str, Any] | None = Field(
|
21
|
-
sa_column=Column(
|
21
|
+
sa_column=Column(JSONB), default=None
|
22
22
|
)
|
23
23
|
meta_non_parallel: dict[str, Any] | None = Field(
|
24
|
-
sa_column=Column(
|
24
|
+
sa_column=Column(JSONB), default=None
|
25
25
|
)
|
26
26
|
args_parallel: dict[str, Any] | None = Field(
|
27
|
-
sa_column=Column(
|
27
|
+
sa_column=Column(JSONB), default=None
|
28
28
|
)
|
29
29
|
args_non_parallel: dict[str, Any] | None = Field(
|
30
|
-
sa_column=Column(
|
30
|
+
sa_column=Column(JSONB), default=None
|
31
31
|
)
|
32
32
|
|
33
33
|
type_filters: dict[str, bool] = Field(
|
34
|
-
sa_column=Column(
|
34
|
+
sa_column=Column(JSONB, nullable=False, server_default="{}")
|
35
35
|
)
|
36
36
|
|
37
37
|
# Task
|
@@ -2,7 +2,6 @@ from fastapi import APIRouter
|
|
2
2
|
from fastapi import BackgroundTasks
|
3
3
|
from fastapi import Depends
|
4
4
|
from fastapi import HTTPException
|
5
|
-
from fastapi import Request
|
6
5
|
from fastapi import Response
|
7
6
|
from fastapi import status
|
8
7
|
|
@@ -51,7 +50,6 @@ async def deactivate_task_group(
|
|
51
50
|
task_group_id: int,
|
52
51
|
background_tasks: BackgroundTasks,
|
53
52
|
response: Response,
|
54
|
-
request: Request,
|
55
53
|
superuser: UserOAuth = Depends(current_active_superuser),
|
56
54
|
db: AsyncSession = Depends(get_async_db),
|
57
55
|
) -> TaskGroupReadV2:
|
@@ -157,7 +155,6 @@ async def reactivate_task_group(
|
|
157
155
|
task_group_id: int,
|
158
156
|
background_tasks: BackgroundTasks,
|
159
157
|
response: Response,
|
160
|
-
request: Request,
|
161
158
|
superuser: UserOAuth = Depends(current_active_superuser),
|
162
159
|
db: AsyncSession = Depends(get_async_db),
|
163
160
|
) -> TaskGroupReadV2:
|
@@ -207,12 +204,12 @@ async def reactivate_task_group(
|
|
207
204
|
response.status_code = status.HTTP_202_ACCEPTED
|
208
205
|
return task_group_activity
|
209
206
|
|
210
|
-
if task_group.
|
207
|
+
if task_group.env_info is None:
|
211
208
|
raise HTTPException(
|
212
209
|
status_code=status.HTTP_422_UNPROCESSABLE_ENTITY,
|
213
210
|
detail=(
|
214
211
|
"Cannot reactivate a task group with "
|
215
|
-
f"{task_group.
|
212
|
+
f"{task_group.env_info=}."
|
216
213
|
),
|
217
214
|
)
|
218
215
|
|
@@ -14,6 +14,7 @@ from .submit import router as submit_job_router_v2
|
|
14
14
|
from .task import router as task_router_v2
|
15
15
|
from .task_collection import router as task_collection_router_v2
|
16
16
|
from .task_collection_custom import router as task_collection_router_v2_custom
|
17
|
+
from .task_collection_pixi import router as task_collection_pixi_router_v2
|
17
18
|
from .task_group import router as task_group_router_v2
|
18
19
|
from .task_group_lifecycle import router as task_group_lifecycle_router_v2
|
19
20
|
from .task_version_update import router as task_version_update_router_v2
|
@@ -49,6 +50,11 @@ router_api_v2.include_router(
|
|
49
50
|
prefix="/task",
|
50
51
|
tags=["V2 Task Lifecycle"],
|
51
52
|
)
|
53
|
+
router_api_v2.include_router(
|
54
|
+
task_collection_pixi_router_v2,
|
55
|
+
prefix="/task",
|
56
|
+
tags=["V2 Task Lifecycle"],
|
57
|
+
)
|
52
58
|
router_api_v2.include_router(
|
53
59
|
task_group_lifecycle_router_v2,
|
54
60
|
prefix="/task-group",
|
@@ -333,6 +333,28 @@ async def _verify_non_duplication_group_constraint(
|
|
333
333
|
)
|
334
334
|
|
335
335
|
|
336
|
+
async def _verify_non_duplication_group_path(
|
337
|
+
path: str | None,
|
338
|
+
db: AsyncSession,
|
339
|
+
) -> None:
|
340
|
+
"""
|
341
|
+
Verify uniqueness of non-`None` `TaskGroupV2.path`
|
342
|
+
"""
|
343
|
+
if path is None:
|
344
|
+
return
|
345
|
+
stm = select(TaskGroupV2.id).where(TaskGroupV2.path == path)
|
346
|
+
res = await db.execute(stm)
|
347
|
+
duplicate_ids = res.scalars().all()
|
348
|
+
if duplicate_ids:
|
349
|
+
raise HTTPException(
|
350
|
+
status_code=status.HTTP_422_UNPROCESSABLE_ENTITY,
|
351
|
+
detail=(
|
352
|
+
f"Other TaskGroups already have {path=}: "
|
353
|
+
f"{sorted(duplicate_ids)}."
|
354
|
+
),
|
355
|
+
)
|
356
|
+
|
357
|
+
|
336
358
|
async def _add_warnings_to_workflow_tasks(
|
337
359
|
wftask_list: list[WorkflowTaskV2], user_id: int, db: AsyncSession
|
338
360
|
) -> list[dict[str, Any]]:
|
@@ -34,7 +34,7 @@ from fractal_server.app.schemas.v2 import HistoryUnitRead
|
|
34
34
|
from fractal_server.app.schemas.v2 import HistoryUnitStatus
|
35
35
|
from fractal_server.app.schemas.v2 import HistoryUnitStatusWithUnset
|
36
36
|
from fractal_server.app.schemas.v2 import ImageLogsRequest
|
37
|
-
from fractal_server.images.status_tools import
|
37
|
+
from fractal_server.images.status_tools import enrich_images_unsorted_async
|
38
38
|
from fractal_server.images.status_tools import IMAGE_STATUS_KEY
|
39
39
|
from fractal_server.images.tools import aggregate_attributes
|
40
40
|
from fractal_server.images.tools import aggregate_types
|
@@ -334,7 +334,7 @@ async def get_history_images(
|
|
334
334
|
types = aggregate_types(type_filtered_images)
|
335
335
|
|
336
336
|
# (3) Enrich images with status attribute
|
337
|
-
type_filtered_images_with_status = await
|
337
|
+
type_filtered_images_with_status = await enrich_images_unsorted_async(
|
338
338
|
dataset_id=dataset_id,
|
339
339
|
workflowtask_id=workflowtask_id,
|
340
340
|
images=type_filtered_images,
|
@@ -14,7 +14,7 @@ from fractal_server.app.models import UserOAuth
|
|
14
14
|
from fractal_server.app.routes.auth import current_active_user
|
15
15
|
from fractal_server.app.schemas.v2 import HistoryUnitStatus
|
16
16
|
from fractal_server.app.schemas.v2 import TaskType
|
17
|
-
from fractal_server.images.status_tools import
|
17
|
+
from fractal_server.images.status_tools import enrich_images_unsorted_async
|
18
18
|
from fractal_server.images.status_tools import IMAGE_STATUS_KEY
|
19
19
|
from fractal_server.images.tools import aggregate_types
|
20
20
|
from fractal_server.images.tools import filter_image_list
|
@@ -46,7 +46,7 @@ async def verify_unique_types(
|
|
46
46
|
filtered_images = dataset.images
|
47
47
|
else:
|
48
48
|
if IMAGE_STATUS_KEY in query.attribute_filters.keys():
|
49
|
-
images = await
|
49
|
+
images = await enrich_images_unsorted_async(
|
50
50
|
dataset_id=dataset_id,
|
51
51
|
workflowtask_id=workflowtask_id,
|
52
52
|
images=dataset.images,
|
@@ -134,7 +134,7 @@ async def check_non_processed_images(
|
|
134
134
|
attribute_filters=filters.attribute_filters,
|
135
135
|
)
|
136
136
|
|
137
|
-
filtered_images_with_status = await
|
137
|
+
filtered_images_with_status = await enrich_images_unsorted_async(
|
138
138
|
dataset_id=dataset_id,
|
139
139
|
workflowtask_id=previous_wft.id,
|
140
140
|
images=filtered_images,
|
@@ -7,14 +7,12 @@ from fastapi import Depends
|
|
7
7
|
from fastapi import File
|
8
8
|
from fastapi import Form
|
9
9
|
from fastapi import HTTPException
|
10
|
-
from fastapi import Request
|
11
10
|
from fastapi import Response
|
12
11
|
from fastapi import status
|
13
12
|
from fastapi import UploadFile
|
14
13
|
from pydantic import BaseModel
|
15
14
|
from pydantic import model_validator
|
16
15
|
from pydantic import ValidationError
|
17
|
-
from sqlmodel import select
|
18
16
|
|
19
17
|
from .....config import get_settings
|
20
18
|
from .....logger import reset_logger_handlers
|
@@ -23,15 +21,16 @@ from .....syringe import Inject
|
|
23
21
|
from ....db import AsyncSession
|
24
22
|
from ....db import get_async_db
|
25
23
|
from ....models.v2 import TaskGroupV2
|
24
|
+
from ....schemas.v2 import FractalUploadedFile
|
26
25
|
from ....schemas.v2 import TaskCollectPipV2
|
27
26
|
from ....schemas.v2 import TaskGroupActivityStatusV2
|
28
27
|
from ....schemas.v2 import TaskGroupActivityV2Read
|
29
28
|
from ....schemas.v2 import TaskGroupCreateV2Strict
|
30
|
-
from ....schemas.v2 import WheelFile
|
31
29
|
from ...aux.validate_user_settings import validate_user_settings
|
32
30
|
from ._aux_functions_task_lifecycle import get_package_version_from_pypi
|
33
31
|
from ._aux_functions_tasks import _get_valid_user_group_id
|
34
32
|
from ._aux_functions_tasks import _verify_non_duplication_group_constraint
|
33
|
+
from ._aux_functions_tasks import _verify_non_duplication_group_path
|
35
34
|
from ._aux_functions_tasks import _verify_non_duplication_user_constraint
|
36
35
|
from fractal_server.app.models import UserOAuth
|
37
36
|
from fractal_server.app.models.v2 import TaskGroupActivityV2
|
@@ -151,7 +150,6 @@ def parse_request_data(
|
|
151
150
|
response_model=TaskGroupActivityV2Read,
|
152
151
|
)
|
153
152
|
async def collect_tasks_pip(
|
154
|
-
request: Request,
|
155
153
|
response: Response,
|
156
154
|
background_tasks: BackgroundTasks,
|
157
155
|
request_data: CollectionRequestData = Depends(parse_request_data),
|
@@ -208,13 +206,13 @@ async def collect_tasks_pip(
|
|
208
206
|
# Initialize wheel_file_content as None
|
209
207
|
wheel_file = None
|
210
208
|
|
211
|
-
# Set pkg_name, version, origin and
|
209
|
+
# Set pkg_name, version, origin and archive_path
|
212
210
|
if request_data.origin == TaskGroupV2OriginEnum.WHEELFILE:
|
213
211
|
try:
|
214
212
|
wheel_filename = request_data.file.filename
|
215
213
|
wheel_info = _parse_wheel_filename(wheel_filename)
|
216
214
|
wheel_file_content = await request_data.file.read()
|
217
|
-
wheel_file =
|
215
|
+
wheel_file = FractalUploadedFile(
|
218
216
|
filename=wheel_filename,
|
219
217
|
contents=wheel_file_content,
|
220
218
|
)
|
@@ -293,18 +291,10 @@ async def collect_tasks_pip(
|
|
293
291
|
version=task_group_attrs["version"],
|
294
292
|
db=db,
|
295
293
|
)
|
296
|
-
|
297
|
-
|
298
|
-
|
299
|
-
|
300
|
-
for conflicting_task_group in res.scalars().all():
|
301
|
-
raise HTTPException(
|
302
|
-
status_code=status.HTTP_422_UNPROCESSABLE_ENTITY,
|
303
|
-
detail=(
|
304
|
-
f"Another task-group already has path={task_group_path}.\n"
|
305
|
-
f"{conflicting_task_group=}"
|
306
|
-
),
|
307
|
-
)
|
294
|
+
await _verify_non_duplication_group_path(
|
295
|
+
path=task_group_attrs["path"],
|
296
|
+
db=db,
|
297
|
+
)
|
308
298
|
|
309
299
|
# On-disk checks
|
310
300
|
|
@@ -26,7 +26,7 @@ from fractal_server.logger import set_logger
|
|
26
26
|
from fractal_server.string_tools import validate_cmd
|
27
27
|
from fractal_server.syringe import Inject
|
28
28
|
from fractal_server.tasks.v2.utils_background import (
|
29
|
-
|
29
|
+
prepare_tasks_metadata,
|
30
30
|
)
|
31
31
|
from fractal_server.tasks.v2.utils_database import (
|
32
32
|
create_db_tasks_and_update_task_group_async,
|
@@ -138,7 +138,7 @@ async def collect_task_custom(
|
|
138
138
|
else:
|
139
139
|
package_root = Path(task_collect.package_root)
|
140
140
|
|
141
|
-
task_list: list[TaskCreateV2] =
|
141
|
+
task_list: list[TaskCreateV2] = prepare_tasks_metadata(
|
142
142
|
package_manifest=task_collect.manifest,
|
143
143
|
python_bin=Path(task_collect.python_interpreter),
|
144
144
|
package_root=package_root,
|