fractal-server 2.13.1__py3-none-any.whl → 2.14.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- fractal_server/__init__.py +1 -1
- fractal_server/__main__.py +3 -1
- fractal_server/app/models/linkusergroup.py +6 -2
- fractal_server/app/models/v2/__init__.py +7 -1
- fractal_server/app/models/v2/dataset.py +1 -11
- fractal_server/app/models/v2/history.py +78 -0
- fractal_server/app/models/v2/job.py +10 -3
- fractal_server/app/models/v2/task_group.py +2 -2
- fractal_server/app/models/v2/workflow.py +1 -1
- fractal_server/app/models/v2/workflowtask.py +1 -1
- fractal_server/app/routes/admin/v2/accounting.py +18 -28
- fractal_server/app/routes/admin/v2/task.py +1 -1
- fractal_server/app/routes/admin/v2/task_group.py +0 -17
- fractal_server/app/routes/api/__init__.py +1 -1
- fractal_server/app/routes/api/v2/__init__.py +8 -2
- fractal_server/app/routes/api/v2/_aux_functions.py +66 -0
- fractal_server/app/routes/api/v2/_aux_functions_history.py +166 -0
- fractal_server/app/routes/api/v2/dataset.py +0 -17
- fractal_server/app/routes/api/v2/history.py +544 -0
- fractal_server/app/routes/api/v2/images.py +31 -43
- fractal_server/app/routes/api/v2/job.py +30 -0
- fractal_server/app/routes/api/v2/project.py +1 -53
- fractal_server/app/routes/api/v2/{status.py → status_legacy.py} +6 -6
- fractal_server/app/routes/api/v2/submit.py +16 -14
- fractal_server/app/routes/api/v2/task.py +3 -10
- fractal_server/app/routes/api/v2/task_collection_custom.py +4 -9
- fractal_server/app/routes/api/v2/task_group.py +0 -17
- fractal_server/app/routes/api/v2/verify_image_types.py +61 -0
- fractal_server/app/routes/api/v2/workflow.py +28 -69
- fractal_server/app/routes/api/v2/workflowtask.py +53 -50
- fractal_server/app/routes/auth/group.py +0 -16
- fractal_server/app/routes/auth/oauth.py +5 -3
- fractal_server/app/routes/pagination.py +47 -0
- fractal_server/app/runner/components.py +0 -3
- fractal_server/app/runner/compress_folder.py +57 -29
- fractal_server/app/runner/exceptions.py +4 -0
- fractal_server/app/runner/executors/base_runner.py +157 -0
- fractal_server/app/runner/{v2/_local/_local_config.py → executors/local/get_local_config.py} +7 -9
- fractal_server/app/runner/executors/local/runner.py +248 -0
- fractal_server/app/runner/executors/{slurm → slurm_common}/_batching.py +1 -1
- fractal_server/app/runner/executors/{slurm → slurm_common}/_slurm_config.py +9 -7
- fractal_server/app/runner/executors/slurm_common/base_slurm_runner.py +868 -0
- fractal_server/app/runner/{v2/_slurm_common → executors/slurm_common}/get_slurm_config.py +48 -17
- fractal_server/app/runner/executors/{slurm → slurm_common}/remote.py +36 -47
- fractal_server/app/runner/executors/slurm_common/slurm_job_task_models.py +134 -0
- fractal_server/app/runner/executors/slurm_ssh/runner.py +268 -0
- fractal_server/app/runner/executors/slurm_sudo/__init__.py +0 -0
- fractal_server/app/runner/executors/{slurm/sudo → slurm_sudo}/_subprocess_run_as_user.py +2 -83
- fractal_server/app/runner/executors/slurm_sudo/runner.py +193 -0
- fractal_server/app/runner/extract_archive.py +1 -3
- fractal_server/app/runner/task_files.py +134 -87
- fractal_server/app/runner/v2/__init__.py +0 -399
- fractal_server/app/runner/v2/_local.py +88 -0
- fractal_server/app/runner/v2/{_slurm_ssh/__init__.py → _slurm_ssh.py} +20 -19
- fractal_server/app/runner/v2/{_slurm_sudo/__init__.py → _slurm_sudo.py} +17 -15
- fractal_server/app/runner/v2/db_tools.py +119 -0
- fractal_server/app/runner/v2/runner.py +206 -95
- fractal_server/app/runner/v2/runner_functions.py +488 -187
- fractal_server/app/runner/v2/runner_functions_low_level.py +40 -43
- fractal_server/app/runner/v2/submit_workflow.py +358 -0
- fractal_server/app/runner/v2/task_interface.py +31 -0
- fractal_server/app/schemas/_validators.py +13 -24
- fractal_server/app/schemas/user.py +10 -7
- fractal_server/app/schemas/user_settings.py +9 -21
- fractal_server/app/schemas/v2/__init__.py +9 -1
- fractal_server/app/schemas/v2/dataset.py +12 -94
- fractal_server/app/schemas/v2/dumps.py +26 -9
- fractal_server/app/schemas/v2/history.py +80 -0
- fractal_server/app/schemas/v2/job.py +15 -8
- fractal_server/app/schemas/v2/manifest.py +14 -7
- fractal_server/app/schemas/v2/project.py +9 -7
- fractal_server/app/schemas/v2/status_legacy.py +35 -0
- fractal_server/app/schemas/v2/task.py +72 -77
- fractal_server/app/schemas/v2/task_collection.py +14 -32
- fractal_server/app/schemas/v2/task_group.py +10 -9
- fractal_server/app/schemas/v2/workflow.py +10 -11
- fractal_server/app/schemas/v2/workflowtask.py +2 -21
- fractal_server/app/security/__init__.py +3 -3
- fractal_server/app/security/signup_email.py +2 -2
- fractal_server/config.py +41 -46
- fractal_server/images/tools.py +23 -0
- fractal_server/migrations/versions/47351f8c7ebc_drop_dataset_filters.py +50 -0
- fractal_server/migrations/versions/9db60297b8b2_set_ondelete.py +250 -0
- fractal_server/migrations/versions/c90a7c76e996_job_id_in_history_run.py +41 -0
- fractal_server/migrations/versions/e81103413827_add_job_type_filters.py +36 -0
- fractal_server/migrations/versions/f37aceb45062_make_historyunit_logfile_required.py +39 -0
- fractal_server/migrations/versions/fbce16ff4e47_new_history_items.py +120 -0
- fractal_server/ssh/_fabric.py +28 -14
- fractal_server/tasks/v2/local/collect.py +2 -2
- fractal_server/tasks/v2/ssh/collect.py +2 -2
- fractal_server/tasks/v2/templates/2_pip_install.sh +1 -1
- fractal_server/tasks/v2/templates/4_pip_show.sh +1 -1
- fractal_server/tasks/v2/utils_background.py +0 -19
- fractal_server/tasks/v2/utils_database.py +30 -17
- fractal_server/tasks/v2/utils_templates.py +6 -0
- {fractal_server-2.13.1.dist-info → fractal_server-2.14.0.dist-info}/METADATA +4 -4
- {fractal_server-2.13.1.dist-info → fractal_server-2.14.0.dist-info}/RECORD +106 -96
- {fractal_server-2.13.1.dist-info → fractal_server-2.14.0.dist-info}/WHEEL +1 -1
- fractal_server/app/runner/executors/slurm/ssh/_executor_wait_thread.py +0 -126
- fractal_server/app/runner/executors/slurm/ssh/_slurm_job.py +0 -116
- fractal_server/app/runner/executors/slurm/ssh/executor.py +0 -1386
- fractal_server/app/runner/executors/slurm/sudo/_check_jobs_status.py +0 -71
- fractal_server/app/runner/executors/slurm/sudo/_executor_wait_thread.py +0 -130
- fractal_server/app/runner/executors/slurm/sudo/executor.py +0 -1281
- fractal_server/app/runner/v2/_local/__init__.py +0 -132
- fractal_server/app/runner/v2/_local/_submit_setup.py +0 -52
- fractal_server/app/runner/v2/_local/executor.py +0 -100
- fractal_server/app/runner/v2/_slurm_ssh/_submit_setup.py +0 -83
- fractal_server/app/runner/v2/_slurm_sudo/_submit_setup.py +0 -83
- fractal_server/app/runner/v2/handle_failed_job.py +0 -59
- fractal_server/app/schemas/v2/status.py +0 -16
- /fractal_server/app/{runner/executors/slurm → history}/__init__.py +0 -0
- /fractal_server/app/runner/executors/{slurm/ssh → local}/__init__.py +0 -0
- /fractal_server/app/runner/executors/{slurm/sudo → slurm_common}/__init__.py +0 -0
- /fractal_server/app/runner/executors/{_job_states.py → slurm_common/_job_states.py} +0 -0
- /fractal_server/app/runner/executors/{slurm → slurm_common}/utils_executors.py +0 -0
- /fractal_server/app/runner/{v2/_slurm_common → executors/slurm_ssh}/__init__.py +0 -0
- {fractal_server-2.13.1.dist-info → fractal_server-2.14.0.dist-info}/LICENSE +0 -0
- {fractal_server-2.13.1.dist-info → fractal_server-2.14.0.dist-info}/entry_points.txt +0 -0
fractal_server/__init__.py
CHANGED
@@ -1 +1 @@
|
|
1
|
-
__VERSION__ = "2.
|
1
|
+
__VERSION__ = "2.14.0"
|
fractal_server/__main__.py
CHANGED
@@ -123,7 +123,9 @@ def set_db(skip_init_data: bool = False):
|
|
123
123
|
asyncio.run(
|
124
124
|
_create_first_user(
|
125
125
|
email=settings.FRACTAL_DEFAULT_ADMIN_EMAIL,
|
126
|
-
password=
|
126
|
+
password=(
|
127
|
+
settings.FRACTAL_DEFAULT_ADMIN_PASSWORD.get_secret_value()
|
128
|
+
),
|
127
129
|
username=settings.FRACTAL_DEFAULT_ADMIN_USERNAME,
|
128
130
|
is_superuser=True,
|
129
131
|
is_verified=True,
|
@@ -13,8 +13,12 @@ class LinkUserGroup(SQLModel, table=True):
|
|
13
13
|
Crossing table between User and UserGroup
|
14
14
|
"""
|
15
15
|
|
16
|
-
group_id: int = Field(
|
17
|
-
|
16
|
+
group_id: int = Field(
|
17
|
+
foreign_key="usergroup.id", primary_key=True, ondelete="CASCADE"
|
18
|
+
)
|
19
|
+
user_id: int = Field(
|
20
|
+
foreign_key="user_oauth.id", primary_key=True, ondelete="CASCADE"
|
21
|
+
)
|
18
22
|
|
19
23
|
timestamp_created: datetime = Field(
|
20
24
|
default_factory=get_timestamp,
|
@@ -5,6 +5,9 @@ from ..linkuserproject import LinkUserProjectV2
|
|
5
5
|
from .accounting import AccountingRecord
|
6
6
|
from .accounting import AccountingRecordSlurm
|
7
7
|
from .dataset import DatasetV2
|
8
|
+
from .history import HistoryImageCache
|
9
|
+
from .history import HistoryRun
|
10
|
+
from .history import HistoryUnit
|
8
11
|
from .job import JobV2
|
9
12
|
from .project import ProjectV2
|
10
13
|
from .task import TaskV2
|
@@ -23,6 +26,9 @@ __all__ = [
|
|
23
26
|
"TaskGroupV2",
|
24
27
|
"TaskGroupActivityV2",
|
25
28
|
"TaskV2",
|
26
|
-
"WorkflowTaskV2",
|
27
29
|
"WorkflowV2",
|
30
|
+
"WorkflowTaskV2",
|
31
|
+
"HistoryRun",
|
32
|
+
"HistoryUnit",
|
33
|
+
"HistoryImageCache",
|
28
34
|
]
|
@@ -11,7 +11,6 @@ from sqlmodel import Relationship
|
|
11
11
|
from sqlmodel import SQLModel
|
12
12
|
|
13
13
|
from ....utils import get_timestamp
|
14
|
-
from fractal_server.images.models import AttributeFiltersType
|
15
14
|
|
16
15
|
|
17
16
|
class DatasetV2(SQLModel, table=True):
|
@@ -20,7 +19,7 @@ class DatasetV2(SQLModel, table=True):
|
|
20
19
|
id: Optional[int] = Field(default=None, primary_key=True)
|
21
20
|
name: str
|
22
21
|
|
23
|
-
project_id: int = Field(foreign_key="projectv2.id")
|
22
|
+
project_id: int = Field(foreign_key="projectv2.id", ondelete="CASCADE")
|
24
23
|
project: "ProjectV2" = Relationship( # noqa: F821
|
25
24
|
sa_relationship_kwargs=dict(lazy="selectin"),
|
26
25
|
)
|
@@ -34,20 +33,11 @@ class DatasetV2(SQLModel, table=True):
|
|
34
33
|
sa_column=Column(DateTime(timezone=True), nullable=False),
|
35
34
|
)
|
36
35
|
|
37
|
-
# New in V2
|
38
|
-
|
39
36
|
zarr_dir: str
|
40
37
|
images: list[dict[str, Any]] = Field(
|
41
38
|
sa_column=Column(JSON, server_default="[]", nullable=False)
|
42
39
|
)
|
43
40
|
|
44
|
-
type_filters: dict[str, bool] = Field(
|
45
|
-
sa_column=Column(JSON, nullable=False, server_default="{}")
|
46
|
-
)
|
47
|
-
attribute_filters: AttributeFiltersType = Field(
|
48
|
-
sa_column=Column(JSON, nullable=False, server_default="{}")
|
49
|
-
)
|
50
|
-
|
51
41
|
@property
|
52
42
|
def image_zarr_urls(self) -> list[str]:
|
53
43
|
return [image["zarr_url"] for image in self.images]
|
@@ -0,0 +1,78 @@
|
|
1
|
+
from datetime import datetime
|
2
|
+
from typing import Any
|
3
|
+
from typing import Optional
|
4
|
+
|
5
|
+
from pydantic import ConfigDict
|
6
|
+
from sqlalchemy import Column
|
7
|
+
from sqlalchemy import String
|
8
|
+
from sqlalchemy.dialects.postgresql import ARRAY
|
9
|
+
from sqlalchemy.dialects.postgresql import JSONB
|
10
|
+
from sqlalchemy.types import DateTime
|
11
|
+
from sqlmodel import Field
|
12
|
+
from sqlmodel import SQLModel
|
13
|
+
|
14
|
+
from ....utils import get_timestamp
|
15
|
+
|
16
|
+
|
17
|
+
class HistoryRun(SQLModel, table=True):
|
18
|
+
model_config = ConfigDict(arbitrary_types_allowed=True)
|
19
|
+
|
20
|
+
id: Optional[int] = Field(default=None, primary_key=True)
|
21
|
+
dataset_id: int = Field(
|
22
|
+
foreign_key="datasetv2.id",
|
23
|
+
ondelete="CASCADE",
|
24
|
+
)
|
25
|
+
workflowtask_id: Optional[int] = Field(
|
26
|
+
foreign_key="workflowtaskv2.id",
|
27
|
+
default=None,
|
28
|
+
ondelete="SET NULL",
|
29
|
+
)
|
30
|
+
job_id: int = Field(foreign_key="jobv2.id")
|
31
|
+
|
32
|
+
workflowtask_dump: dict[str, Any] = Field(
|
33
|
+
sa_column=Column(JSONB, nullable=False),
|
34
|
+
)
|
35
|
+
task_group_dump: dict[str, Any] = Field(
|
36
|
+
sa_column=Column(JSONB, nullable=False),
|
37
|
+
)
|
38
|
+
|
39
|
+
timestamp_started: datetime = Field(
|
40
|
+
sa_column=Column(DateTime(timezone=True), nullable=False),
|
41
|
+
default_factory=get_timestamp,
|
42
|
+
)
|
43
|
+
status: str
|
44
|
+
num_available_images: int
|
45
|
+
|
46
|
+
|
47
|
+
class HistoryUnit(SQLModel, table=True):
|
48
|
+
id: Optional[int] = Field(default=None, primary_key=True)
|
49
|
+
history_run_id: int = Field(
|
50
|
+
foreign_key="historyrun.id",
|
51
|
+
ondelete="CASCADE",
|
52
|
+
)
|
53
|
+
|
54
|
+
logfile: str
|
55
|
+
status: str
|
56
|
+
zarr_urls: list[str] = Field(
|
57
|
+
sa_column=Column(ARRAY(String)),
|
58
|
+
default_factory=list,
|
59
|
+
)
|
60
|
+
|
61
|
+
|
62
|
+
class HistoryImageCache(SQLModel, table=True):
|
63
|
+
zarr_url: str = Field(primary_key=True)
|
64
|
+
dataset_id: int = Field(
|
65
|
+
primary_key=True,
|
66
|
+
foreign_key="datasetv2.id",
|
67
|
+
ondelete="CASCADE",
|
68
|
+
)
|
69
|
+
workflowtask_id: int = Field(
|
70
|
+
primary_key=True,
|
71
|
+
foreign_key="workflowtaskv2.id",
|
72
|
+
ondelete="CASCADE",
|
73
|
+
)
|
74
|
+
|
75
|
+
latest_history_unit_id: int = Field(
|
76
|
+
foreign_key="historyunit.id",
|
77
|
+
ondelete="CASCADE",
|
78
|
+
)
|
@@ -18,11 +18,15 @@ class JobV2(SQLModel, table=True):
|
|
18
18
|
model_config = ConfigDict(arbitrary_types_allowed=True)
|
19
19
|
|
20
20
|
id: Optional[int] = Field(default=None, primary_key=True)
|
21
|
-
project_id: Optional[int] = Field(
|
21
|
+
project_id: Optional[int] = Field(
|
22
|
+
foreign_key="projectv2.id", default=None, ondelete="SET NULL"
|
23
|
+
)
|
22
24
|
workflow_id: Optional[int] = Field(
|
23
|
-
foreign_key="workflowv2.id", default=None
|
25
|
+
foreign_key="workflowv2.id", default=None, ondelete="SET NULL"
|
26
|
+
)
|
27
|
+
dataset_id: Optional[int] = Field(
|
28
|
+
foreign_key="datasetv2.id", default=None, ondelete="SET NULL"
|
24
29
|
)
|
25
|
-
dataset_id: Optional[int] = Field(foreign_key="datasetv2.id", default=None)
|
26
30
|
|
27
31
|
user_email: str = Field(nullable=False)
|
28
32
|
slurm_account: Optional[str] = None
|
@@ -56,3 +60,6 @@ class JobV2(SQLModel, table=True):
|
|
56
60
|
attribute_filters: AttributeFiltersType = Field(
|
57
61
|
sa_column=Column(JSON, nullable=False, server_default="{}")
|
58
62
|
)
|
63
|
+
type_filters: dict[str, bool] = Field(
|
64
|
+
sa_column=Column(JSON, nullable=False, server_default="{}")
|
65
|
+
)
|
@@ -23,7 +23,7 @@ class TaskGroupV2(SQLModel, table=True):
|
|
23
23
|
|
24
24
|
user_id: int = Field(foreign_key="user_oauth.id")
|
25
25
|
user_group_id: Optional[int] = Field(
|
26
|
-
foreign_key="usergroup.id", default=None
|
26
|
+
foreign_key="usergroup.id", default=None, ondelete="SET NULL"
|
27
27
|
)
|
28
28
|
|
29
29
|
origin: str
|
@@ -100,7 +100,7 @@ class TaskGroupActivityV2(SQLModel, table=True):
|
|
100
100
|
id: Optional[int] = Field(default=None, primary_key=True)
|
101
101
|
user_id: int = Field(foreign_key="user_oauth.id")
|
102
102
|
taskgroupv2_id: Optional[int] = Field(
|
103
|
-
default=None, foreign_key="taskgroupv2.id"
|
103
|
+
default=None, foreign_key="taskgroupv2.id", ondelete="SET NULL"
|
104
104
|
)
|
105
105
|
timestamp_started: datetime = Field(
|
106
106
|
default_factory=get_timestamp,
|
@@ -16,7 +16,7 @@ class WorkflowV2(SQLModel, table=True):
|
|
16
16
|
|
17
17
|
id: Optional[int] = Field(default=None, primary_key=True)
|
18
18
|
name: str
|
19
|
-
project_id: int = Field(foreign_key="projectv2.id")
|
19
|
+
project_id: int = Field(foreign_key="projectv2.id", ondelete="CASCADE")
|
20
20
|
project: "ProjectV2" = Relationship( # noqa: F821
|
21
21
|
sa_relationship_kwargs=dict(lazy="selectin"),
|
22
22
|
)
|
@@ -16,7 +16,7 @@ class WorkflowTaskV2(SQLModel, table=True):
|
|
16
16
|
|
17
17
|
id: Optional[int] = Field(default=None, primary_key=True)
|
18
18
|
|
19
|
-
workflow_id: int = Field(foreign_key="workflowv2.id")
|
19
|
+
workflow_id: int = Field(foreign_key="workflowv2.id", ondelete="CASCADE")
|
20
20
|
order: Optional[int] = None
|
21
21
|
meta_parallel: Optional[dict[str, Any]] = Field(
|
22
22
|
sa_column=Column(JSON), default=None
|
@@ -3,9 +3,6 @@ from typing import Optional
|
|
3
3
|
|
4
4
|
from fastapi import APIRouter
|
5
5
|
from fastapi import Depends
|
6
|
-
from fastapi import HTTPException
|
7
|
-
from fastapi import Query
|
8
|
-
from fastapi import status
|
9
6
|
from fastapi.responses import JSONResponse
|
10
7
|
from pydantic import BaseModel
|
11
8
|
from pydantic.types import AwareDatetime
|
@@ -18,6 +15,9 @@ from fractal_server.app.models import UserOAuth
|
|
18
15
|
from fractal_server.app.models.v2 import AccountingRecord
|
19
16
|
from fractal_server.app.models.v2 import AccountingRecordSlurm
|
20
17
|
from fractal_server.app.routes.auth import current_active_superuser
|
18
|
+
from fractal_server.app.routes.pagination import get_pagination_params
|
19
|
+
from fractal_server.app.routes.pagination import PaginationRequest
|
20
|
+
from fractal_server.app.routes.pagination import PaginationResponse
|
21
21
|
from fractal_server.app.schemas.v2 import AccountingRecordRead
|
22
22
|
|
23
23
|
|
@@ -27,32 +27,19 @@ class AccountingQuery(BaseModel):
|
|
27
27
|
timestamp_max: Optional[AwareDatetime] = None
|
28
28
|
|
29
29
|
|
30
|
-
class AccountingPage(BaseModel):
|
31
|
-
total_count: int
|
32
|
-
page_size: int
|
33
|
-
current_page: int
|
34
|
-
records: list[AccountingRecordRead]
|
35
|
-
|
36
|
-
|
37
30
|
router = APIRouter()
|
38
31
|
|
39
32
|
|
40
|
-
@router.post("/", response_model=
|
33
|
+
@router.post("/", response_model=PaginationResponse[AccountingRecordRead])
|
41
34
|
async def query_accounting(
|
42
35
|
query: AccountingQuery,
|
43
|
-
#
|
44
|
-
|
45
|
-
page_size: Optional[int] = Query(default=None, ge=1),
|
46
|
-
# dependencies
|
36
|
+
# Dependencies
|
37
|
+
pagination: PaginationRequest = Depends(get_pagination_params),
|
47
38
|
superuser: UserOAuth = Depends(current_active_superuser),
|
48
39
|
db: AsyncSession = Depends(get_async_db),
|
49
|
-
) ->
|
50
|
-
|
51
|
-
|
52
|
-
raise HTTPException(
|
53
|
-
status_code=status.HTTP_422_UNPROCESSABLE_ENTITY,
|
54
|
-
detail=(f"Invalid pagination parameters: {page=}, {page_size=}."),
|
55
|
-
)
|
40
|
+
) -> PaginationResponse[AccountingRecordRead]:
|
41
|
+
page = pagination.page
|
42
|
+
page_size = pagination.page_size
|
56
43
|
|
57
44
|
stm = select(AccountingRecord).order_by(AccountingRecord.id)
|
58
45
|
stm_count = select(func.count(AccountingRecord.id))
|
@@ -69,20 +56,23 @@ async def query_accounting(
|
|
69
56
|
stm_count = stm_count.where(
|
70
57
|
AccountingRecord.timestamp <= query.timestamp_max
|
71
58
|
)
|
59
|
+
|
60
|
+
res_total_count = await db.execute(stm_count)
|
61
|
+
total_count = res_total_count.scalar()
|
62
|
+
|
72
63
|
if page_size is not None:
|
73
64
|
stm = stm.offset((page - 1) * page_size).limit(page_size)
|
65
|
+
else:
|
66
|
+
page_size = total_count
|
74
67
|
|
75
68
|
res = await db.execute(stm)
|
76
69
|
records = res.scalars().all()
|
77
|
-
res_total_count = await db.execute(stm_count)
|
78
|
-
total_count = res_total_count.scalar()
|
79
70
|
|
80
|
-
|
81
|
-
return AccountingPage(
|
71
|
+
return PaginationResponse[AccountingRecordRead](
|
82
72
|
total_count=total_count,
|
83
|
-
page_size=
|
73
|
+
page_size=page_size,
|
84
74
|
current_page=page,
|
85
|
-
|
75
|
+
items=[record.model_dump() for record in records],
|
86
76
|
)
|
87
77
|
|
88
78
|
|
@@ -68,7 +68,7 @@ async def query_tasks(
|
|
68
68
|
db: AsyncSession = Depends(get_async_db),
|
69
69
|
) -> list[TaskV2Info]:
|
70
70
|
"""
|
71
|
-
Query `TaskV2` table and get
|
71
|
+
Query `TaskV2` table and get information about related items
|
72
72
|
(WorkflowV2s and ProjectV2s)
|
73
73
|
|
74
74
|
Args:
|
@@ -192,23 +192,6 @@ async def delete_task_group(
|
|
192
192
|
detail=f"TaskV2 {workflow_tasks[0].task_id} is still in use",
|
193
193
|
)
|
194
194
|
|
195
|
-
# Cascade operations: set foreign-keys to null for TaskGroupActivityV2
|
196
|
-
# which are in relationship with the current TaskGroupV2
|
197
|
-
logger.debug("Start of cascade operations on TaskGroupActivityV2.")
|
198
|
-
stm = select(TaskGroupActivityV2).where(
|
199
|
-
TaskGroupActivityV2.taskgroupv2_id == task_group_id
|
200
|
-
)
|
201
|
-
res = await db.execute(stm)
|
202
|
-
task_group_activity_list = res.scalars().all()
|
203
|
-
for task_group_activity in task_group_activity_list:
|
204
|
-
logger.debug(
|
205
|
-
f"Setting TaskGroupActivityV2[{task_group_activity.id}]"
|
206
|
-
".taskgroupv2_id to None."
|
207
|
-
)
|
208
|
-
task_group_activity.taskgroupv2_id = None
|
209
|
-
db.add(task_group_activity)
|
210
|
-
logger.debug("End of cascade operations on TaskGroupActivityV2.")
|
211
|
-
|
212
195
|
await db.delete(task_group)
|
213
196
|
await db.commit()
|
214
197
|
|
@@ -4,16 +4,18 @@
|
|
4
4
|
from fastapi import APIRouter
|
5
5
|
|
6
6
|
from .dataset import router as dataset_router_v2
|
7
|
+
from .history import router as history_router_v2
|
7
8
|
from .images import router as images_routes_v2
|
8
9
|
from .job import router as job_router_v2
|
9
10
|
from .project import router as project_router_v2
|
10
|
-
from .
|
11
|
+
from .status_legacy import router as status_legacy_router_v2
|
11
12
|
from .submit import router as submit_job_router_v2
|
12
13
|
from .task import router as task_router_v2
|
13
14
|
from .task_collection import router as task_collection_router_v2
|
14
15
|
from .task_collection_custom import router as task_collection_router_v2_custom
|
15
16
|
from .task_group import router as task_group_router_v2
|
16
17
|
from .task_group_lifecycle import router as task_group_lifecycle_router_v2
|
18
|
+
from .verify_image_types import router as verify_image_types_router
|
17
19
|
from .workflow import router as workflow_router_v2
|
18
20
|
from .workflow_import import router as workflow_import_router_v2
|
19
21
|
from .workflowtask import router as workflowtask_router_v2
|
@@ -24,10 +26,15 @@ from fractal_server.syringe import Inject
|
|
24
26
|
router_api_v2 = APIRouter()
|
25
27
|
|
26
28
|
router_api_v2.include_router(dataset_router_v2, tags=["V2 Dataset"])
|
29
|
+
router_api_v2.include_router(verify_image_types_router, tags=["V2 Job"])
|
27
30
|
router_api_v2.include_router(job_router_v2, tags=["V2 Job"])
|
28
31
|
router_api_v2.include_router(images_routes_v2, tags=["V2 Images"])
|
29
32
|
router_api_v2.include_router(project_router_v2, tags=["V2 Project"])
|
30
33
|
router_api_v2.include_router(submit_job_router_v2, tags=["V2 Job"])
|
34
|
+
router_api_v2.include_router(history_router_v2, tags=["V2 History"])
|
35
|
+
router_api_v2.include_router(
|
36
|
+
status_legacy_router_v2, tags=["V2 Status Legacy"]
|
37
|
+
)
|
31
38
|
|
32
39
|
|
33
40
|
settings = Inject(get_settings)
|
@@ -56,4 +63,3 @@ router_api_v2.include_router(
|
|
56
63
|
workflow_import_router_v2, tags=["V2 Workflow Import"]
|
57
64
|
)
|
58
65
|
router_api_v2.include_router(workflowtask_router_v2, tags=["V2 WorkflowTask"])
|
59
|
-
router_api_v2.include_router(status_router_v2, tags=["V2 Status"])
|
@@ -417,3 +417,69 @@ async def clean_app_job_list_v2(
|
|
417
417
|
if job.status == JobStatusTypeV2.SUBMITTED
|
418
418
|
]
|
419
419
|
return submitted_job_ids
|
420
|
+
|
421
|
+
|
422
|
+
async def _get_dataset_or_404(
|
423
|
+
*,
|
424
|
+
dataset_id: int,
|
425
|
+
db: AsyncSession,
|
426
|
+
) -> DatasetV2:
|
427
|
+
"""
|
428
|
+
Get a dataset or raise 404.
|
429
|
+
|
430
|
+
Args:
|
431
|
+
dataset_id:
|
432
|
+
db:
|
433
|
+
"""
|
434
|
+
ds = await db.get(DatasetV2, dataset_id)
|
435
|
+
if ds is None:
|
436
|
+
raise HTTPException(
|
437
|
+
status_code=status.HTTP_404_NOT_FOUND,
|
438
|
+
detail=f"Dataset {dataset_id} not found.",
|
439
|
+
)
|
440
|
+
else:
|
441
|
+
return ds
|
442
|
+
|
443
|
+
|
444
|
+
async def _get_workflow_or_404(
|
445
|
+
*,
|
446
|
+
workflow_id: int,
|
447
|
+
db: AsyncSession,
|
448
|
+
) -> WorkflowV2:
|
449
|
+
"""
|
450
|
+
Get a workflow or raise 404.
|
451
|
+
|
452
|
+
Args:
|
453
|
+
workflow_id:
|
454
|
+
db:
|
455
|
+
"""
|
456
|
+
wf = await db.get(WorkflowV2, workflow_id)
|
457
|
+
if wf is None:
|
458
|
+
raise HTTPException(
|
459
|
+
status_code=status.HTTP_404_NOT_FOUND,
|
460
|
+
detail=f"Workflow {workflow_id} not found.",
|
461
|
+
)
|
462
|
+
else:
|
463
|
+
return wf
|
464
|
+
|
465
|
+
|
466
|
+
async def _get_workflowtask_or_404(
|
467
|
+
*,
|
468
|
+
workflowtask_id: int,
|
469
|
+
db: AsyncSession,
|
470
|
+
) -> WorkflowTaskV2:
|
471
|
+
"""
|
472
|
+
Get a workflow task or raise 404.
|
473
|
+
|
474
|
+
Args:
|
475
|
+
workflowtask_id:
|
476
|
+
db:
|
477
|
+
"""
|
478
|
+
wftask = await db.get(WorkflowTaskV2, workflowtask_id)
|
479
|
+
if wftask is None:
|
480
|
+
raise HTTPException(
|
481
|
+
status_code=status.HTTP_404_NOT_FOUND,
|
482
|
+
detail=f"WorkflowTask {workflowtask_id} not found.",
|
483
|
+
)
|
484
|
+
else:
|
485
|
+
return wftask
|
@@ -0,0 +1,166 @@
|
|
1
|
+
from pathlib import Path
|
2
|
+
from typing import Literal
|
3
|
+
|
4
|
+
from fastapi import HTTPException
|
5
|
+
from fastapi import status
|
6
|
+
|
7
|
+
from fractal_server.app.db import AsyncSession
|
8
|
+
from fractal_server.app.models import WorkflowTaskV2
|
9
|
+
from fractal_server.app.models.v2 import DatasetV2
|
10
|
+
from fractal_server.app.models.v2 import HistoryRun
|
11
|
+
from fractal_server.app.models.v2 import HistoryUnit
|
12
|
+
from fractal_server.app.models.v2 import WorkflowV2
|
13
|
+
from fractal_server.app.routes.api.v2._aux_functions import _get_dataset_or_404
|
14
|
+
from fractal_server.app.routes.api.v2._aux_functions import (
|
15
|
+
_get_project_check_owner,
|
16
|
+
)
|
17
|
+
from fractal_server.app.routes.api.v2._aux_functions import (
|
18
|
+
_get_workflow_or_404,
|
19
|
+
)
|
20
|
+
from fractal_server.app.routes.api.v2._aux_functions import (
|
21
|
+
_get_workflowtask_or_404,
|
22
|
+
)
|
23
|
+
from fractal_server.logger import set_logger
|
24
|
+
|
25
|
+
|
26
|
+
logger = set_logger(__name__)
|
27
|
+
|
28
|
+
|
29
|
+
async def get_history_unit_or_404(
|
30
|
+
*, history_unit_id: int, db: AsyncSession
|
31
|
+
) -> HistoryUnit:
|
32
|
+
"""
|
33
|
+
Get an existing HistoryUnit or raise a 404.
|
34
|
+
|
35
|
+
Arguments:
|
36
|
+
history_unit_id: The `HistoryUnit` id
|
37
|
+
db: An asynchronous db session
|
38
|
+
"""
|
39
|
+
history_unit = await db.get(HistoryUnit, history_unit_id)
|
40
|
+
if history_unit is None:
|
41
|
+
raise HTTPException(
|
42
|
+
status_code=status.HTTP_404_NOT_FOUND,
|
43
|
+
detail=f"HistoryUnit {history_unit_id} not found",
|
44
|
+
)
|
45
|
+
return history_unit
|
46
|
+
|
47
|
+
|
48
|
+
async def get_history_run_or_404(
|
49
|
+
*, history_run_id: int, db: AsyncSession
|
50
|
+
) -> HistoryRun:
|
51
|
+
"""
|
52
|
+
Get an existing HistoryRun or raise a 404.
|
53
|
+
|
54
|
+
Arguments:
|
55
|
+
history_run_id:
|
56
|
+
db:
|
57
|
+
"""
|
58
|
+
history_run = await db.get(HistoryRun, history_run_id)
|
59
|
+
if history_run is None:
|
60
|
+
raise HTTPException(
|
61
|
+
status_code=status.HTTP_404_NOT_FOUND,
|
62
|
+
detail=f"HistoryRun {history_run_id} not found",
|
63
|
+
)
|
64
|
+
return history_run
|
65
|
+
|
66
|
+
|
67
|
+
def read_log_file(
|
68
|
+
*,
|
69
|
+
logfile: str | None,
|
70
|
+
wftask: WorkflowTaskV2,
|
71
|
+
dataset_id: int,
|
72
|
+
):
|
73
|
+
if logfile is None or not Path(logfile).exists():
|
74
|
+
logger.debug(
|
75
|
+
f"Logs for task '{wftask.task.name}' in dataset "
|
76
|
+
f"{dataset_id} are not available ({logfile=})."
|
77
|
+
)
|
78
|
+
return (
|
79
|
+
f"Logs for task '{wftask.task.name}' in dataset "
|
80
|
+
f"{dataset_id} are not available."
|
81
|
+
)
|
82
|
+
|
83
|
+
try:
|
84
|
+
with open(logfile, "r") as f:
|
85
|
+
return f.read()
|
86
|
+
except Exception as e:
|
87
|
+
return (
|
88
|
+
f"Error while retrieving logs for task '{wftask.task.name}' "
|
89
|
+
f"in dataset {dataset_id}. Original error: {str(e)}."
|
90
|
+
)
|
91
|
+
|
92
|
+
|
93
|
+
async def _verify_workflow_and_dataset_access(
|
94
|
+
*,
|
95
|
+
project_id: int,
|
96
|
+
workflow_id: int,
|
97
|
+
dataset_id: int,
|
98
|
+
user_id: int,
|
99
|
+
db: AsyncSession,
|
100
|
+
) -> dict[Literal["dataset", "workflow"], DatasetV2 | WorkflowV2]:
|
101
|
+
"""
|
102
|
+
Verify user access to a dataset/workflow pair.
|
103
|
+
|
104
|
+
Args:
|
105
|
+
dataset_id:
|
106
|
+
workflow_task_id:
|
107
|
+
user_id:
|
108
|
+
db:
|
109
|
+
"""
|
110
|
+
await _get_project_check_owner(
|
111
|
+
project_id=project_id,
|
112
|
+
user_id=user_id,
|
113
|
+
db=db,
|
114
|
+
)
|
115
|
+
workflow = await _get_workflow_or_404(
|
116
|
+
workflow_id=workflow_id,
|
117
|
+
db=db,
|
118
|
+
)
|
119
|
+
if workflow.project_id != project_id:
|
120
|
+
raise HTTPException(
|
121
|
+
status_code=status.HTTP_422_UNPROCESSABLE_ENTITY,
|
122
|
+
detail="Workflow does not belong to expected project.",
|
123
|
+
)
|
124
|
+
dataset = await _get_dataset_or_404(
|
125
|
+
dataset_id=dataset_id,
|
126
|
+
db=db,
|
127
|
+
)
|
128
|
+
if dataset.project_id != project_id:
|
129
|
+
raise HTTPException(
|
130
|
+
status_code=status.HTTP_422_UNPROCESSABLE_ENTITY,
|
131
|
+
detail="Dataset does not belong to expected project.",
|
132
|
+
)
|
133
|
+
|
134
|
+
return dict(dataset=dataset, workflow=workflow)
|
135
|
+
|
136
|
+
|
137
|
+
async def get_wftask_check_owner(
|
138
|
+
*,
|
139
|
+
project_id: int,
|
140
|
+
dataset_id: int,
|
141
|
+
workflowtask_id: int,
|
142
|
+
user_id: int,
|
143
|
+
db: AsyncSession,
|
144
|
+
) -> WorkflowTaskV2:
|
145
|
+
"""
|
146
|
+
Verify user access for the history of this dataset and workflowtask.
|
147
|
+
|
148
|
+
Args:
|
149
|
+
project_id:
|
150
|
+
dataset_id:
|
151
|
+
workflow_task_id:
|
152
|
+
user_id:
|
153
|
+
db:
|
154
|
+
"""
|
155
|
+
wftask = await _get_workflowtask_or_404(
|
156
|
+
workflowtask_id=workflowtask_id,
|
157
|
+
db=db,
|
158
|
+
)
|
159
|
+
await _verify_workflow_and_dataset_access(
|
160
|
+
project_id=project_id,
|
161
|
+
dataset_id=dataset_id,
|
162
|
+
workflow_id=wftask.workflow_id,
|
163
|
+
user_id=user_id,
|
164
|
+
db=db,
|
165
|
+
)
|
166
|
+
return wftask
|