fractal-server 2.13.0__py3-none-any.whl → 2.14.0a0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- fractal_server/__init__.py +1 -1
- fractal_server/app/history/__init__.py +4 -0
- fractal_server/app/history/image_updates.py +142 -0
- fractal_server/app/history/status_enum.py +16 -0
- fractal_server/app/models/v2/__init__.py +9 -1
- fractal_server/app/models/v2/accounting.py +35 -0
- fractal_server/app/models/v2/history.py +53 -0
- fractal_server/app/routes/admin/v2/__init__.py +4 -0
- fractal_server/app/routes/admin/v2/accounting.py +108 -0
- fractal_server/app/routes/admin/v2/impersonate.py +35 -0
- fractal_server/app/routes/admin/v2/job.py +5 -13
- fractal_server/app/routes/admin/v2/task_group.py +4 -12
- fractal_server/app/routes/api/v2/__init__.py +2 -2
- fractal_server/app/routes/api/v2/_aux_functions.py +78 -0
- fractal_server/app/routes/api/v2/_aux_functions_task_lifecycle.py +3 -3
- fractal_server/app/routes/api/v2/dataset.py +12 -9
- fractal_server/app/routes/api/v2/history.py +247 -0
- fractal_server/app/routes/api/v2/submit.py +1 -0
- fractal_server/app/routes/api/v2/task_group.py +2 -5
- fractal_server/app/routes/api/v2/workflow.py +18 -3
- fractal_server/app/routes/api/v2/workflowtask.py +22 -0
- fractal_server/app/routes/aux/__init__.py +0 -20
- fractal_server/app/runner/executors/base_runner.py +114 -0
- fractal_server/app/runner/{v2/_local → executors/local}/_local_config.py +3 -3
- fractal_server/app/runner/executors/local/_submit_setup.py +54 -0
- fractal_server/app/runner/executors/local/runner.py +200 -0
- fractal_server/app/runner/executors/{slurm → slurm_common}/_batching.py +1 -1
- fractal_server/app/runner/executors/{slurm → slurm_common}/_slurm_config.py +3 -3
- fractal_server/app/runner/{v2/_slurm_ssh → executors/slurm_common}/_submit_setup.py +13 -12
- fractal_server/app/runner/{v2/_slurm_common → executors/slurm_common}/get_slurm_config.py +9 -15
- fractal_server/app/runner/executors/{slurm/ssh → slurm_ssh}/_executor_wait_thread.py +1 -1
- fractal_server/app/runner/executors/{slurm/ssh → slurm_ssh}/_slurm_job.py +1 -1
- fractal_server/app/runner/executors/{slurm/ssh → slurm_ssh}/executor.py +13 -14
- fractal_server/app/runner/executors/{slurm/sudo → slurm_sudo}/_check_jobs_status.py +11 -9
- fractal_server/app/runner/executors/{slurm/sudo → slurm_sudo}/_executor_wait_thread.py +3 -3
- fractal_server/app/runner/executors/{slurm/sudo → slurm_sudo}/_subprocess_run_as_user.py +2 -68
- fractal_server/app/runner/executors/slurm_sudo/runner.py +632 -0
- fractal_server/app/runner/task_files.py +70 -96
- fractal_server/app/runner/v2/__init__.py +9 -19
- fractal_server/app/runner/v2/_local.py +84 -0
- fractal_server/app/runner/v2/{_slurm_ssh/__init__.py → _slurm_ssh.py} +12 -13
- fractal_server/app/runner/v2/{_slurm_sudo/__init__.py → _slurm_sudo.py} +12 -12
- fractal_server/app/runner/v2/runner.py +106 -31
- fractal_server/app/runner/v2/runner_functions.py +88 -64
- fractal_server/app/runner/v2/runner_functions_low_level.py +20 -20
- fractal_server/app/schemas/v2/__init__.py +1 -0
- fractal_server/app/schemas/v2/accounting.py +18 -0
- fractal_server/app/schemas/v2/dataset.py +0 -17
- fractal_server/app/schemas/v2/history.py +23 -0
- fractal_server/config.py +58 -52
- fractal_server/migrations/versions/8223fcef886c_image_status.py +63 -0
- fractal_server/migrations/versions/87cd72a537a2_add_historyitem_table.py +68 -0
- fractal_server/migrations/versions/af1ef1c83c9b_add_accounting_tables.py +57 -0
- fractal_server/tasks/v2/utils_background.py +1 -1
- {fractal_server-2.13.0.dist-info → fractal_server-2.14.0a0.dist-info}/METADATA +1 -1
- {fractal_server-2.13.0.dist-info → fractal_server-2.14.0a0.dist-info}/RECORD +66 -55
- fractal_server/app/routes/api/v2/status.py +0 -168
- fractal_server/app/runner/executors/slurm/sudo/executor.py +0 -1281
- fractal_server/app/runner/v2/_local/__init__.py +0 -129
- fractal_server/app/runner/v2/_local/_submit_setup.py +0 -52
- fractal_server/app/runner/v2/_local/executor.py +0 -100
- fractal_server/app/runner/v2/_slurm_sudo/_submit_setup.py +0 -83
- fractal_server/app/runner/v2/handle_failed_job.py +0 -59
- /fractal_server/app/runner/executors/{slurm → local}/__init__.py +0 -0
- /fractal_server/app/runner/executors/{slurm/ssh → slurm_common}/__init__.py +0 -0
- /fractal_server/app/runner/executors/{_job_states.py → slurm_common/_job_states.py} +0 -0
- /fractal_server/app/runner/executors/{slurm → slurm_common}/remote.py +0 -0
- /fractal_server/app/runner/executors/{slurm → slurm_common}/utils_executors.py +0 -0
- /fractal_server/app/runner/executors/{slurm/sudo → slurm_ssh}/__init__.py +0 -0
- /fractal_server/app/runner/{v2/_slurm_common → executors/slurm_sudo}/__init__.py +0 -0
- {fractal_server-2.13.0.dist-info → fractal_server-2.14.0a0.dist-info}/LICENSE +0 -0
- {fractal_server-2.13.0.dist-info → fractal_server-2.14.0a0.dist-info}/WHEEL +0 -0
- {fractal_server-2.13.0.dist-info → fractal_server-2.14.0a0.dist-info}/entry_points.txt +0 -0
fractal_server/__init__.py
CHANGED
@@ -1 +1 @@
|
|
1
|
-
__VERSION__ = "2.
|
1
|
+
__VERSION__ = "2.14.0a0"
|
@@ -0,0 +1,142 @@
|
|
1
|
+
from typing import Optional
|
2
|
+
|
3
|
+
from sqlalchemy.orm import Session
|
4
|
+
from sqlalchemy.orm.attributes import flag_modified
|
5
|
+
from sqlmodel import select
|
6
|
+
|
7
|
+
from fractal_server.app.db import get_sync_db
|
8
|
+
from fractal_server.app.history.status_enum import HistoryItemImageStatus
|
9
|
+
from fractal_server.app.models.v2 import HistoryItemV2
|
10
|
+
from fractal_server.app.models.v2 import ImageStatus
|
11
|
+
from fractal_server.logger import set_logger
|
12
|
+
|
13
|
+
logger = set_logger(__name__)
|
14
|
+
|
15
|
+
|
16
|
+
def _update_single_image_status(
|
17
|
+
*,
|
18
|
+
zarr_url: str,
|
19
|
+
workflowtask_id: int,
|
20
|
+
dataset_id: int,
|
21
|
+
status: HistoryItemImageStatus,
|
22
|
+
db: Session,
|
23
|
+
commit: bool = True,
|
24
|
+
logfile: Optional[str] = None,
|
25
|
+
) -> None:
|
26
|
+
image_status = db.get(
|
27
|
+
ImageStatus,
|
28
|
+
(
|
29
|
+
zarr_url,
|
30
|
+
workflowtask_id,
|
31
|
+
dataset_id,
|
32
|
+
),
|
33
|
+
)
|
34
|
+
if image_status is None:
|
35
|
+
raise RuntimeError("This should have not happened")
|
36
|
+
image_status.status = status
|
37
|
+
if logfile is not None:
|
38
|
+
image_status.logfile = logfile
|
39
|
+
db.add(image_status)
|
40
|
+
if commit:
|
41
|
+
db.commit()
|
42
|
+
|
43
|
+
|
44
|
+
def update_single_image(
|
45
|
+
*,
|
46
|
+
history_item_id: int,
|
47
|
+
zarr_url: str,
|
48
|
+
status: HistoryItemImageStatus,
|
49
|
+
) -> None:
|
50
|
+
|
51
|
+
logger.debug(
|
52
|
+
f"[update_single_image] {history_item_id=}, {status=}, {zarr_url=}"
|
53
|
+
)
|
54
|
+
|
55
|
+
# Note: thanks to `with_for_update`, a lock is acquired and kept
|
56
|
+
# until `db.commit()`
|
57
|
+
with next(get_sync_db()) as db:
|
58
|
+
stm = (
|
59
|
+
select(HistoryItemV2)
|
60
|
+
.where(HistoryItemV2.id == history_item_id)
|
61
|
+
.with_for_update(nowait=False)
|
62
|
+
)
|
63
|
+
history_item = db.execute(stm).scalar_one()
|
64
|
+
history_item.images[zarr_url] = status
|
65
|
+
flag_modified(history_item, "images")
|
66
|
+
db.commit()
|
67
|
+
|
68
|
+
_update_single_image_status(
|
69
|
+
zarr_url=zarr_url,
|
70
|
+
dataset_id=history_item.dataset_id,
|
71
|
+
workflowtask_id=history_item.workflowtask_id,
|
72
|
+
commit=True,
|
73
|
+
status=status,
|
74
|
+
db=db,
|
75
|
+
)
|
76
|
+
|
77
|
+
|
78
|
+
def update_single_image_logfile(
|
79
|
+
*,
|
80
|
+
history_item_id: int,
|
81
|
+
zarr_url: str,
|
82
|
+
logfile: str,
|
83
|
+
) -> None:
|
84
|
+
|
85
|
+
logger.debug(
|
86
|
+
f"[update_single_image_logfile] {history_item_id=}, {logfile=}, {zarr_url=}"
|
87
|
+
)
|
88
|
+
|
89
|
+
with next(get_sync_db()) as db:
|
90
|
+
history_item = db.get(HistoryItemV2, history_item_id)
|
91
|
+
image_status = db.get(
|
92
|
+
ImageStatus,
|
93
|
+
(
|
94
|
+
zarr_url,
|
95
|
+
history_item.workflowtask_id,
|
96
|
+
history_item.dataset_id,
|
97
|
+
),
|
98
|
+
)
|
99
|
+
if image_status is None:
|
100
|
+
raise RuntimeError("This should have not happened")
|
101
|
+
image_status.logfile = logfile
|
102
|
+
db.merge(image_status)
|
103
|
+
db.commit()
|
104
|
+
|
105
|
+
|
106
|
+
def update_all_images(
|
107
|
+
*,
|
108
|
+
history_item_id: int,
|
109
|
+
status: HistoryItemImageStatus,
|
110
|
+
logfile: Optional[str] = None,
|
111
|
+
) -> None:
|
112
|
+
|
113
|
+
logger.debug(f"[update_all_images] {history_item_id=}, {status=}")
|
114
|
+
|
115
|
+
# Note: thanks to `with_for_update`, a lock is acquired and kept
|
116
|
+
# until `db.commit()`
|
117
|
+
stm = (
|
118
|
+
select(HistoryItemV2)
|
119
|
+
.where(HistoryItemV2.id == history_item_id)
|
120
|
+
.with_for_update(nowait=False)
|
121
|
+
)
|
122
|
+
with next(get_sync_db()) as db:
|
123
|
+
history_item = db.execute(stm).scalar_one()
|
124
|
+
new_images = {
|
125
|
+
zarr_url: status for zarr_url in history_item.images.keys()
|
126
|
+
}
|
127
|
+
history_item.images = new_images
|
128
|
+
flag_modified(history_item, "images")
|
129
|
+
db.commit()
|
130
|
+
|
131
|
+
# FIXME: Make this a bulk edit, if possible
|
132
|
+
for ind, zarr_url in enumerate(history_item.images.keys()):
|
133
|
+
_update_single_image_status(
|
134
|
+
zarr_url=zarr_url,
|
135
|
+
dataset_id=history_item.dataset_id,
|
136
|
+
workflowtask_id=history_item.workflowtask_id,
|
137
|
+
commit=False,
|
138
|
+
status=status,
|
139
|
+
logfile=logfile,
|
140
|
+
db=db,
|
141
|
+
)
|
142
|
+
db.commit()
|
@@ -0,0 +1,16 @@
|
|
1
|
+
from enum import Enum
|
2
|
+
|
3
|
+
|
4
|
+
class HistoryItemImageStatus(str, Enum):
|
5
|
+
"""
|
6
|
+
Available image-status values within a `HistoryItemV2`
|
7
|
+
|
8
|
+
Attributes:
|
9
|
+
SUBMITTED:
|
10
|
+
DONE:
|
11
|
+
FAILED:
|
12
|
+
"""
|
13
|
+
|
14
|
+
SUBMITTED = "submitted"
|
15
|
+
DONE = "done"
|
16
|
+
FAILED = "failed"
|
@@ -2,7 +2,11 @@
|
|
2
2
|
v2 `models` module
|
3
3
|
"""
|
4
4
|
from ..linkuserproject import LinkUserProjectV2
|
5
|
+
from .accounting import AccountingRecord
|
6
|
+
from .accounting import AccountingRecordSlurm
|
5
7
|
from .dataset import DatasetV2
|
8
|
+
from .history import HistoryItemV2
|
9
|
+
from .history import ImageStatus
|
6
10
|
from .job import JobV2
|
7
11
|
from .project import ProjectV2
|
8
12
|
from .task import TaskV2
|
@@ -12,6 +16,8 @@ from .workflow import WorkflowV2
|
|
12
16
|
from .workflowtask import WorkflowTaskV2
|
13
17
|
|
14
18
|
__all__ = [
|
19
|
+
"AccountingRecord",
|
20
|
+
"AccountingRecordSlurm",
|
15
21
|
"LinkUserProjectV2",
|
16
22
|
"DatasetV2",
|
17
23
|
"JobV2",
|
@@ -19,6 +25,8 @@ __all__ = [
|
|
19
25
|
"TaskGroupV2",
|
20
26
|
"TaskGroupActivityV2",
|
21
27
|
"TaskV2",
|
22
|
-
"WorkflowTaskV2",
|
23
28
|
"WorkflowV2",
|
29
|
+
"WorkflowTaskV2",
|
30
|
+
"HistoryItemV2",
|
31
|
+
"ImageStatus",
|
24
32
|
]
|
@@ -0,0 +1,35 @@
|
|
1
|
+
from datetime import datetime
|
2
|
+
from typing import Optional
|
3
|
+
|
4
|
+
from sqlalchemy import Column
|
5
|
+
from sqlalchemy import Integer
|
6
|
+
from sqlalchemy.dialects.postgresql import ARRAY
|
7
|
+
from sqlalchemy.types import DateTime
|
8
|
+
from sqlmodel import Field
|
9
|
+
from sqlmodel import SQLModel
|
10
|
+
|
11
|
+
from ....utils import get_timestamp
|
12
|
+
|
13
|
+
|
14
|
+
class AccountingRecord(SQLModel, table=True):
|
15
|
+
id: Optional[int] = Field(default=None, primary_key=True)
|
16
|
+
user_id: int = Field(foreign_key="user_oauth.id", nullable=False)
|
17
|
+
timestamp: datetime = Field(
|
18
|
+
default_factory=get_timestamp,
|
19
|
+
sa_column=Column(DateTime(timezone=True), nullable=False),
|
20
|
+
)
|
21
|
+
num_tasks: int
|
22
|
+
num_new_images: int
|
23
|
+
|
24
|
+
|
25
|
+
class AccountingRecordSlurm(SQLModel, table=True):
|
26
|
+
id: Optional[int] = Field(default=None, primary_key=True)
|
27
|
+
user_id: int = Field(foreign_key="user_oauth.id", nullable=False)
|
28
|
+
timestamp: datetime = Field(
|
29
|
+
default_factory=get_timestamp,
|
30
|
+
sa_column=Column(DateTime(timezone=True), nullable=False),
|
31
|
+
)
|
32
|
+
slurm_job_ids: list[int] = Field(
|
33
|
+
default_factory=list,
|
34
|
+
sa_column=Column(ARRAY(Integer)),
|
35
|
+
)
|
@@ -0,0 +1,53 @@
|
|
1
|
+
from datetime import datetime
|
2
|
+
from typing import Any
|
3
|
+
from typing import Optional
|
4
|
+
|
5
|
+
from pydantic import ConfigDict
|
6
|
+
from sqlalchemy import Column
|
7
|
+
from sqlalchemy.dialects.postgresql import JSONB
|
8
|
+
from sqlalchemy.types import DateTime
|
9
|
+
from sqlmodel import Field
|
10
|
+
from sqlmodel import SQLModel
|
11
|
+
|
12
|
+
from ....utils import get_timestamp
|
13
|
+
|
14
|
+
|
15
|
+
class HistoryItemV2(SQLModel, table=True):
|
16
|
+
model_config = ConfigDict(arbitrary_types_allowed=True)
|
17
|
+
|
18
|
+
id: Optional[int] = Field(default=None, primary_key=True)
|
19
|
+
dataset_id: int = Field(foreign_key="datasetv2.id")
|
20
|
+
workflowtask_id: Optional[int] = Field(
|
21
|
+
foreign_key="workflowtaskv2.id",
|
22
|
+
default=None,
|
23
|
+
)
|
24
|
+
timestamp_started: datetime = Field(
|
25
|
+
default_factory=get_timestamp,
|
26
|
+
sa_column=Column(
|
27
|
+
DateTime(timezone=True),
|
28
|
+
nullable=False,
|
29
|
+
),
|
30
|
+
)
|
31
|
+
workflowtask_dump: dict[str, Any] = Field(
|
32
|
+
sa_column=Column(JSONB, nullable=False)
|
33
|
+
)
|
34
|
+
task_group_dump: dict[str, Any] = Field(
|
35
|
+
sa_column=Column(JSONB, nullable=False)
|
36
|
+
)
|
37
|
+
parameters_hash: str
|
38
|
+
num_available_images: int
|
39
|
+
num_current_images: int
|
40
|
+
images: dict[str, str] = Field(sa_column=Column(JSONB, nullable=False))
|
41
|
+
|
42
|
+
|
43
|
+
class ImageStatus(SQLModel, table=True):
|
44
|
+
|
45
|
+
zarr_url: str = Field(primary_key=True)
|
46
|
+
workflowtask_id: int = Field(
|
47
|
+
primary_key=True, foreign_key="workflowtaskv2.id"
|
48
|
+
)
|
49
|
+
dataset_id: int = Field(primary_key=True, foreign_key="datasetv2.id")
|
50
|
+
|
51
|
+
parameters_hash: str
|
52
|
+
status: str
|
53
|
+
logfile: str
|
@@ -3,6 +3,8 @@
|
|
3
3
|
"""
|
4
4
|
from fastapi import APIRouter
|
5
5
|
|
6
|
+
from .accounting import router as accounting_router
|
7
|
+
from .impersonate import router as impersonate_router
|
6
8
|
from .job import router as job_router
|
7
9
|
from .project import router as project_router
|
8
10
|
from .task import router as task_router
|
@@ -11,6 +13,7 @@ from .task_group_lifecycle import router as task_group_lifecycle_router
|
|
11
13
|
|
12
14
|
router_admin_v2 = APIRouter()
|
13
15
|
|
16
|
+
router_admin_v2.include_router(accounting_router, prefix="/accounting")
|
14
17
|
router_admin_v2.include_router(job_router, prefix="/job")
|
15
18
|
router_admin_v2.include_router(project_router, prefix="/project")
|
16
19
|
router_admin_v2.include_router(task_router, prefix="/task")
|
@@ -18,3 +21,4 @@ router_admin_v2.include_router(task_group_router, prefix="/task-group")
|
|
18
21
|
router_admin_v2.include_router(
|
19
22
|
task_group_lifecycle_router, prefix="/task-group"
|
20
23
|
)
|
24
|
+
router_admin_v2.include_router(impersonate_router, prefix="/impersonate")
|
@@ -0,0 +1,108 @@
|
|
1
|
+
from itertools import chain
|
2
|
+
from typing import Optional
|
3
|
+
|
4
|
+
from fastapi import APIRouter
|
5
|
+
from fastapi import Depends
|
6
|
+
from fastapi import HTTPException
|
7
|
+
from fastapi import Query
|
8
|
+
from fastapi import status
|
9
|
+
from fastapi.responses import JSONResponse
|
10
|
+
from pydantic import BaseModel
|
11
|
+
from pydantic.types import AwareDatetime
|
12
|
+
from sqlmodel import func
|
13
|
+
from sqlmodel import select
|
14
|
+
|
15
|
+
from fractal_server.app.db import AsyncSession
|
16
|
+
from fractal_server.app.db import get_async_db
|
17
|
+
from fractal_server.app.models import UserOAuth
|
18
|
+
from fractal_server.app.models.v2 import AccountingRecord
|
19
|
+
from fractal_server.app.models.v2 import AccountingRecordSlurm
|
20
|
+
from fractal_server.app.routes.auth import current_active_superuser
|
21
|
+
from fractal_server.app.schemas.v2 import AccountingRecordRead
|
22
|
+
|
23
|
+
|
24
|
+
class AccountingQuery(BaseModel):
|
25
|
+
user_id: Optional[int] = None
|
26
|
+
timestamp_min: Optional[AwareDatetime] = None
|
27
|
+
timestamp_max: Optional[AwareDatetime] = None
|
28
|
+
|
29
|
+
|
30
|
+
class AccountingPage(BaseModel):
|
31
|
+
total_count: int
|
32
|
+
page_size: int
|
33
|
+
current_page: int
|
34
|
+
records: list[AccountingRecordRead]
|
35
|
+
|
36
|
+
|
37
|
+
router = APIRouter()
|
38
|
+
|
39
|
+
|
40
|
+
@router.post("/", response_model=AccountingPage)
|
41
|
+
async def query_accounting(
|
42
|
+
query: AccountingQuery,
|
43
|
+
# pagination
|
44
|
+
page: int = Query(default=1, ge=1),
|
45
|
+
page_size: Optional[int] = Query(default=None, ge=1),
|
46
|
+
# dependencies
|
47
|
+
superuser: UserOAuth = Depends(current_active_superuser),
|
48
|
+
db: AsyncSession = Depends(get_async_db),
|
49
|
+
) -> AccountingPage:
|
50
|
+
|
51
|
+
if page_size is None and page > 1:
|
52
|
+
raise HTTPException(
|
53
|
+
status_code=status.HTTP_422_UNPROCESSABLE_ENTITY,
|
54
|
+
detail=(f"Invalid pagination parameters: {page=}, {page_size=}."),
|
55
|
+
)
|
56
|
+
|
57
|
+
stm = select(AccountingRecord).order_by(AccountingRecord.id)
|
58
|
+
stm_count = select(func.count(AccountingRecord.id))
|
59
|
+
if query.user_id is not None:
|
60
|
+
stm = stm.where(AccountingRecord.user_id == query.user_id)
|
61
|
+
stm_count = stm_count.where(AccountingRecord.user_id == query.user_id)
|
62
|
+
if query.timestamp_min is not None:
|
63
|
+
stm = stm.where(AccountingRecord.timestamp >= query.timestamp_min)
|
64
|
+
stm_count = stm_count.where(
|
65
|
+
AccountingRecord.timestamp >= query.timestamp_min
|
66
|
+
)
|
67
|
+
if query.timestamp_max is not None:
|
68
|
+
stm = stm.where(AccountingRecord.timestamp <= query.timestamp_max)
|
69
|
+
stm_count = stm_count.where(
|
70
|
+
AccountingRecord.timestamp <= query.timestamp_max
|
71
|
+
)
|
72
|
+
if page_size is not None:
|
73
|
+
stm = stm.offset((page - 1) * page_size).limit(page_size)
|
74
|
+
|
75
|
+
res = await db.execute(stm)
|
76
|
+
records = res.scalars().all()
|
77
|
+
res_total_count = await db.execute(stm_count)
|
78
|
+
total_count = res_total_count.scalar()
|
79
|
+
|
80
|
+
actual_page_size = page_size or len(records)
|
81
|
+
return AccountingPage(
|
82
|
+
total_count=total_count,
|
83
|
+
page_size=actual_page_size,
|
84
|
+
current_page=page,
|
85
|
+
records=[record.model_dump() for record in records],
|
86
|
+
)
|
87
|
+
|
88
|
+
|
89
|
+
@router.post("/slurm/")
|
90
|
+
async def query_accounting_slurm(
|
91
|
+
query: AccountingQuery,
|
92
|
+
# dependencies
|
93
|
+
superuser: UserOAuth = Depends(current_active_superuser),
|
94
|
+
db: AsyncSession = Depends(get_async_db),
|
95
|
+
) -> JSONResponse:
|
96
|
+
|
97
|
+
stm = select(AccountingRecordSlurm.slurm_job_ids)
|
98
|
+
if query.user_id is not None:
|
99
|
+
stm = stm.where(AccountingRecordSlurm.user_id == query.user_id)
|
100
|
+
if query.timestamp_min is not None:
|
101
|
+
stm = stm.where(AccountingRecordSlurm.timestamp >= query.timestamp_min)
|
102
|
+
if query.timestamp_max is not None:
|
103
|
+
stm = stm.where(AccountingRecordSlurm.timestamp <= query.timestamp_max)
|
104
|
+
|
105
|
+
res = await db.execute(stm)
|
106
|
+
nested_slurm_job_ids = res.scalars().all()
|
107
|
+
aggregated_slurm_job_ids = list(chain(*nested_slurm_job_ids))
|
108
|
+
return JSONResponse(content=aggregated_slurm_job_ids, status_code=200)
|
@@ -0,0 +1,35 @@
|
|
1
|
+
from fastapi import APIRouter
|
2
|
+
from fastapi import Depends
|
3
|
+
from fastapi.responses import JSONResponse
|
4
|
+
from fastapi_users.authentication import JWTStrategy
|
5
|
+
|
6
|
+
from fractal_server.app.db import AsyncSession
|
7
|
+
from fractal_server.app.db import get_async_db
|
8
|
+
from fractal_server.app.models import UserOAuth
|
9
|
+
from fractal_server.app.routes.auth import current_active_superuser
|
10
|
+
from fractal_server.app.routes.auth._aux_auth import _user_or_404
|
11
|
+
from fractal_server.config import get_settings
|
12
|
+
from fractal_server.syringe import Inject
|
13
|
+
|
14
|
+
router = APIRouter()
|
15
|
+
|
16
|
+
|
17
|
+
@router.get("/{user_id}/")
|
18
|
+
async def impersonate_user(
|
19
|
+
user_id: int,
|
20
|
+
superuser: UserOAuth = Depends(current_active_superuser),
|
21
|
+
db: AsyncSession = Depends(get_async_db),
|
22
|
+
) -> JSONResponse:
|
23
|
+
user = await _user_or_404(user_id, db)
|
24
|
+
|
25
|
+
settings = Inject(get_settings)
|
26
|
+
jwt_strategy = JWTStrategy(
|
27
|
+
secret=settings.JWT_SECRET_KEY, # type: ignore
|
28
|
+
lifetime_seconds=7200, # 2 hours
|
29
|
+
)
|
30
|
+
token = await jwt_strategy.write_token(user)
|
31
|
+
|
32
|
+
return JSONResponse(
|
33
|
+
content={"access_token": token, "token_type": "bearer"},
|
34
|
+
status_code=200,
|
35
|
+
)
|
@@ -1,4 +1,3 @@
|
|
1
|
-
from datetime import datetime
|
2
1
|
from pathlib import Path
|
3
2
|
from typing import Optional
|
4
3
|
|
@@ -8,6 +7,7 @@ from fastapi import HTTPException
|
|
8
7
|
from fastapi import Response
|
9
8
|
from fastapi import status
|
10
9
|
from fastapi.responses import StreamingResponse
|
10
|
+
from pydantic.types import AwareDatetime
|
11
11
|
from sqlmodel import select
|
12
12
|
|
13
13
|
from fractal_server.app.db import AsyncSession
|
@@ -16,7 +16,6 @@ from fractal_server.app.models import UserOAuth
|
|
16
16
|
from fractal_server.app.models.v2 import JobV2
|
17
17
|
from fractal_server.app.models.v2 import ProjectV2
|
18
18
|
from fractal_server.app.routes.auth import current_active_superuser
|
19
|
-
from fractal_server.app.routes.aux import _raise_if_naive_datetime
|
20
19
|
from fractal_server.app.routes.aux._job import _write_shutdown_file
|
21
20
|
from fractal_server.app.routes.aux._runner import _check_shutdown_is_supported
|
22
21
|
from fractal_server.app.runner.filenames import WORKFLOW_LOG_FILENAME
|
@@ -37,10 +36,10 @@ async def view_job(
|
|
37
36
|
dataset_id: Optional[int] = None,
|
38
37
|
workflow_id: Optional[int] = None,
|
39
38
|
status: Optional[JobStatusTypeV2] = None,
|
40
|
-
start_timestamp_min: Optional[
|
41
|
-
start_timestamp_max: Optional[
|
42
|
-
end_timestamp_min: Optional[
|
43
|
-
end_timestamp_max: Optional[
|
39
|
+
start_timestamp_min: Optional[AwareDatetime] = None,
|
40
|
+
start_timestamp_max: Optional[AwareDatetime] = None,
|
41
|
+
end_timestamp_min: Optional[AwareDatetime] = None,
|
42
|
+
end_timestamp_max: Optional[AwareDatetime] = None,
|
44
43
|
log: bool = True,
|
45
44
|
user: UserOAuth = Depends(current_active_superuser),
|
46
45
|
db: AsyncSession = Depends(get_async_db),
|
@@ -67,13 +66,6 @@ async def view_job(
|
|
67
66
|
`job.log` is set to `None`.
|
68
67
|
"""
|
69
68
|
|
70
|
-
_raise_if_naive_datetime(
|
71
|
-
start_timestamp_min,
|
72
|
-
start_timestamp_max,
|
73
|
-
end_timestamp_min,
|
74
|
-
end_timestamp_max,
|
75
|
-
)
|
76
|
-
|
77
69
|
stm = select(JobV2)
|
78
70
|
|
79
71
|
if id is not None:
|
@@ -1,4 +1,3 @@
|
|
1
|
-
from datetime import datetime
|
2
1
|
from typing import Optional
|
3
2
|
|
4
3
|
from fastapi import APIRouter
|
@@ -6,6 +5,7 @@ from fastapi import Depends
|
|
6
5
|
from fastapi import HTTPException
|
7
6
|
from fastapi import Response
|
8
7
|
from fastapi import status
|
8
|
+
from pydantic.types import AwareDatetime
|
9
9
|
from sqlalchemy.sql.operators import is_
|
10
10
|
from sqlalchemy.sql.operators import is_not
|
11
11
|
from sqlmodel import select
|
@@ -20,7 +20,6 @@ from fractal_server.app.routes.auth import current_active_superuser
|
|
20
20
|
from fractal_server.app.routes.auth._aux_auth import (
|
21
21
|
_verify_user_belongs_to_group,
|
22
22
|
)
|
23
|
-
from fractal_server.app.routes.aux import _raise_if_naive_datetime
|
24
23
|
from fractal_server.app.schemas.v2 import TaskGroupActivityActionV2
|
25
24
|
from fractal_server.app.schemas.v2 import TaskGroupActivityStatusV2
|
26
25
|
from fractal_server.app.schemas.v2 import TaskGroupActivityV2Read
|
@@ -42,13 +41,11 @@ async def get_task_group_activity_list(
|
|
42
41
|
pkg_name: Optional[str] = None,
|
43
42
|
status: Optional[TaskGroupActivityStatusV2] = None,
|
44
43
|
action: Optional[TaskGroupActivityActionV2] = None,
|
45
|
-
timestamp_started_min: Optional[
|
44
|
+
timestamp_started_min: Optional[AwareDatetime] = None,
|
46
45
|
superuser: UserOAuth = Depends(current_active_superuser),
|
47
46
|
db: AsyncSession = Depends(get_async_db),
|
48
47
|
) -> list[TaskGroupActivityV2Read]:
|
49
48
|
|
50
|
-
_raise_if_naive_datetime(timestamp_started_min)
|
51
|
-
|
52
49
|
stm = select(TaskGroupActivityV2)
|
53
50
|
if task_group_activity_id is not None:
|
54
51
|
stm = stm.where(TaskGroupActivityV2.id == task_group_activity_id)
|
@@ -96,19 +93,14 @@ async def query_task_group_list(
|
|
96
93
|
active: Optional[bool] = None,
|
97
94
|
pkg_name: Optional[str] = None,
|
98
95
|
origin: Optional[TaskGroupV2OriginEnum] = None,
|
99
|
-
timestamp_last_used_min: Optional[
|
100
|
-
timestamp_last_used_max: Optional[
|
96
|
+
timestamp_last_used_min: Optional[AwareDatetime] = None,
|
97
|
+
timestamp_last_used_max: Optional[AwareDatetime] = None,
|
101
98
|
user: UserOAuth = Depends(current_active_superuser),
|
102
99
|
db: AsyncSession = Depends(get_async_db),
|
103
100
|
) -> list[TaskGroupReadV2]:
|
104
101
|
|
105
102
|
stm = select(TaskGroupV2)
|
106
103
|
|
107
|
-
_raise_if_naive_datetime(
|
108
|
-
timestamp_last_used_max,
|
109
|
-
timestamp_last_used_min,
|
110
|
-
)
|
111
|
-
|
112
104
|
if user_group_id is not None and private is True:
|
113
105
|
raise HTTPException(
|
114
106
|
status_code=status.HTTP_422_UNPROCESSABLE_ENTITY,
|
@@ -4,10 +4,10 @@
|
|
4
4
|
from fastapi import APIRouter
|
5
5
|
|
6
6
|
from .dataset import router as dataset_router_v2
|
7
|
+
from .history import router as history_router_v2
|
7
8
|
from .images import router as images_routes_v2
|
8
9
|
from .job import router as job_router_v2
|
9
10
|
from .project import router as project_router_v2
|
10
|
-
from .status import router as status_router_v2
|
11
11
|
from .submit import router as submit_job_router_v2
|
12
12
|
from .task import router as task_router_v2
|
13
13
|
from .task_collection import router as task_collection_router_v2
|
@@ -28,6 +28,7 @@ router_api_v2.include_router(job_router_v2, tags=["V2 Job"])
|
|
28
28
|
router_api_v2.include_router(images_routes_v2, tags=["V2 Images"])
|
29
29
|
router_api_v2.include_router(project_router_v2, tags=["V2 Project"])
|
30
30
|
router_api_v2.include_router(submit_job_router_v2, tags=["V2 Job"])
|
31
|
+
router_api_v2.include_router(history_router_v2, tags=["V2 History"])
|
31
32
|
|
32
33
|
|
33
34
|
settings = Inject(get_settings)
|
@@ -56,4 +57,3 @@ router_api_v2.include_router(
|
|
56
57
|
workflow_import_router_v2, tags=["V2 Workflow Import"]
|
57
58
|
)
|
58
59
|
router_api_v2.include_router(workflowtask_router_v2, tags=["V2 WorkflowTask"])
|
59
|
-
router_api_v2.include_router(status_router_v2, tags=["V2 Status"])
|
@@ -417,3 +417,81 @@ async def clean_app_job_list_v2(
|
|
417
417
|
if job.status == JobStatusTypeV2.SUBMITTED
|
418
418
|
]
|
419
419
|
return submitted_job_ids
|
420
|
+
|
421
|
+
|
422
|
+
async def _get_workflow_check_history_owner(
|
423
|
+
*,
|
424
|
+
workflow_id: int,
|
425
|
+
dataset_id: int,
|
426
|
+
user_id: int,
|
427
|
+
db: AsyncSession,
|
428
|
+
) -> list[int]:
|
429
|
+
"""
|
430
|
+
Verify user access for the history of this dataset and workflowtask.
|
431
|
+
|
432
|
+
Args:
|
433
|
+
dataset_id:
|
434
|
+
workflow_task_id:
|
435
|
+
user_id:
|
436
|
+
db:
|
437
|
+
|
438
|
+
Returns:
|
439
|
+
List of WorkflowTask IDs
|
440
|
+
"""
|
441
|
+
workflow = await db.get(WorkflowV2, workflow_id)
|
442
|
+
if workflow is None:
|
443
|
+
raise HTTPException(
|
444
|
+
status_code=status.HTTP_404_NOT_FOUND,
|
445
|
+
detail="Workflow not found.",
|
446
|
+
)
|
447
|
+
await _get_project_check_owner(
|
448
|
+
project_id=workflow.project_id,
|
449
|
+
user_id=user_id,
|
450
|
+
db=db,
|
451
|
+
)
|
452
|
+
dataset = await db.get(DatasetV2, dataset_id)
|
453
|
+
if dataset is None:
|
454
|
+
raise HTTPException(
|
455
|
+
status_code=status.HTTP_404_NOT_FOUND,
|
456
|
+
detail="Dataset not found.",
|
457
|
+
)
|
458
|
+
if workflow.project_id != dataset.project_id:
|
459
|
+
raise HTTPException(
|
460
|
+
status_code=status.HTTP_422_UNPROCESSABLE_ENTITY,
|
461
|
+
detail="Dataset and workflow belong to different projects.",
|
462
|
+
)
|
463
|
+
|
464
|
+
return [wftask.id for wftask in workflow.task_list]
|
465
|
+
|
466
|
+
|
467
|
+
async def _get_workflowtask_check_history_owner(
|
468
|
+
*,
|
469
|
+
workflowtask_id: int,
|
470
|
+
dataset_id: int,
|
471
|
+
user_id: int,
|
472
|
+
db: AsyncSession,
|
473
|
+
) -> list[int]:
|
474
|
+
"""
|
475
|
+
Verify user access for the history of this dataset and workflowtask.
|
476
|
+
|
477
|
+
Args:
|
478
|
+
dataset_id:
|
479
|
+
workflow_task_id:
|
480
|
+
user_id:
|
481
|
+
db:
|
482
|
+
|
483
|
+
Returns:
|
484
|
+
List of WorkflowTask IDs
|
485
|
+
"""
|
486
|
+
workflowtask = await db.get(WorkflowTaskV2, workflowtask_id)
|
487
|
+
if workflowtask is None:
|
488
|
+
raise HTTPException(
|
489
|
+
status_code=status.HTTP_404_NOT_FOUND,
|
490
|
+
detail="WorkflowTask not found.",
|
491
|
+
)
|
492
|
+
await _get_workflow_check_history_owner(
|
493
|
+
workflow_id=workflowtask.workflow_id,
|
494
|
+
dataset_id=dataset_id,
|
495
|
+
user_id=user_id,
|
496
|
+
db=db,
|
497
|
+
)
|