fractal-server 2.14.0a9__py3-none-any.whl → 2.14.0a10__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- fractal_server/__init__.py +1 -1
- fractal_server/app/models/v2/dataset.py +0 -10
- fractal_server/app/models/v2/job.py +3 -0
- fractal_server/app/routes/api/v2/__init__.py +2 -0
- fractal_server/app/routes/api/v2/history.py +14 -9
- fractal_server/app/routes/api/v2/images.py +5 -2
- fractal_server/app/routes/api/v2/submit.py +16 -14
- fractal_server/app/routes/api/v2/verify_image_types.py +64 -0
- fractal_server/app/routes/api/v2/workflow.py +11 -7
- fractal_server/app/runner/executors/slurm_ssh/_check_job_status_ssh.py +67 -0
- fractal_server/app/runner/executors/slurm_ssh/runner.py +711 -0
- fractal_server/app/runner/executors/slurm_sudo/runner.py +76 -30
- fractal_server/app/runner/v2/__init__.py +1 -0
- fractal_server/app/runner/v2/_local.py +2 -0
- fractal_server/app/runner/v2/_slurm_ssh.py +2 -0
- fractal_server/app/runner/v2/_slurm_sudo.py +2 -0
- fractal_server/app/runner/v2/runner.py +6 -8
- fractal_server/app/runner/v2/runner_functions.py +9 -4
- fractal_server/app/schemas/v2/dataset.py +4 -71
- fractal_server/app/schemas/v2/dumps.py +6 -5
- fractal_server/app/schemas/v2/job.py +6 -3
- fractal_server/migrations/versions/47351f8c7ebc_drop_dataset_filters.py +50 -0
- fractal_server/migrations/versions/e81103413827_add_job_type_filters.py +36 -0
- {fractal_server-2.14.0a9.dist-info → fractal_server-2.14.0a10.dist-info}/METADATA +1 -1
- {fractal_server-2.14.0a9.dist-info → fractal_server-2.14.0a10.dist-info}/RECORD +29 -24
- /fractal_server/app/runner/executors/{slurm_sudo → slurm_common}/_check_jobs_status.py +0 -0
- {fractal_server-2.14.0a9.dist-info → fractal_server-2.14.0a10.dist-info}/LICENSE +0 -0
- {fractal_server-2.14.0a9.dist-info → fractal_server-2.14.0a10.dist-info}/WHEEL +0 -0
- {fractal_server-2.14.0a9.dist-info → fractal_server-2.14.0a10.dist-info}/entry_points.txt +0 -0
fractal_server/__init__.py
CHANGED
@@ -1 +1 @@
|
|
1
|
-
__VERSION__ = "2.14.
|
1
|
+
__VERSION__ = "2.14.0a10"
|
@@ -11,7 +11,6 @@ from sqlmodel import Relationship
|
|
11
11
|
from sqlmodel import SQLModel
|
12
12
|
|
13
13
|
from ....utils import get_timestamp
|
14
|
-
from fractal_server.images.models import AttributeFiltersType
|
15
14
|
|
16
15
|
|
17
16
|
class DatasetV2(SQLModel, table=True):
|
@@ -34,20 +33,11 @@ class DatasetV2(SQLModel, table=True):
|
|
34
33
|
sa_column=Column(DateTime(timezone=True), nullable=False),
|
35
34
|
)
|
36
35
|
|
37
|
-
# New in V2
|
38
|
-
|
39
36
|
zarr_dir: str
|
40
37
|
images: list[dict[str, Any]] = Field(
|
41
38
|
sa_column=Column(JSON, server_default="[]", nullable=False)
|
42
39
|
)
|
43
40
|
|
44
|
-
type_filters: dict[str, bool] = Field(
|
45
|
-
sa_column=Column(JSON, nullable=False, server_default="{}")
|
46
|
-
)
|
47
|
-
attribute_filters: AttributeFiltersType = Field(
|
48
|
-
sa_column=Column(JSON, nullable=False, server_default="{}")
|
49
|
-
)
|
50
|
-
|
51
41
|
@property
|
52
42
|
def image_zarr_urls(self) -> list[str]:
|
53
43
|
return [image["zarr_url"] for image in self.images]
|
@@ -15,6 +15,7 @@ from .task_collection import router as task_collection_router_v2
|
|
15
15
|
from .task_collection_custom import router as task_collection_router_v2_custom
|
16
16
|
from .task_group import router as task_group_router_v2
|
17
17
|
from .task_group_lifecycle import router as task_group_lifecycle_router_v2
|
18
|
+
from .verify_image_types import router as verify_image_types_router
|
18
19
|
from .workflow import router as workflow_router_v2
|
19
20
|
from .workflow_import import router as workflow_import_router_v2
|
20
21
|
from .workflowtask import router as workflowtask_router_v2
|
@@ -25,6 +26,7 @@ from fractal_server.syringe import Inject
|
|
25
26
|
router_api_v2 = APIRouter()
|
26
27
|
|
27
28
|
router_api_v2.include_router(dataset_router_v2, tags=["V2 Dataset"])
|
29
|
+
router_api_v2.include_router(verify_image_types_router, tags=["V2 Job"])
|
28
30
|
router_api_v2.include_router(job_router_v2, tags=["V2 Job"])
|
29
31
|
router_api_v2.include_router(images_routes_v2, tags=["V2 Images"])
|
30
32
|
router_api_v2.include_router(project_router_v2, tags=["V2 Project"])
|
@@ -1,3 +1,5 @@
|
|
1
|
+
from typing import Optional
|
2
|
+
|
1
3
|
from fastapi import APIRouter
|
2
4
|
from fastapi import Depends
|
3
5
|
from fastapi import HTTPException
|
@@ -165,6 +167,7 @@ async def get_history_run_units(
|
|
165
167
|
dataset_id: int,
|
166
168
|
workflowtask_id: int,
|
167
169
|
history_run_id: int,
|
170
|
+
unit_status: Optional[HistoryUnitStatus] = None,
|
168
171
|
user: UserOAuth = Depends(current_active_user),
|
169
172
|
db: AsyncSession = Depends(get_async_db),
|
170
173
|
pagination: PaginationRequest = Depends(get_pagination_params),
|
@@ -183,21 +186,23 @@ async def get_history_run_units(
|
|
183
186
|
await get_history_run_or_404(history_run_id=history_run_id, db=db)
|
184
187
|
|
185
188
|
# Count `HistoryUnit`s
|
186
|
-
|
187
|
-
|
188
|
-
HistoryUnit.history_run_id == history_run_id
|
189
|
-
)
|
189
|
+
stmt = select(func.count(HistoryUnit.id)).where(
|
190
|
+
HistoryUnit.history_run_id == history_run_id
|
190
191
|
)
|
192
|
+
if unit_status:
|
193
|
+
stmt = stmt.where(HistoryUnit.status == unit_status)
|
194
|
+
res = await db.execute(stmt)
|
191
195
|
total_count = res.scalar()
|
192
196
|
page_size = pagination.page_size or total_count
|
193
197
|
|
194
198
|
# Query `HistoryUnit`s
|
195
|
-
|
196
|
-
|
197
|
-
.where(HistoryUnit.history_run_id == history_run_id)
|
198
|
-
.offset((pagination.page - 1) * page_size)
|
199
|
-
.limit(page_size)
|
199
|
+
stmt = select(HistoryUnit).where(
|
200
|
+
HistoryUnit.history_run_id == history_run_id
|
200
201
|
)
|
202
|
+
if unit_status:
|
203
|
+
stmt = stmt.where(HistoryUnit.status == unit_status)
|
204
|
+
stmt = stmt.offset((pagination.page - 1) * page_size).limit(page_size)
|
205
|
+
res = await db.execute(stmt)
|
201
206
|
units = res.scalars().all()
|
202
207
|
|
203
208
|
return dict(
|
@@ -41,7 +41,6 @@ class ImagePage(PaginationResponse[SingleImage]):
|
|
41
41
|
|
42
42
|
|
43
43
|
class ImageQuery(BaseModel):
|
44
|
-
zarr_url: Optional[str] = None
|
45
44
|
type_filters: dict[str, bool] = Field(default_factory=dict)
|
46
45
|
attribute_filters: AttributeFiltersType = Field(default_factory=dict)
|
47
46
|
|
@@ -56,6 +55,10 @@ class ImageQuery(BaseModel):
|
|
56
55
|
)
|
57
56
|
|
58
57
|
|
58
|
+
class ImageQueryWithZarrUrl(ImageQuery):
|
59
|
+
zarr_url: Optional[str] = None
|
60
|
+
|
61
|
+
|
59
62
|
@router.post(
|
60
63
|
"/project/{project_id}/dataset/{dataset_id}/images/",
|
61
64
|
status_code=status.HTTP_201_CREATED,
|
@@ -115,7 +118,7 @@ async def post_new_image(
|
|
115
118
|
async def query_dataset_images(
|
116
119
|
project_id: int,
|
117
120
|
dataset_id: int,
|
118
|
-
query: Optional[
|
121
|
+
query: Optional[ImageQueryWithZarrUrl] = None,
|
119
122
|
pagination: PaginationRequest = Depends(get_pagination_params),
|
120
123
|
user: UserOAuth = Depends(current_active_user),
|
121
124
|
db: AsyncSession = Depends(get_async_db),
|
@@ -11,30 +11,32 @@ from fastapi import Request
|
|
11
11
|
from fastapi import status
|
12
12
|
from sqlmodel import select
|
13
13
|
|
14
|
-
from .....config import get_settings
|
15
|
-
from .....logger import set_logger
|
16
|
-
from .....syringe import Inject
|
17
|
-
from ....db import AsyncSession
|
18
|
-
from ....db import get_async_db
|
19
|
-
from ....models.v2 import JobV2
|
20
|
-
from ....runner.set_start_and_last_task_index import (
|
21
|
-
set_start_and_last_task_index,
|
22
|
-
)
|
23
|
-
from ....runner.v2 import submit_workflow
|
24
|
-
from ....schemas.v2 import JobCreateV2
|
25
|
-
from ....schemas.v2 import JobReadV2
|
26
|
-
from ....schemas.v2 import JobStatusTypeV2
|
27
|
-
from ...aux.validate_user_settings import validate_user_settings
|
28
14
|
from ._aux_functions import _get_dataset_check_owner
|
29
15
|
from ._aux_functions import _get_workflow_check_owner
|
30
16
|
from ._aux_functions import clean_app_job_list_v2
|
31
17
|
from ._aux_functions_tasks import _check_type_filters_compatibility
|
18
|
+
from fractal_server.app.db import AsyncSession
|
19
|
+
from fractal_server.app.db import get_async_db
|
32
20
|
from fractal_server.app.models import TaskGroupV2
|
33
21
|
from fractal_server.app.models import UserOAuth
|
22
|
+
from fractal_server.app.models.v2 import JobV2
|
34
23
|
from fractal_server.app.routes.api.v2._aux_functions_tasks import (
|
35
24
|
_get_task_read_access,
|
36
25
|
)
|
37
26
|
from fractal_server.app.routes.auth import current_active_verified_user
|
27
|
+
from fractal_server.app.routes.aux.validate_user_settings import (
|
28
|
+
validate_user_settings,
|
29
|
+
)
|
30
|
+
from fractal_server.app.runner.set_start_and_last_task_index import (
|
31
|
+
set_start_and_last_task_index,
|
32
|
+
)
|
33
|
+
from fractal_server.app.runner.v2 import submit_workflow
|
34
|
+
from fractal_server.app.schemas.v2 import JobCreateV2
|
35
|
+
from fractal_server.app.schemas.v2 import JobReadV2
|
36
|
+
from fractal_server.app.schemas.v2 import JobStatusTypeV2
|
37
|
+
from fractal_server.config import get_settings
|
38
|
+
from fractal_server.logger import set_logger
|
39
|
+
from fractal_server.syringe import Inject
|
38
40
|
|
39
41
|
|
40
42
|
router = APIRouter()
|
@@ -0,0 +1,64 @@
|
|
1
|
+
from typing import Optional
|
2
|
+
|
3
|
+
from fastapi import APIRouter
|
4
|
+
from fastapi import Depends
|
5
|
+
from fastapi import status
|
6
|
+
|
7
|
+
from ._aux_functions import _get_dataset_check_owner
|
8
|
+
from .images import ImageQuery
|
9
|
+
from fractal_server.app.db import AsyncSession
|
10
|
+
from fractal_server.app.db import get_async_db
|
11
|
+
from fractal_server.app.models import UserOAuth
|
12
|
+
from fractal_server.app.routes.auth import current_active_user
|
13
|
+
from fractal_server.images.tools import filter_image_list
|
14
|
+
|
15
|
+
router = APIRouter()
|
16
|
+
|
17
|
+
|
18
|
+
@router.post(
|
19
|
+
"/project/{project_id}/dataset/{dataset_id}/images/verify-unique-types/",
|
20
|
+
status_code=status.HTTP_200_OK,
|
21
|
+
)
|
22
|
+
async def verify_unique_types(
|
23
|
+
project_id: int,
|
24
|
+
dataset_id: int,
|
25
|
+
query: Optional[ImageQuery] = None,
|
26
|
+
user: UserOAuth = Depends(current_active_user),
|
27
|
+
db: AsyncSession = Depends(get_async_db),
|
28
|
+
) -> list[str]:
|
29
|
+
# Get dataset
|
30
|
+
output = await _get_dataset_check_owner(
|
31
|
+
project_id=project_id, dataset_id=dataset_id, user_id=user.id, db=db
|
32
|
+
)
|
33
|
+
dataset = output["dataset"]
|
34
|
+
|
35
|
+
# Filter images
|
36
|
+
if query is None:
|
37
|
+
filtered_images = dataset.images
|
38
|
+
else:
|
39
|
+
filtered_images = filter_image_list(
|
40
|
+
images=dataset.images,
|
41
|
+
attribute_filters=query.attribute_filters,
|
42
|
+
type_filters=query.type_filters,
|
43
|
+
)
|
44
|
+
|
45
|
+
# Get all available types (#FIXME use aux function)
|
46
|
+
available_types = set(
|
47
|
+
_type for _img in filtered_images for _type in _img["types"].keys()
|
48
|
+
)
|
49
|
+
|
50
|
+
# Get actual values for each available type
|
51
|
+
values_per_type: dict[str, set] = {
|
52
|
+
_type: set() for _type in available_types
|
53
|
+
}
|
54
|
+
for _img in filtered_images:
|
55
|
+
for _type in available_types:
|
56
|
+
values_per_type[_type].add(_img["types"].get(_type, False))
|
57
|
+
|
58
|
+
# Find types with non-unique value
|
59
|
+
non_unique_types = [
|
60
|
+
key for key, value in values_per_type.items() if len(value) > 1
|
61
|
+
]
|
62
|
+
non_unique_types = sorted(non_unique_types)
|
63
|
+
|
64
|
+
return non_unique_types
|
@@ -290,6 +290,7 @@ async def get_user_workflows(
|
|
290
290
|
|
291
291
|
|
292
292
|
class WorkflowTaskTypeFiltersInfo(BaseModel):
|
293
|
+
workflowtask_id: int
|
293
294
|
current_type_filters: dict[str, bool]
|
294
295
|
input_type_filters: dict[str, bool]
|
295
296
|
output_type_filters: dict[str, bool]
|
@@ -301,7 +302,7 @@ async def get_workflow_type_filters(
|
|
301
302
|
workflow_id: int,
|
302
303
|
user: UserOAuth = Depends(current_active_user),
|
303
304
|
db: AsyncSession = Depends(get_async_db),
|
304
|
-
) ->
|
305
|
+
) -> list[WorkflowTaskTypeFiltersInfo]:
|
305
306
|
"""
|
306
307
|
Get info on type/type-filters flow for a workflow.
|
307
308
|
"""
|
@@ -322,7 +323,7 @@ async def get_workflow_type_filters(
|
|
322
323
|
|
323
324
|
current_type_filters = {}
|
324
325
|
|
325
|
-
|
326
|
+
response_items = []
|
326
327
|
for wftask in workflow.task_list:
|
327
328
|
|
328
329
|
# Compute input_type_filters, based on wftask and task manifest
|
@@ -332,13 +333,16 @@ async def get_workflow_type_filters(
|
|
332
333
|
)
|
333
334
|
|
334
335
|
# Append current item to response list
|
335
|
-
|
336
|
-
|
337
|
-
|
338
|
-
|
336
|
+
response_items.append(
|
337
|
+
dict(
|
338
|
+
workflowtask_id=wftask.id,
|
339
|
+
current_type_filters=copy(current_type_filters),
|
340
|
+
input_type_filters=copy(input_type_filters),
|
341
|
+
output_type_filters=copy(wftask.task.output_types),
|
342
|
+
)
|
339
343
|
)
|
340
344
|
|
341
345
|
# Update `current_type_filters`
|
342
346
|
current_type_filters.update(wftask.task.output_types)
|
343
347
|
|
344
|
-
return
|
348
|
+
return response_items
|
@@ -0,0 +1,67 @@
|
|
1
|
+
from fractal_server.app.runner.executors.slurm_common._job_states import (
|
2
|
+
STATES_FINISHED,
|
3
|
+
)
|
4
|
+
from fractal_server.logger import set_logger
|
5
|
+
from fractal_server.ssh._fabric import FractalSSH
|
6
|
+
|
7
|
+
logger = set_logger(__name__)
|
8
|
+
|
9
|
+
|
10
|
+
def run_squeue(
|
11
|
+
*,
|
12
|
+
job_ids: list[str],
|
13
|
+
fractal_ssh: FractalSSH,
|
14
|
+
) -> str:
|
15
|
+
job_id_single_str = ",".join([str(j) for j in job_ids])
|
16
|
+
cmd = (
|
17
|
+
f"squeue --noheader --format='%i %T' --jobs {job_id_single_str}"
|
18
|
+
" --states=all"
|
19
|
+
)
|
20
|
+
stdout = fractal_ssh.run_command(cmd)
|
21
|
+
return stdout
|
22
|
+
|
23
|
+
|
24
|
+
def get_finished_jobs_ssh(
|
25
|
+
*,
|
26
|
+
fractal_ssh: FractalSSH,
|
27
|
+
job_ids: list[str],
|
28
|
+
) -> set[str]:
|
29
|
+
"""
|
30
|
+
# FIXME: make uniform with non-ssh one
|
31
|
+
|
32
|
+
Check which ones of the given Slurm jobs already finished
|
33
|
+
|
34
|
+
The function is based on the `_jobs_finished` function from
|
35
|
+
clusterfutures (version 0.5).
|
36
|
+
Original Copyright: 2022 Adrian Sampson
|
37
|
+
(released under the MIT licence)
|
38
|
+
"""
|
39
|
+
|
40
|
+
# If there is no Slurm job to check, return right away
|
41
|
+
if not job_ids:
|
42
|
+
return set()
|
43
|
+
|
44
|
+
id_to_state = dict()
|
45
|
+
|
46
|
+
try:
|
47
|
+
stdout = run_squeue(job_ids=job_ids, fractal_ssh=fractal_ssh)
|
48
|
+
id_to_state = {
|
49
|
+
line.split()[0]: line.split()[1] for line in stdout.splitlines()
|
50
|
+
}
|
51
|
+
except Exception: # FIXME
|
52
|
+
id_to_state = dict()
|
53
|
+
for j in job_ids:
|
54
|
+
try:
|
55
|
+
stdout = run_squeue([j])
|
56
|
+
id_to_state.update({stdout.split()[0]: stdout.split()[1]})
|
57
|
+
except Exception:
|
58
|
+
logger.info(f"Job {j} not found. Marked it as completed")
|
59
|
+
id_to_state.update({str(j): "COMPLETED"})
|
60
|
+
|
61
|
+
# Finished jobs only stay in squeue for a few mins (configurable). If
|
62
|
+
# a job ID isn't there, we'll assume it's finished.
|
63
|
+
return {
|
64
|
+
j
|
65
|
+
for j in job_ids
|
66
|
+
if id_to_state.get(j, "COMPLETED") in STATES_FINISHED
|
67
|
+
}
|