fractal-server 2.14.0a7__py3-none-any.whl → 2.14.0a8__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- fractal_server/__init__.py +1 -1
- fractal_server/app/history/__init__.py +0 -4
- fractal_server/app/routes/api/v2/_aux_functions.py +35 -45
- fractal_server/app/routes/api/v2/_aux_functions_history.py +109 -0
- fractal_server/app/routes/api/v2/history.py +42 -71
- fractal_server/app/routes/api/v2/job.py +30 -0
- fractal_server/app/routes/api/v2/status_legacy.py +5 -5
- fractal_server/app/runner/executors/slurm_ssh/executor.py +3 -4
- fractal_server/app/runner/v2/runner.py +13 -16
- fractal_server/app/runner/v2/runner_functions.py +23 -21
- fractal_server/app/schemas/v2/__init__.py +7 -1
- fractal_server/app/schemas/v2/dumps.py +20 -4
- fractal_server/app/schemas/v2/history.py +54 -0
- fractal_server/app/schemas/v2/status_legacy.py +35 -0
- fractal_server/app/schemas/v2/task.py +1 -3
- fractal_server/app/schemas/v2/workflowtask.py +0 -20
- {fractal_server-2.14.0a7.dist-info → fractal_server-2.14.0a8.dist-info}/METADATA +2 -2
- {fractal_server-2.14.0a7.dist-info → fractal_server-2.14.0a8.dist-info}/RECORD +21 -22
- {fractal_server-2.14.0a7.dist-info → fractal_server-2.14.0a8.dist-info}/WHEEL +1 -1
- fractal_server/app/history/image_updates.py +0 -124
- fractal_server/app/history/status_enum.py +0 -16
- fractal_server/app/schemas/v2/status.py +0 -16
- {fractal_server-2.14.0a7.dist-info → fractal_server-2.14.0a8.dist-info}/LICENSE +0 -0
- {fractal_server-2.14.0a7.dist-info → fractal_server-2.14.0a8.dist-info}/entry_points.txt +0 -0
fractal_server/__init__.py
CHANGED
@@ -1 +1 @@
|
|
1
|
-
__VERSION__ = "2.14.
|
1
|
+
__VERSION__ = "2.14.0a8"
|
@@ -419,77 +419,67 @@ async def clean_app_job_list_v2(
|
|
419
419
|
return submitted_job_ids
|
420
420
|
|
421
421
|
|
422
|
-
async def
|
422
|
+
async def _get_dataset_or_404(
|
423
423
|
*,
|
424
|
-
workflow_id: int,
|
425
424
|
dataset_id: int,
|
426
|
-
user_id: int,
|
427
425
|
db: AsyncSession,
|
428
|
-
) ->
|
426
|
+
) -> DatasetV2:
|
429
427
|
"""
|
430
|
-
|
428
|
+
Get a dataset or raise 404.
|
431
429
|
|
432
430
|
Args:
|
433
431
|
dataset_id:
|
434
|
-
workflow_task_id:
|
435
|
-
user_id:
|
436
432
|
db:
|
437
|
-
|
438
|
-
Returns:
|
439
|
-
List of WorkflowTask IDs
|
440
433
|
"""
|
441
|
-
|
442
|
-
if
|
434
|
+
ds = await db.get(DatasetV2, dataset_id)
|
435
|
+
if ds is None:
|
443
436
|
raise HTTPException(
|
444
437
|
status_code=status.HTTP_404_NOT_FOUND,
|
445
|
-
detail="
|
438
|
+
detail=f"Dataset {dataset_id} not found.",
|
446
439
|
)
|
447
|
-
|
448
|
-
|
449
|
-
|
450
|
-
|
451
|
-
|
452
|
-
|
453
|
-
|
440
|
+
else:
|
441
|
+
return ds
|
442
|
+
|
443
|
+
|
444
|
+
async def _get_workflow_or_404(
|
445
|
+
*,
|
446
|
+
workflow_id: int,
|
447
|
+
db: AsyncSession,
|
448
|
+
) -> WorkflowV2:
|
449
|
+
"""
|
450
|
+
Get a workflow or raise 404.
|
451
|
+
|
452
|
+
Args:
|
453
|
+
workflow_id:
|
454
|
+
db:
|
455
|
+
"""
|
456
|
+
wf = await db.get(WorkflowV2, workflow_id)
|
457
|
+
if wf is None:
|
454
458
|
raise HTTPException(
|
455
459
|
status_code=status.HTTP_404_NOT_FOUND,
|
456
|
-
detail="
|
460
|
+
detail=f"Workflow {workflow_id} not found.",
|
457
461
|
)
|
458
|
-
|
459
|
-
|
460
|
-
status_code=status.HTTP_422_UNPROCESSABLE_ENTITY,
|
461
|
-
detail="Dataset and workflow belong to different projects.",
|
462
|
-
)
|
463
|
-
|
464
|
-
return [wftask.id for wftask in workflow.task_list]
|
462
|
+
else:
|
463
|
+
return wf
|
465
464
|
|
466
465
|
|
467
|
-
async def
|
466
|
+
async def _get_workflowtask_or_404(
|
468
467
|
*,
|
469
468
|
workflowtask_id: int,
|
470
|
-
dataset_id: int,
|
471
|
-
user_id: int,
|
472
469
|
db: AsyncSession,
|
473
470
|
) -> WorkflowTaskV2:
|
474
471
|
"""
|
475
|
-
|
472
|
+
Get a workflow task or raise 404.
|
476
473
|
|
477
474
|
Args:
|
478
|
-
|
479
|
-
workflow_task_id:
|
480
|
-
user_id:
|
475
|
+
workflowtask_id:
|
481
476
|
db:
|
482
477
|
"""
|
483
|
-
|
484
|
-
if
|
478
|
+
wftask = await db.get(WorkflowTaskV2, workflowtask_id)
|
479
|
+
if wftask is None:
|
485
480
|
raise HTTPException(
|
486
481
|
status_code=status.HTTP_404_NOT_FOUND,
|
487
|
-
detail="WorkflowTask not found.",
|
482
|
+
detail=f"WorkflowTask {workflowtask_id} not found.",
|
488
483
|
)
|
489
|
-
|
490
|
-
|
491
|
-
dataset_id=dataset_id,
|
492
|
-
user_id=user_id,
|
493
|
-
db=db,
|
494
|
-
)
|
495
|
-
return workflowtask
|
484
|
+
else:
|
485
|
+
return wftask
|
@@ -1,11 +1,25 @@
|
|
1
1
|
from pathlib import Path
|
2
|
+
from typing import Literal
|
2
3
|
|
3
4
|
from fastapi import HTTPException
|
4
5
|
from fastapi import status
|
5
6
|
|
6
7
|
from fractal_server.app.db import AsyncSession
|
7
8
|
from fractal_server.app.models import WorkflowTaskV2
|
9
|
+
from fractal_server.app.models.v2 import DatasetV2
|
10
|
+
from fractal_server.app.models.v2 import HistoryRun
|
8
11
|
from fractal_server.app.models.v2 import HistoryUnit
|
12
|
+
from fractal_server.app.models.v2 import WorkflowV2
|
13
|
+
from fractal_server.app.routes.api.v2._aux_functions import _get_dataset_or_404
|
14
|
+
from fractal_server.app.routes.api.v2._aux_functions import (
|
15
|
+
_get_project_check_owner,
|
16
|
+
)
|
17
|
+
from fractal_server.app.routes.api.v2._aux_functions import (
|
18
|
+
_get_workflow_or_404,
|
19
|
+
)
|
20
|
+
from fractal_server.app.routes.api.v2._aux_functions import (
|
21
|
+
_get_workflowtask_or_404,
|
22
|
+
)
|
9
23
|
|
10
24
|
|
11
25
|
async def get_history_unit_or_404(
|
@@ -27,6 +41,25 @@ async def get_history_unit_or_404(
|
|
27
41
|
return history_unit
|
28
42
|
|
29
43
|
|
44
|
+
async def get_history_run_or_404(
|
45
|
+
*, history_run_id: int, db: AsyncSession
|
46
|
+
) -> HistoryRun:
|
47
|
+
"""
|
48
|
+
Get an existing HistoryRun or raise a 404.
|
49
|
+
|
50
|
+
Arguments:
|
51
|
+
history_run_id:
|
52
|
+
db:
|
53
|
+
"""
|
54
|
+
history_run = await db.get(HistoryRun, history_run_id)
|
55
|
+
if history_run is None:
|
56
|
+
raise HTTPException(
|
57
|
+
status_code=status.HTTP_404_NOT_FOUND,
|
58
|
+
detail=f"HistoryRun {history_run_id} not found",
|
59
|
+
)
|
60
|
+
return history_run
|
61
|
+
|
62
|
+
|
30
63
|
def read_log_file(
|
31
64
|
*,
|
32
65
|
logfile: str | None,
|
@@ -47,3 +80,79 @@ def read_log_file(
|
|
47
80
|
f"Error while retrieving logs for task '{wftask.task.name}' "
|
48
81
|
f"in dataset {dataset_id}. Original error: {str(e)}."
|
49
82
|
)
|
83
|
+
|
84
|
+
|
85
|
+
async def _verify_workflow_and_dataset_access(
|
86
|
+
*,
|
87
|
+
project_id: int,
|
88
|
+
workflow_id: int,
|
89
|
+
dataset_id: int,
|
90
|
+
user_id: int,
|
91
|
+
db: AsyncSession,
|
92
|
+
) -> dict[Literal["dataset", "workflow"], DatasetV2 | WorkflowV2]:
|
93
|
+
"""
|
94
|
+
Verify user access to a dataset/workflow pair.
|
95
|
+
|
96
|
+
Args:
|
97
|
+
dataset_id:
|
98
|
+
workflow_task_id:
|
99
|
+
user_id:
|
100
|
+
db:
|
101
|
+
"""
|
102
|
+
await _get_project_check_owner(
|
103
|
+
project_id=project_id,
|
104
|
+
user_id=user_id,
|
105
|
+
db=db,
|
106
|
+
)
|
107
|
+
workflow = await _get_workflow_or_404(
|
108
|
+
workflow_id=workflow_id,
|
109
|
+
db=db,
|
110
|
+
)
|
111
|
+
if workflow.project_id != project_id:
|
112
|
+
raise HTTPException(
|
113
|
+
status_code=status.HTTP_422_UNPROCESSABLE_ENTITY,
|
114
|
+
detail="Workflow does not belong to expected project.",
|
115
|
+
)
|
116
|
+
dataset = await _get_dataset_or_404(
|
117
|
+
dataset_id=dataset_id,
|
118
|
+
db=db,
|
119
|
+
)
|
120
|
+
if dataset.project_id != project_id:
|
121
|
+
raise HTTPException(
|
122
|
+
status_code=status.HTTP_422_UNPROCESSABLE_ENTITY,
|
123
|
+
detail="Dataset does not belong to expected project.",
|
124
|
+
)
|
125
|
+
|
126
|
+
return dict(dataset=dataset, workflow=workflow)
|
127
|
+
|
128
|
+
|
129
|
+
async def get_wftask_check_owner(
|
130
|
+
*,
|
131
|
+
project_id: int,
|
132
|
+
dataset_id: int,
|
133
|
+
workflowtask_id: int,
|
134
|
+
user_id: int,
|
135
|
+
db: AsyncSession,
|
136
|
+
) -> WorkflowTaskV2:
|
137
|
+
"""
|
138
|
+
Verify user access for the history of this dataset and workflowtask.
|
139
|
+
|
140
|
+
Args:
|
141
|
+
project_id:
|
142
|
+
dataset_id:
|
143
|
+
workflow_task_id:
|
144
|
+
user_id:
|
145
|
+
db:
|
146
|
+
"""
|
147
|
+
wftask = await _get_workflowtask_or_404(
|
148
|
+
workflowtask_id=workflowtask_id,
|
149
|
+
db=db,
|
150
|
+
)
|
151
|
+
await _verify_workflow_and_dataset_access(
|
152
|
+
project_id=project_id,
|
153
|
+
dataset_id=dataset_id,
|
154
|
+
workflow_id=wftask.workflow_id,
|
155
|
+
user_id=user_id,
|
156
|
+
db=db,
|
157
|
+
)
|
158
|
+
return wftask
|
@@ -1,26 +1,20 @@
|
|
1
|
-
from datetime import datetime
|
2
|
-
from typing import Any
|
3
|
-
from typing import Optional
|
4
|
-
|
5
1
|
from fastapi import APIRouter
|
6
2
|
from fastapi import Depends
|
7
3
|
from fastapi import HTTPException
|
8
4
|
from fastapi import status
|
9
5
|
from fastapi.responses import JSONResponse
|
10
|
-
from pydantic import AwareDatetime
|
11
|
-
from pydantic import BaseModel
|
12
|
-
from pydantic import field_serializer
|
13
6
|
from sqlmodel import func
|
14
7
|
from sqlmodel import select
|
15
8
|
|
16
9
|
from ._aux_functions import _get_dataset_check_owner
|
17
10
|
from ._aux_functions import _get_workflow_check_owner
|
18
|
-
from .
|
11
|
+
from ._aux_functions_history import _verify_workflow_and_dataset_access
|
12
|
+
from ._aux_functions_history import get_history_run_or_404
|
19
13
|
from ._aux_functions_history import get_history_unit_or_404
|
14
|
+
from ._aux_functions_history import get_wftask_check_owner
|
20
15
|
from ._aux_functions_history import read_log_file
|
21
16
|
from fractal_server.app.db import AsyncSession
|
22
17
|
from fractal_server.app.db import get_async_db
|
23
|
-
from fractal_server.app.history.status_enum import XXXStatus
|
24
18
|
from fractal_server.app.models import UserOAuth
|
25
19
|
from fractal_server.app.models.v2 import HistoryImageCache
|
26
20
|
from fractal_server.app.models.v2 import HistoryRun
|
@@ -29,6 +23,11 @@ from fractal_server.app.routes.auth import current_active_user
|
|
29
23
|
from fractal_server.app.routes.pagination import get_pagination_params
|
30
24
|
from fractal_server.app.routes.pagination import PaginationRequest
|
31
25
|
from fractal_server.app.routes.pagination import PaginationResponse
|
26
|
+
from fractal_server.app.schemas.v2 import HistoryRunReadAggregated
|
27
|
+
from fractal_server.app.schemas.v2 import HistoryUnitRead
|
28
|
+
from fractal_server.app.schemas.v2 import HistoryUnitStatus
|
29
|
+
from fractal_server.app.schemas.v2 import ImageLogsRequest
|
30
|
+
from fractal_server.app.schemas.v2 import ZarrUrlAndStatus
|
32
31
|
from fractal_server.images.tools import filter_image_list
|
33
32
|
from fractal_server.images.tools import merge_type_filters
|
34
33
|
from fractal_server.logger import set_logger
|
@@ -45,12 +44,21 @@ async def get_workflow_tasks_statuses(
|
|
45
44
|
user: UserOAuth = Depends(current_active_user),
|
46
45
|
db: AsyncSession = Depends(get_async_db),
|
47
46
|
) -> JSONResponse:
|
47
|
+
|
48
|
+
# Access control
|
48
49
|
workflow = await _get_workflow_check_owner(
|
49
50
|
project_id=project_id,
|
50
51
|
workflow_id=workflow_id,
|
51
52
|
user_id=user.id,
|
52
53
|
db=db,
|
53
54
|
)
|
55
|
+
await _get_dataset_check_owner(
|
56
|
+
project_id=project_id,
|
57
|
+
dataset_id=dataset_id,
|
58
|
+
user_id=user.id,
|
59
|
+
db=db,
|
60
|
+
)
|
61
|
+
|
54
62
|
response = {}
|
55
63
|
for wftask in workflow.task_list:
|
56
64
|
res = await db.execute(
|
@@ -69,7 +77,7 @@ async def get_workflow_tasks_statuses(
|
|
69
77
|
num_available_images=latest_history_run.num_available_images,
|
70
78
|
)
|
71
79
|
|
72
|
-
for target_status in
|
80
|
+
for target_status in HistoryUnitStatus:
|
73
81
|
stm = (
|
74
82
|
select(func.count(HistoryImageCache.zarr_url))
|
75
83
|
.join(HistoryUnit)
|
@@ -89,43 +97,6 @@ async def get_workflow_tasks_statuses(
|
|
89
97
|
return JSONResponse(content=response, status_code=200)
|
90
98
|
|
91
99
|
|
92
|
-
# FIXME MOVE TO SCHEMAS
|
93
|
-
|
94
|
-
|
95
|
-
class HistoryUnitRead(BaseModel):
|
96
|
-
id: int
|
97
|
-
logfile: Optional[str] = None
|
98
|
-
status: XXXStatus
|
99
|
-
zarr_urls: list[str]
|
100
|
-
|
101
|
-
|
102
|
-
class HistoryRunReadAggregated(BaseModel):
|
103
|
-
id: int
|
104
|
-
timestamp_started: AwareDatetime
|
105
|
-
workflowtask_dump: dict[str, Any]
|
106
|
-
num_submitted_units: int
|
107
|
-
num_done_units: int
|
108
|
-
num_failed_units: int
|
109
|
-
|
110
|
-
@field_serializer("timestamp_started")
|
111
|
-
def serialize_datetime(v: datetime) -> str:
|
112
|
-
return v.isoformat()
|
113
|
-
|
114
|
-
|
115
|
-
class ImageLogsRequest(BaseModel):
|
116
|
-
workflowtask_id: int
|
117
|
-
dataset_id: int
|
118
|
-
zarr_url: str
|
119
|
-
|
120
|
-
|
121
|
-
class ImageWithStatus(BaseModel):
|
122
|
-
zarr_url: str
|
123
|
-
status: Optional[XXXStatus] = None
|
124
|
-
|
125
|
-
|
126
|
-
# end FIXME
|
127
|
-
|
128
|
-
|
129
100
|
@router.get("/project/{project_id}/status/run/")
|
130
101
|
async def get_history_run_list(
|
131
102
|
project_id: int,
|
@@ -134,8 +105,10 @@ async def get_history_run_list(
|
|
134
105
|
user: UserOAuth = Depends(current_active_user),
|
135
106
|
db: AsyncSession = Depends(get_async_db),
|
136
107
|
) -> list[HistoryRunReadAggregated]:
|
108
|
+
|
137
109
|
# Access control
|
138
|
-
await
|
110
|
+
await get_wftask_check_owner(
|
111
|
+
project_id=project_id,
|
139
112
|
dataset_id=dataset_id,
|
140
113
|
workflowtask_id=workflowtask_id,
|
141
114
|
user_id=user.id,
|
@@ -152,11 +125,11 @@ async def get_history_run_list(
|
|
152
125
|
res = await db.execute(stm)
|
153
126
|
runs = res.scalars().all()
|
154
127
|
|
155
|
-
#
|
156
|
-
|
128
|
+
# Respond early if there are no runs
|
157
129
|
if not runs:
|
158
130
|
return []
|
159
131
|
|
132
|
+
# Add units count by status
|
160
133
|
run_ids = [run.id for run in runs]
|
161
134
|
stm = (
|
162
135
|
select(
|
@@ -196,30 +169,29 @@ async def get_history_run_units(
|
|
196
169
|
db: AsyncSession = Depends(get_async_db),
|
197
170
|
pagination: PaginationRequest = Depends(get_pagination_params),
|
198
171
|
) -> PaginationResponse[HistoryUnitRead]:
|
172
|
+
|
199
173
|
# Access control
|
200
|
-
await
|
174
|
+
await get_wftask_check_owner(
|
175
|
+
project_id=project_id,
|
201
176
|
dataset_id=dataset_id,
|
202
177
|
workflowtask_id=workflowtask_id,
|
203
178
|
user_id=user.id,
|
204
179
|
db=db,
|
205
180
|
)
|
206
181
|
|
207
|
-
|
208
|
-
|
209
|
-
raise HTTPException(
|
210
|
-
status_code=status.HTTP_404_NOT_FOUND,
|
211
|
-
detail=f"HistoryRun {history_run_id} not found",
|
212
|
-
)
|
182
|
+
# Check that `HistoryRun` exists
|
183
|
+
await get_history_run_or_404(history_run_id=history_run_id, db=db)
|
213
184
|
|
185
|
+
# Count `HistoryUnit`s
|
214
186
|
res = await db.execute(
|
215
187
|
select(func.count(HistoryUnit.id)).where(
|
216
188
|
HistoryUnit.history_run_id == history_run_id
|
217
189
|
)
|
218
190
|
)
|
219
191
|
total_count = res.scalar()
|
220
|
-
|
221
192
|
page_size = pagination.page_size or total_count
|
222
193
|
|
194
|
+
# Query `HistoryUnit`s
|
223
195
|
res = await db.execute(
|
224
196
|
select(HistoryUnit)
|
225
197
|
.where(HistoryUnit.history_run_id == history_run_id)
|
@@ -244,30 +216,27 @@ async def get_history_images(
|
|
244
216
|
user: UserOAuth = Depends(current_active_user),
|
245
217
|
db: AsyncSession = Depends(get_async_db),
|
246
218
|
pagination: PaginationRequest = Depends(get_pagination_params),
|
247
|
-
) -> PaginationResponse[
|
219
|
+
) -> PaginationResponse[ZarrUrlAndStatus]:
|
220
|
+
|
248
221
|
# Access control and object retrieval
|
249
|
-
|
250
|
-
res = await _get_dataset_check_owner(
|
222
|
+
wftask = await get_wftask_check_owner(
|
251
223
|
project_id=project_id,
|
252
|
-
dataset_id=dataset_id,
|
253
|
-
user_id=user.id,
|
254
|
-
db=db,
|
255
|
-
)
|
256
|
-
dataset = res["dataset"]
|
257
|
-
wftask = await _get_workflowtask_check_history_owner(
|
258
224
|
dataset_id=dataset_id,
|
259
225
|
workflowtask_id=workflowtask_id,
|
260
226
|
user_id=user.id,
|
261
227
|
db=db,
|
262
228
|
)
|
263
|
-
|
229
|
+
res = await _verify_workflow_and_dataset_access(
|
264
230
|
project_id=project_id,
|
265
231
|
workflow_id=wftask.workflow_id,
|
232
|
+
dataset_id=dataset_id,
|
266
233
|
user_id=user.id,
|
267
234
|
db=db,
|
268
235
|
)
|
236
|
+
dataset = res["dataset"]
|
237
|
+
workflow = res["workflow"]
|
269
238
|
|
270
|
-
#
|
239
|
+
# Setup prefix for logging
|
271
240
|
prefix = f"[DS{dataset.id}-WFT{wftask.id}-images]"
|
272
241
|
|
273
242
|
# (1) Get the type-filtered list of dataset images
|
@@ -359,7 +328,8 @@ async def get_image_log(
|
|
359
328
|
db: AsyncSession = Depends(get_async_db),
|
360
329
|
) -> JSONResponse:
|
361
330
|
# Access control
|
362
|
-
wftask = await
|
331
|
+
wftask = await get_wftask_check_owner(
|
332
|
+
project_id=project_id,
|
363
333
|
dataset_id=request_data.dataset_id,
|
364
334
|
workflowtask_id=request_data.workflowtask_id,
|
365
335
|
user_id=user.id,
|
@@ -406,7 +376,8 @@ async def get_history_unit_log(
|
|
406
376
|
db: AsyncSession = Depends(get_async_db),
|
407
377
|
) -> JSONResponse:
|
408
378
|
# Access control
|
409
|
-
wftask = await
|
379
|
+
wftask = await get_wftask_check_owner(
|
380
|
+
project_id=project_id,
|
410
381
|
dataset_id=dataset_id,
|
411
382
|
workflowtask_id=workflowtask_id,
|
412
383
|
user_id=user.id,
|
@@ -5,6 +5,7 @@ from typing import Optional
|
|
5
5
|
|
6
6
|
from fastapi import APIRouter
|
7
7
|
from fastapi import Depends
|
8
|
+
from fastapi import HTTPException
|
8
9
|
from fastapi import Response
|
9
10
|
from fastapi import status
|
10
11
|
from fastapi.responses import StreamingResponse
|
@@ -83,6 +84,35 @@ async def get_workflow_jobs(
|
|
83
84
|
return job_list
|
84
85
|
|
85
86
|
|
87
|
+
@router.get("/project/{project_id}/latest-job/")
|
88
|
+
async def get_latest_job(
|
89
|
+
project_id: int,
|
90
|
+
workflow_id: int,
|
91
|
+
dataset_id: int,
|
92
|
+
user: UserOAuth = Depends(current_active_user),
|
93
|
+
db: AsyncSession = Depends(get_async_db),
|
94
|
+
) -> JobReadV2:
|
95
|
+
await _get_workflow_check_owner(
|
96
|
+
project_id=project_id, workflow_id=workflow_id, user_id=user.id, db=db
|
97
|
+
)
|
98
|
+
stm = (
|
99
|
+
select(JobV2)
|
100
|
+
.where(JobV2.project_id == project_id)
|
101
|
+
.where(JobV2.workflow_id == workflow_id)
|
102
|
+
.where(JobV2.dataset_id == dataset_id)
|
103
|
+
.order_by(JobV2.start_timestamp.desc())
|
104
|
+
.limit(1)
|
105
|
+
)
|
106
|
+
res = await db.execute(stm)
|
107
|
+
latest_job = res.scalar_one_or_none()
|
108
|
+
if latest_job is None:
|
109
|
+
raise HTTPException(
|
110
|
+
status_code=status.HTTP_404_NOT_FOUND,
|
111
|
+
detail=f"Job with {workflow_id=} and {dataset_id=} not found.",
|
112
|
+
)
|
113
|
+
return latest_job
|
114
|
+
|
115
|
+
|
86
116
|
@router.get(
|
87
117
|
"/project/{project_id}/job/{job_id}/",
|
88
118
|
response_model=JobReadV2,
|
@@ -9,8 +9,8 @@ from .....logger import set_logger
|
|
9
9
|
from ....db import AsyncSession
|
10
10
|
from ....db import get_async_db
|
11
11
|
from ....models.v2 import JobV2
|
12
|
-
from ....schemas.v2.
|
13
|
-
from ....schemas.v2.
|
12
|
+
from ....schemas.v2.status_legacy import LegacyStatusReadV2
|
13
|
+
from ....schemas.v2.status_legacy import WorkflowTaskStatusTypeV2
|
14
14
|
from ._aux_functions import _get_dataset_check_owner
|
15
15
|
from ._aux_functions import _get_submitted_jobs_statement
|
16
16
|
from ._aux_functions import _get_workflow_check_owner
|
@@ -24,7 +24,7 @@ logger = set_logger(__name__)
|
|
24
24
|
|
25
25
|
@router.get(
|
26
26
|
"/project/{project_id}/status-legacy/",
|
27
|
-
response_model=
|
27
|
+
response_model=LegacyStatusReadV2,
|
28
28
|
)
|
29
29
|
async def get_workflowtask_status(
|
30
30
|
project_id: int,
|
@@ -32,7 +32,7 @@ async def get_workflowtask_status(
|
|
32
32
|
workflow_id: int,
|
33
33
|
user: UserOAuth = Depends(current_active_user),
|
34
34
|
db: AsyncSession = Depends(get_async_db),
|
35
|
-
) -> Optional[
|
35
|
+
) -> Optional[LegacyStatusReadV2]:
|
36
36
|
"""
|
37
37
|
Extract the status of all `WorkflowTaskV2` of a given `WorkflowV2` that ran
|
38
38
|
on a given `DatasetV2`.
|
@@ -164,5 +164,5 @@ async def get_workflowtask_status(
|
|
164
164
|
# first time that you hit `last_valid_wftask_id``
|
165
165
|
break
|
166
166
|
|
167
|
-
response_body =
|
167
|
+
response_body = LegacyStatusReadV2(status=clean_workflow_tasks_status_dict)
|
168
168
|
return response_body
|
@@ -9,7 +9,6 @@ from concurrent.futures import InvalidStateError
|
|
9
9
|
from copy import copy
|
10
10
|
from pathlib import Path
|
11
11
|
from typing import Any
|
12
|
-
from typing import Callable
|
13
12
|
from typing import Optional
|
14
13
|
from typing import Sequence
|
15
14
|
|
@@ -216,7 +215,7 @@ class FractalSlurmSSHExecutor(Executor):
|
|
216
215
|
|
217
216
|
def submit(
|
218
217
|
self,
|
219
|
-
fun:
|
218
|
+
fun: callable,
|
220
219
|
*fun_args: Sequence[Any],
|
221
220
|
slurm_config: SlurmConfig,
|
222
221
|
task_files: TaskFiles,
|
@@ -278,7 +277,7 @@ class FractalSlurmSSHExecutor(Executor):
|
|
278
277
|
|
279
278
|
def map(
|
280
279
|
self,
|
281
|
-
fn:
|
280
|
+
fn: callable,
|
282
281
|
iterable: list[Sequence[Any]],
|
283
282
|
*,
|
284
283
|
slurm_config: SlurmConfig,
|
@@ -446,7 +445,7 @@ class FractalSlurmSSHExecutor(Executor):
|
|
446
445
|
|
447
446
|
def _prepare_job(
|
448
447
|
self,
|
449
|
-
fun:
|
448
|
+
fun: callable,
|
450
449
|
slurm_file_prefix: str,
|
451
450
|
task_files: TaskFiles,
|
452
451
|
slurm_config: SlurmConfig,
|
@@ -2,7 +2,6 @@ import logging
|
|
2
2
|
from copy import copy
|
3
3
|
from copy import deepcopy
|
4
4
|
from pathlib import Path
|
5
|
-
from typing import Callable
|
6
5
|
from typing import Optional
|
7
6
|
|
8
7
|
from sqlalchemy.orm.attributes import flag_modified
|
@@ -20,13 +19,15 @@ from .runner_functions import run_v2_task_non_parallel
|
|
20
19
|
from .runner_functions import run_v2_task_parallel
|
21
20
|
from .task_interface import TaskOutput
|
22
21
|
from fractal_server.app.db import get_sync_db
|
23
|
-
from fractal_server.app.history.status_enum import XXXStatus
|
24
22
|
from fractal_server.app.models.v2 import AccountingRecord
|
25
23
|
from fractal_server.app.models.v2 import DatasetV2
|
26
24
|
from fractal_server.app.models.v2 import HistoryRun
|
27
25
|
from fractal_server.app.models.v2 import TaskGroupV2
|
28
26
|
from fractal_server.app.models.v2 import WorkflowTaskV2
|
29
27
|
from fractal_server.app.runner.executors.base_runner import BaseRunner
|
28
|
+
from fractal_server.app.schemas.v2 import HistoryUnitStatus
|
29
|
+
from fractal_server.app.schemas.v2 import TaskDumpV2
|
30
|
+
from fractal_server.app.schemas.v2 import TaskGroupDumpV2
|
30
31
|
from fractal_server.images.models import AttributeFiltersType
|
31
32
|
from fractal_server.images.tools import merge_type_filters
|
32
33
|
|
@@ -40,7 +41,7 @@ def execute_tasks_v2(
|
|
40
41
|
workflow_dir_local: Path,
|
41
42
|
workflow_dir_remote: Optional[Path] = None,
|
42
43
|
logger_name: Optional[str] = None,
|
43
|
-
submit_setup_call:
|
44
|
+
submit_setup_call: callable = no_op_submit_setup_call,
|
44
45
|
job_attribute_filters: AttributeFiltersType,
|
45
46
|
) -> None:
|
46
47
|
logger = logging.getLogger(logger_name)
|
@@ -86,28 +87,24 @@ def execute_tasks_v2(
|
|
86
87
|
else:
|
87
88
|
num_available_images = 0
|
88
89
|
|
89
|
-
# Create history item
|
90
90
|
with next(get_sync_db()) as db:
|
91
|
+
# Create dumps for workflowtask and taskgroup
|
91
92
|
workflowtask_dump = dict(
|
92
93
|
**wftask.model_dump(exclude={"task"}),
|
93
|
-
task=wftask.task.model_dump(),
|
94
|
+
task=TaskDumpV2(**wftask.task.model_dump()).model_dump(),
|
94
95
|
)
|
95
|
-
|
96
|
-
# Exclude timestamps since they'd need to be serialized properly
|
97
96
|
task_group = db.get(TaskGroupV2, wftask.task.taskgroupv2_id)
|
98
|
-
task_group_dump =
|
99
|
-
|
100
|
-
|
101
|
-
|
102
|
-
}
|
103
|
-
)
|
97
|
+
task_group_dump = TaskGroupDumpV2(
|
98
|
+
**task_group.model_dump()
|
99
|
+
).model_dump()
|
100
|
+
# Create HistoryRun
|
104
101
|
history_run = HistoryRun(
|
105
102
|
dataset_id=dataset.id,
|
106
103
|
workflowtask_id=wftask.id,
|
107
104
|
workflowtask_dump=workflowtask_dump,
|
108
105
|
task_group_dump=task_group_dump,
|
109
106
|
num_available_images=num_available_images,
|
110
|
-
status=
|
107
|
+
status=HistoryUnitStatus.SUBMITTED,
|
111
108
|
)
|
112
109
|
db.add(history_run)
|
113
110
|
db.commit()
|
@@ -361,14 +358,14 @@ def execute_tasks_v2(
|
|
361
358
|
db.execute(
|
362
359
|
update(HistoryRun)
|
363
360
|
.where(HistoryRun.id == history_run_id)
|
364
|
-
.values(status=
|
361
|
+
.values(status=HistoryUnitStatus.DONE)
|
365
362
|
)
|
366
363
|
db.commit()
|
367
364
|
else:
|
368
365
|
db.execute(
|
369
366
|
update(HistoryRun)
|
370
367
|
.where(HistoryRun.id == history_run_id)
|
371
|
-
.values(status=
|
368
|
+
.values(status=HistoryUnitStatus.FAILED)
|
372
369
|
)
|
373
370
|
db.commit()
|
374
371
|
logger.error(
|