fractal-server 2.14.0a3__py3-none-any.whl → 2.14.0a4__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- fractal_server/__init__.py +1 -1
- fractal_server/__main__.py +3 -1
- fractal_server/app/history/__init__.py +4 -4
- fractal_server/app/history/image_updates.py +124 -143
- fractal_server/app/history/status_enum.py +2 -2
- fractal_server/app/models/v2/__init__.py +6 -4
- fractal_server/app/models/v2/history.py +44 -20
- fractal_server/app/routes/api/__init__.py +1 -1
- fractal_server/app/routes/api/v2/__init__.py +4 -0
- fractal_server/app/routes/api/v2/_aux_functions_history.py +49 -0
- fractal_server/app/routes/api/v2/dataset.py +0 -12
- fractal_server/app/routes/api/v2/history.py +301 -186
- fractal_server/app/routes/api/v2/project.py +0 -25
- fractal_server/app/routes/api/v2/status_legacy.py +168 -0
- fractal_server/app/routes/api/v2/workflow.py +2 -17
- fractal_server/app/routes/api/v2/workflowtask.py +41 -71
- fractal_server/app/routes/auth/oauth.py +5 -3
- fractal_server/app/runner/executors/local/runner.py +10 -55
- fractal_server/app/runner/executors/slurm_sudo/runner.py +171 -108
- fractal_server/app/runner/v2/__init__.py +0 -20
- fractal_server/app/runner/v2/runner.py +45 -58
- fractal_server/app/runner/v2/runner_functions.py +164 -22
- fractal_server/app/schemas/_validators.py +13 -24
- fractal_server/app/schemas/user.py +10 -7
- fractal_server/app/schemas/user_settings.py +9 -21
- fractal_server/app/schemas/v2/dataset.py +8 -6
- fractal_server/app/schemas/v2/job.py +9 -5
- fractal_server/app/schemas/v2/manifest.py +2 -6
- fractal_server/app/schemas/v2/project.py +9 -7
- fractal_server/app/schemas/v2/task.py +41 -77
- fractal_server/app/schemas/v2/task_collection.py +14 -32
- fractal_server/app/schemas/v2/task_group.py +10 -9
- fractal_server/app/schemas/v2/workflow.py +10 -11
- fractal_server/app/security/signup_email.py +2 -2
- fractal_server/config.py +31 -32
- fractal_server/migrations/versions/fbce16ff4e47_new_history_items.py +120 -0
- fractal_server/tasks/v2/templates/2_pip_install.sh +1 -1
- fractal_server/tasks/v2/utils_templates.py +6 -0
- {fractal_server-2.14.0a3.dist-info → fractal_server-2.14.0a4.dist-info}/METADATA +1 -1
- {fractal_server-2.14.0a3.dist-info → fractal_server-2.14.0a4.dist-info}/RECORD +43 -44
- fractal_server/app/runner/executors/slurm_sudo/_executor_wait_thread.py +0 -130
- fractal_server/app/schemas/v2/history.py +0 -23
- fractal_server/migrations/versions/87cd72a537a2_add_historyitem_table.py +0 -68
- fractal_server/migrations/versions/954ddc64425a_image_status.py +0 -63
- {fractal_server-2.14.0a3.dist-info → fractal_server-2.14.0a4.dist-info}/LICENSE +0 -0
- {fractal_server-2.14.0a3.dist-info → fractal_server-2.14.0a4.dist-info}/WHEEL +0 -0
- {fractal_server-2.14.0a3.dist-info → fractal_server-2.14.0a4.dist-info}/entry_points.txt +0 -0
@@ -1,4 +1,5 @@
|
|
1
|
-
from
|
1
|
+
from datetime import datetime
|
2
|
+
from typing import Any
|
2
3
|
from typing import Optional
|
3
4
|
|
4
5
|
from fastapi import APIRouter
|
@@ -6,126 +7,196 @@ from fastapi import Depends
|
|
6
7
|
from fastapi import HTTPException
|
7
8
|
from fastapi import status
|
8
9
|
from fastapi.responses import JSONResponse
|
10
|
+
from pydantic import AwareDatetime
|
9
11
|
from pydantic import BaseModel
|
12
|
+
from pydantic import field_serializer
|
10
13
|
from sqlmodel import func
|
11
14
|
from sqlmodel import select
|
12
15
|
|
13
16
|
from ._aux_functions import _get_dataset_check_owner
|
14
17
|
from ._aux_functions import _get_workflow_check_owner
|
15
18
|
from ._aux_functions import _get_workflowtask_check_history_owner
|
19
|
+
from ._aux_functions_history import get_history_unit_or_404
|
20
|
+
from ._aux_functions_history import read_log_file
|
16
21
|
from fractal_server.app.db import AsyncSession
|
17
22
|
from fractal_server.app.db import get_async_db
|
18
|
-
from fractal_server.app.history.status_enum import
|
23
|
+
from fractal_server.app.history.status_enum import XXXStatus
|
19
24
|
from fractal_server.app.models import UserOAuth
|
20
|
-
from fractal_server.app.models.v2 import
|
21
|
-
from fractal_server.app.models.v2 import
|
25
|
+
from fractal_server.app.models.v2 import HistoryImageCache
|
26
|
+
from fractal_server.app.models.v2 import HistoryRun
|
27
|
+
from fractal_server.app.models.v2 import HistoryUnit
|
22
28
|
from fractal_server.app.routes.auth import current_active_user
|
23
29
|
from fractal_server.app.routes.pagination import get_pagination_params
|
24
30
|
from fractal_server.app.routes.pagination import PaginationRequest
|
25
31
|
from fractal_server.app.routes.pagination import PaginationResponse
|
26
|
-
from fractal_server.
|
32
|
+
from fractal_server.images.tools import filter_image_list
|
33
|
+
from fractal_server.images.tools import merge_type_filters
|
34
|
+
from fractal_server.logger import set_logger
|
27
35
|
|
28
36
|
router = APIRouter()
|
37
|
+
logger = set_logger(__name__)
|
29
38
|
|
30
39
|
|
31
|
-
@router.get(
|
32
|
-
|
33
|
-
response_model=list[HistoryItemV2Read],
|
34
|
-
)
|
35
|
-
async def get_dataset_history(
|
40
|
+
@router.get("/project/{project_id}/status/")
|
41
|
+
async def get_workflow_tasks_statuses(
|
36
42
|
project_id: int,
|
37
43
|
dataset_id: int,
|
44
|
+
workflow_id: int,
|
38
45
|
user: UserOAuth = Depends(current_active_user),
|
39
46
|
db: AsyncSession = Depends(get_async_db),
|
40
|
-
) ->
|
41
|
-
await
|
47
|
+
) -> JSONResponse:
|
48
|
+
workflow = await _get_workflow_check_owner(
|
42
49
|
project_id=project_id,
|
43
|
-
|
50
|
+
workflow_id=workflow_id,
|
44
51
|
user_id=user.id,
|
45
52
|
db=db,
|
46
53
|
)
|
54
|
+
response = {}
|
55
|
+
for wftask in workflow.task_list:
|
56
|
+
res = await db.execute(
|
57
|
+
select(HistoryRun)
|
58
|
+
.where(HistoryRun.dataset_id == dataset_id)
|
59
|
+
.where(HistoryRun.workflowtask_id == wftask.id)
|
60
|
+
.order_by(HistoryRun.timestamp_started.desc())
|
61
|
+
.limit(1)
|
62
|
+
)
|
63
|
+
latest_history_run = res.scalar()
|
64
|
+
if not latest_history_run:
|
65
|
+
response[wftask.id] = None
|
66
|
+
continue
|
67
|
+
response[wftask.id] = dict(
|
68
|
+
status=latest_history_run.status,
|
69
|
+
num_available_images=latest_history_run.num_available_images,
|
70
|
+
)
|
47
71
|
|
48
|
-
|
49
|
-
|
50
|
-
|
51
|
-
|
52
|
-
|
53
|
-
|
54
|
-
|
55
|
-
|
72
|
+
for target_status in XXXStatus:
|
73
|
+
stm = (
|
74
|
+
select(func.count(HistoryImageCache.zarr_url))
|
75
|
+
.join(HistoryUnit)
|
76
|
+
.where(HistoryImageCache.dataset_id == dataset_id)
|
77
|
+
.where(HistoryImageCache.workflowtask_id == wftask.id)
|
78
|
+
.where(
|
79
|
+
HistoryImageCache.latest_history_unit_id == HistoryUnit.id
|
80
|
+
)
|
81
|
+
.where(HistoryUnit.status == target_status.value)
|
82
|
+
)
|
83
|
+
res = await db.execute(stm)
|
84
|
+
num_images = res.scalar()
|
85
|
+
response[wftask.id][
|
86
|
+
f"num_{target_status.value}_images"
|
87
|
+
] = num_images
|
56
88
|
|
89
|
+
return JSONResponse(content=response, status_code=200)
|
57
90
|
|
58
|
-
|
59
|
-
|
91
|
+
|
92
|
+
# FIXME MOVE TO SCHEMAS
|
93
|
+
|
94
|
+
|
95
|
+
class HistoryUnitRead(BaseModel):
|
96
|
+
id: int
|
97
|
+
logfile: Optional[str] = None
|
98
|
+
status: XXXStatus
|
99
|
+
zarr_urls: list[str]
|
100
|
+
|
101
|
+
|
102
|
+
class HistoryRunReadAggregated(BaseModel):
|
103
|
+
id: int
|
104
|
+
timestamp_started: AwareDatetime
|
105
|
+
workflowtask_dump: dict[str, Any]
|
106
|
+
num_submitted_units: int
|
107
|
+
num_done_units: int
|
108
|
+
num_failed_units: int
|
109
|
+
|
110
|
+
@field_serializer("timestamp_started")
|
111
|
+
def serialize_datetime(v: datetime) -> str:
|
112
|
+
return v.isoformat()
|
113
|
+
|
114
|
+
|
115
|
+
class ImageLogsRequest(BaseModel):
|
116
|
+
workflowtask_id: int
|
117
|
+
dataset_id: int
|
118
|
+
zarr_url: str
|
119
|
+
|
120
|
+
|
121
|
+
class ImageWithStatus(BaseModel):
|
122
|
+
zarr_url: str
|
123
|
+
status: Optional[XXXStatus] = None
|
124
|
+
|
125
|
+
|
126
|
+
# end FIXME
|
127
|
+
|
128
|
+
|
129
|
+
@router.get("/project/{project_id}/status/run/")
|
130
|
+
async def get_history_run_list(
|
60
131
|
project_id: int,
|
61
|
-
workflow_id: int,
|
62
132
|
dataset_id: int,
|
133
|
+
workflowtask_id: int,
|
63
134
|
user: UserOAuth = Depends(current_active_user),
|
64
135
|
db: AsyncSession = Depends(get_async_db),
|
65
|
-
) ->
|
66
|
-
|
67
|
-
|
68
|
-
|
136
|
+
) -> list[HistoryRunReadAggregated]:
|
137
|
+
# Access control
|
138
|
+
await _get_workflowtask_check_history_owner(
|
139
|
+
dataset_id=dataset_id,
|
140
|
+
workflowtask_id=workflowtask_id,
|
69
141
|
user_id=user.id,
|
70
142
|
db=db,
|
71
143
|
)
|
72
144
|
|
73
|
-
|
145
|
+
# Get all runs
|
146
|
+
stm = (
|
147
|
+
select(HistoryRun)
|
148
|
+
.where(HistoryRun.dataset_id == dataset_id)
|
149
|
+
.where(HistoryRun.workflowtask_id == workflowtask_id)
|
150
|
+
.order_by(HistoryRun.timestamp_started)
|
151
|
+
)
|
152
|
+
res = await db.execute(stm)
|
153
|
+
runs = res.scalars().all()
|
154
|
+
|
155
|
+
# Add units count by status
|
156
|
+
|
157
|
+
if not runs:
|
158
|
+
return []
|
74
159
|
|
75
|
-
|
160
|
+
run_ids = [run.id for run in runs]
|
76
161
|
stm = (
|
77
162
|
select(
|
78
|
-
|
163
|
+
HistoryUnit.history_run_id,
|
164
|
+
HistoryUnit.status,
|
165
|
+
func.count(HistoryUnit.id),
|
79
166
|
)
|
80
|
-
.where(
|
81
|
-
.
|
82
|
-
.order_by(
|
83
|
-
HistoryItemV2.workflowtask_id,
|
84
|
-
HistoryItemV2.timestamp_started.desc(),
|
85
|
-
)
|
86
|
-
# https://www.postgresql.org/docs/current/sql-select.html#SQL-DISTINCT
|
87
|
-
.distinct(HistoryItemV2.workflowtask_id)
|
167
|
+
.where(HistoryUnit.history_run_id.in_(run_ids))
|
168
|
+
.group_by(HistoryUnit.history_run_id, HistoryUnit.status)
|
88
169
|
)
|
89
170
|
res = await db.execute(stm)
|
90
|
-
|
91
|
-
|
92
|
-
|
93
|
-
|
94
|
-
|
95
|
-
|
96
|
-
|
97
|
-
.where(ImageStatus.workflowtask_id.in_(wft_ids))
|
98
|
-
.where(ImageStatus.status == _status)
|
99
|
-
# https://docs.sqlalchemy.org/en/20/tutorial/data_select.html#tutorial-group-by-w-aggregates
|
100
|
-
.group_by(ImageStatus.workflowtask_id)
|
101
|
-
)
|
102
|
-
res = await db.execute(stm)
|
103
|
-
count[_status] = {k: v for k, v in res.all()}
|
104
|
-
|
105
|
-
result = {
|
106
|
-
str(_id): None
|
107
|
-
if _id not in num_available_images
|
108
|
-
else {
|
109
|
-
"num_available_images": num_available_images[_id],
|
110
|
-
"num_done_images": count["done"].get(_id, 0),
|
111
|
-
"num_submitted_images": count["submitted"].get(_id, 0),
|
112
|
-
"num_failed_images": count["failed"].get(_id, 0),
|
171
|
+
unit_counts = res.all()
|
172
|
+
|
173
|
+
count_map = {
|
174
|
+
run_id: {
|
175
|
+
"num_done_units": 0,
|
176
|
+
"num_submitted_units": 0,
|
177
|
+
"num_failed_units": 0,
|
113
178
|
}
|
114
|
-
for
|
179
|
+
for run_id in run_ids
|
115
180
|
}
|
181
|
+
for run_id, unit_status, count in unit_counts:
|
182
|
+
count_map[run_id][f"num_{unit_status}_units"] = count
|
183
|
+
|
184
|
+
runs = [dict(**run.model_dump(), **count_map[run.id]) for run in runs]
|
116
185
|
|
117
|
-
return
|
186
|
+
return runs
|
118
187
|
|
119
188
|
|
120
|
-
@router.get("/project/{project_id}/status/
|
121
|
-
async def
|
189
|
+
@router.get("/project/{project_id}/status/run/{history_run_id}/units/")
|
190
|
+
async def get_history_run_units(
|
122
191
|
project_id: int,
|
123
|
-
workflowtask_id: int,
|
124
192
|
dataset_id: int,
|
193
|
+
workflowtask_id: int,
|
194
|
+
history_run_id: int,
|
125
195
|
user: UserOAuth = Depends(current_active_user),
|
126
196
|
db: AsyncSession = Depends(get_async_db),
|
127
|
-
|
128
|
-
|
197
|
+
pagination: PaginationRequest = Depends(get_pagination_params),
|
198
|
+
) -> PaginationResponse[HistoryUnitRead]:
|
199
|
+
# Access control
|
129
200
|
await _get_workflowtask_check_history_owner(
|
130
201
|
dataset_id=dataset_id,
|
131
202
|
workflowtask_id=workflowtask_id,
|
@@ -133,133 +204,153 @@ async def get_per_workflowtask_subsets_aggregated_info(
|
|
133
204
|
db=db,
|
134
205
|
)
|
135
206
|
|
136
|
-
|
137
|
-
|
138
|
-
|
139
|
-
|
140
|
-
|
141
|
-
)
|
142
|
-
res = await db.execute(stm)
|
143
|
-
hash_to_statuses = res.all()
|
144
|
-
|
145
|
-
subsets = []
|
146
|
-
for parameters_hash, statuses in hash_to_statuses:
|
147
|
-
# Get the oldest HistoryItemV2 matching with `parameters_hash`
|
148
|
-
stm = (
|
149
|
-
select(HistoryItemV2)
|
150
|
-
.where(HistoryItemV2.workflowtask_id == workflowtask_id)
|
151
|
-
.where(HistoryItemV2.dataset_id == dataset_id)
|
152
|
-
.where(HistoryItemV2.parameters_hash == parameters_hash)
|
153
|
-
.order_by(HistoryItemV2.timestamp_started)
|
154
|
-
.limit(1)
|
207
|
+
history_run = await db.get(HistoryRun, history_run_id)
|
208
|
+
if history_run is None:
|
209
|
+
raise HTTPException(
|
210
|
+
status_code=status.HTTP_404_NOT_FOUND,
|
211
|
+
detail=f"HistoryRun {history_run_id} not found",
|
155
212
|
)
|
156
|
-
|
157
|
-
|
158
|
-
|
159
|
-
|
160
|
-
{
|
161
|
-
"_timestamp": oldest_history_item.timestamp_started,
|
162
|
-
"workflowtask_dump": oldest_history_item.workflowtask_dump,
|
163
|
-
"parameters_hash": parameters_hash,
|
164
|
-
"info": {
|
165
|
-
"num_done_images": statuses.count(
|
166
|
-
HistoryItemImageStatus.DONE
|
167
|
-
),
|
168
|
-
"num_failed_images": statuses.count(
|
169
|
-
HistoryItemImageStatus.FAILED
|
170
|
-
),
|
171
|
-
"num_submitted_images": statuses.count(
|
172
|
-
HistoryItemImageStatus.SUBMITTED
|
173
|
-
),
|
174
|
-
},
|
175
|
-
}
|
213
|
+
|
214
|
+
res = await db.execute(
|
215
|
+
select(func.count(HistoryUnit.id)).where(
|
216
|
+
HistoryUnit.history_run_id == history_run_id
|
176
217
|
)
|
218
|
+
)
|
219
|
+
total_count = res.scalar()
|
177
220
|
|
178
|
-
|
179
|
-
sorted_results = sorted(subsets, key=lambda obj: obj["_timestamp"])
|
180
|
-
[item.pop("_timestamp") for item in sorted_results]
|
221
|
+
page_size = pagination.page_size or total_count
|
181
222
|
|
182
|
-
|
223
|
+
res = await db.execute(
|
224
|
+
select(HistoryUnit)
|
225
|
+
.where(HistoryUnit.history_run_id == history_run_id)
|
226
|
+
.offset((pagination.page - 1) * page_size)
|
227
|
+
.limit(page_size)
|
228
|
+
)
|
229
|
+
units = res.scalars().all()
|
230
|
+
|
231
|
+
return dict(
|
232
|
+
current_page=pagination.page,
|
233
|
+
page_size=page_size,
|
234
|
+
total_count=total_count,
|
235
|
+
items=units,
|
236
|
+
)
|
183
237
|
|
184
238
|
|
185
239
|
@router.get("/project/{project_id}/status/images/")
|
186
|
-
async def
|
240
|
+
async def get_history_images(
|
187
241
|
project_id: int,
|
188
|
-
workflowtask_id: int,
|
189
242
|
dataset_id: int,
|
190
|
-
|
191
|
-
parameters_hash: Optional[str] = None,
|
192
|
-
# Dependencies
|
193
|
-
pagination: PaginationRequest = Depends(get_pagination_params),
|
243
|
+
workflowtask_id: int,
|
194
244
|
user: UserOAuth = Depends(current_active_user),
|
195
245
|
db: AsyncSession = Depends(get_async_db),
|
196
|
-
|
197
|
-
|
198
|
-
|
199
|
-
|
200
|
-
|
201
|
-
|
246
|
+
pagination: PaginationRequest = Depends(get_pagination_params),
|
247
|
+
) -> PaginationResponse[ImageWithStatus]:
|
248
|
+
# Access control and object retrieval
|
249
|
+
# FIXME: Provide a single function that checks/gets what is needed
|
250
|
+
res = await _get_dataset_check_owner(
|
251
|
+
project_id=project_id,
|
202
252
|
dataset_id=dataset_id,
|
203
|
-
workflowtask_id=workflowtask_id,
|
204
253
|
user_id=user.id,
|
205
254
|
db=db,
|
206
255
|
)
|
207
|
-
|
208
|
-
|
209
|
-
|
210
|
-
|
211
|
-
.
|
212
|
-
|
256
|
+
dataset = res["dataset"]
|
257
|
+
wftask = await _get_workflowtask_check_history_owner(
|
258
|
+
dataset_id=dataset_id,
|
259
|
+
workflowtask_id=workflowtask_id,
|
260
|
+
user_id=user.id,
|
261
|
+
db=db,
|
213
262
|
)
|
214
|
-
|
215
|
-
|
216
|
-
.
|
217
|
-
.
|
218
|
-
|
263
|
+
workflow = await _get_workflow_check_owner(
|
264
|
+
project_id=project_id,
|
265
|
+
workflow_id=wftask.workflow_id,
|
266
|
+
user_id=user.id,
|
267
|
+
db=db,
|
219
268
|
)
|
220
269
|
|
221
|
-
|
222
|
-
|
223
|
-
|
270
|
+
# FIXME reduce logging?
|
271
|
+
prefix = f"[DS{dataset.id}-WFT{wftask.id}-images]"
|
272
|
+
|
273
|
+
# Reconstruct type filters by going through the workflow task list
|
274
|
+
latest_type_filters = {}
|
275
|
+
for current_wftask in workflow.task_list[0 : wftask.order + 1]:
|
276
|
+
patch = merge_type_filters(
|
277
|
+
wftask_type_filters=current_wftask.type_filters,
|
278
|
+
task_input_types=current_wftask.task.input_types,
|
224
279
|
)
|
225
|
-
|
280
|
+
latest_type_filters.update(patch)
|
281
|
+
logger.debug(f"{prefix} {latest_type_filters=}")
|
226
282
|
|
227
|
-
|
228
|
-
|
283
|
+
# Get all matching images from the dataset
|
284
|
+
filtered_dataset_images = filter_image_list(
|
285
|
+
images=dataset.images,
|
286
|
+
type_filters=latest_type_filters,
|
287
|
+
)
|
288
|
+
logger.debug(f"{prefix} {len(dataset.images)=}")
|
289
|
+
logger.debug(f"{prefix} {len(filtered_dataset_images)=}")
|
229
290
|
|
230
|
-
|
231
|
-
|
232
|
-
|
233
|
-
|
291
|
+
filtered_dataset_images_url = list(
|
292
|
+
img["zarr_url"] for img in filtered_dataset_images
|
293
|
+
)
|
294
|
+
logger.debug(f"{prefix} {len(filtered_dataset_images_url)=}")
|
295
|
+
|
296
|
+
# Get pairs (zarr_url,status) for all processed images
|
297
|
+
res = await db.execute(
|
298
|
+
select(HistoryImageCache.zarr_url, HistoryUnit.status)
|
299
|
+
.join(HistoryUnit)
|
300
|
+
.where(HistoryImageCache.dataset_id == dataset_id)
|
301
|
+
.where(HistoryImageCache.workflowtask_id == workflowtask_id)
|
302
|
+
.where(HistoryImageCache.latest_history_unit_id == HistoryUnit.id)
|
303
|
+
.where(HistoryImageCache.zarr_url.in_(filtered_dataset_images_url))
|
304
|
+
.order_by(HistoryImageCache.zarr_url)
|
305
|
+
)
|
306
|
+
list_processed_url_status = res.all()
|
307
|
+
logger.debug(f"{prefix} {len(list_processed_url_status)=}")
|
234
308
|
|
235
|
-
|
236
|
-
query = query.offset((page - 1) * page_size)
|
309
|
+
# Further processing
|
237
310
|
|
238
|
-
|
239
|
-
|
311
|
+
list_processed_url = list(item[0] for item in list_processed_url_status)
|
312
|
+
logger.debug(f"{prefix} {len(list_processed_url)=}")
|
240
313
|
|
241
|
-
|
242
|
-
|
243
|
-
|
244
|
-
|
245
|
-
items=images,
|
314
|
+
list_non_processed_url_status = list(
|
315
|
+
(url, None)
|
316
|
+
for url in filtered_dataset_images_url
|
317
|
+
if url not in list_processed_url
|
246
318
|
)
|
319
|
+
logger.debug(f"{prefix} {len(list_non_processed_url_status)=}")
|
247
320
|
|
321
|
+
sorted_list_url_status = sorted(
|
322
|
+
list_processed_url_status + list_non_processed_url_status,
|
323
|
+
key=lambda url_status: url_status[0],
|
324
|
+
)
|
325
|
+
logger.debug(f"{prefix} {len(sorted_list_url_status)=}")
|
248
326
|
|
249
|
-
|
250
|
-
|
251
|
-
|
252
|
-
|
327
|
+
# Final list of objects
|
328
|
+
sorted_list_objects = list(
|
329
|
+
dict(zarr_url=url_status[0], status=url_status[1])
|
330
|
+
for url_status in sorted_list_url_status
|
331
|
+
)
|
253
332
|
|
333
|
+
total_count = len(sorted_list_objects)
|
334
|
+
page_size = pagination.page_size or total_count
|
254
335
|
|
255
|
-
|
256
|
-
|
336
|
+
return dict(
|
337
|
+
current_page=pagination.page,
|
338
|
+
page_size=page_size,
|
339
|
+
total_count=total_count,
|
340
|
+
items=sorted_list_objects[
|
341
|
+
(pagination.page - 1) * page_size : pagination.page * page_size
|
342
|
+
],
|
343
|
+
)
|
344
|
+
|
345
|
+
|
346
|
+
@router.post("/project/{project_id}/status/image-log/")
|
347
|
+
async def get_image_log(
|
257
348
|
project_id: int,
|
258
349
|
request_data: ImageLogsRequest,
|
259
350
|
user: UserOAuth = Depends(current_active_user),
|
260
351
|
db: AsyncSession = Depends(get_async_db),
|
261
352
|
) -> JSONResponse:
|
262
|
-
|
353
|
+
# Access control
|
263
354
|
wftask = await _get_workflowtask_check_history_owner(
|
264
355
|
dataset_id=request_data.dataset_id,
|
265
356
|
workflowtask_id=request_data.workflowtask_id,
|
@@ -267,39 +358,63 @@ async def get_image_logs(
|
|
267
358
|
db=db,
|
268
359
|
)
|
269
360
|
|
270
|
-
|
271
|
-
|
361
|
+
# Get HistoryImageCache
|
362
|
+
history_image_cache = await db.get(
|
363
|
+
HistoryImageCache,
|
272
364
|
(
|
273
365
|
request_data.zarr_url,
|
274
|
-
request_data.workflowtask_id,
|
275
366
|
request_data.dataset_id,
|
367
|
+
request_data.workflowtask_id,
|
276
368
|
),
|
277
369
|
)
|
278
|
-
if
|
370
|
+
if history_image_cache is None:
|
279
371
|
raise HTTPException(
|
280
372
|
status_code=status.HTTP_404_NOT_FOUND,
|
281
|
-
detail="
|
373
|
+
detail="HistoryImageCache not found",
|
282
374
|
)
|
375
|
+
# Get history unit
|
376
|
+
history_unit = await get_history_unit_or_404(
|
377
|
+
history_unit_id=history_image_cache.latest_history_unit_id,
|
378
|
+
db=db,
|
379
|
+
)
|
283
380
|
|
284
|
-
|
285
|
-
|
286
|
-
|
287
|
-
|
288
|
-
|
289
|
-
|
290
|
-
|
381
|
+
# Get log or placeholder text
|
382
|
+
log = read_log_file(
|
383
|
+
logfile=history_unit.logfile,
|
384
|
+
wftask=wftask,
|
385
|
+
dataset_id=request_data.dataset_id,
|
386
|
+
)
|
387
|
+
return JSONResponse(content=log)
|
291
388
|
|
292
|
-
logfile = Path(image_status.logfile)
|
293
|
-
if not logfile.exists():
|
294
|
-
return JSONResponse(
|
295
|
-
content=(
|
296
|
-
f"Error while retrieving logs for task '{wftask.task.name}' "
|
297
|
-
f"in dataset {request_data.dataset_id}: "
|
298
|
-
f"file '{logfile}' is not available."
|
299
|
-
)
|
300
|
-
)
|
301
389
|
|
302
|
-
|
303
|
-
|
390
|
+
@router.get("/project/{project_id}/status/unit-log/")
|
391
|
+
async def get_history_unit_log(
|
392
|
+
project_id: int,
|
393
|
+
history_run_id: int,
|
394
|
+
history_unit_id: int,
|
395
|
+
workflowtask_id: int,
|
396
|
+
dataset_id: int,
|
397
|
+
user: UserOAuth = Depends(current_active_user),
|
398
|
+
db: AsyncSession = Depends(get_async_db),
|
399
|
+
) -> JSONResponse:
|
400
|
+
# Access control
|
401
|
+
wftask = await _get_workflowtask_check_history_owner(
|
402
|
+
dataset_id=dataset_id,
|
403
|
+
workflowtask_id=workflowtask_id,
|
404
|
+
user_id=user.id,
|
405
|
+
db=db,
|
406
|
+
)
|
407
|
+
|
408
|
+
# Get history unit
|
409
|
+
history_unit = await get_history_unit_or_404(
|
410
|
+
history_unit_id=history_unit_id,
|
411
|
+
db=db,
|
412
|
+
)
|
304
413
|
|
305
|
-
|
414
|
+
# Get log or placeholder text
|
415
|
+
log = read_log_file(
|
416
|
+
logfile=history_unit.logfile,
|
417
|
+
wftask=wftask,
|
418
|
+
dataset_id=dataset_id,
|
419
|
+
)
|
420
|
+
return JSONResponse(content=log)
|
@@ -5,7 +5,6 @@ from fastapi import Depends
|
|
5
5
|
from fastapi import HTTPException
|
6
6
|
from fastapi import Response
|
7
7
|
from fastapi import status
|
8
|
-
from sqlmodel import delete
|
9
8
|
from sqlmodel import select
|
10
9
|
|
11
10
|
from .....logger import reset_logger_handlers
|
@@ -13,8 +12,6 @@ from .....logger import set_logger
|
|
13
12
|
from ....db import AsyncSession
|
14
13
|
from ....db import get_async_db
|
15
14
|
from ....models.v2 import DatasetV2
|
16
|
-
from ....models.v2 import HistoryItemV2
|
17
|
-
from ....models.v2 import ImageStatus
|
18
15
|
from ....models.v2 import JobV2
|
19
16
|
from ....models.v2 import LinkUserProjectV2
|
20
17
|
from ....models.v2 import ProjectV2
|
@@ -164,22 +161,6 @@ async def delete_project(
|
|
164
161
|
for job in jobs:
|
165
162
|
logger.info(f"Setting Job[{job.id}].workflow_id to None.")
|
166
163
|
job.workflow_id = None
|
167
|
-
# Cascade operations: set foreign-keys to null for history items
|
168
|
-
# which are in relationship with the current workflow
|
169
|
-
wft_ids = [wft.id for wft in wf.task_list]
|
170
|
-
stm = select(HistoryItemV2).where(
|
171
|
-
HistoryItemV2.workflowtask_id.in_(wft_ids)
|
172
|
-
)
|
173
|
-
res = await db.execute(stm)
|
174
|
-
history_items = res.scalars().all()
|
175
|
-
for history_item in history_items:
|
176
|
-
history_item.workflowtask_id = None
|
177
|
-
# Cascade operations: delete all image status which are in relationship
|
178
|
-
# with the current workflow
|
179
|
-
stm = delete(ImageStatus).where(
|
180
|
-
ImageStatus.workflowtask_id.in_(wft_ids)
|
181
|
-
)
|
182
|
-
await db.execute(stm)
|
183
164
|
# Delete workflow
|
184
165
|
logger.info(f"Adding Workflow[{wf.id}] to deletion.")
|
185
166
|
await db.delete(wf)
|
@@ -199,12 +180,6 @@ async def delete_project(
|
|
199
180
|
for job in jobs:
|
200
181
|
logger.info(f"Setting Job[{job.id}].dataset_id to None.")
|
201
182
|
job.dataset_id = None
|
202
|
-
# Cascade operations: delete history items and image statuses which are
|
203
|
-
# in relationship with the current dataset
|
204
|
-
stm = delete(HistoryItemV2).where(HistoryItemV2.dataset_id == ds.id)
|
205
|
-
await db.execute(stm)
|
206
|
-
stm = delete(ImageStatus).where(ImageStatus.dataset_id == ds.id)
|
207
|
-
await db.execute(stm)
|
208
183
|
# Delete dataset
|
209
184
|
logger.info(f"Adding Dataset[{ds.id}] to deletion.")
|
210
185
|
await db.delete(ds)
|