fractal-server 2.14.0a3__py3-none-any.whl → 2.14.0a5__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- fractal_server/__init__.py +1 -1
- fractal_server/__main__.py +3 -1
- fractal_server/app/history/__init__.py +4 -4
- fractal_server/app/history/image_updates.py +124 -143
- fractal_server/app/history/status_enum.py +2 -2
- fractal_server/app/models/v2/__init__.py +6 -4
- fractal_server/app/models/v2/history.py +44 -20
- fractal_server/app/routes/api/__init__.py +1 -1
- fractal_server/app/routes/api/v2/__init__.py +4 -0
- fractal_server/app/routes/api/v2/_aux_functions_history.py +49 -0
- fractal_server/app/routes/api/v2/dataset.py +0 -12
- fractal_server/app/routes/api/v2/history.py +309 -186
- fractal_server/app/routes/api/v2/project.py +0 -25
- fractal_server/app/routes/api/v2/status_legacy.py +168 -0
- fractal_server/app/routes/api/v2/workflow.py +2 -17
- fractal_server/app/routes/api/v2/workflowtask.py +41 -71
- fractal_server/app/routes/auth/oauth.py +5 -3
- fractal_server/app/runner/executors/local/runner.py +10 -55
- fractal_server/app/runner/executors/slurm_sudo/runner.py +171 -108
- fractal_server/app/runner/v2/__init__.py +0 -20
- fractal_server/app/runner/v2/runner.py +45 -58
- fractal_server/app/runner/v2/runner_functions.py +164 -22
- fractal_server/app/schemas/_validators.py +13 -24
- fractal_server/app/schemas/user.py +10 -7
- fractal_server/app/schemas/user_settings.py +9 -21
- fractal_server/app/schemas/v2/dataset.py +8 -6
- fractal_server/app/schemas/v2/job.py +9 -5
- fractal_server/app/schemas/v2/manifest.py +2 -6
- fractal_server/app/schemas/v2/project.py +9 -7
- fractal_server/app/schemas/v2/task.py +41 -77
- fractal_server/app/schemas/v2/task_collection.py +14 -32
- fractal_server/app/schemas/v2/task_group.py +10 -9
- fractal_server/app/schemas/v2/workflow.py +10 -11
- fractal_server/app/security/signup_email.py +2 -2
- fractal_server/config.py +31 -32
- fractal_server/migrations/versions/fbce16ff4e47_new_history_items.py +120 -0
- fractal_server/tasks/v2/templates/2_pip_install.sh +1 -1
- fractal_server/tasks/v2/utils_templates.py +6 -0
- {fractal_server-2.14.0a3.dist-info → fractal_server-2.14.0a5.dist-info}/METADATA +1 -1
- {fractal_server-2.14.0a3.dist-info → fractal_server-2.14.0a5.dist-info}/RECORD +43 -44
- fractal_server/app/runner/executors/slurm_sudo/_executor_wait_thread.py +0 -130
- fractal_server/app/schemas/v2/history.py +0 -23
- fractal_server/migrations/versions/87cd72a537a2_add_historyitem_table.py +0 -68
- fractal_server/migrations/versions/954ddc64425a_image_status.py +0 -63
- {fractal_server-2.14.0a3.dist-info → fractal_server-2.14.0a5.dist-info}/LICENSE +0 -0
- {fractal_server-2.14.0a3.dist-info → fractal_server-2.14.0a5.dist-info}/WHEEL +0 -0
- {fractal_server-2.14.0a3.dist-info → fractal_server-2.14.0a5.dist-info}/entry_points.txt +0 -0
@@ -1,4 +1,5 @@
|
|
1
|
-
from
|
1
|
+
from datetime import datetime
|
2
|
+
from typing import Any
|
2
3
|
from typing import Optional
|
3
4
|
|
4
5
|
from fastapi import APIRouter
|
@@ -6,126 +7,196 @@ from fastapi import Depends
|
|
6
7
|
from fastapi import HTTPException
|
7
8
|
from fastapi import status
|
8
9
|
from fastapi.responses import JSONResponse
|
10
|
+
from pydantic import AwareDatetime
|
9
11
|
from pydantic import BaseModel
|
12
|
+
from pydantic import field_serializer
|
10
13
|
from sqlmodel import func
|
11
14
|
from sqlmodel import select
|
12
15
|
|
13
16
|
from ._aux_functions import _get_dataset_check_owner
|
14
17
|
from ._aux_functions import _get_workflow_check_owner
|
15
18
|
from ._aux_functions import _get_workflowtask_check_history_owner
|
19
|
+
from ._aux_functions_history import get_history_unit_or_404
|
20
|
+
from ._aux_functions_history import read_log_file
|
16
21
|
from fractal_server.app.db import AsyncSession
|
17
22
|
from fractal_server.app.db import get_async_db
|
18
|
-
from fractal_server.app.history.status_enum import
|
23
|
+
from fractal_server.app.history.status_enum import XXXStatus
|
19
24
|
from fractal_server.app.models import UserOAuth
|
20
|
-
from fractal_server.app.models.v2 import
|
21
|
-
from fractal_server.app.models.v2 import
|
25
|
+
from fractal_server.app.models.v2 import HistoryImageCache
|
26
|
+
from fractal_server.app.models.v2 import HistoryRun
|
27
|
+
from fractal_server.app.models.v2 import HistoryUnit
|
22
28
|
from fractal_server.app.routes.auth import current_active_user
|
23
29
|
from fractal_server.app.routes.pagination import get_pagination_params
|
24
30
|
from fractal_server.app.routes.pagination import PaginationRequest
|
25
31
|
from fractal_server.app.routes.pagination import PaginationResponse
|
26
|
-
from fractal_server.
|
32
|
+
from fractal_server.images.tools import filter_image_list
|
33
|
+
from fractal_server.images.tools import merge_type_filters
|
34
|
+
from fractal_server.logger import set_logger
|
27
35
|
|
28
36
|
router = APIRouter()
|
37
|
+
logger = set_logger(__name__)
|
29
38
|
|
30
39
|
|
31
|
-
@router.get(
|
32
|
-
|
33
|
-
response_model=list[HistoryItemV2Read],
|
34
|
-
)
|
35
|
-
async def get_dataset_history(
|
40
|
+
@router.get("/project/{project_id}/status/")
|
41
|
+
async def get_workflow_tasks_statuses(
|
36
42
|
project_id: int,
|
37
43
|
dataset_id: int,
|
44
|
+
workflow_id: int,
|
38
45
|
user: UserOAuth = Depends(current_active_user),
|
39
46
|
db: AsyncSession = Depends(get_async_db),
|
40
|
-
) ->
|
41
|
-
await
|
47
|
+
) -> JSONResponse:
|
48
|
+
workflow = await _get_workflow_check_owner(
|
42
49
|
project_id=project_id,
|
43
|
-
|
50
|
+
workflow_id=workflow_id,
|
44
51
|
user_id=user.id,
|
45
52
|
db=db,
|
46
53
|
)
|
54
|
+
response = {}
|
55
|
+
for wftask in workflow.task_list:
|
56
|
+
res = await db.execute(
|
57
|
+
select(HistoryRun)
|
58
|
+
.where(HistoryRun.dataset_id == dataset_id)
|
59
|
+
.where(HistoryRun.workflowtask_id == wftask.id)
|
60
|
+
.order_by(HistoryRun.timestamp_started.desc())
|
61
|
+
.limit(1)
|
62
|
+
)
|
63
|
+
latest_history_run = res.scalar()
|
64
|
+
if not latest_history_run:
|
65
|
+
response[wftask.id] = None
|
66
|
+
continue
|
67
|
+
response[wftask.id] = dict(
|
68
|
+
status=latest_history_run.status,
|
69
|
+
num_available_images=latest_history_run.num_available_images,
|
70
|
+
)
|
47
71
|
|
48
|
-
|
49
|
-
|
50
|
-
|
51
|
-
|
52
|
-
|
53
|
-
|
54
|
-
|
55
|
-
|
72
|
+
for target_status in XXXStatus:
|
73
|
+
stm = (
|
74
|
+
select(func.count(HistoryImageCache.zarr_url))
|
75
|
+
.join(HistoryUnit)
|
76
|
+
.where(HistoryImageCache.dataset_id == dataset_id)
|
77
|
+
.where(HistoryImageCache.workflowtask_id == wftask.id)
|
78
|
+
.where(
|
79
|
+
HistoryImageCache.latest_history_unit_id == HistoryUnit.id
|
80
|
+
)
|
81
|
+
.where(HistoryUnit.status == target_status.value)
|
82
|
+
)
|
83
|
+
res = await db.execute(stm)
|
84
|
+
num_images = res.scalar()
|
85
|
+
response[wftask.id][
|
86
|
+
f"num_{target_status.value}_images"
|
87
|
+
] = num_images
|
56
88
|
|
89
|
+
return JSONResponse(content=response, status_code=200)
|
57
90
|
|
58
|
-
|
59
|
-
|
91
|
+
|
92
|
+
# FIXME MOVE TO SCHEMAS
|
93
|
+
|
94
|
+
|
95
|
+
class HistoryUnitRead(BaseModel):
|
96
|
+
id: int
|
97
|
+
logfile: Optional[str] = None
|
98
|
+
status: XXXStatus
|
99
|
+
zarr_urls: list[str]
|
100
|
+
|
101
|
+
|
102
|
+
class HistoryRunReadAggregated(BaseModel):
|
103
|
+
id: int
|
104
|
+
timestamp_started: AwareDatetime
|
105
|
+
workflowtask_dump: dict[str, Any]
|
106
|
+
num_submitted_units: int
|
107
|
+
num_done_units: int
|
108
|
+
num_failed_units: int
|
109
|
+
|
110
|
+
@field_serializer("timestamp_started")
|
111
|
+
def serialize_datetime(v: datetime) -> str:
|
112
|
+
return v.isoformat()
|
113
|
+
|
114
|
+
|
115
|
+
class ImageLogsRequest(BaseModel):
|
116
|
+
workflowtask_id: int
|
117
|
+
dataset_id: int
|
118
|
+
zarr_url: str
|
119
|
+
|
120
|
+
|
121
|
+
class ImageWithStatus(BaseModel):
|
122
|
+
zarr_url: str
|
123
|
+
status: Optional[XXXStatus] = None
|
124
|
+
|
125
|
+
|
126
|
+
# end FIXME
|
127
|
+
|
128
|
+
|
129
|
+
@router.get("/project/{project_id}/status/run/")
|
130
|
+
async def get_history_run_list(
|
60
131
|
project_id: int,
|
61
|
-
workflow_id: int,
|
62
132
|
dataset_id: int,
|
133
|
+
workflowtask_id: int,
|
63
134
|
user: UserOAuth = Depends(current_active_user),
|
64
135
|
db: AsyncSession = Depends(get_async_db),
|
65
|
-
) ->
|
66
|
-
|
67
|
-
|
68
|
-
|
136
|
+
) -> list[HistoryRunReadAggregated]:
|
137
|
+
# Access control
|
138
|
+
await _get_workflowtask_check_history_owner(
|
139
|
+
dataset_id=dataset_id,
|
140
|
+
workflowtask_id=workflowtask_id,
|
69
141
|
user_id=user.id,
|
70
142
|
db=db,
|
71
143
|
)
|
72
144
|
|
73
|
-
|
145
|
+
# Get all runs
|
146
|
+
stm = (
|
147
|
+
select(HistoryRun)
|
148
|
+
.where(HistoryRun.dataset_id == dataset_id)
|
149
|
+
.where(HistoryRun.workflowtask_id == workflowtask_id)
|
150
|
+
.order_by(HistoryRun.timestamp_started)
|
151
|
+
)
|
152
|
+
res = await db.execute(stm)
|
153
|
+
runs = res.scalars().all()
|
154
|
+
|
155
|
+
# Add units count by status
|
156
|
+
|
157
|
+
if not runs:
|
158
|
+
return []
|
74
159
|
|
75
|
-
|
160
|
+
run_ids = [run.id for run in runs]
|
76
161
|
stm = (
|
77
162
|
select(
|
78
|
-
|
163
|
+
HistoryUnit.history_run_id,
|
164
|
+
HistoryUnit.status,
|
165
|
+
func.count(HistoryUnit.id),
|
79
166
|
)
|
80
|
-
.where(
|
81
|
-
.
|
82
|
-
.order_by(
|
83
|
-
HistoryItemV2.workflowtask_id,
|
84
|
-
HistoryItemV2.timestamp_started.desc(),
|
85
|
-
)
|
86
|
-
# https://www.postgresql.org/docs/current/sql-select.html#SQL-DISTINCT
|
87
|
-
.distinct(HistoryItemV2.workflowtask_id)
|
167
|
+
.where(HistoryUnit.history_run_id.in_(run_ids))
|
168
|
+
.group_by(HistoryUnit.history_run_id, HistoryUnit.status)
|
88
169
|
)
|
89
170
|
res = await db.execute(stm)
|
90
|
-
|
91
|
-
|
92
|
-
|
93
|
-
|
94
|
-
|
95
|
-
|
96
|
-
|
97
|
-
.where(ImageStatus.workflowtask_id.in_(wft_ids))
|
98
|
-
.where(ImageStatus.status == _status)
|
99
|
-
# https://docs.sqlalchemy.org/en/20/tutorial/data_select.html#tutorial-group-by-w-aggregates
|
100
|
-
.group_by(ImageStatus.workflowtask_id)
|
101
|
-
)
|
102
|
-
res = await db.execute(stm)
|
103
|
-
count[_status] = {k: v for k, v in res.all()}
|
104
|
-
|
105
|
-
result = {
|
106
|
-
str(_id): None
|
107
|
-
if _id not in num_available_images
|
108
|
-
else {
|
109
|
-
"num_available_images": num_available_images[_id],
|
110
|
-
"num_done_images": count["done"].get(_id, 0),
|
111
|
-
"num_submitted_images": count["submitted"].get(_id, 0),
|
112
|
-
"num_failed_images": count["failed"].get(_id, 0),
|
171
|
+
unit_counts = res.all()
|
172
|
+
|
173
|
+
count_map = {
|
174
|
+
run_id: {
|
175
|
+
"num_done_units": 0,
|
176
|
+
"num_submitted_units": 0,
|
177
|
+
"num_failed_units": 0,
|
113
178
|
}
|
114
|
-
for
|
179
|
+
for run_id in run_ids
|
115
180
|
}
|
181
|
+
for run_id, unit_status, count in unit_counts:
|
182
|
+
count_map[run_id][f"num_{unit_status}_units"] = count
|
183
|
+
|
184
|
+
runs = [dict(**run.model_dump(), **count_map[run.id]) for run in runs]
|
116
185
|
|
117
|
-
return
|
186
|
+
return runs
|
118
187
|
|
119
188
|
|
120
|
-
@router.get("/project/{project_id}/status/
|
121
|
-
async def
|
189
|
+
@router.get("/project/{project_id}/status/run/{history_run_id}/units/")
|
190
|
+
async def get_history_run_units(
|
122
191
|
project_id: int,
|
123
|
-
workflowtask_id: int,
|
124
192
|
dataset_id: int,
|
193
|
+
workflowtask_id: int,
|
194
|
+
history_run_id: int,
|
125
195
|
user: UserOAuth = Depends(current_active_user),
|
126
196
|
db: AsyncSession = Depends(get_async_db),
|
127
|
-
|
128
|
-
|
197
|
+
pagination: PaginationRequest = Depends(get_pagination_params),
|
198
|
+
) -> PaginationResponse[HistoryUnitRead]:
|
199
|
+
# Access control
|
129
200
|
await _get_workflowtask_check_history_owner(
|
130
201
|
dataset_id=dataset_id,
|
131
202
|
workflowtask_id=workflowtask_id,
|
@@ -133,133 +204,161 @@ async def get_per_workflowtask_subsets_aggregated_info(
|
|
133
204
|
db=db,
|
134
205
|
)
|
135
206
|
|
136
|
-
|
137
|
-
|
138
|
-
|
139
|
-
|
140
|
-
|
141
|
-
)
|
142
|
-
res = await db.execute(stm)
|
143
|
-
hash_to_statuses = res.all()
|
144
|
-
|
145
|
-
subsets = []
|
146
|
-
for parameters_hash, statuses in hash_to_statuses:
|
147
|
-
# Get the oldest HistoryItemV2 matching with `parameters_hash`
|
148
|
-
stm = (
|
149
|
-
select(HistoryItemV2)
|
150
|
-
.where(HistoryItemV2.workflowtask_id == workflowtask_id)
|
151
|
-
.where(HistoryItemV2.dataset_id == dataset_id)
|
152
|
-
.where(HistoryItemV2.parameters_hash == parameters_hash)
|
153
|
-
.order_by(HistoryItemV2.timestamp_started)
|
154
|
-
.limit(1)
|
207
|
+
history_run = await db.get(HistoryRun, history_run_id)
|
208
|
+
if history_run is None:
|
209
|
+
raise HTTPException(
|
210
|
+
status_code=status.HTTP_404_NOT_FOUND,
|
211
|
+
detail=f"HistoryRun {history_run_id} not found",
|
155
212
|
)
|
156
|
-
|
157
|
-
|
158
|
-
|
159
|
-
|
160
|
-
{
|
161
|
-
"_timestamp": oldest_history_item.timestamp_started,
|
162
|
-
"workflowtask_dump": oldest_history_item.workflowtask_dump,
|
163
|
-
"parameters_hash": parameters_hash,
|
164
|
-
"info": {
|
165
|
-
"num_done_images": statuses.count(
|
166
|
-
HistoryItemImageStatus.DONE
|
167
|
-
),
|
168
|
-
"num_failed_images": statuses.count(
|
169
|
-
HistoryItemImageStatus.FAILED
|
170
|
-
),
|
171
|
-
"num_submitted_images": statuses.count(
|
172
|
-
HistoryItemImageStatus.SUBMITTED
|
173
|
-
),
|
174
|
-
},
|
175
|
-
}
|
213
|
+
|
214
|
+
res = await db.execute(
|
215
|
+
select(func.count(HistoryUnit.id)).where(
|
216
|
+
HistoryUnit.history_run_id == history_run_id
|
176
217
|
)
|
218
|
+
)
|
219
|
+
total_count = res.scalar()
|
220
|
+
|
221
|
+
page_size = pagination.page_size or total_count
|
177
222
|
|
178
|
-
|
179
|
-
|
180
|
-
|
223
|
+
res = await db.execute(
|
224
|
+
select(HistoryUnit)
|
225
|
+
.where(HistoryUnit.history_run_id == history_run_id)
|
226
|
+
.offset((pagination.page - 1) * page_size)
|
227
|
+
.limit(page_size)
|
228
|
+
)
|
229
|
+
units = res.scalars().all()
|
181
230
|
|
182
|
-
return
|
231
|
+
return dict(
|
232
|
+
current_page=pagination.page,
|
233
|
+
page_size=page_size,
|
234
|
+
total_count=total_count,
|
235
|
+
items=units,
|
236
|
+
)
|
183
237
|
|
184
238
|
|
185
239
|
@router.get("/project/{project_id}/status/images/")
|
186
|
-
async def
|
240
|
+
async def get_history_images(
|
187
241
|
project_id: int,
|
188
|
-
workflowtask_id: int,
|
189
242
|
dataset_id: int,
|
190
|
-
|
191
|
-
parameters_hash: Optional[str] = None,
|
192
|
-
# Dependencies
|
193
|
-
pagination: PaginationRequest = Depends(get_pagination_params),
|
243
|
+
workflowtask_id: int,
|
194
244
|
user: UserOAuth = Depends(current_active_user),
|
195
245
|
db: AsyncSession = Depends(get_async_db),
|
196
|
-
|
197
|
-
|
198
|
-
|
199
|
-
|
200
|
-
|
201
|
-
|
246
|
+
pagination: PaginationRequest = Depends(get_pagination_params),
|
247
|
+
) -> PaginationResponse[ImageWithStatus]:
|
248
|
+
# Access control and object retrieval
|
249
|
+
# FIXME: Provide a single function that checks/gets what is needed
|
250
|
+
res = await _get_dataset_check_owner(
|
251
|
+
project_id=project_id,
|
252
|
+
dataset_id=dataset_id,
|
253
|
+
user_id=user.id,
|
254
|
+
db=db,
|
255
|
+
)
|
256
|
+
dataset = res["dataset"]
|
257
|
+
wftask = await _get_workflowtask_check_history_owner(
|
202
258
|
dataset_id=dataset_id,
|
203
259
|
workflowtask_id=workflowtask_id,
|
204
260
|
user_id=user.id,
|
205
261
|
db=db,
|
206
262
|
)
|
263
|
+
workflow = await _get_workflow_check_owner(
|
264
|
+
project_id=project_id,
|
265
|
+
workflow_id=wftask.workflow_id,
|
266
|
+
user_id=user.id,
|
267
|
+
db=db,
|
268
|
+
)
|
207
269
|
|
208
|
-
|
209
|
-
|
210
|
-
|
211
|
-
|
212
|
-
|
270
|
+
# FIXME reduce logging?
|
271
|
+
prefix = f"[DS{dataset.id}-WFT{wftask.id}-images]"
|
272
|
+
|
273
|
+
# (1) Get the type-filtered list of dataset images
|
274
|
+
|
275
|
+
# (1A) Reconstruct dataset type filters by starting from {} and making
|
276
|
+
# incremental updates with `output_types` of all previous tasks
|
277
|
+
inferred_dataset_type_filters = {}
|
278
|
+
for current_wftask in workflow.task_list[0 : wftask.order]:
|
279
|
+
inferred_dataset_type_filters.update(current_wftask.task.output_types)
|
280
|
+
logger.debug(f"{prefix} {inferred_dataset_type_filters=}")
|
281
|
+
# (1B) Compute type filters for the current wftask
|
282
|
+
type_filters_patch = merge_type_filters(
|
283
|
+
task_input_types=wftask.task.input_types,
|
284
|
+
wftask_type_filters=wftask.type_filters,
|
213
285
|
)
|
214
|
-
|
215
|
-
|
216
|
-
|
217
|
-
|
218
|
-
|
286
|
+
logger.debug(f"{prefix} {type_filters_patch=}")
|
287
|
+
# (1C) Combine dataset type filters (lower priority) and current-wftask
|
288
|
+
# filters (higher priority)
|
289
|
+
actual_filters = inferred_dataset_type_filters
|
290
|
+
actual_filters.update(type_filters_patch)
|
291
|
+
logger.debug(f"{prefix} {actual_filters=}")
|
292
|
+
# (1D) Get all matching images from the dataset
|
293
|
+
filtered_dataset_images = filter_image_list(
|
294
|
+
images=dataset.images,
|
295
|
+
type_filters=inferred_dataset_type_filters,
|
296
|
+
)
|
297
|
+
logger.debug(f"{prefix} {len(dataset.images)=}")
|
298
|
+
logger.debug(f"{prefix} {len(filtered_dataset_images)=}")
|
299
|
+
# (1E) Extract the list of URLs for filtered images
|
300
|
+
filtered_dataset_images_url = list(
|
301
|
+
img["zarr_url"] for img in filtered_dataset_images
|
219
302
|
)
|
220
303
|
|
221
|
-
|
222
|
-
|
223
|
-
|
224
|
-
)
|
225
|
-
|
226
|
-
|
227
|
-
|
228
|
-
|
304
|
+
# (2) Get `(zarr_url, status)` pairs for all images that have already
|
305
|
+
# been processed
|
306
|
+
res = await db.execute(
|
307
|
+
select(HistoryImageCache.zarr_url, HistoryUnit.status)
|
308
|
+
.join(HistoryUnit)
|
309
|
+
.where(HistoryImageCache.dataset_id == dataset_id)
|
310
|
+
.where(HistoryImageCache.workflowtask_id == workflowtask_id)
|
311
|
+
.where(HistoryImageCache.latest_history_unit_id == HistoryUnit.id)
|
312
|
+
.where(HistoryImageCache.zarr_url.in_(filtered_dataset_images_url))
|
313
|
+
.order_by(HistoryImageCache.zarr_url)
|
314
|
+
)
|
315
|
+
list_processed_url_status = res.all()
|
316
|
+
logger.debug(f"{prefix} {len(list_processed_url_status)=}")
|
229
317
|
|
230
|
-
|
231
|
-
|
232
|
-
|
233
|
-
page_size = total_count
|
318
|
+
# (3) Combine outputs from 1 and 2
|
319
|
+
list_processed_url = list(item[0] for item in list_processed_url_status)
|
320
|
+
logger.debug(f"{prefix} {len(list_processed_url)=}")
|
234
321
|
|
235
|
-
|
236
|
-
|
322
|
+
list_non_processed_url_status = list(
|
323
|
+
(url, None)
|
324
|
+
for url in filtered_dataset_images_url
|
325
|
+
if url not in list_processed_url
|
326
|
+
)
|
327
|
+
logger.debug(f"{prefix} {len(list_non_processed_url_status)=}")
|
237
328
|
|
238
|
-
|
239
|
-
|
329
|
+
sorted_list_url_status = sorted(
|
330
|
+
list_processed_url_status + list_non_processed_url_status,
|
331
|
+
key=lambda url_status: url_status[0],
|
332
|
+
)
|
333
|
+
logger.debug(f"{prefix} {len(sorted_list_url_status)=}")
|
240
334
|
|
241
|
-
|
242
|
-
|
243
|
-
|
244
|
-
|
245
|
-
items=images,
|
335
|
+
# Final list of objects
|
336
|
+
sorted_list_objects = list(
|
337
|
+
dict(zarr_url=url_status[0], status=url_status[1])
|
338
|
+
for url_status in sorted_list_url_status
|
246
339
|
)
|
247
340
|
|
341
|
+
total_count = len(sorted_list_objects)
|
342
|
+
page_size = pagination.page_size or total_count
|
248
343
|
|
249
|
-
|
250
|
-
|
251
|
-
|
252
|
-
|
344
|
+
return dict(
|
345
|
+
current_page=pagination.page,
|
346
|
+
page_size=page_size,
|
347
|
+
total_count=total_count,
|
348
|
+
items=sorted_list_objects[
|
349
|
+
(pagination.page - 1) * page_size : pagination.page * page_size
|
350
|
+
],
|
351
|
+
)
|
253
352
|
|
254
353
|
|
255
|
-
@router.post("/project/{project_id}/status/image-
|
256
|
-
async def
|
354
|
+
@router.post("/project/{project_id}/status/image-log/")
|
355
|
+
async def get_image_log(
|
257
356
|
project_id: int,
|
258
357
|
request_data: ImageLogsRequest,
|
259
358
|
user: UserOAuth = Depends(current_active_user),
|
260
359
|
db: AsyncSession = Depends(get_async_db),
|
261
360
|
) -> JSONResponse:
|
262
|
-
|
361
|
+
# Access control
|
263
362
|
wftask = await _get_workflowtask_check_history_owner(
|
264
363
|
dataset_id=request_data.dataset_id,
|
265
364
|
workflowtask_id=request_data.workflowtask_id,
|
@@ -267,39 +366,63 @@ async def get_image_logs(
|
|
267
366
|
db=db,
|
268
367
|
)
|
269
368
|
|
270
|
-
|
271
|
-
|
369
|
+
# Get HistoryImageCache
|
370
|
+
history_image_cache = await db.get(
|
371
|
+
HistoryImageCache,
|
272
372
|
(
|
273
373
|
request_data.zarr_url,
|
274
|
-
request_data.workflowtask_id,
|
275
374
|
request_data.dataset_id,
|
375
|
+
request_data.workflowtask_id,
|
276
376
|
),
|
277
377
|
)
|
278
|
-
if
|
378
|
+
if history_image_cache is None:
|
279
379
|
raise HTTPException(
|
280
380
|
status_code=status.HTTP_404_NOT_FOUND,
|
281
|
-
detail="
|
381
|
+
detail="HistoryImageCache not found",
|
282
382
|
)
|
383
|
+
# Get history unit
|
384
|
+
history_unit = await get_history_unit_or_404(
|
385
|
+
history_unit_id=history_image_cache.latest_history_unit_id,
|
386
|
+
db=db,
|
387
|
+
)
|
283
388
|
|
284
|
-
|
285
|
-
|
286
|
-
|
287
|
-
|
288
|
-
|
289
|
-
|
290
|
-
|
389
|
+
# Get log or placeholder text
|
390
|
+
log = read_log_file(
|
391
|
+
logfile=history_unit.logfile,
|
392
|
+
wftask=wftask,
|
393
|
+
dataset_id=request_data.dataset_id,
|
394
|
+
)
|
395
|
+
return JSONResponse(content=log)
|
291
396
|
|
292
|
-
logfile = Path(image_status.logfile)
|
293
|
-
if not logfile.exists():
|
294
|
-
return JSONResponse(
|
295
|
-
content=(
|
296
|
-
f"Error while retrieving logs for task '{wftask.task.name}' "
|
297
|
-
f"in dataset {request_data.dataset_id}: "
|
298
|
-
f"file '{logfile}' is not available."
|
299
|
-
)
|
300
|
-
)
|
301
397
|
|
302
|
-
|
303
|
-
|
398
|
+
@router.get("/project/{project_id}/status/unit-log/")
|
399
|
+
async def get_history_unit_log(
|
400
|
+
project_id: int,
|
401
|
+
history_run_id: int,
|
402
|
+
history_unit_id: int,
|
403
|
+
workflowtask_id: int,
|
404
|
+
dataset_id: int,
|
405
|
+
user: UserOAuth = Depends(current_active_user),
|
406
|
+
db: AsyncSession = Depends(get_async_db),
|
407
|
+
) -> JSONResponse:
|
408
|
+
# Access control
|
409
|
+
wftask = await _get_workflowtask_check_history_owner(
|
410
|
+
dataset_id=dataset_id,
|
411
|
+
workflowtask_id=workflowtask_id,
|
412
|
+
user_id=user.id,
|
413
|
+
db=db,
|
414
|
+
)
|
415
|
+
|
416
|
+
# Get history unit
|
417
|
+
history_unit = await get_history_unit_or_404(
|
418
|
+
history_unit_id=history_unit_id,
|
419
|
+
db=db,
|
420
|
+
)
|
304
421
|
|
305
|
-
|
422
|
+
# Get log or placeholder text
|
423
|
+
log = read_log_file(
|
424
|
+
logfile=history_unit.logfile,
|
425
|
+
wftask=wftask,
|
426
|
+
dataset_id=dataset_id,
|
427
|
+
)
|
428
|
+
return JSONResponse(content=log)
|
@@ -5,7 +5,6 @@ from fastapi import Depends
|
|
5
5
|
from fastapi import HTTPException
|
6
6
|
from fastapi import Response
|
7
7
|
from fastapi import status
|
8
|
-
from sqlmodel import delete
|
9
8
|
from sqlmodel import select
|
10
9
|
|
11
10
|
from .....logger import reset_logger_handlers
|
@@ -13,8 +12,6 @@ from .....logger import set_logger
|
|
13
12
|
from ....db import AsyncSession
|
14
13
|
from ....db import get_async_db
|
15
14
|
from ....models.v2 import DatasetV2
|
16
|
-
from ....models.v2 import HistoryItemV2
|
17
|
-
from ....models.v2 import ImageStatus
|
18
15
|
from ....models.v2 import JobV2
|
19
16
|
from ....models.v2 import LinkUserProjectV2
|
20
17
|
from ....models.v2 import ProjectV2
|
@@ -164,22 +161,6 @@ async def delete_project(
|
|
164
161
|
for job in jobs:
|
165
162
|
logger.info(f"Setting Job[{job.id}].workflow_id to None.")
|
166
163
|
job.workflow_id = None
|
167
|
-
# Cascade operations: set foreign-keys to null for history items
|
168
|
-
# which are in relationship with the current workflow
|
169
|
-
wft_ids = [wft.id for wft in wf.task_list]
|
170
|
-
stm = select(HistoryItemV2).where(
|
171
|
-
HistoryItemV2.workflowtask_id.in_(wft_ids)
|
172
|
-
)
|
173
|
-
res = await db.execute(stm)
|
174
|
-
history_items = res.scalars().all()
|
175
|
-
for history_item in history_items:
|
176
|
-
history_item.workflowtask_id = None
|
177
|
-
# Cascade operations: delete all image status which are in relationship
|
178
|
-
# with the current workflow
|
179
|
-
stm = delete(ImageStatus).where(
|
180
|
-
ImageStatus.workflowtask_id.in_(wft_ids)
|
181
|
-
)
|
182
|
-
await db.execute(stm)
|
183
164
|
# Delete workflow
|
184
165
|
logger.info(f"Adding Workflow[{wf.id}] to deletion.")
|
185
166
|
await db.delete(wf)
|
@@ -199,12 +180,6 @@ async def delete_project(
|
|
199
180
|
for job in jobs:
|
200
181
|
logger.info(f"Setting Job[{job.id}].dataset_id to None.")
|
201
182
|
job.dataset_id = None
|
202
|
-
# Cascade operations: delete history items and image statuses which are
|
203
|
-
# in relationship with the current dataset
|
204
|
-
stm = delete(HistoryItemV2).where(HistoryItemV2.dataset_id == ds.id)
|
205
|
-
await db.execute(stm)
|
206
|
-
stm = delete(ImageStatus).where(ImageStatus.dataset_id == ds.id)
|
207
|
-
await db.execute(stm)
|
208
183
|
# Delete dataset
|
209
184
|
logger.info(f"Adding Dataset[{ds.id}] to deletion.")
|
210
185
|
await db.delete(ds)
|