fractal-server 2.14.0a2__py3-none-any.whl → 2.14.0a4__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- fractal_server/__init__.py +1 -1
- fractal_server/__main__.py +3 -1
- fractal_server/app/history/__init__.py +4 -4
- fractal_server/app/history/image_updates.py +124 -142
- fractal_server/app/history/status_enum.py +2 -2
- fractal_server/app/models/v2/__init__.py +6 -4
- fractal_server/app/models/v2/history.py +44 -20
- fractal_server/app/routes/admin/v2/task.py +1 -1
- fractal_server/app/routes/api/__init__.py +1 -1
- fractal_server/app/routes/api/v2/__init__.py +4 -0
- fractal_server/app/routes/api/v2/_aux_functions_history.py +49 -0
- fractal_server/app/routes/api/v2/dataset.py +0 -12
- fractal_server/app/routes/api/v2/history.py +302 -176
- fractal_server/app/routes/api/v2/project.py +1 -26
- fractal_server/app/routes/api/v2/status_legacy.py +168 -0
- fractal_server/app/routes/api/v2/workflow.py +2 -17
- fractal_server/app/routes/api/v2/workflowtask.py +41 -71
- fractal_server/app/routes/auth/oauth.py +5 -3
- fractal_server/app/runner/executors/base_runner.py +2 -1
- fractal_server/app/runner/executors/local/_submit_setup.py +5 -13
- fractal_server/app/runner/executors/local/runner.py +10 -55
- fractal_server/app/runner/executors/slurm_common/_slurm_config.py +1 -1
- fractal_server/app/runner/executors/slurm_common/get_slurm_config.py +1 -1
- fractal_server/app/runner/executors/slurm_common/remote.py +1 -1
- fractal_server/app/runner/executors/slurm_sudo/runner.py +171 -108
- fractal_server/app/runner/v2/__init__.py +2 -22
- fractal_server/app/runner/v2/_slurm_ssh.py +1 -1
- fractal_server/app/runner/v2/_slurm_sudo.py +1 -1
- fractal_server/app/runner/v2/runner.py +47 -59
- fractal_server/app/runner/v2/runner_functions.py +185 -69
- fractal_server/app/schemas/_validators.py +13 -24
- fractal_server/app/schemas/user.py +10 -7
- fractal_server/app/schemas/user_settings.py +9 -21
- fractal_server/app/schemas/v2/dataset.py +8 -6
- fractal_server/app/schemas/v2/job.py +9 -5
- fractal_server/app/schemas/v2/manifest.py +3 -7
- fractal_server/app/schemas/v2/project.py +9 -7
- fractal_server/app/schemas/v2/task.py +41 -77
- fractal_server/app/schemas/v2/task_collection.py +14 -32
- fractal_server/app/schemas/v2/task_group.py +10 -9
- fractal_server/app/schemas/v2/workflow.py +10 -11
- fractal_server/app/security/__init__.py +3 -3
- fractal_server/app/security/signup_email.py +2 -2
- fractal_server/config.py +33 -34
- fractal_server/migrations/versions/fbce16ff4e47_new_history_items.py +120 -0
- fractal_server/tasks/v2/templates/2_pip_install.sh +1 -1
- fractal_server/tasks/v2/templates/4_pip_show.sh +1 -1
- fractal_server/tasks/v2/utils_templates.py +6 -0
- {fractal_server-2.14.0a2.dist-info → fractal_server-2.14.0a4.dist-info}/METADATA +1 -1
- {fractal_server-2.14.0a2.dist-info → fractal_server-2.14.0a4.dist-info}/RECORD +53 -54
- fractal_server/app/runner/executors/slurm_sudo/_executor_wait_thread.py +0 -130
- fractal_server/app/schemas/v2/history.py +0 -23
- fractal_server/migrations/versions/87cd72a537a2_add_historyitem_table.py +0 -68
- fractal_server/migrations/versions/954ddc64425a_image_status.py +0 -63
- {fractal_server-2.14.0a2.dist-info → fractal_server-2.14.0a4.dist-info}/LICENSE +0 -0
- {fractal_server-2.14.0a2.dist-info → fractal_server-2.14.0a4.dist-info}/WHEEL +0 -0
- {fractal_server-2.14.0a2.dist-info → fractal_server-2.14.0a4.dist-info}/entry_points.txt +0 -0
@@ -1,4 +1,5 @@
|
|
1
|
-
from
|
1
|
+
from datetime import datetime
|
2
|
+
from typing import Any
|
2
3
|
from typing import Optional
|
3
4
|
|
4
5
|
from fastapi import APIRouter
|
@@ -6,126 +7,196 @@ from fastapi import Depends
|
|
6
7
|
from fastapi import HTTPException
|
7
8
|
from fastapi import status
|
8
9
|
from fastapi.responses import JSONResponse
|
10
|
+
from pydantic import AwareDatetime
|
9
11
|
from pydantic import BaseModel
|
12
|
+
from pydantic import field_serializer
|
10
13
|
from sqlmodel import func
|
11
14
|
from sqlmodel import select
|
12
15
|
|
13
16
|
from ._aux_functions import _get_dataset_check_owner
|
14
17
|
from ._aux_functions import _get_workflow_check_owner
|
15
18
|
from ._aux_functions import _get_workflowtask_check_history_owner
|
19
|
+
from ._aux_functions_history import get_history_unit_or_404
|
20
|
+
from ._aux_functions_history import read_log_file
|
16
21
|
from fractal_server.app.db import AsyncSession
|
17
22
|
from fractal_server.app.db import get_async_db
|
18
|
-
from fractal_server.app.history.status_enum import
|
23
|
+
from fractal_server.app.history.status_enum import XXXStatus
|
19
24
|
from fractal_server.app.models import UserOAuth
|
20
|
-
from fractal_server.app.models.v2 import
|
21
|
-
from fractal_server.app.models.v2 import
|
25
|
+
from fractal_server.app.models.v2 import HistoryImageCache
|
26
|
+
from fractal_server.app.models.v2 import HistoryRun
|
27
|
+
from fractal_server.app.models.v2 import HistoryUnit
|
22
28
|
from fractal_server.app.routes.auth import current_active_user
|
23
29
|
from fractal_server.app.routes.pagination import get_pagination_params
|
24
30
|
from fractal_server.app.routes.pagination import PaginationRequest
|
25
31
|
from fractal_server.app.routes.pagination import PaginationResponse
|
26
|
-
from fractal_server.
|
32
|
+
from fractal_server.images.tools import filter_image_list
|
33
|
+
from fractal_server.images.tools import merge_type_filters
|
34
|
+
from fractal_server.logger import set_logger
|
27
35
|
|
28
36
|
router = APIRouter()
|
37
|
+
logger = set_logger(__name__)
|
29
38
|
|
30
39
|
|
31
|
-
@router.get(
|
32
|
-
|
33
|
-
response_model=list[HistoryItemV2Read],
|
34
|
-
)
|
35
|
-
async def get_dataset_history(
|
40
|
+
@router.get("/project/{project_id}/status/")
|
41
|
+
async def get_workflow_tasks_statuses(
|
36
42
|
project_id: int,
|
37
43
|
dataset_id: int,
|
44
|
+
workflow_id: int,
|
38
45
|
user: UserOAuth = Depends(current_active_user),
|
39
46
|
db: AsyncSession = Depends(get_async_db),
|
40
|
-
) ->
|
41
|
-
await
|
47
|
+
) -> JSONResponse:
|
48
|
+
workflow = await _get_workflow_check_owner(
|
42
49
|
project_id=project_id,
|
43
|
-
|
50
|
+
workflow_id=workflow_id,
|
44
51
|
user_id=user.id,
|
45
52
|
db=db,
|
46
53
|
)
|
54
|
+
response = {}
|
55
|
+
for wftask in workflow.task_list:
|
56
|
+
res = await db.execute(
|
57
|
+
select(HistoryRun)
|
58
|
+
.where(HistoryRun.dataset_id == dataset_id)
|
59
|
+
.where(HistoryRun.workflowtask_id == wftask.id)
|
60
|
+
.order_by(HistoryRun.timestamp_started.desc())
|
61
|
+
.limit(1)
|
62
|
+
)
|
63
|
+
latest_history_run = res.scalar()
|
64
|
+
if not latest_history_run:
|
65
|
+
response[wftask.id] = None
|
66
|
+
continue
|
67
|
+
response[wftask.id] = dict(
|
68
|
+
status=latest_history_run.status,
|
69
|
+
num_available_images=latest_history_run.num_available_images,
|
70
|
+
)
|
47
71
|
|
48
|
-
|
49
|
-
|
50
|
-
|
51
|
-
|
52
|
-
|
53
|
-
|
54
|
-
|
55
|
-
|
72
|
+
for target_status in XXXStatus:
|
73
|
+
stm = (
|
74
|
+
select(func.count(HistoryImageCache.zarr_url))
|
75
|
+
.join(HistoryUnit)
|
76
|
+
.where(HistoryImageCache.dataset_id == dataset_id)
|
77
|
+
.where(HistoryImageCache.workflowtask_id == wftask.id)
|
78
|
+
.where(
|
79
|
+
HistoryImageCache.latest_history_unit_id == HistoryUnit.id
|
80
|
+
)
|
81
|
+
.where(HistoryUnit.status == target_status.value)
|
82
|
+
)
|
83
|
+
res = await db.execute(stm)
|
84
|
+
num_images = res.scalar()
|
85
|
+
response[wftask.id][
|
86
|
+
f"num_{target_status.value}_images"
|
87
|
+
] = num_images
|
56
88
|
|
89
|
+
return JSONResponse(content=response, status_code=200)
|
57
90
|
|
58
|
-
|
59
|
-
|
91
|
+
|
92
|
+
# FIXME MOVE TO SCHEMAS
|
93
|
+
|
94
|
+
|
95
|
+
class HistoryUnitRead(BaseModel):
|
96
|
+
id: int
|
97
|
+
logfile: Optional[str] = None
|
98
|
+
status: XXXStatus
|
99
|
+
zarr_urls: list[str]
|
100
|
+
|
101
|
+
|
102
|
+
class HistoryRunReadAggregated(BaseModel):
|
103
|
+
id: int
|
104
|
+
timestamp_started: AwareDatetime
|
105
|
+
workflowtask_dump: dict[str, Any]
|
106
|
+
num_submitted_units: int
|
107
|
+
num_done_units: int
|
108
|
+
num_failed_units: int
|
109
|
+
|
110
|
+
@field_serializer("timestamp_started")
|
111
|
+
def serialize_datetime(v: datetime) -> str:
|
112
|
+
return v.isoformat()
|
113
|
+
|
114
|
+
|
115
|
+
class ImageLogsRequest(BaseModel):
|
116
|
+
workflowtask_id: int
|
117
|
+
dataset_id: int
|
118
|
+
zarr_url: str
|
119
|
+
|
120
|
+
|
121
|
+
class ImageWithStatus(BaseModel):
|
122
|
+
zarr_url: str
|
123
|
+
status: Optional[XXXStatus] = None
|
124
|
+
|
125
|
+
|
126
|
+
# end FIXME
|
127
|
+
|
128
|
+
|
129
|
+
@router.get("/project/{project_id}/status/run/")
|
130
|
+
async def get_history_run_list(
|
60
131
|
project_id: int,
|
61
|
-
workflow_id: int,
|
62
132
|
dataset_id: int,
|
133
|
+
workflowtask_id: int,
|
63
134
|
user: UserOAuth = Depends(current_active_user),
|
64
135
|
db: AsyncSession = Depends(get_async_db),
|
65
|
-
) ->
|
66
|
-
|
67
|
-
|
68
|
-
|
136
|
+
) -> list[HistoryRunReadAggregated]:
|
137
|
+
# Access control
|
138
|
+
await _get_workflowtask_check_history_owner(
|
139
|
+
dataset_id=dataset_id,
|
140
|
+
workflowtask_id=workflowtask_id,
|
69
141
|
user_id=user.id,
|
70
142
|
db=db,
|
71
143
|
)
|
72
144
|
|
73
|
-
|
145
|
+
# Get all runs
|
146
|
+
stm = (
|
147
|
+
select(HistoryRun)
|
148
|
+
.where(HistoryRun.dataset_id == dataset_id)
|
149
|
+
.where(HistoryRun.workflowtask_id == workflowtask_id)
|
150
|
+
.order_by(HistoryRun.timestamp_started)
|
151
|
+
)
|
152
|
+
res = await db.execute(stm)
|
153
|
+
runs = res.scalars().all()
|
154
|
+
|
155
|
+
# Add units count by status
|
156
|
+
|
157
|
+
if not runs:
|
158
|
+
return []
|
74
159
|
|
75
|
-
|
160
|
+
run_ids = [run.id for run in runs]
|
76
161
|
stm = (
|
77
162
|
select(
|
78
|
-
|
163
|
+
HistoryUnit.history_run_id,
|
164
|
+
HistoryUnit.status,
|
165
|
+
func.count(HistoryUnit.id),
|
79
166
|
)
|
80
|
-
.where(
|
81
|
-
.
|
82
|
-
.order_by(
|
83
|
-
HistoryItemV2.workflowtask_id,
|
84
|
-
HistoryItemV2.timestamp_started.desc(),
|
85
|
-
)
|
86
|
-
# https://www.postgresql.org/docs/current/sql-select.html#SQL-DISTINCT
|
87
|
-
.distinct(HistoryItemV2.workflowtask_id)
|
167
|
+
.where(HistoryUnit.history_run_id.in_(run_ids))
|
168
|
+
.group_by(HistoryUnit.history_run_id, HistoryUnit.status)
|
88
169
|
)
|
89
170
|
res = await db.execute(stm)
|
90
|
-
|
91
|
-
|
92
|
-
|
93
|
-
|
94
|
-
|
95
|
-
|
96
|
-
|
97
|
-
.where(ImageStatus.workflowtask_id.in_(wft_ids))
|
98
|
-
.where(ImageStatus.status == _status)
|
99
|
-
# https://docs.sqlalchemy.org/en/20/tutorial/data_select.html#tutorial-group-by-w-aggregates
|
100
|
-
.group_by(ImageStatus.workflowtask_id)
|
101
|
-
)
|
102
|
-
res = await db.execute(stm)
|
103
|
-
count[_status] = {k: v for k, v in res.all()}
|
104
|
-
|
105
|
-
result = {
|
106
|
-
str(_id): None
|
107
|
-
if _id not in num_available_images
|
108
|
-
else {
|
109
|
-
"num_available_images": num_available_images[_id],
|
110
|
-
"num_done_images": count["done"].get(_id, 0),
|
111
|
-
"num_submitted_images": count["submitted"].get(_id, 0),
|
112
|
-
"num_failed_images": count["failed"].get(_id, 0),
|
171
|
+
unit_counts = res.all()
|
172
|
+
|
173
|
+
count_map = {
|
174
|
+
run_id: {
|
175
|
+
"num_done_units": 0,
|
176
|
+
"num_submitted_units": 0,
|
177
|
+
"num_failed_units": 0,
|
113
178
|
}
|
114
|
-
for
|
179
|
+
for run_id in run_ids
|
115
180
|
}
|
181
|
+
for run_id, unit_status, count in unit_counts:
|
182
|
+
count_map[run_id][f"num_{unit_status}_units"] = count
|
183
|
+
|
184
|
+
runs = [dict(**run.model_dump(), **count_map[run.id]) for run in runs]
|
116
185
|
|
117
|
-
return
|
186
|
+
return runs
|
118
187
|
|
119
188
|
|
120
|
-
@router.get("/project/{project_id}/status/
|
121
|
-
async def
|
189
|
+
@router.get("/project/{project_id}/status/run/{history_run_id}/units/")
|
190
|
+
async def get_history_run_units(
|
122
191
|
project_id: int,
|
123
|
-
workflowtask_id: int,
|
124
192
|
dataset_id: int,
|
193
|
+
workflowtask_id: int,
|
194
|
+
history_run_id: int,
|
125
195
|
user: UserOAuth = Depends(current_active_user),
|
126
196
|
db: AsyncSession = Depends(get_async_db),
|
127
|
-
|
128
|
-
|
197
|
+
pagination: PaginationRequest = Depends(get_pagination_params),
|
198
|
+
) -> PaginationResponse[HistoryUnitRead]:
|
199
|
+
# Access control
|
129
200
|
await _get_workflowtask_check_history_owner(
|
130
201
|
dataset_id=dataset_id,
|
131
202
|
workflowtask_id=workflowtask_id,
|
@@ -133,122 +204,153 @@ async def get_per_workflowtask_subsets_aggregated_info(
|
|
133
204
|
db=db,
|
134
205
|
)
|
135
206
|
|
136
|
-
|
137
|
-
|
138
|
-
|
139
|
-
|
140
|
-
|
141
|
-
)
|
142
|
-
res = await db.execute(stm)
|
143
|
-
hash_statuses = res.all()
|
144
|
-
|
145
|
-
result = []
|
146
|
-
for _hash, statuses in hash_statuses:
|
147
|
-
dump = await db.execute(
|
148
|
-
select(HistoryItemV2.workflowtask_dump)
|
149
|
-
.where(HistoryItemV2.workflowtask_id == workflowtask_id)
|
150
|
-
.where(HistoryItemV2.dataset_id == dataset_id)
|
151
|
-
.where(HistoryItemV2.parameters_hash == _hash)
|
207
|
+
history_run = await db.get(HistoryRun, history_run_id)
|
208
|
+
if history_run is None:
|
209
|
+
raise HTTPException(
|
210
|
+
status_code=status.HTTP_404_NOT_FOUND,
|
211
|
+
detail=f"HistoryRun {history_run_id} not found",
|
152
212
|
)
|
153
|
-
|
154
|
-
|
155
|
-
|
156
|
-
|
157
|
-
"info": {
|
158
|
-
"num_done_images": statuses.count(
|
159
|
-
HistoryItemImageStatus.DONE
|
160
|
-
),
|
161
|
-
"num_failed_images": statuses.count(
|
162
|
-
HistoryItemImageStatus.FAILED
|
163
|
-
),
|
164
|
-
"num_submitted_images": statuses.count(
|
165
|
-
HistoryItemImageStatus.SUBMITTED
|
166
|
-
),
|
167
|
-
},
|
168
|
-
}
|
213
|
+
|
214
|
+
res = await db.execute(
|
215
|
+
select(func.count(HistoryUnit.id)).where(
|
216
|
+
HistoryUnit.history_run_id == history_run_id
|
169
217
|
)
|
218
|
+
)
|
219
|
+
total_count = res.scalar()
|
170
220
|
|
171
|
-
|
221
|
+
page_size = pagination.page_size or total_count
|
222
|
+
|
223
|
+
res = await db.execute(
|
224
|
+
select(HistoryUnit)
|
225
|
+
.where(HistoryUnit.history_run_id == history_run_id)
|
226
|
+
.offset((pagination.page - 1) * page_size)
|
227
|
+
.limit(page_size)
|
228
|
+
)
|
229
|
+
units = res.scalars().all()
|
230
|
+
|
231
|
+
return dict(
|
232
|
+
current_page=pagination.page,
|
233
|
+
page_size=page_size,
|
234
|
+
total_count=total_count,
|
235
|
+
items=units,
|
236
|
+
)
|
172
237
|
|
173
238
|
|
174
239
|
@router.get("/project/{project_id}/status/images/")
|
175
|
-
async def
|
240
|
+
async def get_history_images(
|
176
241
|
project_id: int,
|
177
|
-
workflowtask_id: int,
|
178
242
|
dataset_id: int,
|
179
|
-
|
180
|
-
parameters_hash: Optional[str] = None,
|
181
|
-
# Dependencies
|
182
|
-
pagination: PaginationRequest = Depends(get_pagination_params),
|
243
|
+
workflowtask_id: int,
|
183
244
|
user: UserOAuth = Depends(current_active_user),
|
184
245
|
db: AsyncSession = Depends(get_async_db),
|
185
|
-
|
186
|
-
|
187
|
-
|
188
|
-
|
189
|
-
|
190
|
-
|
246
|
+
pagination: PaginationRequest = Depends(get_pagination_params),
|
247
|
+
) -> PaginationResponse[ImageWithStatus]:
|
248
|
+
# Access control and object retrieval
|
249
|
+
# FIXME: Provide a single function that checks/gets what is needed
|
250
|
+
res = await _get_dataset_check_owner(
|
251
|
+
project_id=project_id,
|
191
252
|
dataset_id=dataset_id,
|
192
|
-
workflowtask_id=workflowtask_id,
|
193
253
|
user_id=user.id,
|
194
254
|
db=db,
|
195
255
|
)
|
196
|
-
|
197
|
-
|
198
|
-
|
199
|
-
|
200
|
-
.
|
201
|
-
|
256
|
+
dataset = res["dataset"]
|
257
|
+
wftask = await _get_workflowtask_check_history_owner(
|
258
|
+
dataset_id=dataset_id,
|
259
|
+
workflowtask_id=workflowtask_id,
|
260
|
+
user_id=user.id,
|
261
|
+
db=db,
|
202
262
|
)
|
203
|
-
|
204
|
-
|
205
|
-
.
|
206
|
-
.
|
207
|
-
|
263
|
+
workflow = await _get_workflow_check_owner(
|
264
|
+
project_id=project_id,
|
265
|
+
workflow_id=wftask.workflow_id,
|
266
|
+
user_id=user.id,
|
267
|
+
db=db,
|
208
268
|
)
|
209
269
|
|
210
|
-
|
211
|
-
|
212
|
-
|
270
|
+
# FIXME reduce logging?
|
271
|
+
prefix = f"[DS{dataset.id}-WFT{wftask.id}-images]"
|
272
|
+
|
273
|
+
# Reconstruct type filters by going through the workflow task list
|
274
|
+
latest_type_filters = {}
|
275
|
+
for current_wftask in workflow.task_list[0 : wftask.order + 1]:
|
276
|
+
patch = merge_type_filters(
|
277
|
+
wftask_type_filters=current_wftask.type_filters,
|
278
|
+
task_input_types=current_wftask.task.input_types,
|
213
279
|
)
|
214
|
-
|
280
|
+
latest_type_filters.update(patch)
|
281
|
+
logger.debug(f"{prefix} {latest_type_filters=}")
|
215
282
|
|
216
|
-
|
217
|
-
|
283
|
+
# Get all matching images from the dataset
|
284
|
+
filtered_dataset_images = filter_image_list(
|
285
|
+
images=dataset.images,
|
286
|
+
type_filters=latest_type_filters,
|
287
|
+
)
|
288
|
+
logger.debug(f"{prefix} {len(dataset.images)=}")
|
289
|
+
logger.debug(f"{prefix} {len(filtered_dataset_images)=}")
|
218
290
|
|
219
|
-
|
220
|
-
|
221
|
-
|
222
|
-
|
291
|
+
filtered_dataset_images_url = list(
|
292
|
+
img["zarr_url"] for img in filtered_dataset_images
|
293
|
+
)
|
294
|
+
logger.debug(f"{prefix} {len(filtered_dataset_images_url)=}")
|
295
|
+
|
296
|
+
# Get pairs (zarr_url,status) for all processed images
|
297
|
+
res = await db.execute(
|
298
|
+
select(HistoryImageCache.zarr_url, HistoryUnit.status)
|
299
|
+
.join(HistoryUnit)
|
300
|
+
.where(HistoryImageCache.dataset_id == dataset_id)
|
301
|
+
.where(HistoryImageCache.workflowtask_id == workflowtask_id)
|
302
|
+
.where(HistoryImageCache.latest_history_unit_id == HistoryUnit.id)
|
303
|
+
.where(HistoryImageCache.zarr_url.in_(filtered_dataset_images_url))
|
304
|
+
.order_by(HistoryImageCache.zarr_url)
|
305
|
+
)
|
306
|
+
list_processed_url_status = res.all()
|
307
|
+
logger.debug(f"{prefix} {len(list_processed_url_status)=}")
|
223
308
|
|
224
|
-
|
225
|
-
query = query.offset((page - 1) * page_size)
|
309
|
+
# Further processing
|
226
310
|
|
227
|
-
|
228
|
-
|
311
|
+
list_processed_url = list(item[0] for item in list_processed_url_status)
|
312
|
+
logger.debug(f"{prefix} {len(list_processed_url)=}")
|
229
313
|
|
230
|
-
|
231
|
-
|
232
|
-
|
233
|
-
|
234
|
-
items=images,
|
314
|
+
list_non_processed_url_status = list(
|
315
|
+
(url, None)
|
316
|
+
for url in filtered_dataset_images_url
|
317
|
+
if url not in list_processed_url
|
235
318
|
)
|
319
|
+
logger.debug(f"{prefix} {len(list_non_processed_url_status)=}")
|
236
320
|
|
321
|
+
sorted_list_url_status = sorted(
|
322
|
+
list_processed_url_status + list_non_processed_url_status,
|
323
|
+
key=lambda url_status: url_status[0],
|
324
|
+
)
|
325
|
+
logger.debug(f"{prefix} {len(sorted_list_url_status)=}")
|
237
326
|
|
238
|
-
|
239
|
-
|
240
|
-
|
241
|
-
|
327
|
+
# Final list of objects
|
328
|
+
sorted_list_objects = list(
|
329
|
+
dict(zarr_url=url_status[0], status=url_status[1])
|
330
|
+
for url_status in sorted_list_url_status
|
331
|
+
)
|
242
332
|
|
333
|
+
total_count = len(sorted_list_objects)
|
334
|
+
page_size = pagination.page_size or total_count
|
243
335
|
|
244
|
-
|
245
|
-
|
336
|
+
return dict(
|
337
|
+
current_page=pagination.page,
|
338
|
+
page_size=page_size,
|
339
|
+
total_count=total_count,
|
340
|
+
items=sorted_list_objects[
|
341
|
+
(pagination.page - 1) * page_size : pagination.page * page_size
|
342
|
+
],
|
343
|
+
)
|
344
|
+
|
345
|
+
|
346
|
+
@router.post("/project/{project_id}/status/image-log/")
|
347
|
+
async def get_image_log(
|
246
348
|
project_id: int,
|
247
349
|
request_data: ImageLogsRequest,
|
248
350
|
user: UserOAuth = Depends(current_active_user),
|
249
351
|
db: AsyncSession = Depends(get_async_db),
|
250
352
|
) -> JSONResponse:
|
251
|
-
|
353
|
+
# Access control
|
252
354
|
wftask = await _get_workflowtask_check_history_owner(
|
253
355
|
dataset_id=request_data.dataset_id,
|
254
356
|
workflowtask_id=request_data.workflowtask_id,
|
@@ -256,39 +358,63 @@ async def get_image_logs(
|
|
256
358
|
db=db,
|
257
359
|
)
|
258
360
|
|
259
|
-
|
260
|
-
|
361
|
+
# Get HistoryImageCache
|
362
|
+
history_image_cache = await db.get(
|
363
|
+
HistoryImageCache,
|
261
364
|
(
|
262
365
|
request_data.zarr_url,
|
263
|
-
request_data.workflowtask_id,
|
264
366
|
request_data.dataset_id,
|
367
|
+
request_data.workflowtask_id,
|
265
368
|
),
|
266
369
|
)
|
267
|
-
if
|
370
|
+
if history_image_cache is None:
|
268
371
|
raise HTTPException(
|
269
372
|
status_code=status.HTTP_404_NOT_FOUND,
|
270
|
-
detail="
|
373
|
+
detail="HistoryImageCache not found",
|
271
374
|
)
|
375
|
+
# Get history unit
|
376
|
+
history_unit = await get_history_unit_or_404(
|
377
|
+
history_unit_id=history_image_cache.latest_history_unit_id,
|
378
|
+
db=db,
|
379
|
+
)
|
272
380
|
|
273
|
-
|
274
|
-
|
275
|
-
|
276
|
-
|
277
|
-
|
278
|
-
|
279
|
-
|
381
|
+
# Get log or placeholder text
|
382
|
+
log = read_log_file(
|
383
|
+
logfile=history_unit.logfile,
|
384
|
+
wftask=wftask,
|
385
|
+
dataset_id=request_data.dataset_id,
|
386
|
+
)
|
387
|
+
return JSONResponse(content=log)
|
280
388
|
|
281
|
-
logfile = Path(image_status.logfile)
|
282
|
-
if not logfile.exists():
|
283
|
-
return JSONResponse(
|
284
|
-
content=(
|
285
|
-
f"Error while retrieving logs for task '{wftask.task.name}' "
|
286
|
-
f"in dataset {request_data.dataset_id}: "
|
287
|
-
f"file '{logfile}' is not available."
|
288
|
-
)
|
289
|
-
)
|
290
389
|
|
291
|
-
|
292
|
-
|
390
|
+
@router.get("/project/{project_id}/status/unit-log/")
|
391
|
+
async def get_history_unit_log(
|
392
|
+
project_id: int,
|
393
|
+
history_run_id: int,
|
394
|
+
history_unit_id: int,
|
395
|
+
workflowtask_id: int,
|
396
|
+
dataset_id: int,
|
397
|
+
user: UserOAuth = Depends(current_active_user),
|
398
|
+
db: AsyncSession = Depends(get_async_db),
|
399
|
+
) -> JSONResponse:
|
400
|
+
# Access control
|
401
|
+
wftask = await _get_workflowtask_check_history_owner(
|
402
|
+
dataset_id=dataset_id,
|
403
|
+
workflowtask_id=workflowtask_id,
|
404
|
+
user_id=user.id,
|
405
|
+
db=db,
|
406
|
+
)
|
407
|
+
|
408
|
+
# Get history unit
|
409
|
+
history_unit = await get_history_unit_or_404(
|
410
|
+
history_unit_id=history_unit_id,
|
411
|
+
db=db,
|
412
|
+
)
|
293
413
|
|
294
|
-
|
414
|
+
# Get log or placeholder text
|
415
|
+
log = read_log_file(
|
416
|
+
logfile=history_unit.logfile,
|
417
|
+
wftask=wftask,
|
418
|
+
dataset_id=dataset_id,
|
419
|
+
)
|
420
|
+
return JSONResponse(content=log)
|
@@ -5,7 +5,6 @@ from fastapi import Depends
|
|
5
5
|
from fastapi import HTTPException
|
6
6
|
from fastapi import Response
|
7
7
|
from fastapi import status
|
8
|
-
from sqlmodel import delete
|
9
8
|
from sqlmodel import select
|
10
9
|
|
11
10
|
from .....logger import reset_logger_handlers
|
@@ -13,8 +12,6 @@ from .....logger import set_logger
|
|
13
12
|
from ....db import AsyncSession
|
14
13
|
from ....db import get_async_db
|
15
14
|
from ....models.v2 import DatasetV2
|
16
|
-
from ....models.v2 import HistoryItemV2
|
17
|
-
from ....models.v2 import ImageStatus
|
18
15
|
from ....models.v2 import JobV2
|
19
16
|
from ....models.v2 import LinkUserProjectV2
|
20
17
|
from ....models.v2 import ProjectV2
|
@@ -57,7 +54,7 @@ async def create_project(
|
|
57
54
|
db: AsyncSession = Depends(get_async_db),
|
58
55
|
) -> Optional[ProjectReadV2]:
|
59
56
|
"""
|
60
|
-
Create new
|
57
|
+
Create new project
|
61
58
|
"""
|
62
59
|
|
63
60
|
# Check that there is no project with the same user and name
|
@@ -164,22 +161,6 @@ async def delete_project(
|
|
164
161
|
for job in jobs:
|
165
162
|
logger.info(f"Setting Job[{job.id}].workflow_id to None.")
|
166
163
|
job.workflow_id = None
|
167
|
-
# Cascade operations: set foreign-keys to null for history items
|
168
|
-
# which are in relationship with the current workflow
|
169
|
-
wft_ids = [wft.id for wft in wf.task_list]
|
170
|
-
stm = select(HistoryItemV2).where(
|
171
|
-
HistoryItemV2.workflowtask_id.in_(wft_ids)
|
172
|
-
)
|
173
|
-
res = await db.execute(stm)
|
174
|
-
history_items = res.scalars().all()
|
175
|
-
for history_item in history_items:
|
176
|
-
history_item.workflowtask_id = None
|
177
|
-
# Cascade operations: delete all image status which are in relationship
|
178
|
-
# with the current workflow
|
179
|
-
stm = delete(ImageStatus).where(
|
180
|
-
ImageStatus.workflowtask_id.in_(wft_ids)
|
181
|
-
)
|
182
|
-
await db.execute(stm)
|
183
164
|
# Delete workflow
|
184
165
|
logger.info(f"Adding Workflow[{wf.id}] to deletion.")
|
185
166
|
await db.delete(wf)
|
@@ -199,12 +180,6 @@ async def delete_project(
|
|
199
180
|
for job in jobs:
|
200
181
|
logger.info(f"Setting Job[{job.id}].dataset_id to None.")
|
201
182
|
job.dataset_id = None
|
202
|
-
# Cascade operations: delete history items and image statuses which are
|
203
|
-
# in relationship with the current dataset
|
204
|
-
stm = delete(HistoryItemV2).where(HistoryItemV2.dataset_id == ds.id)
|
205
|
-
await db.execute(stm)
|
206
|
-
stm = delete(ImageStatus).where(ImageStatus.dataset_id == ds.id)
|
207
|
-
await db.execute(stm)
|
208
183
|
# Delete dataset
|
209
184
|
logger.info(f"Adding Dataset[{ds.id}] to deletion.")
|
210
185
|
await db.delete(ds)
|