fractal-server 2.13.0__py3-none-any.whl → 2.14.0a0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- fractal_server/__init__.py +1 -1
- fractal_server/app/history/__init__.py +4 -0
- fractal_server/app/history/image_updates.py +142 -0
- fractal_server/app/history/status_enum.py +16 -0
- fractal_server/app/models/v2/__init__.py +9 -1
- fractal_server/app/models/v2/accounting.py +35 -0
- fractal_server/app/models/v2/history.py +53 -0
- fractal_server/app/routes/admin/v2/__init__.py +4 -0
- fractal_server/app/routes/admin/v2/accounting.py +108 -0
- fractal_server/app/routes/admin/v2/impersonate.py +35 -0
- fractal_server/app/routes/admin/v2/job.py +5 -13
- fractal_server/app/routes/admin/v2/task_group.py +4 -12
- fractal_server/app/routes/api/v2/__init__.py +2 -2
- fractal_server/app/routes/api/v2/_aux_functions.py +78 -0
- fractal_server/app/routes/api/v2/_aux_functions_task_lifecycle.py +3 -3
- fractal_server/app/routes/api/v2/dataset.py +12 -9
- fractal_server/app/routes/api/v2/history.py +247 -0
- fractal_server/app/routes/api/v2/submit.py +1 -0
- fractal_server/app/routes/api/v2/task_group.py +2 -5
- fractal_server/app/routes/api/v2/workflow.py +18 -3
- fractal_server/app/routes/api/v2/workflowtask.py +22 -0
- fractal_server/app/routes/aux/__init__.py +0 -20
- fractal_server/app/runner/executors/base_runner.py +114 -0
- fractal_server/app/runner/{v2/_local → executors/local}/_local_config.py +3 -3
- fractal_server/app/runner/executors/local/_submit_setup.py +54 -0
- fractal_server/app/runner/executors/local/runner.py +200 -0
- fractal_server/app/runner/executors/{slurm → slurm_common}/_batching.py +1 -1
- fractal_server/app/runner/executors/{slurm → slurm_common}/_slurm_config.py +3 -3
- fractal_server/app/runner/{v2/_slurm_ssh → executors/slurm_common}/_submit_setup.py +13 -12
- fractal_server/app/runner/{v2/_slurm_common → executors/slurm_common}/get_slurm_config.py +9 -15
- fractal_server/app/runner/executors/{slurm/ssh → slurm_ssh}/_executor_wait_thread.py +1 -1
- fractal_server/app/runner/executors/{slurm/ssh → slurm_ssh}/_slurm_job.py +1 -1
- fractal_server/app/runner/executors/{slurm/ssh → slurm_ssh}/executor.py +13 -14
- fractal_server/app/runner/executors/{slurm/sudo → slurm_sudo}/_check_jobs_status.py +11 -9
- fractal_server/app/runner/executors/{slurm/sudo → slurm_sudo}/_executor_wait_thread.py +3 -3
- fractal_server/app/runner/executors/{slurm/sudo → slurm_sudo}/_subprocess_run_as_user.py +2 -68
- fractal_server/app/runner/executors/slurm_sudo/runner.py +632 -0
- fractal_server/app/runner/task_files.py +70 -96
- fractal_server/app/runner/v2/__init__.py +9 -19
- fractal_server/app/runner/v2/_local.py +84 -0
- fractal_server/app/runner/v2/{_slurm_ssh/__init__.py → _slurm_ssh.py} +12 -13
- fractal_server/app/runner/v2/{_slurm_sudo/__init__.py → _slurm_sudo.py} +12 -12
- fractal_server/app/runner/v2/runner.py +106 -31
- fractal_server/app/runner/v2/runner_functions.py +88 -64
- fractal_server/app/runner/v2/runner_functions_low_level.py +20 -20
- fractal_server/app/schemas/v2/__init__.py +1 -0
- fractal_server/app/schemas/v2/accounting.py +18 -0
- fractal_server/app/schemas/v2/dataset.py +0 -17
- fractal_server/app/schemas/v2/history.py +23 -0
- fractal_server/config.py +58 -52
- fractal_server/migrations/versions/8223fcef886c_image_status.py +63 -0
- fractal_server/migrations/versions/87cd72a537a2_add_historyitem_table.py +68 -0
- fractal_server/migrations/versions/af1ef1c83c9b_add_accounting_tables.py +57 -0
- fractal_server/tasks/v2/utils_background.py +1 -1
- {fractal_server-2.13.0.dist-info → fractal_server-2.14.0a0.dist-info}/METADATA +1 -1
- {fractal_server-2.13.0.dist-info → fractal_server-2.14.0a0.dist-info}/RECORD +66 -55
- fractal_server/app/routes/api/v2/status.py +0 -168
- fractal_server/app/runner/executors/slurm/sudo/executor.py +0 -1281
- fractal_server/app/runner/v2/_local/__init__.py +0 -129
- fractal_server/app/runner/v2/_local/_submit_setup.py +0 -52
- fractal_server/app/runner/v2/_local/executor.py +0 -100
- fractal_server/app/runner/v2/_slurm_sudo/_submit_setup.py +0 -83
- fractal_server/app/runner/v2/handle_failed_job.py +0 -59
- /fractal_server/app/runner/executors/{slurm → local}/__init__.py +0 -0
- /fractal_server/app/runner/executors/{slurm/ssh → slurm_common}/__init__.py +0 -0
- /fractal_server/app/runner/executors/{_job_states.py → slurm_common/_job_states.py} +0 -0
- /fractal_server/app/runner/executors/{slurm → slurm_common}/remote.py +0 -0
- /fractal_server/app/runner/executors/{slurm → slurm_common}/utils_executors.py +0 -0
- /fractal_server/app/runner/executors/{slurm/sudo → slurm_ssh}/__init__.py +0 -0
- /fractal_server/app/runner/{v2/_slurm_common → executors/slurm_sudo}/__init__.py +0 -0
- {fractal_server-2.13.0.dist-info → fractal_server-2.14.0a0.dist-info}/LICENSE +0 -0
- {fractal_server-2.13.0.dist-info → fractal_server-2.14.0a0.dist-info}/WHEEL +0 -0
- {fractal_server-2.13.0.dist-info → fractal_server-2.14.0a0.dist-info}/entry_points.txt +0 -0
@@ -55,7 +55,7 @@ async def get_package_version_from_pypi(
|
|
55
55
|
f"A TimeoutException occurred while getting {url}.\n"
|
56
56
|
f"Original error: {str(e)}."
|
57
57
|
)
|
58
|
-
logger.
|
58
|
+
logger.warning(error_msg)
|
59
59
|
raise HTTPException(
|
60
60
|
status_code=status.HTTP_422_UNPROCESSABLE_ENTITY,
|
61
61
|
detail=error_msg,
|
@@ -65,7 +65,7 @@ async def get_package_version_from_pypi(
|
|
65
65
|
f"An unknown error occurred while getting {url}. "
|
66
66
|
f"Original error: {str(e)}."
|
67
67
|
)
|
68
|
-
logger.
|
68
|
+
logger.warning(error_msg)
|
69
69
|
raise HTTPException(
|
70
70
|
status_code=status.HTTP_422_UNPROCESSABLE_ENTITY,
|
71
71
|
detail=error_msg,
|
@@ -85,7 +85,7 @@ async def get_package_version_from_pypi(
|
|
85
85
|
latest_version = response_data["info"]["version"]
|
86
86
|
available_releases = response_data["releases"].keys()
|
87
87
|
except KeyError as e:
|
88
|
-
logger.
|
88
|
+
logger.warning(
|
89
89
|
f"A KeyError occurred while getting {url}. "
|
90
90
|
f"Original error: {str(e)}."
|
91
91
|
)
|
@@ -5,11 +5,14 @@ from fastapi import Depends
|
|
5
5
|
from fastapi import HTTPException
|
6
6
|
from fastapi import Response
|
7
7
|
from fastapi import status
|
8
|
+
from sqlmodel import delete
|
8
9
|
from sqlmodel import select
|
9
10
|
|
10
11
|
from ....db import AsyncSession
|
11
12
|
from ....db import get_async_db
|
12
13
|
from ....models.v2 import DatasetV2
|
14
|
+
from ....models.v2 import HistoryItemV2
|
15
|
+
from ....models.v2 import ImageStatus
|
13
16
|
from ....models.v2 import JobV2
|
14
17
|
from ....models.v2 import ProjectV2
|
15
18
|
from ....schemas.v2 import DatasetCreateV2
|
@@ -47,7 +50,6 @@ async def create_dataset(
|
|
47
50
|
)
|
48
51
|
|
49
52
|
if dataset.zarr_dir is None:
|
50
|
-
|
51
53
|
if user.settings.project_dir is None:
|
52
54
|
raise HTTPException(
|
53
55
|
status_code=status.HTTP_422_UNPROCESSABLE_ENTITY,
|
@@ -91,7 +93,6 @@ async def create_dataset(
|
|
91
93
|
)
|
92
94
|
async def read_dataset_list(
|
93
95
|
project_id: int,
|
94
|
-
history: bool = True,
|
95
96
|
user: UserOAuth = Depends(current_active_user),
|
96
97
|
db: AsyncSession = Depends(get_async_db),
|
97
98
|
) -> Optional[list[DatasetReadV2]]:
|
@@ -110,9 +111,6 @@ async def read_dataset_list(
|
|
110
111
|
res = await db.execute(stm)
|
111
112
|
dataset_list = res.scalars().all()
|
112
113
|
await db.close()
|
113
|
-
if not history:
|
114
|
-
for ds in dataset_list:
|
115
|
-
setattr(ds, "history", [])
|
116
114
|
return dataset_list
|
117
115
|
|
118
116
|
|
@@ -225,6 +223,15 @@ async def delete_dataset(
|
|
225
223
|
for job in jobs:
|
226
224
|
job.dataset_id = None
|
227
225
|
|
226
|
+
# Cascade operations: delete history items and image status which are in
|
227
|
+
# relationship with the current dataset
|
228
|
+
|
229
|
+
stm = delete(HistoryItemV2).where(HistoryItemV2.dataset_id == dataset_id)
|
230
|
+
await db.execute(stm)
|
231
|
+
|
232
|
+
stm = delete(ImageStatus).where(ImageStatus.dataset_id == dataset_id)
|
233
|
+
await db.execute(stm)
|
234
|
+
|
228
235
|
# Delete dataset
|
229
236
|
await db.delete(dataset)
|
230
237
|
await db.commit()
|
@@ -234,7 +241,6 @@ async def delete_dataset(
|
|
234
241
|
|
235
242
|
@router.get("/dataset/", response_model=list[DatasetReadV2])
|
236
243
|
async def get_user_datasets(
|
237
|
-
history: bool = True,
|
238
244
|
user: UserOAuth = Depends(current_active_user),
|
239
245
|
db: AsyncSession = Depends(get_async_db),
|
240
246
|
) -> list[DatasetReadV2]:
|
@@ -249,9 +255,6 @@ async def get_user_datasets(
|
|
249
255
|
res = await db.execute(stm)
|
250
256
|
dataset_list = res.scalars().all()
|
251
257
|
await db.close()
|
252
|
-
if not history:
|
253
|
-
for ds in dataset_list:
|
254
|
-
setattr(ds, "history", [])
|
255
258
|
return dataset_list
|
256
259
|
|
257
260
|
|
@@ -0,0 +1,247 @@
|
|
1
|
+
from typing import Optional
|
2
|
+
|
3
|
+
from fastapi import APIRouter
|
4
|
+
from fastapi import Depends
|
5
|
+
from fastapi import HTTPException
|
6
|
+
from fastapi import Query
|
7
|
+
from fastapi import status
|
8
|
+
from fastapi.responses import JSONResponse
|
9
|
+
from sqlmodel import func
|
10
|
+
from sqlmodel import select
|
11
|
+
|
12
|
+
from ._aux_functions import _get_dataset_check_owner
|
13
|
+
from ._aux_functions import _get_workflow_check_owner
|
14
|
+
from ._aux_functions import _get_workflow_task_check_owner
|
15
|
+
from fractal_server.app.db import AsyncSession
|
16
|
+
from fractal_server.app.db import get_async_db
|
17
|
+
from fractal_server.app.history.status_enum import HistoryItemImageStatus
|
18
|
+
from fractal_server.app.models import UserOAuth
|
19
|
+
from fractal_server.app.models.v2 import HistoryItemV2
|
20
|
+
from fractal_server.app.models.v2 import ImageStatus
|
21
|
+
from fractal_server.app.models.v2 import WorkflowTaskV2
|
22
|
+
from fractal_server.app.routes.auth import current_active_user
|
23
|
+
from fractal_server.app.schemas.v2.history import HistoryItemV2Read
|
24
|
+
|
25
|
+
router = APIRouter()
|
26
|
+
|
27
|
+
|
28
|
+
@router.get(
|
29
|
+
"/project/{project_id}/dataset/{dataset_id}/history/",
|
30
|
+
response_model=list[HistoryItemV2Read],
|
31
|
+
)
|
32
|
+
async def get_dataset_history(
|
33
|
+
project_id: int,
|
34
|
+
dataset_id: int,
|
35
|
+
user: UserOAuth = Depends(current_active_user),
|
36
|
+
db: AsyncSession = Depends(get_async_db),
|
37
|
+
) -> list[HistoryItemV2Read]:
|
38
|
+
await _get_dataset_check_owner(
|
39
|
+
project_id=project_id,
|
40
|
+
dataset_id=dataset_id,
|
41
|
+
user_id=user.id,
|
42
|
+
db=db,
|
43
|
+
)
|
44
|
+
|
45
|
+
stm = (
|
46
|
+
select(HistoryItemV2)
|
47
|
+
.where(HistoryItemV2.dataset_id == dataset_id)
|
48
|
+
.order_by(HistoryItemV2.timestamp_started)
|
49
|
+
)
|
50
|
+
res = await db.execute(stm)
|
51
|
+
items = res.scalars().all()
|
52
|
+
return items
|
53
|
+
|
54
|
+
|
55
|
+
@router.get("/project/{project_id}/status/")
|
56
|
+
async def get_per_workflow_aggregated_info(
|
57
|
+
project_id: int,
|
58
|
+
workflow_id: int,
|
59
|
+
dataset_id: int,
|
60
|
+
user: UserOAuth = Depends(current_active_user),
|
61
|
+
db: AsyncSession = Depends(get_async_db),
|
62
|
+
) -> JSONResponse:
|
63
|
+
workflow = await _get_workflow_check_owner(
|
64
|
+
project_id=project_id,
|
65
|
+
workflow_id=workflow_id,
|
66
|
+
user_id=user.id,
|
67
|
+
db=db,
|
68
|
+
)
|
69
|
+
|
70
|
+
wft_ids = [wftask.id for wftask in workflow.task_list]
|
71
|
+
|
72
|
+
# num_available_images
|
73
|
+
stm = (
|
74
|
+
select(
|
75
|
+
HistoryItemV2.workflowtask_id, HistoryItemV2.num_available_images
|
76
|
+
)
|
77
|
+
.where(HistoryItemV2.dataset_id == dataset_id)
|
78
|
+
.where(HistoryItemV2.workflowtask_id.in_(wft_ids))
|
79
|
+
.order_by(
|
80
|
+
HistoryItemV2.workflowtask_id,
|
81
|
+
HistoryItemV2.timestamp_started.desc(),
|
82
|
+
)
|
83
|
+
# https://www.postgresql.org/docs/current/sql-select.html#SQL-DISTINCT
|
84
|
+
.distinct(HistoryItemV2.workflowtask_id)
|
85
|
+
)
|
86
|
+
res = await db.execute(stm)
|
87
|
+
num_available_images = {k: v for k, v in res.all()}
|
88
|
+
|
89
|
+
count = {}
|
90
|
+
for _status in HistoryItemImageStatus:
|
91
|
+
stm = (
|
92
|
+
select(ImageStatus.workflowtask_id, func.count())
|
93
|
+
.where(ImageStatus.dataset_id == dataset_id)
|
94
|
+
.where(ImageStatus.workflowtask_id.in_(wft_ids))
|
95
|
+
.where(ImageStatus.status == _status)
|
96
|
+
# https://docs.sqlalchemy.org/en/20/tutorial/data_select.html#tutorial-group-by-w-aggregates
|
97
|
+
.group_by(ImageStatus.workflowtask_id)
|
98
|
+
)
|
99
|
+
res = await db.execute(stm)
|
100
|
+
count[_status] = {k: v for k, v in res.all()}
|
101
|
+
|
102
|
+
result = {
|
103
|
+
str(_id): None
|
104
|
+
if _id not in num_available_images
|
105
|
+
else {
|
106
|
+
"num_available_images": num_available_images[_id],
|
107
|
+
"num_done_images": count["done"].get(_id, 0),
|
108
|
+
"num_submitted_images": count["submitted"].get(_id, 0),
|
109
|
+
"num_failed_images": count["failed"].get(_id, 0),
|
110
|
+
}
|
111
|
+
for _id in wft_ids
|
112
|
+
}
|
113
|
+
|
114
|
+
return JSONResponse(content=result, status_code=200)
|
115
|
+
|
116
|
+
|
117
|
+
@router.get("/project/{project_id}/status/subsets/")
|
118
|
+
async def get_per_workflowtask_subsets_aggregated_info(
|
119
|
+
project_id: int,
|
120
|
+
workflowtask_id: int,
|
121
|
+
dataset_id: int,
|
122
|
+
user: UserOAuth = Depends(current_active_user),
|
123
|
+
db: AsyncSession = Depends(get_async_db),
|
124
|
+
) -> JSONResponse:
|
125
|
+
wftask = await db.get(WorkflowTaskV2, workflowtask_id)
|
126
|
+
if wftask is None:
|
127
|
+
raise HTTPException(
|
128
|
+
status_code=status.HTTP_404_NOT_FOUND,
|
129
|
+
detail="WorkflowTask not found",
|
130
|
+
)
|
131
|
+
await _get_workflow_task_check_owner(
|
132
|
+
project_id=project_id,
|
133
|
+
workflow_id=wftask.workflow_id,
|
134
|
+
workflow_task_id=workflowtask_id,
|
135
|
+
user_id=user.id,
|
136
|
+
db=db,
|
137
|
+
)
|
138
|
+
|
139
|
+
stm = (
|
140
|
+
select(ImageStatus.parameters_hash, func.array_agg(ImageStatus.status))
|
141
|
+
.where(ImageStatus.dataset_id == dataset_id)
|
142
|
+
.where(ImageStatus.workflowtask_id == workflowtask_id)
|
143
|
+
.group_by(ImageStatus.parameters_hash)
|
144
|
+
)
|
145
|
+
res = await db.execute(stm)
|
146
|
+
hash_statuses = res.all()
|
147
|
+
|
148
|
+
result = []
|
149
|
+
for _hash, statuses in hash_statuses:
|
150
|
+
dump = await db.execute(
|
151
|
+
select(HistoryItemV2.workflowtask_dump)
|
152
|
+
.where(HistoryItemV2.workflowtask_id == workflowtask_id)
|
153
|
+
.where(HistoryItemV2.dataset_id == dataset_id)
|
154
|
+
.where(HistoryItemV2.parameters_hash == _hash)
|
155
|
+
)
|
156
|
+
result.append(
|
157
|
+
{
|
158
|
+
"workflowtask_dump": dump.scalar_one(),
|
159
|
+
"parameters_hash": _hash,
|
160
|
+
"info": {
|
161
|
+
"num_done_images": statuses.count(
|
162
|
+
HistoryItemImageStatus.DONE
|
163
|
+
),
|
164
|
+
"num_failed_images": statuses.count(
|
165
|
+
HistoryItemImageStatus.FAILED
|
166
|
+
),
|
167
|
+
"num_submitted_images": statuses.count(
|
168
|
+
HistoryItemImageStatus.SUBMITTED
|
169
|
+
),
|
170
|
+
},
|
171
|
+
}
|
172
|
+
)
|
173
|
+
|
174
|
+
return JSONResponse(content=result, status_code=200)
|
175
|
+
|
176
|
+
|
177
|
+
@router.get("/project/{project_id}/status/images/")
|
178
|
+
async def get_per_workflowtask_images(
|
179
|
+
project_id: int,
|
180
|
+
workflowtask_id: int,
|
181
|
+
dataset_id: int,
|
182
|
+
status: HistoryItemImageStatus,
|
183
|
+
parameters_hash: Optional[str] = None,
|
184
|
+
# Pagination
|
185
|
+
page: int = Query(default=1, ge=1),
|
186
|
+
page_size: Optional[int] = Query(default=None, ge=1),
|
187
|
+
# Dependencies
|
188
|
+
user: UserOAuth = Depends(current_active_user),
|
189
|
+
db: AsyncSession = Depends(get_async_db),
|
190
|
+
) -> JSONResponse:
|
191
|
+
|
192
|
+
if page_size is None and page > 1:
|
193
|
+
raise HTTPException(
|
194
|
+
status_code=status.HTTP_422_UNPROCESSABLE_ENTITY,
|
195
|
+
detail=(f"Invalid pagination parameters: {page=}, {page_size=}."),
|
196
|
+
)
|
197
|
+
|
198
|
+
wftask = await db.get(WorkflowTaskV2, workflowtask_id)
|
199
|
+
if wftask is None:
|
200
|
+
raise HTTPException(
|
201
|
+
status_code=status.HTTP_404_NOT_FOUND,
|
202
|
+
detail="WorkflowTask not found",
|
203
|
+
)
|
204
|
+
await _get_workflow_task_check_owner(
|
205
|
+
project_id=project_id,
|
206
|
+
workflow_id=wftask.workflow_id,
|
207
|
+
workflow_task_id=workflowtask_id,
|
208
|
+
user_id=user.id,
|
209
|
+
db=db,
|
210
|
+
)
|
211
|
+
|
212
|
+
total_count_stm = (
|
213
|
+
select(func.count(ImageStatus.zarr_url))
|
214
|
+
.where(ImageStatus.dataset_id == dataset_id)
|
215
|
+
.where(ImageStatus.workflowtask_id == workflowtask_id)
|
216
|
+
.where(ImageStatus.status == status)
|
217
|
+
)
|
218
|
+
query = (
|
219
|
+
select(ImageStatus.zarr_url)
|
220
|
+
.where(ImageStatus.dataset_id == dataset_id)
|
221
|
+
.where(ImageStatus.workflowtask_id == workflowtask_id)
|
222
|
+
.where(ImageStatus.status == status)
|
223
|
+
)
|
224
|
+
|
225
|
+
if parameters_hash is not None:
|
226
|
+
total_count_stm = total_count_stm.where(
|
227
|
+
ImageStatus.parameters_hash == parameters_hash
|
228
|
+
)
|
229
|
+
query = query.where(ImageStatus.parameters_hash == parameters_hash)
|
230
|
+
|
231
|
+
if page_size is not None:
|
232
|
+
query = query.limit(page_size)
|
233
|
+
if page > 1:
|
234
|
+
query = query.offset((page - 1) * page_size)
|
235
|
+
|
236
|
+
res_total_count = await db.execute(total_count_stm)
|
237
|
+
total_count = res_total_count.scalar()
|
238
|
+
|
239
|
+
res = await db.execute(query)
|
240
|
+
images = res.scalars().all()
|
241
|
+
|
242
|
+
return {
|
243
|
+
"total_count": total_count,
|
244
|
+
"page_size": page_size,
|
245
|
+
"current_page": page,
|
246
|
+
"images": images,
|
247
|
+
}
|
@@ -1,4 +1,3 @@
|
|
1
|
-
from datetime import datetime
|
2
1
|
from typing import Optional
|
3
2
|
|
4
3
|
from fastapi import APIRouter
|
@@ -6,6 +5,7 @@ from fastapi import Depends
|
|
6
5
|
from fastapi import HTTPException
|
7
6
|
from fastapi import Response
|
8
7
|
from fastapi import status
|
8
|
+
from pydantic.types import AwareDatetime
|
9
9
|
from sqlmodel import or_
|
10
10
|
from sqlmodel import select
|
11
11
|
|
@@ -23,7 +23,6 @@ from fractal_server.app.routes.auth import current_active_user
|
|
23
23
|
from fractal_server.app.routes.auth._aux_auth import (
|
24
24
|
_verify_user_belongs_to_group,
|
25
25
|
)
|
26
|
-
from fractal_server.app.routes.aux import _raise_if_naive_datetime
|
27
26
|
from fractal_server.app.schemas.v2 import TaskGroupActivityActionV2
|
28
27
|
from fractal_server.app.schemas.v2 import TaskGroupActivityStatusV2
|
29
28
|
from fractal_server.app.schemas.v2 import TaskGroupActivityV2Read
|
@@ -43,13 +42,11 @@ async def get_task_group_activity_list(
|
|
43
42
|
pkg_name: Optional[str] = None,
|
44
43
|
status: Optional[TaskGroupActivityStatusV2] = None,
|
45
44
|
action: Optional[TaskGroupActivityActionV2] = None,
|
46
|
-
timestamp_started_min: Optional[
|
45
|
+
timestamp_started_min: Optional[AwareDatetime] = None,
|
47
46
|
user: UserOAuth = Depends(current_active_user),
|
48
47
|
db: AsyncSession = Depends(get_async_db),
|
49
48
|
) -> list[TaskGroupActivityV2Read]:
|
50
49
|
|
51
|
-
_raise_if_naive_datetime(timestamp_started_min)
|
52
|
-
|
53
50
|
stm = select(TaskGroupActivityV2).where(
|
54
51
|
TaskGroupActivityV2.user_id == user.id
|
55
52
|
)
|
@@ -7,10 +7,13 @@ from fastapi import HTTPException
|
|
7
7
|
from fastapi import Response
|
8
8
|
from fastapi import status
|
9
9
|
from pydantic import BaseModel
|
10
|
+
from sqlmodel import delete
|
10
11
|
from sqlmodel import select
|
11
12
|
|
12
13
|
from ....db import AsyncSession
|
13
14
|
from ....db import get_async_db
|
15
|
+
from ....models.v2 import HistoryItemV2
|
16
|
+
from ....models.v2 import ImageStatus
|
14
17
|
from ....models.v2 import JobV2
|
15
18
|
from ....models.v2 import ProjectV2
|
16
19
|
from ....models.v2 import WorkflowV2
|
@@ -225,14 +228,26 @@ async def delete_workflow(
|
|
225
228
|
),
|
226
229
|
)
|
227
230
|
|
228
|
-
# Cascade
|
229
|
-
# relationship with the current workflow
|
231
|
+
# Cascade operation: set foreign-keys to null for jobs and history items
|
232
|
+
# which are in relationship with the current workflow.
|
230
233
|
stm = select(JobV2).where(JobV2.workflow_id == workflow_id)
|
231
234
|
res = await db.execute(stm)
|
232
235
|
jobs = res.scalars().all()
|
233
236
|
for job in jobs:
|
234
237
|
job.workflow_id = None
|
235
238
|
|
239
|
+
wft_ids = [wft.id for wft in workflow.task_list]
|
240
|
+
stm = select(HistoryItemV2).where(
|
241
|
+
HistoryItemV2.workflowtask_id.in_(wft_ids)
|
242
|
+
)
|
243
|
+
res = await db.execute(stm)
|
244
|
+
history_items = res.scalars().all()
|
245
|
+
for history_item in history_items:
|
246
|
+
history_item.workflowtask_id = None
|
247
|
+
|
248
|
+
stm = delete(ImageStatus).where(ImageStatus.workflowtask_id.in_(wft_ids))
|
249
|
+
await db.execute(stm)
|
250
|
+
|
236
251
|
# Delete workflow
|
237
252
|
await db.delete(workflow)
|
238
253
|
await db.commit()
|
@@ -244,7 +259,7 @@ async def delete_workflow(
|
|
244
259
|
"/project/{project_id}/workflow/{workflow_id}/export/",
|
245
260
|
response_model=WorkflowExportV2,
|
246
261
|
)
|
247
|
-
async def
|
262
|
+
async def export_workflow(
|
248
263
|
project_id: int,
|
249
264
|
workflow_id: int,
|
250
265
|
user: UserOAuth = Depends(current_active_user),
|
@@ -6,6 +6,8 @@ from fastapi import Depends
|
|
6
6
|
from fastapi import HTTPException
|
7
7
|
from fastapi import Response
|
8
8
|
from fastapi import status
|
9
|
+
from sqlmodel import delete
|
10
|
+
from sqlmodel import select
|
9
11
|
|
10
12
|
from ....db import AsyncSession
|
11
13
|
from ....db import get_async_db
|
@@ -15,6 +17,8 @@ from ._aux_functions import _workflow_insert_task
|
|
15
17
|
from ._aux_functions_tasks import _check_type_filters_compatibility
|
16
18
|
from ._aux_functions_tasks import _get_task_read_access
|
17
19
|
from fractal_server.app.models import UserOAuth
|
20
|
+
from fractal_server.app.models.v2 import HistoryItemV2
|
21
|
+
from fractal_server.app.models.v2 import ImageStatus
|
18
22
|
from fractal_server.app.models.v2 import WorkflowTaskV2
|
19
23
|
from fractal_server.app.routes.auth import current_active_user
|
20
24
|
from fractal_server.app.schemas.v2 import WorkflowTaskCreateV2
|
@@ -333,6 +337,24 @@ async def delete_workflowtask(
|
|
333
337
|
db=db,
|
334
338
|
)
|
335
339
|
|
340
|
+
# Cascade operations:
|
341
|
+
# * set foreign-keys to null for history items which are in relationship
|
342
|
+
# with the current workflowtask;
|
343
|
+
# * delete ImageStatus in relationship with the current workflowtask.
|
344
|
+
stm = select(HistoryItemV2).where(
|
345
|
+
HistoryItemV2.workflowtask_id == db_workflow_task.id
|
346
|
+
)
|
347
|
+
res = await db.execute(stm)
|
348
|
+
history_items = res.scalars().all()
|
349
|
+
for history_item in history_items:
|
350
|
+
history_item.workflowtask_id = None
|
351
|
+
|
352
|
+
stm = delete(ImageStatus).where(
|
353
|
+
ImageStatus.workflowtask_id == db_workflow_task.id
|
354
|
+
)
|
355
|
+
await db.execute(stm)
|
356
|
+
|
357
|
+
# Delete WorkflowTask
|
336
358
|
await db.delete(db_workflow_task)
|
337
359
|
await db.commit()
|
338
360
|
|
@@ -1,20 +0,0 @@
|
|
1
|
-
from datetime import datetime
|
2
|
-
from typing import Optional
|
3
|
-
|
4
|
-
from fastapi import HTTPException
|
5
|
-
from fastapi import status
|
6
|
-
|
7
|
-
|
8
|
-
def _raise_if_naive_datetime(*timestamps: tuple[Optional[datetime]]) -> None:
|
9
|
-
"""
|
10
|
-
Raise 422 if any not-null argument is a naive `datetime` object:
|
11
|
-
https://docs.python.org/3/library/datetime.html#determining-if-an-object-is-aware-or-naive
|
12
|
-
"""
|
13
|
-
for timestamp in filter(None, timestamps):
|
14
|
-
if (timestamp.tzinfo is None) or (
|
15
|
-
timestamp.tzinfo.utcoffset(timestamp) is None
|
16
|
-
):
|
17
|
-
raise HTTPException(
|
18
|
-
status_code=status.HTTP_422_UNPROCESSABLE_ENTITY,
|
19
|
-
detail=f"{timestamp=} is naive. You must provide a timezone.",
|
20
|
-
)
|
@@ -0,0 +1,114 @@
|
|
1
|
+
from typing import Any
|
2
|
+
|
3
|
+
from fractal_server.app.runner.components import _COMPONENT_KEY_
|
4
|
+
|
5
|
+
|
6
|
+
class BaseRunner(object):
|
7
|
+
"""
|
8
|
+
Base class for Fractal runners.
|
9
|
+
"""
|
10
|
+
|
11
|
+
def shutdown(self, *args, **kwargs):
|
12
|
+
raise NotImplementedError()
|
13
|
+
|
14
|
+
def submit(
|
15
|
+
self,
|
16
|
+
func: callable,
|
17
|
+
parameters: dict[str, Any],
|
18
|
+
history_item_id: int,
|
19
|
+
in_compound_task: bool,
|
20
|
+
**kwargs,
|
21
|
+
) -> tuple[Any, BaseException]:
|
22
|
+
"""
|
23
|
+
Run a single fractal task.
|
24
|
+
|
25
|
+
# FIXME: Describe more in detail
|
26
|
+
|
27
|
+
Args:
|
28
|
+
func:
|
29
|
+
Function to be executed.
|
30
|
+
parameters:
|
31
|
+
Dictionary of parameters. Must include `zarr_urls` key.
|
32
|
+
history_item_id:
|
33
|
+
Database ID of the corresponding `HistoryItemV2` entry.
|
34
|
+
in_compound_task:
|
35
|
+
Whether this is the init part of a compound task.
|
36
|
+
kwargs:
|
37
|
+
Runner-specific parameters.
|
38
|
+
"""
|
39
|
+
raise NotImplementedError()
|
40
|
+
|
41
|
+
def multisubmit(
|
42
|
+
self,
|
43
|
+
func: callable,
|
44
|
+
list_parameters: list[dict[str, Any]],
|
45
|
+
history_item_id: int,
|
46
|
+
in_compound_task: bool,
|
47
|
+
**kwargs,
|
48
|
+
) -> tuple[dict[int, Any], dict[int, BaseException]]:
|
49
|
+
"""
|
50
|
+
Run a parallel fractal task.
|
51
|
+
|
52
|
+
# FIXME: Describe more in detail
|
53
|
+
|
54
|
+
Args:
|
55
|
+
func:
|
56
|
+
Function to be executed.
|
57
|
+
list_parameters:
|
58
|
+
List of dictionaries of parameters. Each one must include a
|
59
|
+
`zarr_url` key.
|
60
|
+
history_item_id:
|
61
|
+
Database ID of the corresponding `HistoryItemV2` entry.
|
62
|
+
in_compound_task:
|
63
|
+
Whether this is the compute part of a compound task.
|
64
|
+
kwargs:
|
65
|
+
Runner-specific parameters.
|
66
|
+
"""
|
67
|
+
raise NotImplementedError()
|
68
|
+
|
69
|
+
def validate_submit_parameters(self, parameters: dict[str, Any]) -> None:
|
70
|
+
"""
|
71
|
+
Validate parameters for `submit` method
|
72
|
+
|
73
|
+
Args:
|
74
|
+
parameters: Parameters dictionary.
|
75
|
+
"""
|
76
|
+
if not isinstance(parameters, dict):
|
77
|
+
raise ValueError("`parameters` must be a dictionary.")
|
78
|
+
if "zarr_urls" not in parameters.keys():
|
79
|
+
raise ValueError(
|
80
|
+
f"No 'zarr_urls' key in in {list(parameters.keys())}"
|
81
|
+
)
|
82
|
+
if _COMPONENT_KEY_ not in parameters.keys():
|
83
|
+
raise ValueError(
|
84
|
+
f"No '{_COMPONENT_KEY_}' key in in {list(parameters.keys())}"
|
85
|
+
)
|
86
|
+
|
87
|
+
def validate_multisubmit_parameters(
|
88
|
+
self,
|
89
|
+
list_parameters: list[dict[str, Any]],
|
90
|
+
in_compound_task: bool,
|
91
|
+
) -> None:
|
92
|
+
"""
|
93
|
+
Validate parameters for `multi_submit` method
|
94
|
+
|
95
|
+
Args:
|
96
|
+
list_parameters: List of parameters dictionaries.
|
97
|
+
in_compound_task:
|
98
|
+
Whether this is the compute part of a compound task.
|
99
|
+
"""
|
100
|
+
for single_kwargs in list_parameters:
|
101
|
+
if not isinstance(single_kwargs, dict):
|
102
|
+
raise RuntimeError("kwargs itemt must be a dictionary.")
|
103
|
+
if "zarr_url" not in single_kwargs.keys():
|
104
|
+
raise RuntimeError(
|
105
|
+
f"No 'zarr_url' key in in {list(single_kwargs.keys())}"
|
106
|
+
)
|
107
|
+
if _COMPONENT_KEY_ not in single_kwargs.keys():
|
108
|
+
raise ValueError(
|
109
|
+
f"No '{_COMPONENT_KEY_}' key in in {list(single_kwargs.keys())}"
|
110
|
+
)
|
111
|
+
if not in_compound_task:
|
112
|
+
zarr_urls = [kwargs["zarr_url"] for kwargs in list_parameters]
|
113
|
+
if len(zarr_urls) != len(set(zarr_urls)):
|
114
|
+
raise RuntimeError("Non-unique zarr_urls")
|
@@ -20,9 +20,9 @@ from pydantic import BaseModel
|
|
20
20
|
from pydantic import ConfigDict
|
21
21
|
from pydantic import ValidationError
|
22
22
|
|
23
|
-
from
|
24
|
-
from
|
25
|
-
from
|
23
|
+
from fractal_server.app.models.v2 import WorkflowTaskV2
|
24
|
+
from fractal_server.config import get_settings
|
25
|
+
from fractal_server.syringe import Inject
|
26
26
|
|
27
27
|
|
28
28
|
class LocalBackendConfigError(ValueError):
|
@@ -0,0 +1,54 @@
|
|
1
|
+
from pathlib import Path
|
2
|
+
from typing import Any
|
3
|
+
from typing import Literal
|
4
|
+
|
5
|
+
from ...task_files import TaskFiles
|
6
|
+
from ._local_config import get_local_backend_config
|
7
|
+
from fractal_server.app.models.v2 import WorkflowTaskV2
|
8
|
+
|
9
|
+
|
10
|
+
def _local_submit_setup(
|
11
|
+
*,
|
12
|
+
wftask: WorkflowTaskV2,
|
13
|
+
root_dir_local: Path,
|
14
|
+
root_dir_remote: Path,
|
15
|
+
which_type: Literal["non_parallel", "parallel"],
|
16
|
+
) -> dict[str, Any]:
|
17
|
+
"""
|
18
|
+
Collect WorkflowTask-specific configuration parameters from different
|
19
|
+
sources, and inject them for execution.
|
20
|
+
|
21
|
+
FIXME
|
22
|
+
|
23
|
+
Arguments:
|
24
|
+
wftask:
|
25
|
+
WorkflowTask for which the configuration is to be assembled
|
26
|
+
workflow_dir_local:
|
27
|
+
Not used in this function.
|
28
|
+
workflow_dir_remote:
|
29
|
+
Not used in this function.
|
30
|
+
|
31
|
+
Returns:
|
32
|
+
submit_setup_dict:
|
33
|
+
A dictionary that will be passed on to
|
34
|
+
`FractalThreadPoolExecutor.submit` and
|
35
|
+
`FractalThreadPoolExecutor.map`, so as to set extra options.
|
36
|
+
"""
|
37
|
+
|
38
|
+
local_backend_config = get_local_backend_config(
|
39
|
+
wftask=wftask,
|
40
|
+
which_type=which_type,
|
41
|
+
)
|
42
|
+
|
43
|
+
# Get TaskFiles object
|
44
|
+
task_files = TaskFiles(
|
45
|
+
root_dir_local=root_dir_local,
|
46
|
+
root_dir_remote=root_dir_remote,
|
47
|
+
task_order=wftask.order,
|
48
|
+
task_name=wftask.task.name,
|
49
|
+
)
|
50
|
+
|
51
|
+
return dict(
|
52
|
+
local_backend_config=local_backend_config,
|
53
|
+
task_files=task_files,
|
54
|
+
)
|