fractal-server 2.14.0a1__py3-none-any.whl → 2.14.0a3__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- fractal_server/__init__.py +1 -1
- fractal_server/app/history/image_updates.py +2 -1
- fractal_server/app/models/v2/history.py +1 -1
- fractal_server/app/routes/admin/v2/accounting.py +18 -28
- fractal_server/app/routes/admin/v2/task.py +1 -1
- fractal_server/app/routes/api/v2/_aux_functions.py +2 -4
- fractal_server/app/routes/api/v2/history.py +109 -51
- fractal_server/app/routes/api/v2/images.py +11 -28
- fractal_server/app/routes/api/v2/project.py +1 -1
- fractal_server/app/routes/pagination.py +47 -0
- fractal_server/app/runner/executors/base_runner.py +2 -1
- fractal_server/app/runner/executors/local/_submit_setup.py +5 -13
- fractal_server/app/runner/executors/slurm_common/_slurm_config.py +1 -1
- fractal_server/app/runner/executors/slurm_common/get_slurm_config.py +1 -1
- fractal_server/app/runner/executors/slurm_common/remote.py +1 -1
- fractal_server/app/runner/v2/__init__.py +2 -2
- fractal_server/app/runner/v2/_slurm_ssh.py +1 -1
- fractal_server/app/runner/v2/_slurm_sudo.py +1 -1
- fractal_server/app/runner/v2/runner.py +3 -2
- fractal_server/app/runner/v2/runner_functions.py +21 -47
- fractal_server/app/schemas/v2/manifest.py +1 -1
- fractal_server/app/security/__init__.py +3 -3
- fractal_server/config.py +2 -10
- fractal_server/migrations/versions/{8223fcef886c_image_status.py → 954ddc64425a_image_status.py} +4 -4
- fractal_server/tasks/v2/templates/4_pip_show.sh +1 -1
- {fractal_server-2.14.0a1.dist-info → fractal_server-2.14.0a3.dist-info}/METADATA +1 -1
- {fractal_server-2.14.0a1.dist-info → fractal_server-2.14.0a3.dist-info}/RECORD +30 -29
- {fractal_server-2.14.0a1.dist-info → fractal_server-2.14.0a3.dist-info}/LICENSE +0 -0
- {fractal_server-2.14.0a1.dist-info → fractal_server-2.14.0a3.dist-info}/WHEEL +0 -0
- {fractal_server-2.14.0a1.dist-info → fractal_server-2.14.0a3.dist-info}/entry_points.txt +0 -0
fractal_server/__init__.py
CHANGED
@@ -1 +1 @@
|
|
1
|
-
__VERSION__ = "2.14.
|
1
|
+
__VERSION__ = "2.14.0a3"
|
@@ -83,7 +83,8 @@ def update_single_image_logfile(
|
|
83
83
|
) -> None:
|
84
84
|
|
85
85
|
logger.debug(
|
86
|
-
|
86
|
+
"[update_single_image_logfile] "
|
87
|
+
f"{history_item_id=}, {logfile=}, {zarr_url=}"
|
87
88
|
)
|
88
89
|
|
89
90
|
with next(get_sync_db()) as db:
|
@@ -3,9 +3,6 @@ from typing import Optional
|
|
3
3
|
|
4
4
|
from fastapi import APIRouter
|
5
5
|
from fastapi import Depends
|
6
|
-
from fastapi import HTTPException
|
7
|
-
from fastapi import Query
|
8
|
-
from fastapi import status
|
9
6
|
from fastapi.responses import JSONResponse
|
10
7
|
from pydantic import BaseModel
|
11
8
|
from pydantic.types import AwareDatetime
|
@@ -18,6 +15,9 @@ from fractal_server.app.models import UserOAuth
|
|
18
15
|
from fractal_server.app.models.v2 import AccountingRecord
|
19
16
|
from fractal_server.app.models.v2 import AccountingRecordSlurm
|
20
17
|
from fractal_server.app.routes.auth import current_active_superuser
|
18
|
+
from fractal_server.app.routes.pagination import get_pagination_params
|
19
|
+
from fractal_server.app.routes.pagination import PaginationRequest
|
20
|
+
from fractal_server.app.routes.pagination import PaginationResponse
|
21
21
|
from fractal_server.app.schemas.v2 import AccountingRecordRead
|
22
22
|
|
23
23
|
|
@@ -27,32 +27,19 @@ class AccountingQuery(BaseModel):
|
|
27
27
|
timestamp_max: Optional[AwareDatetime] = None
|
28
28
|
|
29
29
|
|
30
|
-
class AccountingPage(BaseModel):
|
31
|
-
total_count: int
|
32
|
-
page_size: int
|
33
|
-
current_page: int
|
34
|
-
records: list[AccountingRecordRead]
|
35
|
-
|
36
|
-
|
37
30
|
router = APIRouter()
|
38
31
|
|
39
32
|
|
40
|
-
@router.post("/", response_model=
|
33
|
+
@router.post("/", response_model=PaginationResponse[AccountingRecordRead])
|
41
34
|
async def query_accounting(
|
42
35
|
query: AccountingQuery,
|
43
|
-
#
|
44
|
-
|
45
|
-
page_size: Optional[int] = Query(default=None, ge=1),
|
46
|
-
# dependencies
|
36
|
+
# Dependencies
|
37
|
+
pagination: PaginationRequest = Depends(get_pagination_params),
|
47
38
|
superuser: UserOAuth = Depends(current_active_superuser),
|
48
39
|
db: AsyncSession = Depends(get_async_db),
|
49
|
-
) ->
|
50
|
-
|
51
|
-
|
52
|
-
raise HTTPException(
|
53
|
-
status_code=status.HTTP_422_UNPROCESSABLE_ENTITY,
|
54
|
-
detail=(f"Invalid pagination parameters: {page=}, {page_size=}."),
|
55
|
-
)
|
40
|
+
) -> PaginationResponse[AccountingRecordRead]:
|
41
|
+
page = pagination.page
|
42
|
+
page_size = pagination.page_size
|
56
43
|
|
57
44
|
stm = select(AccountingRecord).order_by(AccountingRecord.id)
|
58
45
|
stm_count = select(func.count(AccountingRecord.id))
|
@@ -69,20 +56,23 @@ async def query_accounting(
|
|
69
56
|
stm_count = stm_count.where(
|
70
57
|
AccountingRecord.timestamp <= query.timestamp_max
|
71
58
|
)
|
59
|
+
|
60
|
+
res_total_count = await db.execute(stm_count)
|
61
|
+
total_count = res_total_count.scalar()
|
62
|
+
|
72
63
|
if page_size is not None:
|
73
64
|
stm = stm.offset((page - 1) * page_size).limit(page_size)
|
65
|
+
else:
|
66
|
+
page_size = total_count
|
74
67
|
|
75
68
|
res = await db.execute(stm)
|
76
69
|
records = res.scalars().all()
|
77
|
-
res_total_count = await db.execute(stm_count)
|
78
|
-
total_count = res_total_count.scalar()
|
79
70
|
|
80
|
-
|
81
|
-
return AccountingPage(
|
71
|
+
return PaginationResponse[AccountingRecordRead](
|
82
72
|
total_count=total_count,
|
83
|
-
page_size=
|
73
|
+
page_size=page_size,
|
84
74
|
current_page=page,
|
85
|
-
|
75
|
+
items=[record.model_dump() for record in records],
|
86
76
|
)
|
87
77
|
|
88
78
|
|
@@ -68,7 +68,7 @@ async def query_tasks(
|
|
68
68
|
db: AsyncSession = Depends(get_async_db),
|
69
69
|
) -> list[TaskV2Info]:
|
70
70
|
"""
|
71
|
-
Query `TaskV2` table and get
|
71
|
+
Query `TaskV2` table and get information about related items
|
72
72
|
(WorkflowV2s and ProjectV2s)
|
73
73
|
|
74
74
|
Args:
|
@@ -470,7 +470,7 @@ async def _get_workflowtask_check_history_owner(
|
|
470
470
|
dataset_id: int,
|
471
471
|
user_id: int,
|
472
472
|
db: AsyncSession,
|
473
|
-
) ->
|
473
|
+
) -> WorkflowTaskV2:
|
474
474
|
"""
|
475
475
|
Verify user access for the history of this dataset and workflowtask.
|
476
476
|
|
@@ -479,9 +479,6 @@ async def _get_workflowtask_check_history_owner(
|
|
479
479
|
workflow_task_id:
|
480
480
|
user_id:
|
481
481
|
db:
|
482
|
-
|
483
|
-
Returns:
|
484
|
-
List of WorkflowTask IDs
|
485
482
|
"""
|
486
483
|
workflowtask = await db.get(WorkflowTaskV2, workflowtask_id)
|
487
484
|
if workflowtask is None:
|
@@ -495,3 +492,4 @@ async def _get_workflowtask_check_history_owner(
|
|
495
492
|
user_id=user_id,
|
496
493
|
db=db,
|
497
494
|
)
|
495
|
+
return workflowtask
|
@@ -1,25 +1,28 @@
|
|
1
|
+
from pathlib import Path
|
1
2
|
from typing import Optional
|
2
3
|
|
3
4
|
from fastapi import APIRouter
|
4
5
|
from fastapi import Depends
|
5
6
|
from fastapi import HTTPException
|
6
|
-
from fastapi import Query
|
7
7
|
from fastapi import status
|
8
8
|
from fastapi.responses import JSONResponse
|
9
|
+
from pydantic import BaseModel
|
9
10
|
from sqlmodel import func
|
10
11
|
from sqlmodel import select
|
11
12
|
|
12
13
|
from ._aux_functions import _get_dataset_check_owner
|
13
14
|
from ._aux_functions import _get_workflow_check_owner
|
14
|
-
from ._aux_functions import
|
15
|
+
from ._aux_functions import _get_workflowtask_check_history_owner
|
15
16
|
from fractal_server.app.db import AsyncSession
|
16
17
|
from fractal_server.app.db import get_async_db
|
17
18
|
from fractal_server.app.history.status_enum import HistoryItemImageStatus
|
18
19
|
from fractal_server.app.models import UserOAuth
|
19
20
|
from fractal_server.app.models.v2 import HistoryItemV2
|
20
21
|
from fractal_server.app.models.v2 import ImageStatus
|
21
|
-
from fractal_server.app.models.v2 import WorkflowTaskV2
|
22
22
|
from fractal_server.app.routes.auth import current_active_user
|
23
|
+
from fractal_server.app.routes.pagination import get_pagination_params
|
24
|
+
from fractal_server.app.routes.pagination import PaginationRequest
|
25
|
+
from fractal_server.app.routes.pagination import PaginationResponse
|
23
26
|
from fractal_server.app.schemas.v2.history import HistoryItemV2Read
|
24
27
|
|
25
28
|
router = APIRouter()
|
@@ -122,16 +125,10 @@ async def get_per_workflowtask_subsets_aggregated_info(
|
|
122
125
|
user: UserOAuth = Depends(current_active_user),
|
123
126
|
db: AsyncSession = Depends(get_async_db),
|
124
127
|
) -> JSONResponse:
|
125
|
-
|
126
|
-
|
127
|
-
|
128
|
-
|
129
|
-
detail="WorkflowTask not found",
|
130
|
-
)
|
131
|
-
await _get_workflow_task_check_owner(
|
132
|
-
project_id=project_id,
|
133
|
-
workflow_id=wftask.workflow_id,
|
134
|
-
workflow_task_id=workflowtask_id,
|
128
|
+
|
129
|
+
await _get_workflowtask_check_history_owner(
|
130
|
+
dataset_id=dataset_id,
|
131
|
+
workflowtask_id=workflowtask_id,
|
135
132
|
user_id=user.id,
|
136
133
|
db=db,
|
137
134
|
)
|
@@ -143,20 +140,27 @@ async def get_per_workflowtask_subsets_aggregated_info(
|
|
143
140
|
.group_by(ImageStatus.parameters_hash)
|
144
141
|
)
|
145
142
|
res = await db.execute(stm)
|
146
|
-
|
143
|
+
hash_to_statuses = res.all()
|
147
144
|
|
148
|
-
|
149
|
-
for
|
150
|
-
|
151
|
-
|
145
|
+
subsets = []
|
146
|
+
for parameters_hash, statuses in hash_to_statuses:
|
147
|
+
# Get the oldest HistoryItemV2 matching with `parameters_hash`
|
148
|
+
stm = (
|
149
|
+
select(HistoryItemV2)
|
152
150
|
.where(HistoryItemV2.workflowtask_id == workflowtask_id)
|
153
151
|
.where(HistoryItemV2.dataset_id == dataset_id)
|
154
|
-
.where(HistoryItemV2.parameters_hash ==
|
152
|
+
.where(HistoryItemV2.parameters_hash == parameters_hash)
|
153
|
+
.order_by(HistoryItemV2.timestamp_started)
|
154
|
+
.limit(1)
|
155
155
|
)
|
156
|
-
|
156
|
+
res = await db.execute(stm)
|
157
|
+
oldest_history_item = res.scalar_one()
|
158
|
+
|
159
|
+
subsets.append(
|
157
160
|
{
|
158
|
-
"
|
159
|
-
"
|
161
|
+
"_timestamp": oldest_history_item.timestamp_started,
|
162
|
+
"workflowtask_dump": oldest_history_item.workflowtask_dump,
|
163
|
+
"parameters_hash": parameters_hash,
|
160
164
|
"info": {
|
161
165
|
"num_done_images": statuses.count(
|
162
166
|
HistoryItemImageStatus.DONE
|
@@ -171,7 +175,11 @@ async def get_per_workflowtask_subsets_aggregated_info(
|
|
171
175
|
}
|
172
176
|
)
|
173
177
|
|
174
|
-
|
178
|
+
# Use `_timestamp` values for sorting, and then drop them from the response
|
179
|
+
sorted_results = sorted(subsets, key=lambda obj: obj["_timestamp"])
|
180
|
+
[item.pop("_timestamp") for item in sorted_results]
|
181
|
+
|
182
|
+
return JSONResponse(content=sorted_results, status_code=200)
|
175
183
|
|
176
184
|
|
177
185
|
@router.get("/project/{project_id}/status/images/")
|
@@ -181,30 +189,18 @@ async def get_per_workflowtask_images(
|
|
181
189
|
dataset_id: int,
|
182
190
|
status: HistoryItemImageStatus,
|
183
191
|
parameters_hash: Optional[str] = None,
|
184
|
-
# Pagination
|
185
|
-
page: int = Query(default=1, ge=1),
|
186
|
-
page_size: Optional[int] = Query(default=None, ge=1),
|
187
192
|
# Dependencies
|
193
|
+
pagination: PaginationRequest = Depends(get_pagination_params),
|
188
194
|
user: UserOAuth = Depends(current_active_user),
|
189
195
|
db: AsyncSession = Depends(get_async_db),
|
190
|
-
) ->
|
196
|
+
) -> PaginationResponse[str]:
|
191
197
|
|
192
|
-
|
193
|
-
|
194
|
-
status_code=status.HTTP_422_UNPROCESSABLE_ENTITY,
|
195
|
-
detail=(f"Invalid pagination parameters: {page=}, {page_size=}."),
|
196
|
-
)
|
198
|
+
page = pagination.page
|
199
|
+
page_size = pagination.page_size
|
197
200
|
|
198
|
-
|
199
|
-
|
200
|
-
|
201
|
-
status_code=status.HTTP_404_NOT_FOUND,
|
202
|
-
detail="WorkflowTask not found",
|
203
|
-
)
|
204
|
-
await _get_workflow_task_check_owner(
|
205
|
-
project_id=project_id,
|
206
|
-
workflow_id=wftask.workflow_id,
|
207
|
-
workflow_task_id=workflowtask_id,
|
201
|
+
await _get_workflowtask_check_history_owner(
|
202
|
+
dataset_id=dataset_id,
|
203
|
+
workflowtask_id=workflowtask_id,
|
208
204
|
user_id=user.id,
|
209
205
|
db=db,
|
210
206
|
)
|
@@ -228,20 +224,82 @@ async def get_per_workflowtask_images(
|
|
228
224
|
)
|
229
225
|
query = query.where(ImageStatus.parameters_hash == parameters_hash)
|
230
226
|
|
227
|
+
res_total_count = await db.execute(total_count_stm)
|
228
|
+
total_count = res_total_count.scalar()
|
229
|
+
|
231
230
|
if page_size is not None:
|
232
231
|
query = query.limit(page_size)
|
232
|
+
else:
|
233
|
+
page_size = total_count
|
234
|
+
|
233
235
|
if page > 1:
|
234
236
|
query = query.offset((page - 1) * page_size)
|
235
237
|
|
236
|
-
res_total_count = await db.execute(total_count_stm)
|
237
|
-
total_count = res_total_count.scalar()
|
238
|
-
|
239
238
|
res = await db.execute(query)
|
240
239
|
images = res.scalars().all()
|
241
240
|
|
242
|
-
return
|
243
|
-
|
244
|
-
|
245
|
-
|
246
|
-
|
247
|
-
|
241
|
+
return PaginationResponse[str](
|
242
|
+
total_count=total_count,
|
243
|
+
page_size=page_size,
|
244
|
+
current_page=page,
|
245
|
+
items=images,
|
246
|
+
)
|
247
|
+
|
248
|
+
|
249
|
+
class ImageLogsRequest(BaseModel):
|
250
|
+
workflowtask_id: int
|
251
|
+
dataset_id: int
|
252
|
+
zarr_url: str
|
253
|
+
|
254
|
+
|
255
|
+
@router.post("/project/{project_id}/status/image-logs/")
|
256
|
+
async def get_image_logs(
|
257
|
+
project_id: int,
|
258
|
+
request_data: ImageLogsRequest,
|
259
|
+
user: UserOAuth = Depends(current_active_user),
|
260
|
+
db: AsyncSession = Depends(get_async_db),
|
261
|
+
) -> JSONResponse:
|
262
|
+
|
263
|
+
wftask = await _get_workflowtask_check_history_owner(
|
264
|
+
dataset_id=request_data.dataset_id,
|
265
|
+
workflowtask_id=request_data.workflowtask_id,
|
266
|
+
user_id=user.id,
|
267
|
+
db=db,
|
268
|
+
)
|
269
|
+
|
270
|
+
image_status = await db.get(
|
271
|
+
ImageStatus,
|
272
|
+
(
|
273
|
+
request_data.zarr_url,
|
274
|
+
request_data.workflowtask_id,
|
275
|
+
request_data.dataset_id,
|
276
|
+
),
|
277
|
+
)
|
278
|
+
if image_status is None:
|
279
|
+
raise HTTPException(
|
280
|
+
status_code=status.HTTP_404_NOT_FOUND,
|
281
|
+
detail="ImageStatus not found",
|
282
|
+
)
|
283
|
+
|
284
|
+
if image_status.logfile is None:
|
285
|
+
return JSONResponse(
|
286
|
+
content=(
|
287
|
+
f"Logs for task '{wftask.task.name}' in dataset "
|
288
|
+
f"{request_data.dataset_id} are not yet available."
|
289
|
+
)
|
290
|
+
)
|
291
|
+
|
292
|
+
logfile = Path(image_status.logfile)
|
293
|
+
if not logfile.exists():
|
294
|
+
return JSONResponse(
|
295
|
+
content=(
|
296
|
+
f"Error while retrieving logs for task '{wftask.task.name}' "
|
297
|
+
f"in dataset {request_data.dataset_id}: "
|
298
|
+
f"file '{logfile}' is not available."
|
299
|
+
)
|
300
|
+
)
|
301
|
+
|
302
|
+
with logfile.open("r") as f:
|
303
|
+
file_contents = f.read()
|
304
|
+
|
305
|
+
return JSONResponse(content=file_contents)
|
@@ -17,6 +17,9 @@ from fractal_server.app.db import AsyncSession
|
|
17
17
|
from fractal_server.app.db import get_async_db
|
18
18
|
from fractal_server.app.models import UserOAuth
|
19
19
|
from fractal_server.app.routes.auth import current_active_user
|
20
|
+
from fractal_server.app.routes.pagination import get_pagination_params
|
21
|
+
from fractal_server.app.routes.pagination import PaginationRequest
|
22
|
+
from fractal_server.app.routes.pagination import PaginationResponse
|
20
23
|
from fractal_server.app.schemas._filter_validators import (
|
21
24
|
validate_attribute_filters,
|
22
25
|
)
|
@@ -31,17 +34,11 @@ from fractal_server.images.tools import match_filter
|
|
31
34
|
router = APIRouter()
|
32
35
|
|
33
36
|
|
34
|
-
class ImagePage(
|
35
|
-
|
36
|
-
total_count: int
|
37
|
-
page_size: int
|
38
|
-
current_page: int
|
37
|
+
class ImagePage(PaginationResponse[SingleImage]):
|
39
38
|
|
40
39
|
attributes: dict[str, list[Any]]
|
41
40
|
types: list[str]
|
42
41
|
|
43
|
-
images: list[SingleImage]
|
44
|
-
|
45
42
|
|
46
43
|
class ImageQuery(BaseModel):
|
47
44
|
zarr_url: Optional[str] = None
|
@@ -118,18 +115,14 @@ async def post_new_image(
|
|
118
115
|
async def query_dataset_images(
|
119
116
|
project_id: int,
|
120
117
|
dataset_id: int,
|
121
|
-
|
122
|
-
|
123
|
-
query: Optional[ImageQuery] = None, # body
|
118
|
+
query: Optional[ImageQuery] = None,
|
119
|
+
pagination: PaginationRequest = Depends(get_pagination_params),
|
124
120
|
user: UserOAuth = Depends(current_active_user),
|
125
121
|
db: AsyncSession = Depends(get_async_db),
|
126
122
|
) -> ImagePage:
|
127
123
|
|
128
|
-
|
129
|
-
|
130
|
-
status_code=status.HTTP_422_UNPROCESSABLE_ENTITY,
|
131
|
-
detail=f"Invalid pagination parameter: page={page} < 1",
|
132
|
-
)
|
124
|
+
page = pagination.page
|
125
|
+
page_size = pagination.page_size
|
133
126
|
|
134
127
|
output = await _get_dataset_check_owner(
|
135
128
|
project_id=project_id, dataset_id=dataset_id, user_id=user.id, db=db
|
@@ -177,20 +170,10 @@ async def query_dataset_images(
|
|
177
170
|
|
178
171
|
total_count = len(images)
|
179
172
|
|
180
|
-
if page_size is
|
181
|
-
if page_size <= 0:
|
182
|
-
raise HTTPException(
|
183
|
-
status_code=status.HTTP_422_UNPROCESSABLE_ENTITY,
|
184
|
-
detail=(
|
185
|
-
f"Invalid pagination parameter: page_size={page_size} <= 0"
|
186
|
-
),
|
187
|
-
)
|
188
|
-
else:
|
173
|
+
if page_size is None:
|
189
174
|
page_size = total_count
|
190
175
|
|
191
|
-
if total_count
|
192
|
-
page = 1
|
193
|
-
else:
|
176
|
+
if total_count > 0:
|
194
177
|
last_page = (total_count // page_size) + (total_count % page_size > 0)
|
195
178
|
if page > last_page:
|
196
179
|
page = last_page
|
@@ -201,9 +184,9 @@ async def query_dataset_images(
|
|
201
184
|
total_count=total_count,
|
202
185
|
current_page=page,
|
203
186
|
page_size=page_size,
|
187
|
+
items=images,
|
204
188
|
attributes=attributes,
|
205
189
|
types=types,
|
206
|
-
images=images,
|
207
190
|
)
|
208
191
|
|
209
192
|
|
@@ -0,0 +1,47 @@
|
|
1
|
+
from typing import Generic
|
2
|
+
from typing import Optional
|
3
|
+
from typing import TypeVar
|
4
|
+
|
5
|
+
from fastapi import HTTPException
|
6
|
+
from pydantic import BaseModel
|
7
|
+
from pydantic import Field
|
8
|
+
from pydantic import model_validator
|
9
|
+
from pydantic import ValidationError
|
10
|
+
|
11
|
+
T = TypeVar("T")
|
12
|
+
|
13
|
+
|
14
|
+
class PaginationRequest(BaseModel):
|
15
|
+
|
16
|
+
page: int = Field(ge=1)
|
17
|
+
page_size: Optional[int] = Field(ge=1)
|
18
|
+
|
19
|
+
@model_validator(mode="after")
|
20
|
+
def valid_pagination_parameters(self):
|
21
|
+
if self.page_size is None and self.page > 1:
|
22
|
+
raise ValueError(
|
23
|
+
f"page_size is None but page={self.page} is greater than 1."
|
24
|
+
)
|
25
|
+
return self
|
26
|
+
|
27
|
+
|
28
|
+
def get_pagination_params(
|
29
|
+
page: int = 1, page_size: Optional[int] = None
|
30
|
+
) -> PaginationRequest:
|
31
|
+
try:
|
32
|
+
pagination = PaginationRequest(page=page, page_size=page_size)
|
33
|
+
except ValidationError as e:
|
34
|
+
raise HTTPException(
|
35
|
+
status_code=422,
|
36
|
+
detail=f"Invalid pagination parameters. Original error: '{e}'.",
|
37
|
+
)
|
38
|
+
return pagination
|
39
|
+
|
40
|
+
|
41
|
+
class PaginationResponse(BaseModel, Generic[T]):
|
42
|
+
|
43
|
+
current_page: int = Field(ge=1)
|
44
|
+
page_size: int = Field(ge=0)
|
45
|
+
total_count: int = Field(ge=0)
|
46
|
+
|
47
|
+
items: list[T]
|
@@ -106,7 +106,8 @@ class BaseRunner(object):
|
|
106
106
|
)
|
107
107
|
if _COMPONENT_KEY_ not in single_kwargs.keys():
|
108
108
|
raise ValueError(
|
109
|
-
f"No '{_COMPONENT_KEY_}' key
|
109
|
+
f"No '{_COMPONENT_KEY_}' key "
|
110
|
+
f"in {list(single_kwargs.keys())}"
|
110
111
|
)
|
111
112
|
if not in_compound_task:
|
112
113
|
zarr_urls = [kwargs["zarr_url"] for kwargs in list_parameters]
|
@@ -21,18 +21,10 @@ def _local_submit_setup(
|
|
21
21
|
FIXME
|
22
22
|
|
23
23
|
Arguments:
|
24
|
-
wftask:
|
25
|
-
|
26
|
-
|
27
|
-
|
28
|
-
workflow_dir_remote:
|
29
|
-
Not used in this function.
|
30
|
-
|
31
|
-
Returns:
|
32
|
-
submit_setup_dict:
|
33
|
-
A dictionary that will be passed on to
|
34
|
-
`FractalThreadPoolExecutor.submit` and
|
35
|
-
`FractalThreadPoolExecutor.map`, so as to set extra options.
|
24
|
+
wftask: WorkflowTask for which the configuration is to be assembled
|
25
|
+
root_dir_local:
|
26
|
+
root_dir_rempote: Not used in this function.
|
27
|
+
which_type: Whether it is a parallel or non-parallel task.
|
36
28
|
"""
|
37
29
|
|
38
30
|
local_backend_config = get_local_backend_config(
|
@@ -43,7 +35,7 @@ def _local_submit_setup(
|
|
43
35
|
# Get TaskFiles object
|
44
36
|
task_files = TaskFiles(
|
45
37
|
root_dir_local=root_dir_local,
|
46
|
-
root_dir_remote=
|
38
|
+
root_dir_remote=root_dir_local,
|
47
39
|
task_order=wftask.order,
|
48
40
|
task_name=wftask.task.name,
|
49
41
|
)
|
@@ -213,7 +213,7 @@ class SlurmConfig(BaseModel):
|
|
213
213
|
expected file content are defined in
|
214
214
|
[`SlurmConfigFile`](./#fractal_server.app.runner._slurm._slurm_config.SlurmConfigFile)).
|
215
215
|
|
216
|
-
Part of the attributes map directly to some of the SLURM
|
216
|
+
Part of the attributes map directly to some of the SLURM attributes (see
|
217
217
|
https://slurm.schedmd.com/sbatch.html), e.g. `partition`. Other attributes
|
218
218
|
are metaparameters which are needed in fractal-server to combine multiple
|
219
219
|
tasks in the same SLURM job (e.g. `parallel_tasks_per_job` or
|
@@ -19,7 +19,7 @@ def get_slurm_config(
|
|
19
19
|
Prepare a `SlurmConfig` configuration object
|
20
20
|
|
21
21
|
The argument `which_type` determines whether we use `wftask.meta_parallel`
|
22
|
-
or `wftask.meta_non_parallel`. In the following
|
22
|
+
or `wftask.meta_non_parallel`. In the following description, let us assume
|
23
23
|
that `which_type="parallel"`.
|
24
24
|
|
25
25
|
The sources for `SlurmConfig` attributes, in increasing priority order, are
|
@@ -134,7 +134,7 @@ def worker(
|
|
134
134
|
_extra_import_paths = extra_import_paths.split(":")
|
135
135
|
sys.path[:0] = _extra_import_paths
|
136
136
|
|
137
|
-
# Execute the job and
|
137
|
+
# Execute the job and capture exceptions
|
138
138
|
try:
|
139
139
|
with open(in_fname, "rb") as f:
|
140
140
|
indata = f.read()
|
@@ -2,7 +2,7 @@
|
|
2
2
|
Runner backend subsystem root V2
|
3
3
|
|
4
4
|
This module is the single entry point to the runner backend subsystem V2.
|
5
|
-
Other
|
5
|
+
Other subsystems should only import this module and not its submodules or
|
6
6
|
the individual backends.
|
7
7
|
"""
|
8
8
|
import os
|
@@ -118,7 +118,7 @@ def submit_workflow(
|
|
118
118
|
)
|
119
119
|
except Exception as e:
|
120
120
|
logger.error(
|
121
|
-
f"Error
|
121
|
+
f"Error connecting to the database. Original error: {str(e)}"
|
122
122
|
)
|
123
123
|
reset_logger_handlers(logger)
|
124
124
|
return
|
@@ -127,7 +127,7 @@ def execute_tasks_v2(
|
|
127
127
|
dataset_id=dataset.id,
|
128
128
|
parameters_hash=parameters_hash,
|
129
129
|
status=HistoryItemImageStatus.SUBMITTED,
|
130
|
-
logfile=
|
130
|
+
logfile=None,
|
131
131
|
)
|
132
132
|
)
|
133
133
|
db.commit()
|
@@ -198,7 +198,8 @@ def execute_tasks_v2(
|
|
198
198
|
# Update image list
|
199
199
|
num_new_images = 0
|
200
200
|
current_task_output.check_zarr_urls_are_unique()
|
201
|
-
# FIXME: Introduce for loop over task outputs, and processe them
|
201
|
+
# FIXME: Introduce for loop over task outputs, and processe them
|
202
|
+
# sequentially
|
202
203
|
# each failure should lead to an update of the specific image status
|
203
204
|
for image_obj in current_task_output.image_list_updates:
|
204
205
|
image = image_obj.model_dump()
|
@@ -1,9 +1,7 @@
|
|
1
1
|
import functools
|
2
2
|
import logging
|
3
|
-
import traceback
|
4
3
|
from pathlib import Path
|
5
4
|
from typing import Any
|
6
|
-
from typing import Callable
|
7
5
|
from typing import Literal
|
8
6
|
from typing import Optional
|
9
7
|
|
@@ -59,38 +57,18 @@ def _cast_and_validate_InitTaskOutput(
|
|
59
57
|
)
|
60
58
|
|
61
59
|
|
62
|
-
def no_op_submit_setup_call(
|
60
|
+
def no_op_submit_setup_call(
|
61
|
+
*,
|
62
|
+
wftask: WorkflowTaskV2,
|
63
|
+
root_dir_local: Path,
|
64
|
+
which_type: Literal["non_parallel", "parallel"],
|
65
|
+
) -> dict[str, Any]:
|
63
66
|
"""
|
64
67
|
Default (no-operation) interface of submit_setup_call in V2.
|
65
68
|
"""
|
66
69
|
return {}
|
67
70
|
|
68
71
|
|
69
|
-
# Backend-specific configuration
|
70
|
-
def _get_executor_options(
|
71
|
-
*,
|
72
|
-
wftask: WorkflowTaskV2,
|
73
|
-
workflow_dir_local: Path,
|
74
|
-
workflow_dir_remote: Path,
|
75
|
-
submit_setup_call: Callable,
|
76
|
-
which_type: Literal["non_parallel", "parallel"],
|
77
|
-
) -> dict:
|
78
|
-
try:
|
79
|
-
options = submit_setup_call(
|
80
|
-
wftask=wftask,
|
81
|
-
root_dir_local=workflow_dir_local,
|
82
|
-
root_dir_remote=workflow_dir_remote,
|
83
|
-
which_type=which_type,
|
84
|
-
)
|
85
|
-
except Exception as e:
|
86
|
-
tb = "".join(traceback.format_tb(e.__traceback__))
|
87
|
-
raise RuntimeError(
|
88
|
-
f"{type(e)} error in {submit_setup_call=}\n"
|
89
|
-
f"Original traceback:\n{tb}"
|
90
|
-
)
|
91
|
-
return options
|
92
|
-
|
93
|
-
|
94
72
|
def _check_parallelization_list_size(my_list):
|
95
73
|
if len(my_list) > MAX_PARALLELIZATION_LIST_SIZE:
|
96
74
|
raise JobExecutionError(
|
@@ -109,7 +87,7 @@ def run_v2_task_non_parallel(
|
|
109
87
|
workflow_dir_local: Path,
|
110
88
|
workflow_dir_remote: Optional[Path] = None,
|
111
89
|
executor: BaseRunner,
|
112
|
-
submit_setup_call:
|
90
|
+
submit_setup_call: callable = no_op_submit_setup_call,
|
113
91
|
history_item_id: int,
|
114
92
|
) -> tuple[TaskOutput, int, dict[int, BaseException]]:
|
115
93
|
"""
|
@@ -123,11 +101,10 @@ def run_v2_task_non_parallel(
|
|
123
101
|
)
|
124
102
|
workflow_dir_remote = workflow_dir_local
|
125
103
|
|
126
|
-
executor_options =
|
104
|
+
executor_options = submit_setup_call(
|
127
105
|
wftask=wftask,
|
128
|
-
|
129
|
-
|
130
|
-
submit_setup_call=submit_setup_call,
|
106
|
+
root_dir_local=workflow_dir_local,
|
107
|
+
root_dir_remote=workflow_dir_remote,
|
131
108
|
which_type="non_parallel",
|
132
109
|
)
|
133
110
|
|
@@ -169,7 +146,7 @@ def run_v2_task_parallel(
|
|
169
146
|
executor: BaseRunner,
|
170
147
|
workflow_dir_local: Path,
|
171
148
|
workflow_dir_remote: Optional[Path] = None,
|
172
|
-
submit_setup_call:
|
149
|
+
submit_setup_call: callable = no_op_submit_setup_call,
|
173
150
|
history_item_id: int,
|
174
151
|
) -> tuple[TaskOutput, int, dict[int, BaseException]]:
|
175
152
|
|
@@ -178,11 +155,10 @@ def run_v2_task_parallel(
|
|
178
155
|
|
179
156
|
_check_parallelization_list_size(images)
|
180
157
|
|
181
|
-
executor_options =
|
158
|
+
executor_options = submit_setup_call(
|
182
159
|
wftask=wftask,
|
183
|
-
|
184
|
-
|
185
|
-
submit_setup_call=submit_setup_call,
|
160
|
+
root_dir_local=workflow_dir_local,
|
161
|
+
root_dir_remote=workflow_dir_remote,
|
186
162
|
which_type="parallel",
|
187
163
|
)
|
188
164
|
|
@@ -237,22 +213,20 @@ def run_v2_task_compound(
|
|
237
213
|
executor: BaseRunner,
|
238
214
|
workflow_dir_local: Path,
|
239
215
|
workflow_dir_remote: Optional[Path] = None,
|
240
|
-
submit_setup_call:
|
216
|
+
submit_setup_call: callable = no_op_submit_setup_call,
|
241
217
|
history_item_id: int,
|
242
218
|
) -> tuple[TaskOutput, int, dict[int, BaseException]]:
|
243
219
|
|
244
|
-
executor_options_init =
|
220
|
+
executor_options_init = submit_setup_call(
|
245
221
|
wftask=wftask,
|
246
|
-
|
247
|
-
|
248
|
-
submit_setup_call=submit_setup_call,
|
222
|
+
root_dir_local=workflow_dir_local,
|
223
|
+
root_dir_remote=workflow_dir_remote,
|
249
224
|
which_type="non_parallel",
|
250
225
|
)
|
251
|
-
executor_options_compute =
|
226
|
+
executor_options_compute = submit_setup_call(
|
252
227
|
wftask=wftask,
|
253
|
-
|
254
|
-
|
255
|
-
submit_setup_call=submit_setup_call,
|
228
|
+
root_dir_local=workflow_dir_local,
|
229
|
+
root_dir_remote=workflow_dir_remote,
|
256
230
|
which_type="parallel",
|
257
231
|
)
|
258
232
|
|
@@ -128,7 +128,7 @@ class ManifestV2(BaseModel):
|
|
128
128
|
The list of tasks, represented as specified by subclasses of the
|
129
129
|
_TaskManifestBase (a.k.a. TaskManifestType)
|
130
130
|
has_args_schemas:
|
131
|
-
`True` if the manifest
|
131
|
+
`True` if the manifest includes JSON Schemas for the arguments of
|
132
132
|
each task.
|
133
133
|
args_schema_version:
|
134
134
|
Label of how `args_schema`s were generated (e.g. `pydantic_v1`).
|
@@ -24,7 +24,7 @@ FastAPIUsers with Barer Token and cookie transports and register local routes.
|
|
24
24
|
Then, for each OAuth client defined in the Fractal Settings configuration, it
|
25
25
|
registers the client and the relative routes.
|
26
26
|
|
27
|
-
All routes are
|
27
|
+
All routes are registered under the `auth/` prefix.
|
28
28
|
"""
|
29
29
|
import contextlib
|
30
30
|
from typing import Any
|
@@ -296,7 +296,7 @@ async def _create_first_user(
|
|
296
296
|
Private method to create the first fractal-server user
|
297
297
|
|
298
298
|
Create a user with the given default arguments and return a message with
|
299
|
-
the relevant
|
299
|
+
the relevant information. If the user already exists, for example after a
|
300
300
|
restart, it returns a message to inform that user already exists.
|
301
301
|
|
302
302
|
**WARNING**: This function is only meant to create the first user, and then
|
@@ -312,7 +312,7 @@ async def _create_first_user(
|
|
312
312
|
email: New user's email
|
313
313
|
password: New user's password
|
314
314
|
is_superuser: `True` if the new user is a superuser
|
315
|
-
is_verified: `True` if the new user is
|
315
|
+
is_verified: `True` if the new user is verified
|
316
316
|
username:
|
317
317
|
"""
|
318
318
|
function_logger = set_logger("fractal_server.create_first_user")
|
fractal_server/config.py
CHANGED
@@ -502,14 +502,6 @@ class Settings(BaseSettings):
|
|
502
502
|
`JobExecutionError`.
|
503
503
|
"""
|
504
504
|
|
505
|
-
FRACTAL_RUNNER_TASKS_INCLUDE_IMAGE: str = (
|
506
|
-
"Copy OME-Zarr structure;Convert Metadata Components from 2D to 3D"
|
507
|
-
)
|
508
|
-
"""
|
509
|
-
`;`-separated list of names for task that require the `metadata["image"]`
|
510
|
-
attribute in their input-arguments JSON file.
|
511
|
-
"""
|
512
|
-
|
513
505
|
FRACTAL_PIP_CACHE_DIR: Optional[str] = None
|
514
506
|
"""
|
515
507
|
Absolute path to the cache directory for `pip`; if unset,
|
@@ -597,11 +589,11 @@ class Settings(BaseSettings):
|
|
597
589
|
"""
|
598
590
|
FRACTAL_EMAIL_SMTP_SERVER: Optional[str] = None
|
599
591
|
"""
|
600
|
-
|
592
|
+
SMTP server for the OAuth-signup emails.
|
601
593
|
"""
|
602
594
|
FRACTAL_EMAIL_SMTP_PORT: Optional[int] = None
|
603
595
|
"""
|
604
|
-
|
596
|
+
SMTP server port for the OAuth-signup emails.
|
605
597
|
"""
|
606
598
|
FRACTAL_EMAIL_INSTANCE_NAME: Optional[str] = None
|
607
599
|
"""
|
fractal_server/migrations/versions/{8223fcef886c_image_status.py → 954ddc64425a_image_status.py}
RENAMED
@@ -1,8 +1,8 @@
|
|
1
1
|
"""image status
|
2
2
|
|
3
|
-
Revision ID:
|
3
|
+
Revision ID: 954ddc64425a
|
4
4
|
Revises: 87cd72a537a2
|
5
|
-
Create Date: 2025-02-
|
5
|
+
Create Date: 2025-02-28 16:37:38.765883
|
6
6
|
|
7
7
|
"""
|
8
8
|
import sqlalchemy as sa
|
@@ -11,7 +11,7 @@ from alembic import op
|
|
11
11
|
|
12
12
|
|
13
13
|
# revision identifiers, used by Alembic.
|
14
|
-
revision = "
|
14
|
+
revision = "954ddc64425a"
|
15
15
|
down_revision = "87cd72a537a2"
|
16
16
|
branch_labels = None
|
17
17
|
depends_on = None
|
@@ -35,7 +35,7 @@ def upgrade() -> None:
|
|
35
35
|
"status", sqlmodel.sql.sqltypes.AutoString(), nullable=False
|
36
36
|
),
|
37
37
|
sa.Column(
|
38
|
-
"logfile", sqlmodel.sql.sqltypes.AutoString(), nullable=
|
38
|
+
"logfile", sqlmodel.sql.sqltypes.AutoString(), nullable=True
|
39
39
|
),
|
40
40
|
sa.ForeignKeyConstraint(
|
41
41
|
["dataset_id"],
|
@@ -20,7 +20,7 @@ echo
|
|
20
20
|
# FIXME: only run pip-show once!
|
21
21
|
|
22
22
|
# Extract information about paths
|
23
|
-
# WARNING: this block will fail for paths which
|
23
|
+
# WARNING: this block will fail for paths which include whitespace characters
|
24
24
|
write_log "START pip show"
|
25
25
|
$VENVPYTHON -m pip show ${PACKAGE_NAME}
|
26
26
|
write_log "END pip show"
|
@@ -1,10 +1,10 @@
|
|
1
|
-
fractal_server/__init__.py,sha256=
|
1
|
+
fractal_server/__init__.py,sha256=hpk2FCu0xXoEuH8cDYFItQE2071uD_KsQqreoRWpt4A,25
|
2
2
|
fractal_server/__main__.py,sha256=igfS2XL3e8JycuhASl2vsYuIPma0MG0cfPPFRuQfh14,6906
|
3
3
|
fractal_server/alembic.ini,sha256=MWwi7GzjzawI9cCAK1LW7NxIBQDUqD12-ptJoq5JpP0,3153
|
4
4
|
fractal_server/app/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
5
5
|
fractal_server/app/db/__init__.py,sha256=wup2wcOkyOh8Vd0Xm76PZn_naxeMqaL4eF8DHHXTGlI,2889
|
6
6
|
fractal_server/app/history/__init__.py,sha256=bisQpsMCFmtQGhIsf9ES0HdEuH4DYkPxVO7SlvxaWTY,239
|
7
|
-
fractal_server/app/history/image_updates.py,sha256=
|
7
|
+
fractal_server/app/history/image_updates.py,sha256=YqoOwPW97LQk9-o04zB1lSU0yQ90V0AcuhFIhTVUxos,3959
|
8
8
|
fractal_server/app/history/status_enum.py,sha256=xBBLHQY2Z105b2_HVU9DVRgdEVbbjLm6l4kkcV0Q1Sk,275
|
9
9
|
fractal_server/app/models/__init__.py,sha256=xJWiGAwpXmCpnFMC4c_HTqoUCzMOXrakoGLUH_uMvdA,415
|
10
10
|
fractal_server/app/models/linkusergroup.py,sha256=LWTUfhH2uAnn_4moK7QdRUIHWtpw-hPZuW-5jClv_OE,610
|
@@ -14,7 +14,7 @@ fractal_server/app/models/user_settings.py,sha256=Y-ZV-uZAFLZqXxy8c5_Qeh_F7zQuZD
|
|
14
14
|
fractal_server/app/models/v2/__init__.py,sha256=VNoK2OUB8_IPvZoItLOxup84ZMNslO7j30jojNS2lI0,774
|
15
15
|
fractal_server/app/models/v2/accounting.py,sha256=f2ALxfKKBNxFLJTtC2-YqRepVK253x68y7zkD2V_Nls,1115
|
16
16
|
fractal_server/app/models/v2/dataset.py,sha256=O5_6YfNeX6JM7PUcEZhbeV4JCvuAhFCQbOOuefpVnqc,1544
|
17
|
-
fractal_server/app/models/v2/history.py,sha256=
|
17
|
+
fractal_server/app/models/v2/history.py,sha256=SqD6Va7h7LUzSzf_yz_iTcDQpivxif6hy--Rls_yekw,1538
|
18
18
|
fractal_server/app/models/v2/job.py,sha256=L0P1mrztMqqb-6qdPEbuHXhCsf2mxVUct_ehcXrREGg,1844
|
19
19
|
fractal_server/app/models/v2/project.py,sha256=rAHoh5KfYwIaW7rTX0_O0jvWmxEvfo1BafvmcXuSSRk,786
|
20
20
|
fractal_server/app/models/v2/task.py,sha256=8KEROaadgccXRZIP7EriBp2j1FgzYkgiirOi5_fG79M,1494
|
@@ -24,23 +24,23 @@ fractal_server/app/models/v2/workflowtask.py,sha256=919L2jCm9y57MXiezGBb28uiXpxy
|
|
24
24
|
fractal_server/app/routes/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
25
25
|
fractal_server/app/routes/admin/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
26
26
|
fractal_server/app/routes/admin/v2/__init__.py,sha256=_5lqb6-M8-fZqE1HRMep6pAFYRUKMxrvbZOKs-RXWkw,933
|
27
|
-
fractal_server/app/routes/admin/v2/accounting.py,sha256=
|
27
|
+
fractal_server/app/routes/admin/v2/accounting.py,sha256=UDMPD9DMhMBcu4UsEOEtKMCGnkVMtmwBuRklek-_ShQ,3631
|
28
28
|
fractal_server/app/routes/admin/v2/impersonate.py,sha256=gc4lshfEPFR6W2asH7aKu6hqE6chzusdhAUVV9p51eU,1131
|
29
29
|
fractal_server/app/routes/admin/v2/job.py,sha256=4soc-5d99QEsir7U9AqpofgaGggSBwgMm7mXW5LBvSI,7439
|
30
30
|
fractal_server/app/routes/admin/v2/project.py,sha256=luy-yiGX1JYTdPm1hpIdDUUqPm8xHuipLy9k2X6zu74,1223
|
31
|
-
fractal_server/app/routes/admin/v2/task.py,sha256=
|
31
|
+
fractal_server/app/routes/admin/v2/task.py,sha256=QOwgyDU9m7T_wLMwkdgfFaoMjNxcDg6zMVpngxhUvqk,4374
|
32
32
|
fractal_server/app/routes/admin/v2/task_group.py,sha256=XTjdqgABXZcx9EenaoqSmHh12BXSentUus3SV0oxBMs,7929
|
33
33
|
fractal_server/app/routes/admin/v2/task_group_lifecycle.py,sha256=0e0ZJ_k75TVHaT2o8Xk33DPDSgh-eBhZf-y4y7t-Adg,9429
|
34
34
|
fractal_server/app/routes/api/__init__.py,sha256=2IDheFi0OFdsUg7nbUiyahqybvpgXqeHUXIL2QtWrQQ,641
|
35
35
|
fractal_server/app/routes/api/v2/__init__.py,sha256=S7zOeoLkD6Sss1JLRQxeQWPSXKMX2yaIVhLQUw0PDh0,2176
|
36
|
-
fractal_server/app/routes/api/v2/_aux_functions.py,sha256=
|
36
|
+
fractal_server/app/routes/api/v2/_aux_functions.py,sha256=pmYbsHjJexb5-zMCJQLNStmU_95ZfeEIBpoCJx4GFIY,13480
|
37
37
|
fractal_server/app/routes/api/v2/_aux_functions_task_lifecycle.py,sha256=qdXCb6IP8-qPEAxGZKljtjIqNzIAyRaAsQSRi5VqFHM,6773
|
38
38
|
fractal_server/app/routes/api/v2/_aux_functions_tasks.py,sha256=uhNSs-jcS7ndIUFKiOC1yrDiViw3uvKEXi9UL04BMks,11642
|
39
39
|
fractal_server/app/routes/api/v2/dataset.py,sha256=gS5169eJRGHBQNUnkDB75Bv3Kg8Ql-tMVw5_FAxUEKc,9664
|
40
|
-
fractal_server/app/routes/api/v2/history.py,sha256=
|
41
|
-
fractal_server/app/routes/api/v2/images.py,sha256=
|
40
|
+
fractal_server/app/routes/api/v2/history.py,sha256=C_V_u2ab4i8v4bM-uQ0SV3Olyor_5olQRDYFsLco2Ac,9801
|
41
|
+
fractal_server/app/routes/api/v2/images.py,sha256=wUhYomNLGtJTtu_pD2oQorcH2LISxo64Wxo6ogc4IXc,8185
|
42
42
|
fractal_server/app/routes/api/v2/job.py,sha256=m89FTh9Px25oXCeWj2k2NdGWQaO2oxMh-6lZppcsJOY,5551
|
43
|
-
fractal_server/app/routes/api/v2/project.py,sha256=
|
43
|
+
fractal_server/app/routes/api/v2/project.py,sha256=apWQNOdj2FIZmBl6Cjtr2tK-jUclEsw-ikKg6PMT8sU,7828
|
44
44
|
fractal_server/app/routes/api/v2/submit.py,sha256=K4OjcSg476JXIeeMUaYdTDk8Qpj5IO5UULvfErI7Y5Y,8624
|
45
45
|
fractal_server/app/routes/api/v2/task.py,sha256=z3_SxsXoKsbM9GGNJUdIiZisQwAJSBqvCc7thaJIOTU,7191
|
46
46
|
fractal_server/app/routes/api/v2/task_collection.py,sha256=IDNF6sjDuU37HIQ0TuQA-TZIuf7nfHAQXUUNmkrlhLM,12706
|
@@ -63,23 +63,24 @@ fractal_server/app/routes/aux/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5N
|
|
63
63
|
fractal_server/app/routes/aux/_job.py,sha256=XWyWpOObcV55YyK7uzGRlaslmPDCBZy4hiSZBpoa_bg,616
|
64
64
|
fractal_server/app/routes/aux/_runner.py,sha256=spNudutueHTBJPhm55RlOuYzb31DhyheSjl2rk6dloM,873
|
65
65
|
fractal_server/app/routes/aux/validate_user_settings.py,sha256=FLVi__8YFcm_6c_K5uMQo7raWWXQLBcZtx8yaPO4jaE,2301
|
66
|
+
fractal_server/app/routes/pagination.py,sha256=L8F5JqekF39qz-LpeScdlhb57MQnSRXjK4ZEtsZqYLk,1210
|
66
67
|
fractal_server/app/runner/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
67
68
|
fractal_server/app/runner/components.py,sha256=ZF8ct_Ky5k8IAcrmpYOZ-bc6OBgdELEighYVqFDEbZg,119
|
68
69
|
fractal_server/app/runner/compress_folder.py,sha256=HSc1tv7x2DBjBoXwugZlC79rm9GNBIWtQKK9yWn5ZBI,3991
|
69
70
|
fractal_server/app/runner/exceptions.py,sha256=_qZ_t8O4umAdJ1ikockiF5rDJuxnEskrGrLjZcnQl7A,4159
|
70
71
|
fractal_server/app/runner/executors/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
71
|
-
fractal_server/app/runner/executors/base_runner.py,sha256=
|
72
|
+
fractal_server/app/runner/executors/base_runner.py,sha256=0E3gbSndXdEAxZwFCiZXrUd8tjEmvLa_ztPBGMJXtUw,3742
|
72
73
|
fractal_server/app/runner/executors/local/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
73
74
|
fractal_server/app/runner/executors/local/_local_config.py,sha256=8dyg2Gh8L2FlG_jJRYLMkcMgVHGEY2w7DME9aaKXFFo,3688
|
74
|
-
fractal_server/app/runner/executors/local/_submit_setup.py,sha256=
|
75
|
+
fractal_server/app/runner/executors/local/_submit_setup.py,sha256=pDc9Q6axXL8_5JAV0byXzGOLOB0bZF88_L9LZykOgwM,1220
|
75
76
|
fractal_server/app/runner/executors/local/runner.py,sha256=tEI3qe9UQKgqNoY6gkP1b2O1yRw3VGTiPTDKztrCt2I,7577
|
76
77
|
fractal_server/app/runner/executors/slurm_common/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
77
78
|
fractal_server/app/runner/executors/slurm_common/_batching.py,sha256=ZY020JZlDS5mfpgpWTChQkyHU7iLE5kx2HVd57_C6XA,8850
|
78
79
|
fractal_server/app/runner/executors/slurm_common/_job_states.py,sha256=nuV-Zba38kDrRESOVB3gaGbrSPZc4q7YGichQaeqTW0,238
|
79
|
-
fractal_server/app/runner/executors/slurm_common/_slurm_config.py,sha256=
|
80
|
+
fractal_server/app/runner/executors/slurm_common/_slurm_config.py,sha256=fZaFUUXqDH0p3DndCFUpFqTqyD2tMVCuSYgYLAycpVw,15897
|
80
81
|
fractal_server/app/runner/executors/slurm_common/_submit_setup.py,sha256=crbfAAvXbxe_9PaokXkkVdPV65lSCFbInZ0RlT6uyHI,2746
|
81
|
-
fractal_server/app/runner/executors/slurm_common/get_slurm_config.py,sha256
|
82
|
-
fractal_server/app/runner/executors/slurm_common/remote.py,sha256=
|
82
|
+
fractal_server/app/runner/executors/slurm_common/get_slurm_config.py,sha256=-fAX1DZMB5RZnyYanIJD72mWOJAPkh21jd4loDXKJw4,5994
|
83
|
+
fractal_server/app/runner/executors/slurm_common/remote.py,sha256=iXLu4d-bWzn7qmDaOjKFkcuaSHLjPESAMSLcg6c99fc,5852
|
83
84
|
fractal_server/app/runner/executors/slurm_common/utils_executors.py,sha256=naPyJI0I3lD-sYHbSXbMFGUBK4h_SggA5V91Z1Ch1Xg,1416
|
84
85
|
fractal_server/app/runner/executors/slurm_ssh/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
85
86
|
fractal_server/app/runner/executors/slurm_ssh/_executor_wait_thread.py,sha256=lnW8dNNPqqbpQvojVBQaNJm4wN3Qkw02RWBZ1w68Hyw,3755
|
@@ -96,14 +97,14 @@ fractal_server/app/runner/run_subprocess.py,sha256=c3JbYXq3hX2aaflQU19qJ5Xs6J6oX
|
|
96
97
|
fractal_server/app/runner/set_start_and_last_task_index.py,sha256=-q4zVybAj8ek2XlbENKlfOAJ39hT_zoJoZkqzDqiAMY,1254
|
97
98
|
fractal_server/app/runner/shutdown.py,sha256=9pfSKHDNdIcm0eY-opgRTi7y0HmvfPmYiu9JR6Idark,2082
|
98
99
|
fractal_server/app/runner/task_files.py,sha256=5enzBqiQct1AUGwrGX-rxFCxnhW3SPYIUylMYwyVfrE,2482
|
99
|
-
fractal_server/app/runner/v2/__init__.py,sha256=
|
100
|
+
fractal_server/app/runner/v2/__init__.py,sha256=llVnhgNGsSuP_eZ_ilQixQTmwst79LWrgjILpC2Xn9o,14247
|
100
101
|
fractal_server/app/runner/v2/_local.py,sha256=Zas2RS_f9mfdkXszBpzISHylLX1bX8pFuoLA1fHLFqQ,2945
|
101
|
-
fractal_server/app/runner/v2/_slurm_ssh.py,sha256=
|
102
|
-
fractal_server/app/runner/v2/_slurm_sudo.py,sha256=
|
102
|
+
fractal_server/app/runner/v2/_slurm_ssh.py,sha256=5w_lwQzySx-R3kVg2Bf-21n5JpWjJAgMtYP2BROvWJo,3227
|
103
|
+
fractal_server/app/runner/v2/_slurm_sudo.py,sha256=CzWUeC6at_Sj-wU1myjA68ZRKMiLZYBTLv9I9odUxBU,2914
|
103
104
|
fractal_server/app/runner/v2/deduplicate_list.py,sha256=IVTE4abBU1bUprFTkxrTfYKnvkNTanWQ-KWh_etiT08,645
|
104
105
|
fractal_server/app/runner/v2/merge_outputs.py,sha256=D1L4Taieq9i71SPQyNc1kMokgHh-sV_MqF3bv7QMDBc,907
|
105
|
-
fractal_server/app/runner/v2/runner.py,sha256=
|
106
|
-
fractal_server/app/runner/v2/runner_functions.py,sha256=
|
106
|
+
fractal_server/app/runner/v2/runner.py,sha256=qtxmnrgMdlB3CA5Ayg7BXUv1yETR6H7kMLp70R1faSM,14456
|
107
|
+
fractal_server/app/runner/v2/runner_functions.py,sha256=Zvi6sC5krltygLO-fC0K21VJEhmX1XQCm9IzVqf_cB0,9583
|
107
108
|
fractal_server/app/runner/v2/runner_functions_low_level.py,sha256=dvvRK7od8iQ8vdPf80uGUxs3i5i0buGjCodBxSjZ7PQ,3671
|
108
109
|
fractal_server/app/runner/v2/task_interface.py,sha256=e1GGQSYd0MyBj1EZvEVzqv-HpVE4YffXOq82WLrCaOc,1866
|
109
110
|
fractal_server/app/runner/versions.py,sha256=dSaPRWqmFPHjg20kTCHmi_dmGNcCETflDtDLronNanU,852
|
@@ -119,7 +120,7 @@ fractal_server/app/schemas/v2/dataset.py,sha256=xo7Y3fq5ThMVBp6xDVypdG-EmGfBX_vW
|
|
119
120
|
fractal_server/app/schemas/v2/dumps.py,sha256=2GUjoqeblUvrSoojBz5odoUUf53IABtbY_5GvFZoMVc,1782
|
120
121
|
fractal_server/app/schemas/v2/history.py,sha256=OHwRIbOIjBiiTYUNZYsHTdEXJHff17JRizQ8pf1e0vk,601
|
121
122
|
fractal_server/app/schemas/v2/job.py,sha256=Dp_RRiC5uvJqq1fAJlBXztAFA-tS5FWuRnUbTnLtL6M,4226
|
122
|
-
fractal_server/app/schemas/v2/manifest.py,sha256=
|
123
|
+
fractal_server/app/schemas/v2/manifest.py,sha256=tcCvT4PbdtpdC5eU54MKUne6puXpnPlIExZYwLGHEAo,7133
|
123
124
|
fractal_server/app/schemas/v2/project.py,sha256=uqBreoS0UAkbVEJJS2HkSdjCCWfFIkv6N70TWk9HgxA,868
|
124
125
|
fractal_server/app/schemas/v2/status.py,sha256=SQaUpQkjFq5c5k5J4rOjNhuQaDOEg8lksPhkKmPU5VU,332
|
125
126
|
fractal_server/app/schemas/v2/task.py,sha256=OUCNQQUULmWSOdPm8Dz8E0ivG1XOcvO4dxz-osSa9R0,7248
|
@@ -127,10 +128,10 @@ fractal_server/app/schemas/v2/task_collection.py,sha256=NFIcfTAhFN5LMxmyJCat7CKx
|
|
127
128
|
fractal_server/app/schemas/v2/task_group.py,sha256=vFF850kJRmmcxt2sn7nrhm-OWJHRhYu_XOQP5LNiXyU,3850
|
128
129
|
fractal_server/app/schemas/v2/workflow.py,sha256=qmKJZ9xZ6-sN41XdocZ7K6hum_pUfaMuKOJs_TlFCRQ,2211
|
129
130
|
fractal_server/app/schemas/v2/workflowtask.py,sha256=qMvwlnFCsnyD8uv8HJ4cFy2-QMm2ETUFlTIbxIFUWxk,8056
|
130
|
-
fractal_server/app/security/__init__.py,sha256=
|
131
|
+
fractal_server/app/security/__init__.py,sha256=e2cveg5hQpieGD3bSPd5GTOMthvJ-HXH3buSb9WVfEU,14096
|
131
132
|
fractal_server/app/security/signup_email.py,sha256=CR1VbsGFNshxsWquLDZPbUAYnGzkCHeJChtncq63RBc,1434
|
132
133
|
fractal_server/app/user_settings.py,sha256=OP1yiYKtPadxwM51_Q0hdPk3z90TCN4z1BLpQsXyWiU,1316
|
133
|
-
fractal_server/config.py,sha256=
|
134
|
+
fractal_server/config.py,sha256=eYo-c3Zt4rkC45mewLYOeFZSA_7FF4Wmm6zDvX1dpt4,28549
|
134
135
|
fractal_server/data_migrations/README.md,sha256=_3AEFvDg9YkybDqCLlFPdDmGJvr6Tw7HRI14aZ3LOIw,398
|
135
136
|
fractal_server/data_migrations/tools.py,sha256=LeMeASwYGtEqd-3wOLle6WARdTGAimoyMmRbbJl-hAM,572
|
136
137
|
fractal_server/gunicorn_fractal.py,sha256=u6U01TLGlXgq1v8QmEpLih3QnsInZD7CqphgJ_GrGzc,1230
|
@@ -153,12 +154,12 @@ fractal_server/migrations/versions/50a13d6138fd_initial_schema.py,sha256=zwXegXs
|
|
153
154
|
fractal_server/migrations/versions/5bf02391cfef_v2.py,sha256=axhNkr_H6R4rRbY7oGYazNbFvPXeSyBDWFVbKNmiqs8,8433
|
154
155
|
fractal_server/migrations/versions/70e77f1c38b0_add_applyworkflow_first_task_index_and_.py,sha256=Q-DsMzG3IcUV2Ol1dhJWosDvKERamBE6QvA2zzS5zpQ,1632
|
155
156
|
fractal_server/migrations/versions/71eefd1dd202_add_slurm_accounts.py,sha256=mbWuCkTpRAdGbRhW7lhXs_e5S6O37UAcCN6JfoY5H8A,1353
|
156
|
-
fractal_server/migrations/versions/8223fcef886c_image_status.py,sha256=Nhlw22F9w1Obh-Tte6787Fyglb4NuQwdJQeO-HlkqSs,1743
|
157
157
|
fractal_server/migrations/versions/84bf0fffde30_add_dumps_to_applyworkflow.py,sha256=NSCuhANChsg76vBkShBl-9tQ4VEHubOjtAv1etHhlvY,2684
|
158
158
|
fractal_server/migrations/versions/87cd72a537a2_add_historyitem_table.py,sha256=xxAftQYyQ2C_7qiuPcG5FeVmhFQGznxfCglsfk2CjiU,2092
|
159
159
|
fractal_server/migrations/versions/8e8f227a3e36_update_taskv2_post_2_7_0.py,sha256=68y9-fpSuKx6KPtM_9n8Ho0I1qwa8IoG-yJqXUYQrGg,1111
|
160
160
|
fractal_server/migrations/versions/8f79bd162e35_add_docs_info_and_docs_link_to_task_.py,sha256=6pgODDtyAxevZvAJBj9IJ41inhV1RpwbpZr_qfPPu1A,1115
|
161
161
|
fractal_server/migrations/versions/94a47ea2d3ff_remove_cache_dir_slurm_user_and_slurm_.py,sha256=yL3-Hvzw5jBLKj4LFP1z5ofZE9L9W3tLwYtPNW7z4ko,1508
|
162
|
+
fractal_server/migrations/versions/954ddc64425a_image_status.py,sha256=cPjuGTztDkjvhVQDO8i41qAmG5O2CgKNUfV_PRK9Pck,1742
|
162
163
|
fractal_server/migrations/versions/97f444d47249_add_applyworkflow_project_dump.py,sha256=eKTZm3EgUgapXBxO0RuHkEfTKic-TZG3ADaMpGLuc0k,1057
|
163
164
|
fractal_server/migrations/versions/99ea79d9e5d2_add_dataset_history.py,sha256=0im6TxDr53sKKcjiPgeH4ftVRGnRXZSh2lPbRQ1Ir9w,883
|
164
165
|
fractal_server/migrations/versions/9c5ae74c9b98_add_user_settings_table.py,sha256=syONdZNf4-OnAcWIsbzXpYwpXPsXZ4SsmjwVvmVG0PU,2256
|
@@ -194,7 +195,7 @@ fractal_server/tasks/v2/ssh/reactivate.py,sha256=8Rnbbny7TjMEAHhboqfgxBVZZK5UNNm
|
|
194
195
|
fractal_server/tasks/v2/templates/1_create_venv.sh,sha256=PK0jdHKtQpda1zULebBaVPORt4t6V17wa4N1ohcj5ac,548
|
195
196
|
fractal_server/tasks/v2/templates/2_pip_install.sh,sha256=Gpk2io8u9YaflFUlQu2NgkDQw5AA4m4AOVG1sB4yrHQ,1822
|
196
197
|
fractal_server/tasks/v2/templates/3_pip_freeze.sh,sha256=JldREScEBI4cD_qjfX4UK7V4aI-FnX9ZvVNxgpSOBFc,168
|
197
|
-
fractal_server/tasks/v2/templates/4_pip_show.sh,sha256=
|
198
|
+
fractal_server/tasks/v2/templates/4_pip_show.sh,sha256=qm1vPy6AkKhWDjCJGXS8LqCLYO3KsAyRK325ZsFcF6U,1747
|
198
199
|
fractal_server/tasks/v2/templates/5_get_venv_size_and_file_number.sh,sha256=q-6ZUvA6w6FDVEoSd9O63LaJ9tKZc7qAFH72SGPrd_k,284
|
199
200
|
fractal_server/tasks/v2/templates/6_pip_install_from_freeze.sh,sha256=A2y8RngEjAcRhG-_owA6P7tAdrS_AszFuGXnaeMV8u0,1122
|
200
201
|
fractal_server/tasks/v2/utils_background.py,sha256=W_RvihI1aiYPJNsPo8z4wKuA_bPs0UT2huzLihRpjU4,4248
|
@@ -205,8 +206,8 @@ fractal_server/tasks/v2/utils_templates.py,sha256=07TZpJ0Mh_A4lXVXrrH2o1VLFFGwxe
|
|
205
206
|
fractal_server/urls.py,sha256=QjIKAC1a46bCdiPMu3AlpgFbcv6a4l3ABcd5xz190Og,471
|
206
207
|
fractal_server/utils.py,sha256=PMwrxWFxRTQRl1b9h-NRIbFGPKqpH_hXnkAT3NfZdpY,3571
|
207
208
|
fractal_server/zip_tools.py,sha256=GjDgo_sf6V_DDg6wWeBlZu5zypIxycn_l257p_YVKGc,4876
|
208
|
-
fractal_server-2.14.
|
209
|
-
fractal_server-2.14.
|
210
|
-
fractal_server-2.14.
|
211
|
-
fractal_server-2.14.
|
212
|
-
fractal_server-2.14.
|
209
|
+
fractal_server-2.14.0a3.dist-info/LICENSE,sha256=QKAharUuhxL58kSoLizKJeZE3mTCBnX6ucmz8W0lxlk,1576
|
210
|
+
fractal_server-2.14.0a3.dist-info/METADATA,sha256=F2fjhacHy-6hFP43F8JNom4iV9-WNRiO6CiCye5KzLI,4550
|
211
|
+
fractal_server-2.14.0a3.dist-info/WHEEL,sha256=RaoafKOydTQ7I_I3JTrPCg6kUmTgtm4BornzOqyEfJ8,88
|
212
|
+
fractal_server-2.14.0a3.dist-info/entry_points.txt,sha256=8tV2kynvFkjnhbtDnxAqImL6HMVKsopgGfew0DOp5UY,58
|
213
|
+
fractal_server-2.14.0a3.dist-info/RECORD,,
|
File without changes
|
File without changes
|
File without changes
|