fractal-server 2.14.7__py3-none-any.whl → 2.14.9__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- fractal_server/__init__.py +1 -1
- fractal_server/app/models/v2/history.py +1 -0
- fractal_server/app/routes/api/v2/_aux_functions_task_version_update.py +42 -0
- fractal_server/app/routes/api/v2/history.py +63 -73
- fractal_server/app/routes/api/v2/task_version_update.py +23 -47
- fractal_server/app/runner/v2/runner.py +1 -0
- fractal_server/app/schemas/v2/__init__.py +1 -1
- fractal_server/app/schemas/v2/history.py +13 -2
- fractal_server/data_migrations/2_14_9.py +48 -0
- fractal_server/images/image_status.py +85 -0
- fractal_server/migrations/versions/969d84257cac_add_historyrun_task_id.py +42 -0
- fractal_server/tasks/v2/ssh/collect.py +15 -14
- fractal_server/tasks/v2/ssh/deactivate.py +14 -14
- fractal_server/tasks/v2/ssh/reactivate.py +15 -14
- {fractal_server-2.14.7.dist-info → fractal_server-2.14.9.dist-info}/METADATA +1 -1
- {fractal_server-2.14.7.dist-info → fractal_server-2.14.9.dist-info}/RECORD +19 -15
- {fractal_server-2.14.7.dist-info → fractal_server-2.14.9.dist-info}/LICENSE +0 -0
- {fractal_server-2.14.7.dist-info → fractal_server-2.14.9.dist-info}/WHEEL +0 -0
- {fractal_server-2.14.7.dist-info → fractal_server-2.14.9.dist-info}/entry_points.txt +0 -0
fractal_server/__init__.py
CHANGED
@@ -1 +1 @@
|
|
1
|
-
__VERSION__ = "2.14.
|
1
|
+
__VERSION__ = "2.14.9"
|
@@ -27,6 +27,7 @@ class HistoryRun(SQLModel, table=True):
|
|
27
27
|
ondelete="SET NULL",
|
28
28
|
)
|
29
29
|
job_id: int = Field(foreign_key="jobv2.id")
|
30
|
+
task_id: int | None = Field(foreign_key="taskv2.id", ondelete="SET NULL")
|
30
31
|
|
31
32
|
workflowtask_dump: dict[str, Any] = Field(
|
32
33
|
sa_column=Column(JSONB, nullable=False),
|
@@ -0,0 +1,42 @@
|
|
1
|
+
from typing import Any
|
2
|
+
|
3
|
+
|
4
|
+
def get_new_workflow_task_meta(
|
5
|
+
*,
|
6
|
+
old_workflow_task_meta: dict | None,
|
7
|
+
old_task_meta: dict | None,
|
8
|
+
new_task_meta: dict | None,
|
9
|
+
) -> dict[str, Any]:
|
10
|
+
"""
|
11
|
+
Prepare new meta field based on old/new tasks and old workflow task.
|
12
|
+
"""
|
13
|
+
|
14
|
+
# When the whole `old_workflow_task_meta` is user-provided, use it
|
15
|
+
# as the outcome
|
16
|
+
if old_task_meta is None:
|
17
|
+
return old_workflow_task_meta
|
18
|
+
|
19
|
+
# When `old_workflow_task_meta` is unset, use the new-task meta as default.
|
20
|
+
if old_workflow_task_meta is None:
|
21
|
+
return new_task_meta
|
22
|
+
|
23
|
+
if new_task_meta is None:
|
24
|
+
new_task_meta = {}
|
25
|
+
|
26
|
+
# Find properties that were added to the old defaults
|
27
|
+
additions = {
|
28
|
+
k: v
|
29
|
+
for k, v in old_workflow_task_meta.items()
|
30
|
+
if v != old_task_meta.get(k)
|
31
|
+
}
|
32
|
+
# Find properties that were removed from the old defaults
|
33
|
+
removals = old_task_meta.keys() - old_workflow_task_meta.keys()
|
34
|
+
|
35
|
+
# Add `additions` and remove `removals`.
|
36
|
+
new_workflowtask_meta = {
|
37
|
+
k: v
|
38
|
+
for k, v in (new_task_meta | additions).items()
|
39
|
+
if k not in removals
|
40
|
+
}
|
41
|
+
|
42
|
+
return new_workflowtask_meta
|
@@ -23,6 +23,7 @@ from fractal_server.app.models import UserOAuth
|
|
23
23
|
from fractal_server.app.models.v2 import HistoryImageCache
|
24
24
|
from fractal_server.app.models.v2 import HistoryRun
|
25
25
|
from fractal_server.app.models.v2 import HistoryUnit
|
26
|
+
from fractal_server.app.models.v2 import TaskV2
|
26
27
|
from fractal_server.app.routes.auth import current_active_user
|
27
28
|
from fractal_server.app.routes.pagination import get_pagination_params
|
28
29
|
from fractal_server.app.routes.pagination import PaginationRequest
|
@@ -31,9 +32,11 @@ from fractal_server.app.schemas.v2 import HistoryRunRead
|
|
31
32
|
from fractal_server.app.schemas.v2 import HistoryRunReadAggregated
|
32
33
|
from fractal_server.app.schemas.v2 import HistoryUnitRead
|
33
34
|
from fractal_server.app.schemas.v2 import HistoryUnitStatus
|
34
|
-
from fractal_server.app.schemas.v2 import
|
35
|
+
from fractal_server.app.schemas.v2 import HistoryUnitStatusWithUnset
|
35
36
|
from fractal_server.app.schemas.v2 import ImageLogsRequest
|
36
37
|
from fractal_server.app.schemas.v2 import SingleImageWithStatus
|
38
|
+
from fractal_server.images.image_status import enrich_image_list
|
39
|
+
from fractal_server.images.image_status import IMAGE_STATUS_KEY
|
37
40
|
from fractal_server.images.tools import aggregate_attributes
|
38
41
|
from fractal_server.images.tools import aggregate_types
|
39
42
|
from fractal_server.images.tools import filter_image_list
|
@@ -60,7 +63,6 @@ def check_historyrun_related_to_dataset_and_wftask(
|
|
60
63
|
|
61
64
|
|
62
65
|
class ImageWithStatusPage(PaginationResponse[SingleImageWithStatus]):
|
63
|
-
|
64
66
|
attributes: dict[str, list[Any]]
|
65
67
|
types: list[str]
|
66
68
|
|
@@ -77,7 +79,6 @@ async def get_workflow_tasks_statuses(
|
|
77
79
|
user: UserOAuth = Depends(current_active_user),
|
78
80
|
db: AsyncSession = Depends(get_async_db),
|
79
81
|
) -> JSONResponse:
|
80
|
-
|
81
82
|
# Access control
|
82
83
|
workflow = await _get_workflow_check_owner(
|
83
84
|
project_id=project_id,
|
@@ -150,7 +151,6 @@ async def get_history_run_list(
|
|
150
151
|
user: UserOAuth = Depends(current_active_user),
|
151
152
|
db: AsyncSession = Depends(get_async_db),
|
152
153
|
) -> list[HistoryRunReadAggregated]:
|
153
|
-
|
154
154
|
# Access control
|
155
155
|
await get_wftask_check_owner(
|
156
156
|
project_id=project_id,
|
@@ -199,7 +199,36 @@ async def get_history_run_list(
|
|
199
199
|
for run_id, unit_status, count in unit_counts:
|
200
200
|
count_map[run_id][f"num_{unit_status}_units"] = count
|
201
201
|
|
202
|
-
|
202
|
+
res = await db.execute(
|
203
|
+
select(
|
204
|
+
TaskV2.id,
|
205
|
+
TaskV2.version,
|
206
|
+
TaskV2.args_schema_parallel,
|
207
|
+
TaskV2.args_schema_non_parallel,
|
208
|
+
).where(
|
209
|
+
TaskV2.id.in_(
|
210
|
+
[run.task_id for run in runs if run.task_id is not None]
|
211
|
+
)
|
212
|
+
)
|
213
|
+
)
|
214
|
+
|
215
|
+
task_args = {
|
216
|
+
_id: {
|
217
|
+
"version": version,
|
218
|
+
"args_schema_parallel": parallel,
|
219
|
+
"args_schema_non_parallel": non_parallel,
|
220
|
+
}
|
221
|
+
for _id, version, parallel, non_parallel in res.all()
|
222
|
+
}
|
223
|
+
|
224
|
+
runs = [
|
225
|
+
dict(
|
226
|
+
**run.model_dump(),
|
227
|
+
**count_map[run.id],
|
228
|
+
**task_args.get(run.task_id, {}),
|
229
|
+
)
|
230
|
+
for run in runs
|
231
|
+
]
|
203
232
|
|
204
233
|
return runs
|
205
234
|
|
@@ -215,7 +244,6 @@ async def get_history_run_units(
|
|
215
244
|
db: AsyncSession = Depends(get_async_db),
|
216
245
|
pagination: PaginationRequest = Depends(get_pagination_params),
|
217
246
|
) -> PaginationResponse[HistoryUnitRead]:
|
218
|
-
|
219
247
|
# Access control
|
220
248
|
await get_wftask_check_owner(
|
221
249
|
project_id=project_id,
|
@@ -271,12 +299,11 @@ async def get_history_images(
|
|
271
299
|
dataset_id: int,
|
272
300
|
workflowtask_id: int,
|
273
301
|
request_body: ImageQuery,
|
274
|
-
unit_status:
|
302
|
+
unit_status: HistoryUnitStatusWithUnset | None = None,
|
275
303
|
user: UserOAuth = Depends(current_active_user),
|
276
304
|
db: AsyncSession = Depends(get_async_db),
|
277
305
|
pagination: PaginationRequest = Depends(get_pagination_params),
|
278
306
|
) -> ImageWithStatusPage:
|
279
|
-
|
280
307
|
# Access control and object retrieval
|
281
308
|
wftask = await get_wftask_check_owner(
|
282
309
|
project_id=project_id,
|
@@ -318,99 +345,62 @@ async def get_history_images(
|
|
318
345
|
actual_filters.update(type_filters_patch)
|
319
346
|
logger.debug(f"{prefix} {actual_filters=}")
|
320
347
|
# (1D) Get all matching images from the dataset
|
348
|
+
|
321
349
|
pre_filtered_dataset_images = filter_image_list(
|
322
350
|
images=dataset.images,
|
323
351
|
type_filters=inferred_dataset_type_filters,
|
324
352
|
)
|
353
|
+
|
354
|
+
full_images_list = await enrich_image_list(
|
355
|
+
dataset_id=dataset_id,
|
356
|
+
workflowtask_id=workflowtask_id,
|
357
|
+
images=pre_filtered_dataset_images,
|
358
|
+
db=db,
|
359
|
+
)
|
360
|
+
|
361
|
+
if unit_status is not None:
|
362
|
+
request_body.attribute_filters[IMAGE_STATUS_KEY] = unit_status
|
363
|
+
|
325
364
|
filtered_dataset_images = filter_image_list(
|
326
|
-
|
365
|
+
full_images_list,
|
327
366
|
type_filters=request_body.type_filters,
|
328
367
|
attribute_filters=request_body.attribute_filters,
|
329
368
|
)
|
330
369
|
logger.debug(f"{prefix} {len(dataset.images)=}")
|
331
370
|
logger.debug(f"{prefix} {len(filtered_dataset_images)=}")
|
332
|
-
# (1E) Extract the list of URLs for filtered images
|
333
|
-
filtered_dataset_images_url = list(
|
334
|
-
img["zarr_url"] for img in filtered_dataset_images
|
335
|
-
)
|
336
|
-
|
337
|
-
# (2) Get `(zarr_url, status)` pairs for all images that have already
|
338
|
-
# been processed, and
|
339
|
-
# (3) When relevant, find images that have not been processed
|
340
|
-
base_stmt = (
|
341
|
-
select(HistoryImageCache.zarr_url, HistoryUnit.status)
|
342
|
-
.join(HistoryUnit)
|
343
|
-
.where(HistoryImageCache.dataset_id == dataset_id)
|
344
|
-
.where(HistoryImageCache.workflowtask_id == workflowtask_id)
|
345
|
-
.where(HistoryImageCache.latest_history_unit_id == HistoryUnit.id)
|
346
|
-
.where(HistoryImageCache.zarr_url.in_(filtered_dataset_images_url))
|
347
|
-
)
|
348
|
-
|
349
|
-
if unit_status in [HistoryUnitStatusQuery.UNSET, None]:
|
350
|
-
stmt = base_stmt.order_by(HistoryImageCache.zarr_url)
|
351
|
-
res = await db.execute(stmt)
|
352
|
-
list_processed_url_status = res.all()
|
353
|
-
list_processed_url = list(
|
354
|
-
item[0] for item in list_processed_url_status
|
355
|
-
)
|
356
|
-
list_non_processed_url_status = list(
|
357
|
-
(url, None)
|
358
|
-
for url in filtered_dataset_images_url
|
359
|
-
if url not in list_processed_url
|
360
|
-
)
|
361
|
-
if unit_status == HistoryUnitStatusQuery.UNSET:
|
362
|
-
list_processed_url_status = []
|
363
|
-
else:
|
364
|
-
stmt = base_stmt.where(HistoryUnit.status == unit_status).order_by(
|
365
|
-
HistoryImageCache.zarr_url
|
366
|
-
)
|
367
|
-
res = await db.execute(stmt)
|
368
|
-
list_processed_url_status = res.all()
|
369
|
-
list_non_processed_url_status = []
|
370
|
-
|
371
|
-
logger.debug(f"{prefix} {len(list_processed_url_status)=}")
|
372
|
-
logger.debug(f"{prefix} {len(list_non_processed_url_status)=}")
|
373
|
-
|
374
|
-
# (3) Combine outputs from 1 and 2
|
375
|
-
full_list_url_status = (
|
376
|
-
list_processed_url_status + list_non_processed_url_status
|
377
|
-
)
|
378
|
-
logger.debug(f"{prefix} {len(full_list_url_status)=}")
|
379
371
|
|
380
372
|
attributes = aggregate_attributes(pre_filtered_dataset_images)
|
381
373
|
types = aggregate_types(pre_filtered_dataset_images)
|
382
374
|
|
383
|
-
sorted_list_url_status = sorted(
|
384
|
-
full_list_url_status,
|
385
|
-
key=lambda url_status: url_status[0],
|
386
|
-
)
|
387
|
-
logger.debug(f"{prefix} {len(sorted_list_url_status)=}")
|
388
|
-
|
389
375
|
# Final list of objects
|
390
376
|
|
391
|
-
total_count = len(
|
377
|
+
total_count = len(filtered_dataset_images)
|
392
378
|
page_size = pagination.page_size or total_count
|
393
|
-
|
394
|
-
|
379
|
+
sorted_images_list = sorted(
|
380
|
+
filtered_dataset_images,
|
381
|
+
key=lambda image: image["zarr_url"],
|
382
|
+
)
|
383
|
+
paginated_images_list = sorted_images_list[
|
395
384
|
(pagination.page - 1) * page_size : pagination.page * page_size
|
396
385
|
]
|
397
386
|
|
398
|
-
#
|
399
|
-
|
387
|
+
# FIXME: This is only for backwards-compatibility. To remove when we
|
388
|
+
# update the webclient
|
389
|
+
paginated_images_list = [
|
400
390
|
{
|
401
|
-
**
|
402
|
-
|
403
|
-
|
404
|
-
"
|
391
|
+
**img,
|
392
|
+
"status": (
|
393
|
+
lambda x: None if x == HistoryUnitStatusWithUnset.UNSET else x
|
394
|
+
)(img["attributes"].pop(IMAGE_STATUS_KEY)),
|
405
395
|
}
|
406
|
-
for
|
396
|
+
for img in paginated_images_list
|
407
397
|
]
|
408
398
|
|
409
399
|
return dict(
|
410
400
|
current_page=pagination.page,
|
411
401
|
page_size=page_size,
|
412
402
|
total_count=total_count,
|
413
|
-
items=
|
403
|
+
items=paginated_images_list,
|
414
404
|
attributes=attributes,
|
415
405
|
types=types,
|
416
406
|
)
|
@@ -17,12 +17,12 @@ from ....models import LinkUserGroup
|
|
17
17
|
from ....models.v2 import TaskV2
|
18
18
|
from ._aux_functions import _get_workflow_check_owner
|
19
19
|
from ._aux_functions import _get_workflow_task_check_owner
|
20
|
+
from ._aux_functions_task_version_update import get_new_workflow_task_meta
|
20
21
|
from ._aux_functions_tasks import _check_type_filters_compatibility
|
21
22
|
from ._aux_functions_tasks import _get_task_group_or_404
|
22
23
|
from ._aux_functions_tasks import _get_task_read_access
|
23
24
|
from fractal_server.app.models import UserOAuth
|
24
25
|
from fractal_server.app.models.v2 import TaskGroupV2
|
25
|
-
from fractal_server.app.models.v2 import WorkflowTaskV2
|
26
26
|
from fractal_server.app.routes.auth import current_active_user
|
27
27
|
from fractal_server.app.schemas.v2 import WorkflowTaskReadV2
|
28
28
|
from fractal_server.app.schemas.v2 import WorkflowTaskReplaceV2
|
@@ -181,7 +181,7 @@ async def replace_workflowtask(
|
|
181
181
|
) -> WorkflowTaskReadV2:
|
182
182
|
|
183
183
|
# Get objects from database
|
184
|
-
|
184
|
+
workflow_task, workflow = await _get_workflow_task_check_owner(
|
185
185
|
project_id=project_id,
|
186
186
|
workflow_id=workflow_id,
|
187
187
|
workflow_task_id=workflow_task_id,
|
@@ -197,14 +197,14 @@ async def replace_workflowtask(
|
|
197
197
|
|
198
198
|
# Preliminary checks
|
199
199
|
if not _is_type_update_valid(
|
200
|
-
old_type=
|
200
|
+
old_type=workflow_task.task_type,
|
201
201
|
new_type=new_task.type,
|
202
202
|
):
|
203
203
|
raise HTTPException(
|
204
204
|
status_code=status.HTTP_422_UNPROCESSABLE_ENTITY,
|
205
205
|
detail=(
|
206
206
|
"Cannot change task type from "
|
207
|
-
f"{
|
207
|
+
f"{workflow_task.task_type} to {new_task.type}."
|
208
208
|
),
|
209
209
|
)
|
210
210
|
|
@@ -220,51 +220,27 @@ async def replace_workflowtask(
|
|
220
220
|
)
|
221
221
|
_check_type_filters_compatibility(
|
222
222
|
task_input_types=new_task.input_types,
|
223
|
-
wftask_type_filters=
|
223
|
+
wftask_type_filters=workflow_task.type_filters,
|
224
224
|
)
|
225
225
|
|
226
|
-
|
227
|
-
|
228
|
-
|
229
|
-
|
230
|
-
|
231
|
-
|
232
|
-
|
233
|
-
|
234
|
-
|
235
|
-
|
236
|
-
|
237
|
-
# keep them; else, get `meta_non_parallel` from new task
|
238
|
-
if (
|
239
|
-
old_wftask.meta_non_parallel != old_wftask.task.meta_non_parallel
|
240
|
-
) and (old_wftask.task.meta_non_parallel == new_task.meta_non_parallel):
|
241
|
-
_meta_non_parallel = old_wftask.meta_non_parallel
|
242
|
-
else:
|
243
|
-
_meta_non_parallel = new_task.meta_non_parallel
|
244
|
-
# Same for `meta_parallel`
|
245
|
-
if (old_wftask.meta_parallel != old_wftask.task.meta_parallel) and (
|
246
|
-
old_wftask.task.meta_parallel == new_task.meta_parallel
|
247
|
-
):
|
248
|
-
_meta_parallel = old_wftask.meta_parallel
|
249
|
-
else:
|
250
|
-
_meta_parallel = new_task.meta_parallel
|
251
|
-
|
252
|
-
new_workflow_task = WorkflowTaskV2(
|
253
|
-
task_id=new_task.id,
|
254
|
-
task_type=new_task.type,
|
255
|
-
task=new_task,
|
256
|
-
# old-task values
|
257
|
-
type_filters=old_wftask.type_filters,
|
258
|
-
# possibly new values
|
259
|
-
args_non_parallel=_args_non_parallel,
|
260
|
-
args_parallel=_args_parallel,
|
261
|
-
meta_non_parallel=_meta_non_parallel,
|
262
|
-
meta_parallel=_meta_parallel,
|
226
|
+
workflow_task.task_id = new_task.id
|
227
|
+
workflow_task.task_type = new_task.type
|
228
|
+
workflow_task.meta_non_parallel = get_new_workflow_task_meta(
|
229
|
+
old_task_meta=workflow_task.task.meta_non_parallel,
|
230
|
+
old_workflow_task_meta=workflow_task.meta_non_parallel,
|
231
|
+
new_task_meta=new_task.meta_non_parallel,
|
232
|
+
)
|
233
|
+
workflow_task.meta_parallel = get_new_workflow_task_meta(
|
234
|
+
old_task_meta=workflow_task.task.meta_parallel,
|
235
|
+
old_workflow_task_meta=workflow_task.meta_parallel,
|
236
|
+
new_task_meta=new_task.meta_parallel,
|
263
237
|
)
|
238
|
+
if replace.args_non_parallel is not None:
|
239
|
+
workflow_task.args_non_parallel = replace.args_non_parallel
|
240
|
+
if replace.args_parallel is not None:
|
241
|
+
workflow_task.args_parallel = replace.args_parallel
|
264
242
|
|
265
|
-
|
266
|
-
workflow.task_list.remove(old_wftask)
|
267
|
-
workflow.task_list.insert(workflow_task_order, new_workflow_task)
|
243
|
+
db.add(workflow_task)
|
268
244
|
await db.commit()
|
269
|
-
await db.refresh(
|
270
|
-
return
|
245
|
+
await db.refresh(workflow_task)
|
246
|
+
return workflow_task
|
@@ -14,7 +14,7 @@ from .history import HistoryRunRead # noqa F401
|
|
14
14
|
from .history import HistoryRunReadAggregated # noqa F401
|
15
15
|
from .history import HistoryUnitRead # noqa F401
|
16
16
|
from .history import HistoryUnitStatus # noqa F401
|
17
|
-
from .history import
|
17
|
+
from .history import HistoryUnitStatusWithUnset # noqa F401
|
18
18
|
from .history import ImageLogsRequest # noqa F401
|
19
19
|
from .history import SingleImageWithStatus # noqa F401
|
20
20
|
from .job import JobCreateV2 # noqa F401
|
@@ -24,12 +24,19 @@ class HistoryUnitStatus(StrEnum):
|
|
24
24
|
FAILED = "failed"
|
25
25
|
|
26
26
|
|
27
|
-
class
|
27
|
+
class HistoryUnitStatusWithUnset(StrEnum):
|
28
|
+
"""
|
29
|
+
Available status for history queries
|
30
|
+
|
31
|
+
Attributes:
|
32
|
+
SUBMITTED:
|
33
|
+
DONE:
|
34
|
+
FAILED:
|
35
|
+
"""
|
28
36
|
|
29
37
|
SUBMITTED = "submitted"
|
30
38
|
DONE = "done"
|
31
39
|
FAILED = "failed"
|
32
|
-
|
33
40
|
UNSET = "unset"
|
34
41
|
|
35
42
|
|
@@ -63,6 +70,9 @@ class HistoryRunReadAggregated(BaseModel):
|
|
63
70
|
num_submitted_units: int
|
64
71
|
num_done_units: int
|
65
72
|
num_failed_units: int
|
73
|
+
args_schema_parallel: dict[str, Any] | None = None
|
74
|
+
args_schema_non_parallel: dict[str, Any] | None = None
|
75
|
+
version: str | None = None
|
66
76
|
|
67
77
|
@field_serializer("timestamp_started")
|
68
78
|
def serialize_datetime(v: datetime) -> str:
|
@@ -75,5 +85,6 @@ class ImageLogsRequest(BaseModel):
|
|
75
85
|
zarr_url: str
|
76
86
|
|
77
87
|
|
88
|
+
# FIXME: remove this when we update the webclient
|
78
89
|
class SingleImageWithStatus(SingleImage):
|
79
90
|
status: HistoryUnitStatus | None = None
|
@@ -0,0 +1,48 @@
|
|
1
|
+
import logging
|
2
|
+
|
3
|
+
from sqlmodel import select
|
4
|
+
|
5
|
+
from fractal_server.app.db import get_sync_db
|
6
|
+
from fractal_server.app.models import HistoryRun
|
7
|
+
from fractal_server.app.models import TaskV2
|
8
|
+
from fractal_server.app.models import WorkflowTaskV2
|
9
|
+
|
10
|
+
logger = logging.getLogger("fix_db")
|
11
|
+
logger.setLevel(logging.INFO)
|
12
|
+
|
13
|
+
|
14
|
+
def fix_db():
|
15
|
+
logger.info("START execution of fix_db function")
|
16
|
+
|
17
|
+
with next(get_sync_db()) as db:
|
18
|
+
|
19
|
+
stm = select(HistoryRun).order_by(HistoryRun.id)
|
20
|
+
history_runs = db.execute(stm).scalars().all()
|
21
|
+
|
22
|
+
for hr in history_runs:
|
23
|
+
logger.info(f"HistoryRun[{hr.id}] START")
|
24
|
+
|
25
|
+
wft = db.get(WorkflowTaskV2, hr.workflowtask_id)
|
26
|
+
if wft is None:
|
27
|
+
logger.warning(
|
28
|
+
f"WorkflowTaskV2[{hr.workflowtask_id}] not found. "
|
29
|
+
"Trying to use HistoryRun.workflowtask_dump"
|
30
|
+
)
|
31
|
+
task_id = hr.workflowtask_dump.get("task_id")
|
32
|
+
if task_id is not None and db.get(TaskV2, task_id) is not None:
|
33
|
+
hr.task_id = task_id
|
34
|
+
else:
|
35
|
+
logger.warning(f"TaskV2[{task_id}] not found")
|
36
|
+
else:
|
37
|
+
hr.task_id = wft.task_id
|
38
|
+
logger.info(
|
39
|
+
f"HistoryRun[{hr.id}].task_id set to {wft.task_id}"
|
40
|
+
)
|
41
|
+
|
42
|
+
db.add(hr)
|
43
|
+
logger.info(f"HistoryRun[{hr.id}] END")
|
44
|
+
|
45
|
+
db.commit()
|
46
|
+
logger.info("Changes committed.")
|
47
|
+
|
48
|
+
logger.info("END execution of fix_db function")
|
@@ -0,0 +1,85 @@
|
|
1
|
+
import time
|
2
|
+
from copy import deepcopy
|
3
|
+
from typing import Any
|
4
|
+
|
5
|
+
from fastapi import Depends
|
6
|
+
from sqlmodel import select
|
7
|
+
|
8
|
+
from fractal_server.app.db import AsyncSession
|
9
|
+
from fractal_server.app.db import get_async_db
|
10
|
+
from fractal_server.app.models.v2 import HistoryImageCache
|
11
|
+
from fractal_server.app.models.v2 import HistoryUnit
|
12
|
+
from fractal_server.app.schemas.v2 import HistoryUnitStatusWithUnset
|
13
|
+
from fractal_server.logger import set_logger
|
14
|
+
|
15
|
+
IMAGE_STATUS_KEY = "__wftask_dataset_image_status__"
|
16
|
+
|
17
|
+
logger = set_logger(__name__)
|
18
|
+
|
19
|
+
|
20
|
+
def _enriched_image(*, img: dict[str, Any], status: str) -> dict[str, Any]:
|
21
|
+
img["attributes"][IMAGE_STATUS_KEY] = status
|
22
|
+
return img
|
23
|
+
|
24
|
+
|
25
|
+
async def enrich_image_list(
|
26
|
+
*,
|
27
|
+
images: list[dict[str, Any]],
|
28
|
+
dataset_id: int,
|
29
|
+
workflowtask_id: int,
|
30
|
+
db: AsyncSession = Depends(get_async_db),
|
31
|
+
) -> list[dict[str, Any]]:
|
32
|
+
start_time = time.perf_counter()
|
33
|
+
logger.info(
|
34
|
+
f"START {enrich_image_list.__name__} for {dataset_id=}, "
|
35
|
+
f"{workflowtask_id=}"
|
36
|
+
)
|
37
|
+
|
38
|
+
zarr_url_to_image = {img["zarr_url"]: deepcopy(img) for img in images}
|
39
|
+
|
40
|
+
stm = (
|
41
|
+
select(HistoryImageCache.zarr_url, HistoryUnit.status)
|
42
|
+
.join(HistoryUnit)
|
43
|
+
.where(HistoryImageCache.dataset_id == dataset_id)
|
44
|
+
.where(HistoryImageCache.workflowtask_id == workflowtask_id)
|
45
|
+
.where(HistoryImageCache.latest_history_unit_id == HistoryUnit.id)
|
46
|
+
.where(HistoryImageCache.zarr_url.in_(zarr_url_to_image.keys()))
|
47
|
+
.order_by(HistoryImageCache.zarr_url)
|
48
|
+
)
|
49
|
+
res = await db.execute(stm)
|
50
|
+
list_processed_url_status = res.all()
|
51
|
+
logger.debug(
|
52
|
+
f"POST db query, "
|
53
|
+
f"elapsed={time.perf_counter() - start_time:.3f} "
|
54
|
+
"seconds"
|
55
|
+
)
|
56
|
+
|
57
|
+
set_processed_urls = set(item[0] for item in list_processed_url_status)
|
58
|
+
processed_images_with_status = [
|
59
|
+
_enriched_image(
|
60
|
+
img=zarr_url_to_image[item[0]],
|
61
|
+
status=item[1],
|
62
|
+
)
|
63
|
+
for item in list_processed_url_status
|
64
|
+
]
|
65
|
+
logger.debug(
|
66
|
+
f"POST processed_images_with_status, "
|
67
|
+
f"elapsed={time.perf_counter() - start_time:.3f} "
|
68
|
+
"seconds"
|
69
|
+
)
|
70
|
+
|
71
|
+
non_processed_urls = zarr_url_to_image.keys() - set_processed_urls
|
72
|
+
non_processed_images_with_status = [
|
73
|
+
_enriched_image(
|
74
|
+
img=zarr_url_to_image[zarr_url],
|
75
|
+
status=HistoryUnitStatusWithUnset.UNSET,
|
76
|
+
)
|
77
|
+
for zarr_url in non_processed_urls
|
78
|
+
]
|
79
|
+
logger.debug(
|
80
|
+
f"POST non_processed_images_with_status, "
|
81
|
+
f"elapsed={time.perf_counter() - start_time:.3f} "
|
82
|
+
"seconds"
|
83
|
+
)
|
84
|
+
|
85
|
+
return processed_images_with_status + non_processed_images_with_status
|
@@ -0,0 +1,42 @@
|
|
1
|
+
"""add historyrun.task_id
|
2
|
+
|
3
|
+
Revision ID: 969d84257cac
|
4
|
+
Revises: c90a7c76e996
|
5
|
+
Create Date: 2025-05-13 11:17:22.089308
|
6
|
+
|
7
|
+
"""
|
8
|
+
import sqlalchemy as sa
|
9
|
+
from alembic import op
|
10
|
+
|
11
|
+
|
12
|
+
# revision identifiers, used by Alembic.
|
13
|
+
revision = "969d84257cac"
|
14
|
+
down_revision = "c90a7c76e996"
|
15
|
+
branch_labels = None
|
16
|
+
depends_on = None
|
17
|
+
|
18
|
+
|
19
|
+
def upgrade() -> None:
|
20
|
+
# ### commands auto generated by Alembic - please adjust! ###
|
21
|
+
with op.batch_alter_table("historyrun", schema=None) as batch_op:
|
22
|
+
batch_op.add_column(sa.Column("task_id", sa.Integer(), nullable=True))
|
23
|
+
batch_op.create_foreign_key(
|
24
|
+
batch_op.f("fk_historyrun_task_id_taskv2"),
|
25
|
+
"taskv2",
|
26
|
+
["task_id"],
|
27
|
+
["id"],
|
28
|
+
ondelete="SET NULL",
|
29
|
+
)
|
30
|
+
|
31
|
+
# ### end Alembic commands ###
|
32
|
+
|
33
|
+
|
34
|
+
def downgrade() -> None:
|
35
|
+
# ### commands auto generated by Alembic - please adjust! ###
|
36
|
+
with op.batch_alter_table("historyrun", schema=None) as batch_op:
|
37
|
+
batch_op.drop_constraint(
|
38
|
+
batch_op.f("fk_historyrun_task_id_taskv2"), type_="foreignkey"
|
39
|
+
)
|
40
|
+
batch_op.drop_column("task_id")
|
41
|
+
|
42
|
+
# ### end Alembic commands ###
|
@@ -108,21 +108,22 @@ def collect_ssh(
|
|
108
108
|
)
|
109
109
|
return
|
110
110
|
|
111
|
-
# Check that the (remote) task_group path does not exist
|
112
|
-
if fractal_ssh.remote_exists(task_group.path):
|
113
|
-
error_msg = f"{task_group.path} already exists."
|
114
|
-
logger.error(error_msg)
|
115
|
-
fail_and_cleanup(
|
116
|
-
task_group=task_group,
|
117
|
-
task_group_activity=activity,
|
118
|
-
logger_name=LOGGER_NAME,
|
119
|
-
log_file_path=log_file_path,
|
120
|
-
exception=FileExistsError(error_msg),
|
121
|
-
db=db,
|
122
|
-
)
|
123
|
-
return
|
124
|
-
|
125
111
|
try:
|
112
|
+
|
113
|
+
# Check that the (remote) task_group path does not exist
|
114
|
+
if fractal_ssh.remote_exists(task_group.path):
|
115
|
+
error_msg = f"{task_group.path} already exists."
|
116
|
+
logger.error(error_msg)
|
117
|
+
fail_and_cleanup(
|
118
|
+
task_group=task_group,
|
119
|
+
task_group_activity=activity,
|
120
|
+
logger_name=LOGGER_NAME,
|
121
|
+
log_file_path=log_file_path,
|
122
|
+
exception=FileExistsError(error_msg),
|
123
|
+
db=db,
|
124
|
+
)
|
125
|
+
return
|
126
|
+
|
126
127
|
# Create remote `task_group.path` and `script_dir_remote`
|
127
128
|
# folders (note that because of `parents=True` we are in
|
128
129
|
# the `no error if existing, make parent directories as
|
@@ -94,22 +94,22 @@ def deactivate_ssh(
|
|
94
94
|
)
|
95
95
|
return
|
96
96
|
|
97
|
-
# Check that the (local) task_group venv_path does exist
|
98
|
-
if not fractal_ssh.remote_exists(task_group.venv_path):
|
99
|
-
error_msg = f"{task_group.venv_path} does not exist."
|
100
|
-
logger.error(error_msg)
|
101
|
-
fail_and_cleanup(
|
102
|
-
task_group=task_group,
|
103
|
-
task_group_activity=activity,
|
104
|
-
logger_name=LOGGER_NAME,
|
105
|
-
log_file_path=log_file_path,
|
106
|
-
exception=FileNotFoundError(error_msg),
|
107
|
-
db=db,
|
108
|
-
)
|
109
|
-
return
|
110
|
-
|
111
97
|
try:
|
112
98
|
|
99
|
+
# Check that the (local) task_group venv_path does exist
|
100
|
+
if not fractal_ssh.remote_exists(task_group.venv_path):
|
101
|
+
error_msg = f"{task_group.venv_path} does not exist."
|
102
|
+
logger.error(error_msg)
|
103
|
+
fail_and_cleanup(
|
104
|
+
task_group=task_group,
|
105
|
+
task_group_activity=activity,
|
106
|
+
logger_name=LOGGER_NAME,
|
107
|
+
log_file_path=log_file_path,
|
108
|
+
exception=FileNotFoundError(error_msg),
|
109
|
+
db=db,
|
110
|
+
)
|
111
|
+
return
|
112
|
+
|
113
113
|
activity.status = TaskGroupActivityStatusV2.ONGOING
|
114
114
|
activity = add_commit_refresh(obj=activity, db=db)
|
115
115
|
|
@@ -95,21 +95,22 @@ def reactivate_ssh(
|
|
95
95
|
)
|
96
96
|
return
|
97
97
|
|
98
|
-
# Check that the (remote) task_group venv_path does not exist
|
99
|
-
if fractal_ssh.remote_exists(task_group.venv_path):
|
100
|
-
error_msg = f"{task_group.venv_path} already exists."
|
101
|
-
logger.error(error_msg)
|
102
|
-
fail_and_cleanup(
|
103
|
-
task_group=task_group,
|
104
|
-
task_group_activity=activity,
|
105
|
-
logger_name=LOGGER_NAME,
|
106
|
-
log_file_path=log_file_path,
|
107
|
-
exception=FileExistsError(error_msg),
|
108
|
-
db=db,
|
109
|
-
)
|
110
|
-
return
|
111
|
-
|
112
98
|
try:
|
99
|
+
# Check that the (remote) task_group venv_path does not
|
100
|
+
# exist
|
101
|
+
if fractal_ssh.remote_exists(task_group.venv_path):
|
102
|
+
error_msg = f"{task_group.venv_path} already exists."
|
103
|
+
logger.error(error_msg)
|
104
|
+
fail_and_cleanup(
|
105
|
+
task_group=task_group,
|
106
|
+
task_group_activity=activity,
|
107
|
+
logger_name=LOGGER_NAME,
|
108
|
+
log_file_path=log_file_path,
|
109
|
+
exception=FileExistsError(error_msg),
|
110
|
+
db=db,
|
111
|
+
)
|
112
|
+
return
|
113
|
+
|
113
114
|
activity.status = TaskGroupActivityStatusV2.ONGOING
|
114
115
|
activity = add_commit_refresh(obj=activity, db=db)
|
115
116
|
|
@@ -1,4 +1,4 @@
|
|
1
|
-
fractal_server/__init__.py,sha256=
|
1
|
+
fractal_server/__init__.py,sha256=DrlYBYsIKgHWXaavttetyr00t93DQZr6HVVppq1kegI,23
|
2
2
|
fractal_server/__main__.py,sha256=rkM8xjY1KeS3l63irB8yCrlVobR-73uDapC4wvrIlxI,6957
|
3
3
|
fractal_server/alembic.ini,sha256=MWwi7GzjzawI9cCAK1LW7NxIBQDUqD12-ptJoq5JpP0,3153
|
4
4
|
fractal_server/app/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
@@ -11,7 +11,7 @@ fractal_server/app/models/user_settings.py,sha256=RxzRBGLHF_wc5csrTeHGUSV77Md_X0
|
|
11
11
|
fractal_server/app/models/v2/__init__.py,sha256=vjHwek7-IXmaZZL9VF0nD30YL9ca4wNc8P4RXJK_kDc,832
|
12
12
|
fractal_server/app/models/v2/accounting.py,sha256=i-2TsjqyuclxFQ21C-TeDoss7ZBTRuXdzIJfVr2UxwE,1081
|
13
13
|
fractal_server/app/models/v2/dataset.py,sha256=B_bPnYCSLRFN-vBIOc5nJ31JTruQPxLda9mqpPIJmGk,1209
|
14
|
-
fractal_server/app/models/v2/history.py,sha256=
|
14
|
+
fractal_server/app/models/v2/history.py,sha256=XsaUb2HNIfVekaxfdpFOG2Y6q5QyTm1SO2shl8dLYQ0,2123
|
15
15
|
fractal_server/app/models/v2/job.py,sha256=LfpwAedMVcA_6Ne0Rr4g3tt0asAQkWz3LSPm7IwZhYc,1978
|
16
16
|
fractal_server/app/models/v2/project.py,sha256=RmU5BQR4HD6xifRndUhvPBy30wntml-giBRoEysdWXw,755
|
17
17
|
fractal_server/app/models/v2/task.py,sha256=P7nsS5mCmVyzr4WtcjoiedesqkWvkHA2cQPsMbQt-7o,1427
|
@@ -33,9 +33,10 @@ fractal_server/app/routes/api/v2/__init__.py,sha256=3i4Aa-sgXq5Bb21hjONRilAloz0o
|
|
33
33
|
fractal_server/app/routes/api/v2/_aux_functions.py,sha256=P5exwdiNm0ZxtoGw4wxvm_-u8e83gXz8iYEVFuUq_cU,12792
|
34
34
|
fractal_server/app/routes/api/v2/_aux_functions_history.py,sha256=Z23xwvBaVEEQ5B-JsWZJpjj4_QqoXqHYONztnbAH6gw,4425
|
35
35
|
fractal_server/app/routes/api/v2/_aux_functions_task_lifecycle.py,sha256=GpKfw9yj01LmOAuNMTOreU1PFkCKpjK5oCt7_wp35-A,6741
|
36
|
+
fractal_server/app/routes/api/v2/_aux_functions_task_version_update.py,sha256=WLDOYCnb6fnS5avKflyx6yN24Vo1n5kJk5ZyiKbzb8Y,1175
|
36
37
|
fractal_server/app/routes/api/v2/_aux_functions_tasks.py,sha256=MFYnyNPBACSHXTDLXe6cSennnpmlpajN84iivOOMW7Y,11599
|
37
38
|
fractal_server/app/routes/api/v2/dataset.py,sha256=6u4MFqJ3YZ0Zq6Xx8CRMrTPKW55ZaR63Uno21DqFr4Q,8889
|
38
|
-
fractal_server/app/routes/api/v2/history.py,sha256=
|
39
|
+
fractal_server/app/routes/api/v2/history.py,sha256=OHy3Y4lreGyGXk9v5iud73tzoGV-YAT027gMWJH-5p4,16958
|
39
40
|
fractal_server/app/routes/api/v2/images.py,sha256=tJn0ANv4Tz2KHyb41sPbBRmSCpt632m8HEcgC3u-rHk,7709
|
40
41
|
fractal_server/app/routes/api/v2/job.py,sha256=8xRTwh_OCHmK9IfI_zUASa2ozewR0qu0zVBl_a4IvHw,6467
|
41
42
|
fractal_server/app/routes/api/v2/pre_submission_checks.py,sha256=MmjvSQ0pNAWEw5BavR16zIZ4h13py302AmToaz8Vvco,4768
|
@@ -47,7 +48,7 @@ fractal_server/app/routes/api/v2/task_collection.py,sha256=FGMhTnU88Umd8nMdriUYP
|
|
47
48
|
fractal_server/app/routes/api/v2/task_collection_custom.py,sha256=EfGpv6W7xDyuYYp6E7XAcXLJiLNAImUHFqMDLgfh-4s,6730
|
48
49
|
fractal_server/app/routes/api/v2/task_group.py,sha256=iShTvM9nJQhQLwR8ZpQRucVwYhJ7t00Lbesqh3M6mY4,7361
|
49
50
|
fractal_server/app/routes/api/v2/task_group_lifecycle.py,sha256=C2U2V76YbbqDWmErJ98MH9C2C26Lve2p_35FZ1dNmXg,9095
|
50
|
-
fractal_server/app/routes/api/v2/task_version_update.py,sha256=
|
51
|
+
fractal_server/app/routes/api/v2/task_version_update.py,sha256=h2c6aTLXj0_ZyBuHVsD5-ZTNMGEUpS96qZ4Ot1jlb74,7974
|
51
52
|
fractal_server/app/routes/api/v2/workflow.py,sha256=gwMtpfUY_JiTv5_R_q1I9WNkp6nTqEVtYx8jWNJRxcU,10227
|
52
53
|
fractal_server/app/routes/api/v2/workflow_import.py,sha256=Q4CnkSV47F11j6DkNT_U3AhwBK-LSsWWegItfdoOJ6c,11167
|
53
54
|
fractal_server/app/routes/api/v2/workflowtask.py,sha256=vVqEoJa3lrMl2CU94WoxFaqO3U0QImPgvrkkUNdqDOU,7462
|
@@ -100,7 +101,7 @@ fractal_server/app/runner/v2/_slurm_sudo.py,sha256=Gvsh4tUlc1_3KdF3B7zEqs-YIntC_
|
|
100
101
|
fractal_server/app/runner/v2/db_tools.py,sha256=du5dKhMMFMErQXbGIgu9JvO_vtMensodyPsyDeqz1yQ,3324
|
101
102
|
fractal_server/app/runner/v2/deduplicate_list.py,sha256=IVTE4abBU1bUprFTkxrTfYKnvkNTanWQ-KWh_etiT08,645
|
102
103
|
fractal_server/app/runner/v2/merge_outputs.py,sha256=D1L4Taieq9i71SPQyNc1kMokgHh-sV_MqF3bv7QMDBc,907
|
103
|
-
fractal_server/app/runner/v2/runner.py,sha256=
|
104
|
+
fractal_server/app/runner/v2/runner.py,sha256=YkxHhWA25Mv7HzviCaYMTxkyuP-JFHj73H9aPO6Fxm0,17713
|
104
105
|
fractal_server/app/runner/v2/runner_functions.py,sha256=Q9AVIR2NEBfRpfqW1wtQTTQfks_R1TnwRFBRro2fvjQ,18837
|
105
106
|
fractal_server/app/runner/v2/submit_workflow.py,sha256=AMnXdozwIGlXD55ch0_SNAG-ntKBO-QRhkbInrvsShU,13140
|
106
107
|
fractal_server/app/runner/v2/task_interface.py,sha256=V2TWBK6tbhycyMrJvFaoJ9IpuKlrLrvmjJbfNMsBBXo,2527
|
@@ -109,11 +110,11 @@ fractal_server/app/schemas/__init__.py,sha256=stURAU_t3AOBaH0HSUbV-GKhlPKngnnIMo
|
|
109
110
|
fractal_server/app/schemas/user.py,sha256=t9nbyYjGCSOsxm9K97PDG3-9o27CsaFfhWb_L5nrjqA,1910
|
110
111
|
fractal_server/app/schemas/user_group.py,sha256=x3-kqbo0q2wTP7QI0iZ7PU_9Dr957UYrFMKqS7BXLhE,1425
|
111
112
|
fractal_server/app/schemas/user_settings.py,sha256=NpdC0Me0fgwwdfJuTSlFLCnLUjiWWzrJlPn_UPLjXnw,1862
|
112
|
-
fractal_server/app/schemas/v2/__init__.py,sha256=
|
113
|
+
fractal_server/app/schemas/v2/__init__.py,sha256=M49RJ8SKcVoSfSTuiTCcbexSo8JMtLQTVFltCW4CuGQ,3103
|
113
114
|
fractal_server/app/schemas/v2/accounting.py,sha256=Wylt7uWTiDIFlHJOh4XEtYitk2FjFlmnodDrJDxcr0E,397
|
114
115
|
fractal_server/app/schemas/v2/dataset.py,sha256=NKCjBwGBC7mPiSlXktZAcleJsvlLY6KfNKw7Wx4Zfqk,1728
|
115
116
|
fractal_server/app/schemas/v2/dumps.py,sha256=o4RiWoSmQ8UPoWxgKoeORykGNIdczeNmm-ng-dBRD7k,2216
|
116
|
-
fractal_server/app/schemas/v2/history.py,sha256=
|
117
|
+
fractal_server/app/schemas/v2/history.py,sha256=BCOii7GW7OvcDr1mLZl5-kYxtczzefQciuAxp95zrFk,1958
|
117
118
|
fractal_server/app/schemas/v2/job.py,sha256=fPay7dLSr-skKRdVRoZig8rf_sZwUdVdHZaJ4XM8vMI,3288
|
118
119
|
fractal_server/app/schemas/v2/manifest.py,sha256=sZhj99iDgjE2MWXeTxnXSb6pFdKwRnFpCVQzcnpoTrI,6821
|
119
120
|
fractal_server/app/schemas/v2/project.py,sha256=l96-3bCfB3knhITaLj1WSyBgbzP_k8CdtvgX_5jO_fU,657
|
@@ -127,10 +128,12 @@ fractal_server/app/security/__init__.py,sha256=oJ8RVglpOvWPQY4RokiE2YA72Nqo42dZE
|
|
127
128
|
fractal_server/app/security/signup_email.py,sha256=Xd6QYxcdmg0PHpDwmUE8XQmPcOj3Xjy5oROcIMhmltM,1472
|
128
129
|
fractal_server/app/user_settings.py,sha256=OP1yiYKtPadxwM51_Q0hdPk3z90TCN4z1BLpQsXyWiU,1316
|
129
130
|
fractal_server/config.py,sha256=ldI9VzEWmwU75Z7zVku6I-rXGKS3bJDdCifZnwad9-4,25924
|
131
|
+
fractal_server/data_migrations/2_14_9.py,sha256=cXmiMWPjtghr1f0uapZrs4mtbcj3tiliOebNz9LPR8Q,1532
|
130
132
|
fractal_server/data_migrations/README.md,sha256=_3AEFvDg9YkybDqCLlFPdDmGJvr6Tw7HRI14aZ3LOIw,398
|
131
133
|
fractal_server/data_migrations/tools.py,sha256=LeMeASwYGtEqd-3wOLle6WARdTGAimoyMmRbbJl-hAM,572
|
132
134
|
fractal_server/gunicorn_fractal.py,sha256=u6U01TLGlXgq1v8QmEpLih3QnsInZD7CqphgJ_GrGzc,1230
|
133
135
|
fractal_server/images/__init__.py,sha256=-_wjoKtSX02P1KjDxDP_EXKvmbONTRmbf7iGVTsyBpM,154
|
136
|
+
fractal_server/images/image_status.py,sha256=2EUxvuYEFKSKoICEiQntT5P3Un7FS8gv2Ef1HZFUfF4,2667
|
134
137
|
fractal_server/images/models.py,sha256=6WchcIzLLLwdkLNRfg71Dl4Y-9UFLPyrrzh1lWgjuP0,1245
|
135
138
|
fractal_server/images/tools.py,sha256=XKhbdjfWZpTSe1akK1bSQl4gzEQlj9ETDbELkuwayVg,4066
|
136
139
|
fractal_server/logger.py,sha256=QIeVn3QpZsiIL2jDdrKotr-MLyDcZYgiPiTluFU46lE,5317
|
@@ -154,6 +157,7 @@ fractal_server/migrations/versions/84bf0fffde30_add_dumps_to_applyworkflow.py,sh
|
|
154
157
|
fractal_server/migrations/versions/8e8f227a3e36_update_taskv2_post_2_7_0.py,sha256=68y9-fpSuKx6KPtM_9n8Ho0I1qwa8IoG-yJqXUYQrGg,1111
|
155
158
|
fractal_server/migrations/versions/8f79bd162e35_add_docs_info_and_docs_link_to_task_.py,sha256=6pgODDtyAxevZvAJBj9IJ41inhV1RpwbpZr_qfPPu1A,1115
|
156
159
|
fractal_server/migrations/versions/94a47ea2d3ff_remove_cache_dir_slurm_user_and_slurm_.py,sha256=yL3-Hvzw5jBLKj4LFP1z5ofZE9L9W3tLwYtPNW7z4ko,1508
|
160
|
+
fractal_server/migrations/versions/969d84257cac_add_historyrun_task_id.py,sha256=4nLSYEMp_Tm7VRfo8p9YKLHVnoizTXaPV6lfcfvWhj0,1143
|
157
161
|
fractal_server/migrations/versions/97f444d47249_add_applyworkflow_project_dump.py,sha256=eKTZm3EgUgapXBxO0RuHkEfTKic-TZG3ADaMpGLuc0k,1057
|
158
162
|
fractal_server/migrations/versions/99ea79d9e5d2_add_dataset_history.py,sha256=0im6TxDr53sKKcjiPgeH4ftVRGnRXZSh2lPbRQ1Ir9w,883
|
159
163
|
fractal_server/migrations/versions/9c5ae74c9b98_add_user_settings_table.py,sha256=syONdZNf4-OnAcWIsbzXpYwpXPsXZ4SsmjwVvmVG0PU,2256
|
@@ -188,9 +192,9 @@ fractal_server/tasks/v2/local/deactivate.py,sha256=94s_RDND8aR5Y8RxFrRx61rZBMPGq
|
|
188
192
|
fractal_server/tasks/v2/local/reactivate.py,sha256=eBgFgq5xVKNr4DIDX7QU8xXerhwMrPaHDJ1wTth7aQc,6191
|
189
193
|
fractal_server/tasks/v2/ssh/__init__.py,sha256=aSQbVi6Ummt9QzcSLWNmSqYjfdxrn9ROmqgH6bDpI7k,135
|
190
194
|
fractal_server/tasks/v2/ssh/_utils.py,sha256=LjaEYVUJDChilu3YuhxuGWYRNnVJ_zqNE9SDHdRTIHY,2824
|
191
|
-
fractal_server/tasks/v2/ssh/collect.py,sha256=
|
192
|
-
fractal_server/tasks/v2/ssh/deactivate.py,sha256=
|
193
|
-
fractal_server/tasks/v2/ssh/reactivate.py,sha256=
|
195
|
+
fractal_server/tasks/v2/ssh/collect.py,sha256=bClq8hB04igrUXk1Mgc7pRWQws77mpYX8KYL-w6Lwtg,14825
|
196
|
+
fractal_server/tasks/v2/ssh/deactivate.py,sha256=YO2PJ0VV-LhVW-6O-t-d6BQciO2fYAkYbz5Y9UBiXaA,12928
|
197
|
+
fractal_server/tasks/v2/ssh/reactivate.py,sha256=1DIQduhqZLbrIeoVyyp54vemBWZu94tFDvjpmDsZZI0,8818
|
194
198
|
fractal_server/tasks/v2/templates/1_create_venv.sh,sha256=PK0jdHKtQpda1zULebBaVPORt4t6V17wa4N1ohcj5ac,548
|
195
199
|
fractal_server/tasks/v2/templates/2_pip_install.sh,sha256=Md2LPt3BJ7IfN0USF2uivl4rP8OwbzJOUepGAr_Cicg,1836
|
196
200
|
fractal_server/tasks/v2/templates/3_pip_freeze.sh,sha256=JldREScEBI4cD_qjfX4UK7V4aI-FnX9ZvVNxgpSOBFc,168
|
@@ -210,8 +214,8 @@ fractal_server/types/validators/_workflow_task_arguments_validators.py,sha256=HL
|
|
210
214
|
fractal_server/urls.py,sha256=QjIKAC1a46bCdiPMu3AlpgFbcv6a4l3ABcd5xz190Og,471
|
211
215
|
fractal_server/utils.py,sha256=FCY6HUsRnnbsWkT2kwQ2izijiHuCrCD3Kh50G0QudxE,3531
|
212
216
|
fractal_server/zip_tools.py,sha256=tqz_8f-vQ9OBRW-4OQfO6xxY-YInHTyHmZxU7U4PqZo,4885
|
213
|
-
fractal_server-2.14.
|
214
|
-
fractal_server-2.14.
|
215
|
-
fractal_server-2.14.
|
216
|
-
fractal_server-2.14.
|
217
|
-
fractal_server-2.14.
|
217
|
+
fractal_server-2.14.9.dist-info/LICENSE,sha256=QKAharUuhxL58kSoLizKJeZE3mTCBnX6ucmz8W0lxlk,1576
|
218
|
+
fractal_server-2.14.9.dist-info/METADATA,sha256=0clmXo9yCW0yKDYn04AtdyVdVVqA0K6r77lqL9bmpMI,4243
|
219
|
+
fractal_server-2.14.9.dist-info/WHEEL,sha256=7dDg4QLnNKTvwIDR9Ac8jJaAmBC_owJrckbC0jjThyA,88
|
220
|
+
fractal_server-2.14.9.dist-info/entry_points.txt,sha256=8tV2kynvFkjnhbtDnxAqImL6HMVKsopgGfew0DOp5UY,58
|
221
|
+
fractal_server-2.14.9.dist-info/RECORD,,
|
File without changes
|
File without changes
|
File without changes
|