fractal-server 2.14.0a7__py3-none-any.whl → 2.14.0a8__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- fractal_server/__init__.py +1 -1
- fractal_server/app/history/__init__.py +0 -4
- fractal_server/app/routes/api/v2/_aux_functions.py +35 -45
- fractal_server/app/routes/api/v2/_aux_functions_history.py +109 -0
- fractal_server/app/routes/api/v2/history.py +42 -71
- fractal_server/app/routes/api/v2/job.py +30 -0
- fractal_server/app/routes/api/v2/status_legacy.py +5 -5
- fractal_server/app/runner/executors/slurm_ssh/executor.py +3 -4
- fractal_server/app/runner/v2/runner.py +13 -16
- fractal_server/app/runner/v2/runner_functions.py +23 -21
- fractal_server/app/schemas/v2/__init__.py +7 -1
- fractal_server/app/schemas/v2/dumps.py +20 -4
- fractal_server/app/schemas/v2/history.py +54 -0
- fractal_server/app/schemas/v2/status_legacy.py +35 -0
- fractal_server/app/schemas/v2/task.py +1 -3
- fractal_server/app/schemas/v2/workflowtask.py +0 -20
- {fractal_server-2.14.0a7.dist-info → fractal_server-2.14.0a8.dist-info}/METADATA +2 -2
- {fractal_server-2.14.0a7.dist-info → fractal_server-2.14.0a8.dist-info}/RECORD +21 -22
- {fractal_server-2.14.0a7.dist-info → fractal_server-2.14.0a8.dist-info}/WHEEL +1 -1
- fractal_server/app/history/image_updates.py +0 -124
- fractal_server/app/history/status_enum.py +0 -16
- fractal_server/app/schemas/v2/status.py +0 -16
- {fractal_server-2.14.0a7.dist-info → fractal_server-2.14.0a8.dist-info}/LICENSE +0 -0
- {fractal_server-2.14.0a7.dist-info → fractal_server-2.14.0a8.dist-info}/entry_points.txt +0 -0
@@ -15,7 +15,6 @@ from .runner_functions_low_level import run_single_task
|
|
15
15
|
from .task_interface import InitTaskOutput
|
16
16
|
from .task_interface import TaskOutput
|
17
17
|
from fractal_server.app.db import get_sync_db
|
18
|
-
from fractal_server.app.history.status_enum import XXXStatus
|
19
18
|
from fractal_server.app.models.v2 import HistoryUnit
|
20
19
|
from fractal_server.app.models.v2 import TaskV2
|
21
20
|
from fractal_server.app.models.v2 import WorkflowTaskV2
|
@@ -23,12 +22,15 @@ from fractal_server.app.runner.components import _COMPONENT_KEY_
|
|
23
22
|
from fractal_server.app.runner.components import _index_to_component
|
24
23
|
from fractal_server.app.runner.executors.base_runner import BaseRunner
|
25
24
|
from fractal_server.app.runner.v2._db_tools import bulk_upsert_image_cache_fast
|
25
|
+
from fractal_server.app.schemas.v2 import HistoryUnitStatus
|
26
26
|
|
27
27
|
|
28
28
|
__all__ = [
|
29
|
-
"run_v2_task_non_parallel",
|
30
29
|
"run_v2_task_parallel",
|
30
|
+
"run_v2_task_non_parallel",
|
31
31
|
"run_v2_task_compound",
|
32
|
+
"run_v2_task_converter_non_parallel",
|
33
|
+
"run_v2_task_converter_compound",
|
32
34
|
]
|
33
35
|
|
34
36
|
MAX_PARALLELIZATION_LIST_SIZE = 20_000
|
@@ -125,7 +127,7 @@ def run_v2_task_non_parallel(
|
|
125
127
|
with next(get_sync_db()) as db:
|
126
128
|
history_unit = HistoryUnit(
|
127
129
|
history_run_id=history_run_id,
|
128
|
-
status=
|
130
|
+
status=HistoryUnitStatus.SUBMITTED,
|
129
131
|
logfile=None, # FIXME
|
130
132
|
zarr_urls=function_kwargs["zarr_urls"],
|
131
133
|
)
|
@@ -165,7 +167,7 @@ def run_v2_task_non_parallel(
|
|
165
167
|
db.execute(
|
166
168
|
update(HistoryUnit)
|
167
169
|
.where(HistoryUnit.id == history_unit_id)
|
168
|
-
.values(status=
|
170
|
+
.values(status=HistoryUnitStatus.DONE)
|
169
171
|
)
|
170
172
|
db.commit()
|
171
173
|
if result is None:
|
@@ -176,7 +178,7 @@ def run_v2_task_non_parallel(
|
|
176
178
|
db.execute(
|
177
179
|
update(HistoryUnit)
|
178
180
|
.where(HistoryUnit.id == history_unit_id)
|
179
|
-
.values(status=
|
181
|
+
.values(status=HistoryUnitStatus.FAILED)
|
180
182
|
)
|
181
183
|
db.commit()
|
182
184
|
return (TaskOutput(), num_tasks, {0: exception})
|
@@ -222,7 +224,7 @@ def run_v2_task_converter_non_parallel(
|
|
222
224
|
with next(get_sync_db()) as db:
|
223
225
|
history_unit = HistoryUnit(
|
224
226
|
history_run_id=history_run_id,
|
225
|
-
status=
|
227
|
+
status=HistoryUnitStatus.SUBMITTED,
|
226
228
|
logfile=None, # FIXME
|
227
229
|
zarr_urls=[],
|
228
230
|
)
|
@@ -250,7 +252,7 @@ def run_v2_task_converter_non_parallel(
|
|
250
252
|
db.execute(
|
251
253
|
update(HistoryUnit)
|
252
254
|
.where(HistoryUnit.id == history_unit_id)
|
253
|
-
.values(status=
|
255
|
+
.values(status=HistoryUnitStatus.DONE)
|
254
256
|
)
|
255
257
|
db.commit()
|
256
258
|
if result is None:
|
@@ -261,7 +263,7 @@ def run_v2_task_converter_non_parallel(
|
|
261
263
|
db.execute(
|
262
264
|
update(HistoryUnit)
|
263
265
|
.where(HistoryUnit.id == history_unit_id)
|
264
|
-
.values(status=
|
266
|
+
.values(status=HistoryUnitStatus.FAILED)
|
265
267
|
)
|
266
268
|
db.commit()
|
267
269
|
return (TaskOutput(), num_tasks, {0: exception})
|
@@ -303,7 +305,7 @@ def run_v2_task_parallel(
|
|
303
305
|
history_units = [
|
304
306
|
HistoryUnit(
|
305
307
|
history_run_id=history_run_id,
|
306
|
-
status=
|
308
|
+
status=HistoryUnitStatus.SUBMITTED,
|
307
309
|
logfile=None, # FIXME
|
308
310
|
zarr_urls=[image["zarr_url"]],
|
309
311
|
)
|
@@ -367,12 +369,12 @@ def run_v2_task_parallel(
|
|
367
369
|
db.execute(
|
368
370
|
update(HistoryUnit)
|
369
371
|
.where(HistoryUnit.id.in_(history_unit_ids_done))
|
370
|
-
.values(status=
|
372
|
+
.values(status=HistoryUnitStatus.DONE)
|
371
373
|
)
|
372
374
|
db.execute(
|
373
375
|
update(HistoryUnit)
|
374
376
|
.where(HistoryUnit.id.in_(history_unit_ids_failed))
|
375
|
-
.values(status=
|
377
|
+
.values(status=HistoryUnitStatus.FAILED)
|
376
378
|
)
|
377
379
|
db.commit()
|
378
380
|
|
@@ -421,7 +423,7 @@ def run_v2_task_compound(
|
|
421
423
|
# Create a single `HistoryUnit` for the whole compound task
|
422
424
|
history_unit = HistoryUnit(
|
423
425
|
history_run_id=history_run_id,
|
424
|
-
status=
|
426
|
+
status=HistoryUnitStatus.SUBMITTED,
|
425
427
|
logfile=None, # FIXME
|
426
428
|
zarr_urls=input_image_zarr_urls,
|
427
429
|
)
|
@@ -467,7 +469,7 @@ def run_v2_task_compound(
|
|
467
469
|
db.execute(
|
468
470
|
update(HistoryUnit)
|
469
471
|
.where(HistoryUnit.id == history_unit_id)
|
470
|
-
.values(status=
|
472
|
+
.values(status=HistoryUnitStatus.FAILED)
|
471
473
|
)
|
472
474
|
db.commit()
|
473
475
|
return (TaskOutput(), num_tasks, {0: exception})
|
@@ -485,7 +487,7 @@ def run_v2_task_compound(
|
|
485
487
|
db.execute(
|
486
488
|
update(HistoryUnit)
|
487
489
|
.where(HistoryUnit.id == history_unit_id)
|
488
|
-
.values(status=
|
490
|
+
.values(status=HistoryUnitStatus.DONE)
|
489
491
|
)
|
490
492
|
db.commit()
|
491
493
|
return (TaskOutput(), 0, {})
|
@@ -535,13 +537,13 @@ def run_v2_task_compound(
|
|
535
537
|
db.execute(
|
536
538
|
update(HistoryUnit)
|
537
539
|
.where(HistoryUnit.id == history_unit_id)
|
538
|
-
.values(status=
|
540
|
+
.values(status=HistoryUnitStatus.FAILED)
|
539
541
|
)
|
540
542
|
else:
|
541
543
|
db.execute(
|
542
544
|
update(HistoryUnit)
|
543
545
|
.where(HistoryUnit.id == history_unit_id)
|
544
|
-
.values(status=
|
546
|
+
.values(status=HistoryUnitStatus.DONE)
|
545
547
|
)
|
546
548
|
db.commit()
|
547
549
|
|
@@ -586,7 +588,7 @@ def run_v2_task_converter_compound(
|
|
586
588
|
# Create a single `HistoryUnit` for the whole compound task
|
587
589
|
history_unit = HistoryUnit(
|
588
590
|
history_run_id=history_run_id,
|
589
|
-
status=
|
591
|
+
status=HistoryUnitStatus.SUBMITTED,
|
590
592
|
logfile=None, # FIXME
|
591
593
|
zarr_urls=[],
|
592
594
|
)
|
@@ -619,7 +621,7 @@ def run_v2_task_converter_compound(
|
|
619
621
|
db.execute(
|
620
622
|
update(HistoryUnit)
|
621
623
|
.where(HistoryUnit.id == history_unit_id)
|
622
|
-
.values(status=
|
624
|
+
.values(status=HistoryUnitStatus.FAILED)
|
623
625
|
)
|
624
626
|
db.commit()
|
625
627
|
return (TaskOutput(), num_tasks, {0: exception})
|
@@ -637,7 +639,7 @@ def run_v2_task_converter_compound(
|
|
637
639
|
db.execute(
|
638
640
|
update(HistoryUnit)
|
639
641
|
.where(HistoryUnit.id == history_unit_id)
|
640
|
-
.values(status=
|
642
|
+
.values(status=HistoryUnitStatus.DONE)
|
641
643
|
)
|
642
644
|
db.commit()
|
643
645
|
return (TaskOutput(), 0, {})
|
@@ -687,13 +689,13 @@ def run_v2_task_converter_compound(
|
|
687
689
|
db.execute(
|
688
690
|
update(HistoryUnit)
|
689
691
|
.where(HistoryUnit.id == history_unit_id)
|
690
|
-
.values(status=
|
692
|
+
.values(status=HistoryUnitStatus.FAILED)
|
691
693
|
)
|
692
694
|
else:
|
693
695
|
db.execute(
|
694
696
|
update(HistoryUnit)
|
695
697
|
.where(HistoryUnit.id == history_unit_id)
|
696
|
-
.values(status=
|
698
|
+
.values(status=HistoryUnitStatus.DONE)
|
697
699
|
)
|
698
700
|
db.commit()
|
699
701
|
|
@@ -7,8 +7,14 @@ from .dataset import DatasetUpdateV2 # noqa F401
|
|
7
7
|
from .dumps import DatasetDumpV2 # noqa F401
|
8
8
|
from .dumps import ProjectDumpV2 # noqa F401
|
9
9
|
from .dumps import TaskDumpV2 # noqa F401
|
10
|
+
from .dumps import TaskGroupDumpV2 # noqa F401
|
10
11
|
from .dumps import WorkflowDumpV2 # noqa F401
|
11
12
|
from .dumps import WorkflowTaskDumpV2 # noqa F401
|
13
|
+
from .history import HistoryRunReadAggregated # noqa F401
|
14
|
+
from .history import HistoryUnitRead # noqa F401
|
15
|
+
from .history import HistoryUnitStatus # noqa F401
|
16
|
+
from .history import ImageLogsRequest # noqa F401
|
17
|
+
from .history import ZarrUrlAndStatus # noqa F401
|
12
18
|
from .job import JobCreateV2 # noqa F401
|
13
19
|
from .job import JobReadV2 # noqa F401
|
14
20
|
from .job import JobStatusTypeV2 # noqa F401
|
@@ -18,6 +24,7 @@ from .manifest import TaskManifestV2 # noqa F401
|
|
18
24
|
from .project import ProjectCreateV2 # noqa F401
|
19
25
|
from .project import ProjectReadV2 # noqa F401
|
20
26
|
from .project import ProjectUpdateV2 # noqa F401
|
27
|
+
from .status_legacy import WorkflowTaskStatusTypeV2 # noqa F401
|
21
28
|
from .task import TaskCreateV2 # noqa F401
|
22
29
|
from .task import TaskExportV2 # noqa F401
|
23
30
|
from .task import TaskImportV2 # noqa F401
|
@@ -47,5 +54,4 @@ from .workflowtask import WorkflowTaskImportV2 # noqa F401
|
|
47
54
|
from .workflowtask import WorkflowTaskReadV2 # noqa F401
|
48
55
|
from .workflowtask import WorkflowTaskReadV2WithWarning # noqa F401
|
49
56
|
from .workflowtask import WorkflowTaskReplaceV2 # noqa F401
|
50
|
-
from .workflowtask import WorkflowTaskStatusTypeV2 # noqa F401
|
51
57
|
from .workflowtask import WorkflowTaskUpdateV2 # noqa F401
|
@@ -1,18 +1,20 @@
|
|
1
1
|
"""
|
2
|
-
|
3
2
|
Dump models differ from their Read counterpart in that:
|
4
3
|
* They are directly JSON-able, without any additional encoder.
|
5
|
-
* They may only
|
4
|
+
* They may include only a subset of the available fields.
|
6
5
|
|
7
6
|
These models are used in at least two situations:
|
8
7
|
1. In the "*_dump" attributes of Job models;
|
9
|
-
2. In the
|
8
|
+
2. In the history items, to trim their size.
|
10
9
|
"""
|
11
10
|
from typing import Optional
|
12
11
|
|
13
12
|
from pydantic import BaseModel
|
14
13
|
from pydantic import ConfigDict
|
14
|
+
from pydantic import Field
|
15
15
|
|
16
|
+
from .task import TaskTypeType
|
17
|
+
from .task_group import TaskGroupV2OriginEnum
|
16
18
|
from fractal_server.images.models import AttributeFiltersType
|
17
19
|
|
18
20
|
|
@@ -26,7 +28,7 @@ class ProjectDumpV2(BaseModel):
|
|
26
28
|
class TaskDumpV2(BaseModel):
|
27
29
|
id: int
|
28
30
|
name: str
|
29
|
-
type:
|
31
|
+
type: TaskTypeType
|
30
32
|
|
31
33
|
command_non_parallel: Optional[str] = None
|
32
34
|
command_parallel: Optional[str] = None
|
@@ -72,3 +74,17 @@ class DatasetDumpV2(BaseModel):
|
|
72
74
|
zarr_dir: str
|
73
75
|
type_filters: dict[str, bool]
|
74
76
|
attribute_filters: AttributeFiltersType
|
77
|
+
|
78
|
+
|
79
|
+
class TaskGroupDumpV2(BaseModel):
|
80
|
+
id: int
|
81
|
+
origin: TaskGroupV2OriginEnum
|
82
|
+
pkg_name: str
|
83
|
+
version: Optional[str] = None
|
84
|
+
python_version: Optional[str] = None
|
85
|
+
pip_extras: Optional[str] = None
|
86
|
+
pinned_package_versions: dict[str, str] = Field(default_factory=dict)
|
87
|
+
|
88
|
+
path: Optional[str] = None
|
89
|
+
venv_path: Optional[str] = None
|
90
|
+
wheel_path: Optional[str] = None
|
@@ -0,0 +1,54 @@
|
|
1
|
+
from datetime import datetime
|
2
|
+
from enum import Enum
|
3
|
+
from typing import Any
|
4
|
+
from typing import Optional
|
5
|
+
|
6
|
+
from pydantic import AwareDatetime
|
7
|
+
from pydantic import BaseModel
|
8
|
+
from pydantic import field_serializer
|
9
|
+
|
10
|
+
|
11
|
+
class HistoryUnitStatus(str, Enum):
|
12
|
+
"""
|
13
|
+
Available status for images
|
14
|
+
|
15
|
+
Attributes:
|
16
|
+
SUBMITTED:
|
17
|
+
DONE:
|
18
|
+
FAILED:
|
19
|
+
"""
|
20
|
+
|
21
|
+
SUBMITTED = "submitted"
|
22
|
+
DONE = "done"
|
23
|
+
FAILED = "failed"
|
24
|
+
|
25
|
+
|
26
|
+
class HistoryUnitRead(BaseModel):
|
27
|
+
id: int
|
28
|
+
logfile: Optional[str] = None
|
29
|
+
status: HistoryUnitStatus
|
30
|
+
zarr_urls: list[str]
|
31
|
+
|
32
|
+
|
33
|
+
class HistoryRunReadAggregated(BaseModel):
|
34
|
+
id: int
|
35
|
+
timestamp_started: AwareDatetime
|
36
|
+
workflowtask_dump: dict[str, Any]
|
37
|
+
num_submitted_units: int
|
38
|
+
num_done_units: int
|
39
|
+
num_failed_units: int
|
40
|
+
|
41
|
+
@field_serializer("timestamp_started")
|
42
|
+
def serialize_datetime(v: datetime) -> str:
|
43
|
+
return v.isoformat()
|
44
|
+
|
45
|
+
|
46
|
+
class ImageLogsRequest(BaseModel):
|
47
|
+
workflowtask_id: int
|
48
|
+
dataset_id: int
|
49
|
+
zarr_url: str
|
50
|
+
|
51
|
+
|
52
|
+
class ZarrUrlAndStatus(BaseModel):
|
53
|
+
zarr_url: str
|
54
|
+
status: Optional[HistoryUnitStatus] = None
|
@@ -0,0 +1,35 @@
|
|
1
|
+
from enum import Enum
|
2
|
+
|
3
|
+
from pydantic import BaseModel
|
4
|
+
from pydantic import Field
|
5
|
+
|
6
|
+
|
7
|
+
class WorkflowTaskStatusTypeV2(str, Enum):
|
8
|
+
"""
|
9
|
+
Define the available values for the status of a `WorkflowTask`.
|
10
|
+
|
11
|
+
This model is used within the `Dataset.history` attribute, which is
|
12
|
+
constructed in the runner and then used in the API (e.g. in the
|
13
|
+
`api/v2/project/{project_id}/dataset/{dataset_id}/status` endpoint).
|
14
|
+
|
15
|
+
Attributes:
|
16
|
+
SUBMITTED: The `WorkflowTask` is part of a running job.
|
17
|
+
DONE: The most-recent execution of this `WorkflowTask` was successful.
|
18
|
+
FAILED: The most-recent execution of this `WorkflowTask` failed.
|
19
|
+
"""
|
20
|
+
|
21
|
+
SUBMITTED = "submitted"
|
22
|
+
DONE = "done"
|
23
|
+
FAILED = "failed"
|
24
|
+
|
25
|
+
|
26
|
+
class LegacyStatusReadV2(BaseModel):
|
27
|
+
"""
|
28
|
+
Response type for the
|
29
|
+
`/project/{project_id}/status/` endpoint
|
30
|
+
"""
|
31
|
+
|
32
|
+
status: dict[
|
33
|
+
str,
|
34
|
+
WorkflowTaskStatusTypeV2,
|
35
|
+
] = Field(default_factory=dict)
|
@@ -135,7 +135,7 @@ class TaskCreateV2(BaseModel):
|
|
135
135
|
class TaskReadV2(BaseModel):
|
136
136
|
id: int
|
137
137
|
name: str
|
138
|
-
type:
|
138
|
+
type: TaskTypeType
|
139
139
|
source: Optional[str] = None
|
140
140
|
version: Optional[str] = None
|
141
141
|
|
@@ -158,8 +158,6 @@ class TaskReadV2(BaseModel):
|
|
158
158
|
authors: Optional[str] = None
|
159
159
|
tags: list[str]
|
160
160
|
|
161
|
-
type: Optional[TaskTypeType] = None
|
162
|
-
|
163
161
|
|
164
162
|
class TaskUpdateV2(BaseModel):
|
165
163
|
model_config = ConfigDict(extra="forbid")
|
@@ -1,4 +1,3 @@
|
|
1
|
-
from enum import Enum
|
2
1
|
from typing import Any
|
3
2
|
from typing import Optional
|
4
3
|
from typing import Union
|
@@ -21,25 +20,6 @@ from .task import TaskTypeType
|
|
21
20
|
RESERVED_ARGUMENTS = {"zarr_dir", "zarr_url", "zarr_urls", "init_args"}
|
22
21
|
|
23
22
|
|
24
|
-
class WorkflowTaskStatusTypeV2(str, Enum):
|
25
|
-
"""
|
26
|
-
Define the available values for the status of a `WorkflowTask`.
|
27
|
-
|
28
|
-
This model is used within the `Dataset.history` attribute, which is
|
29
|
-
constructed in the runner and then used in the API (e.g. in the
|
30
|
-
`api/v2/project/{project_id}/dataset/{dataset_id}/status` endpoint).
|
31
|
-
|
32
|
-
Attributes:
|
33
|
-
SUBMITTED: The `WorkflowTask` is part of a running job.
|
34
|
-
DONE: The most-recent execution of this `WorkflowTask` was successful.
|
35
|
-
FAILED: The most-recent execution of this `WorkflowTask` failed.
|
36
|
-
"""
|
37
|
-
|
38
|
-
SUBMITTED = "submitted"
|
39
|
-
DONE = "done"
|
40
|
-
FAILED = "failed"
|
41
|
-
|
42
|
-
|
43
23
|
class WorkflowTaskCreateV2(BaseModel):
|
44
24
|
model_config = ConfigDict(extra="forbid")
|
45
25
|
|
@@ -1,8 +1,7 @@
|
|
1
1
|
Metadata-Version: 2.3
|
2
2
|
Name: fractal-server
|
3
|
-
Version: 2.14.
|
3
|
+
Version: 2.14.0a8
|
4
4
|
Summary: Backend component of the Fractal analytics platform
|
5
|
-
Home-page: https://github.com/fractal-analytics-platform/fractal-server
|
6
5
|
License: BSD-3-Clause
|
7
6
|
Author: Tommaso Comparin
|
8
7
|
Author-email: tommaso.comparin@exact-lab.it
|
@@ -29,6 +28,7 @@ Requires-Dist: sqlmodel (==0.0.22)
|
|
29
28
|
Requires-Dist: uvicorn (>=0.29.0,<0.35.0)
|
30
29
|
Requires-Dist: uvicorn-worker (==0.3.0)
|
31
30
|
Project-URL: Documentation, https://fractal-analytics-platform.github.io/fractal-server
|
31
|
+
Project-URL: Homepage, https://github.com/fractal-analytics-platform/fractal-server
|
32
32
|
Project-URL: Repository, https://github.com/fractal-analytics-platform/fractal-server
|
33
33
|
Project-URL: changelog, https://github.com/fractal-analytics-platform/fractal-server/blob/main/CHANGELOG.md
|
34
34
|
Description-Content-Type: text/markdown
|
@@ -1,11 +1,9 @@
|
|
1
|
-
fractal_server/__init__.py,sha256=
|
1
|
+
fractal_server/__init__.py,sha256=kGAQu3H-ucHD5NhKhR9WUJUdDh_6vyFTxQN6G9BnBpY,25
|
2
2
|
fractal_server/__main__.py,sha256=rkM8xjY1KeS3l63irB8yCrlVobR-73uDapC4wvrIlxI,6957
|
3
3
|
fractal_server/alembic.ini,sha256=MWwi7GzjzawI9cCAK1LW7NxIBQDUqD12-ptJoq5JpP0,3153
|
4
4
|
fractal_server/app/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
5
5
|
fractal_server/app/db/__init__.py,sha256=wup2wcOkyOh8Vd0Xm76PZn_naxeMqaL4eF8DHHXTGlI,2889
|
6
|
-
fractal_server/app/history/__init__.py,sha256=
|
7
|
-
fractal_server/app/history/image_updates.py,sha256=ToY3yENMM6OdWUuFPG5F7qsLefrsaPqyRDFHaimsaHI,4188
|
8
|
-
fractal_server/app/history/status_enum.py,sha256=umznsG4-ZunCLz2ks3Gb-K_s1irwd4ibOJ8vJ2Z0C-M,235
|
6
|
+
fractal_server/app/history/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
9
7
|
fractal_server/app/models/__init__.py,sha256=xJWiGAwpXmCpnFMC4c_HTqoUCzMOXrakoGLUH_uMvdA,415
|
10
8
|
fractal_server/app/models/linkusergroup.py,sha256=LWTUfhH2uAnn_4moK7QdRUIHWtpw-hPZuW-5jClv_OE,610
|
11
9
|
fractal_server/app/models/linkuserproject.py,sha256=hvaxh3Lkiy2uUCwB8gvn8RorCpvxSSdzWdCS_U1GL7g,315
|
@@ -33,16 +31,16 @@ fractal_server/app/routes/admin/v2/task_group.py,sha256=XTjdqgABXZcx9EenaoqSmHh1
|
|
33
31
|
fractal_server/app/routes/admin/v2/task_group_lifecycle.py,sha256=0e0ZJ_k75TVHaT2o8Xk33DPDSgh-eBhZf-y4y7t-Adg,9429
|
34
32
|
fractal_server/app/routes/api/__init__.py,sha256=B8l6PSAhR10iZqHEiyTat-_0tkeKdrCigIE6DJGP5b8,638
|
35
33
|
fractal_server/app/routes/api/v2/__init__.py,sha256=Q65mxJOfuexzj53m6IB5fNl9PDec-Asu97W0bBxrRMM,2324
|
36
|
-
fractal_server/app/routes/api/v2/_aux_functions.py,sha256=
|
37
|
-
fractal_server/app/routes/api/v2/_aux_functions_history.py,sha256=
|
34
|
+
fractal_server/app/routes/api/v2/_aux_functions.py,sha256=eE-TdEMI_UX3LBDUGwjG5NyUcihDVaHYlG15NlTJ9DI,12872
|
35
|
+
fractal_server/app/routes/api/v2/_aux_functions_history.py,sha256=nWO4jBoL3MWuMMwS-6TwxlTHzgRr8Xed30RSeetLvP8,4199
|
38
36
|
fractal_server/app/routes/api/v2/_aux_functions_task_lifecycle.py,sha256=qdXCb6IP8-qPEAxGZKljtjIqNzIAyRaAsQSRi5VqFHM,6773
|
39
37
|
fractal_server/app/routes/api/v2/_aux_functions_tasks.py,sha256=uhNSs-jcS7ndIUFKiOC1yrDiViw3uvKEXi9UL04BMks,11642
|
40
38
|
fractal_server/app/routes/api/v2/dataset.py,sha256=osoWJIA5SZH4aAr-0TG6Uc3877wzsgCLB_Oek59hRjk,9230
|
41
|
-
fractal_server/app/routes/api/v2/history.py,sha256=
|
39
|
+
fractal_server/app/routes/api/v2/history.py,sha256=9oux9P_BloLLFs0t-bSiKfjaWEH1Al0kzQSr4wFUxac,13042
|
42
40
|
fractal_server/app/routes/api/v2/images.py,sha256=wUhYomNLGtJTtu_pD2oQorcH2LISxo64Wxo6ogc4IXc,8185
|
43
|
-
fractal_server/app/routes/api/v2/job.py,sha256=
|
41
|
+
fractal_server/app/routes/api/v2/job.py,sha256=MU1sHIKk_89WrD0TD44d4ufzqnywot7On_W71KjyUbQ,6500
|
44
42
|
fractal_server/app/routes/api/v2/project.py,sha256=hMvL9QLPUcAAiPGy6ta2LBLTVRozJsfvBPl5D06_MHg,6666
|
45
|
-
fractal_server/app/routes/api/v2/status_legacy.py,sha256=
|
43
|
+
fractal_server/app/routes/api/v2/status_legacy.py,sha256=Q5ZWQNfeZKL8Xgtou2Xr80iaF1uO-r4oSKgq5H42V_8,6349
|
46
44
|
fractal_server/app/routes/api/v2/submit.py,sha256=K4OjcSg476JXIeeMUaYdTDk8Qpj5IO5UULvfErI7Y5Y,8624
|
47
45
|
fractal_server/app/routes/api/v2/task.py,sha256=O7pquZhXIS4lRs5XqHvstiwe8BiCuS-B3ZKJI1g6EJU,6985
|
48
46
|
fractal_server/app/routes/api/v2/task_collection.py,sha256=IDNF6sjDuU37HIQ0TuQA-TZIuf7nfHAQXUUNmkrlhLM,12706
|
@@ -87,7 +85,7 @@ fractal_server/app/runner/executors/slurm_common/utils_executors.py,sha256=naPyJ
|
|
87
85
|
fractal_server/app/runner/executors/slurm_ssh/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
88
86
|
fractal_server/app/runner/executors/slurm_ssh/_executor_wait_thread.py,sha256=lnW8dNNPqqbpQvojVBQaNJm4wN3Qkw02RWBZ1w68Hyw,3755
|
89
87
|
fractal_server/app/runner/executors/slurm_ssh/_slurm_job.py,sha256=IL1C52dezEiincVX2yKryNiPHi4YOMURNLdQO_QPdGw,4406
|
90
|
-
fractal_server/app/runner/executors/slurm_ssh/executor.py,sha256=
|
88
|
+
fractal_server/app/runner/executors/slurm_ssh/executor.py,sha256=VXMDaQRijYaeKyxly4RkBkRg2inLBZ75rfuODg0Mgr8,53602
|
91
89
|
fractal_server/app/runner/executors/slurm_sudo/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
92
90
|
fractal_server/app/runner/executors/slurm_sudo/_check_jobs_status.py,sha256=eZd9lxbObsqc1M3B96IGMJ-1oC0jo8lBOX4Nto97VvE,2036
|
93
91
|
fractal_server/app/runner/executors/slurm_sudo/_subprocess_run_as_user.py,sha256=O1bNg1DiSDJmQE0RmOk2Ii47DagiXp5ryd0R6KxO2OM,3177
|
@@ -105,8 +103,8 @@ fractal_server/app/runner/v2/_slurm_ssh.py,sha256=5w_lwQzySx-R3kVg2Bf-21n5JpWjJA
|
|
105
103
|
fractal_server/app/runner/v2/_slurm_sudo.py,sha256=CzWUeC6at_Sj-wU1myjA68ZRKMiLZYBTLv9I9odUxBU,2914
|
106
104
|
fractal_server/app/runner/v2/deduplicate_list.py,sha256=IVTE4abBU1bUprFTkxrTfYKnvkNTanWQ-KWh_etiT08,645
|
107
105
|
fractal_server/app/runner/v2/merge_outputs.py,sha256=D1L4Taieq9i71SPQyNc1kMokgHh-sV_MqF3bv7QMDBc,907
|
108
|
-
fractal_server/app/runner/v2/runner.py,sha256=
|
109
|
-
fractal_server/app/runner/v2/runner_functions.py,sha256=
|
106
|
+
fractal_server/app/runner/v2/runner.py,sha256=hsXBFQmoo8fqZC13DxlEwLLSVwsr7AvgTrTfiQXcXyk,15570
|
107
|
+
fractal_server/app/runner/v2/runner_functions.py,sha256=9i7fHMr3BFrgguptJ-Pz3Gu68Okp5e3JvIIEqhdrG3Q,22120
|
110
108
|
fractal_server/app/runner/v2/runner_functions_low_level.py,sha256=dvvRK7od8iQ8vdPf80uGUxs3i5i0buGjCodBxSjZ7PQ,3671
|
111
109
|
fractal_server/app/runner/v2/task_interface.py,sha256=e1GGQSYd0MyBj1EZvEVzqv-HpVE4YffXOq82WLrCaOc,1866
|
112
110
|
fractal_server/app/runner/versions.py,sha256=dSaPRWqmFPHjg20kTCHmi_dmGNcCETflDtDLronNanU,852
|
@@ -116,19 +114,20 @@ fractal_server/app/schemas/_validators.py,sha256=ZzTlTTzRATzf9Snx4Xp67aDmG77GaM2
|
|
116
114
|
fractal_server/app/schemas/user.py,sha256=oCftAKeHdFFowpLyh1G-RiylR8cIO7fTn0PkT5xjs0E,2494
|
117
115
|
fractal_server/app/schemas/user_group.py,sha256=Uao1igRYflBu7Dg6Zs0kaFU3zBFJzIwDLrkFfaJk6z8,2176
|
118
116
|
fractal_server/app/schemas/user_settings.py,sha256=z7hx54yTrWfjo98oX_1lkeRh1UGrC1dSRH6yIOpnCsY,2772
|
119
|
-
fractal_server/app/schemas/v2/__init__.py,sha256=
|
117
|
+
fractal_server/app/schemas/v2/__init__.py,sha256=Q1Sozuh6escPbU3SIsmiJbQMFl9kfIcfK-3Ctb-1wXQ,2988
|
120
118
|
fractal_server/app/schemas/v2/accounting.py,sha256=Wylt7uWTiDIFlHJOh4XEtYitk2FjFlmnodDrJDxcr0E,397
|
121
119
|
fractal_server/app/schemas/v2/dataset.py,sha256=9yc-tte70yPPk4CSfy2imykYVbCW8-23K499pi9z2e0,5206
|
122
|
-
fractal_server/app/schemas/v2/dumps.py,sha256=
|
120
|
+
fractal_server/app/schemas/v2/dumps.py,sha256=AhCSo4jaGyCdcNkmRvtIZ_QnIl4elu-KUAFlmP5I2rg,2261
|
121
|
+
fractal_server/app/schemas/v2/history.py,sha256=aCzr68OdNtHzNHrW43F5uxSCCcT6dMaq-_Dq9GpuJ6k,1100
|
123
122
|
fractal_server/app/schemas/v2/job.py,sha256=KhxQOfncpE_SAu7Wed8CXS2G6onh0v875GkotBvKBTY,4304
|
124
123
|
fractal_server/app/schemas/v2/manifest.py,sha256=8mmB0QwxEgAeGgwKD_fT-o-wFy7lb6HxNXbp17IJqNY,7281
|
125
124
|
fractal_server/app/schemas/v2/project.py,sha256=ulgCmUnX0w-0jrSjVYIT7sxeK95CSNGh2msXydhsgYI,885
|
126
|
-
fractal_server/app/schemas/v2/
|
127
|
-
fractal_server/app/schemas/v2/task.py,sha256
|
125
|
+
fractal_server/app/schemas/v2/status_legacy.py,sha256=vc6C3Xri8222bb9OmsghMz05CNuEalO-t2s_nKo341s,954
|
126
|
+
fractal_server/app/schemas/v2/task.py,sha256=8vc8c3ZL6dJo9kyCrfKozgO_pF37BBLvD5XYIXynlEc,6551
|
128
127
|
fractal_server/app/schemas/v2/task_collection.py,sha256=dLu4sy-su5k5vDJqCZdJMW8mLT5TX2SV60l_RAvKhwY,5930
|
129
128
|
fractal_server/app/schemas/v2/task_group.py,sha256=A3SFHNHLKPJyrnDz-wbnQvycetafKltp6UsH1u-euwA,3850
|
130
129
|
fractal_server/app/schemas/v2/workflow.py,sha256=ZpM43zTMyLRnEUtkbr_J5DYP00NwjItaC8gweB7GGAA,2172
|
131
|
-
fractal_server/app/schemas/v2/workflowtask.py,sha256=
|
130
|
+
fractal_server/app/schemas/v2/workflowtask.py,sha256=rVbmNihDAJL_Sckbt1hBK2JEcb-8Xpxn3McvaomZLmQ,7429
|
132
131
|
fractal_server/app/security/__init__.py,sha256=e2cveg5hQpieGD3bSPd5GTOMthvJ-HXH3buSb9WVfEU,14096
|
133
132
|
fractal_server/app/security/signup_email.py,sha256=Xd6QYxcdmg0PHpDwmUE8XQmPcOj3Xjy5oROcIMhmltM,1472
|
134
133
|
fractal_server/app/user_settings.py,sha256=OP1yiYKtPadxwM51_Q0hdPk3z90TCN4z1BLpQsXyWiU,1316
|
@@ -206,8 +205,8 @@ fractal_server/tasks/v2/utils_templates.py,sha256=Kc_nSzdlV6KIsO0CQSPs1w70zLyENP
|
|
206
205
|
fractal_server/urls.py,sha256=QjIKAC1a46bCdiPMu3AlpgFbcv6a4l3ABcd5xz190Og,471
|
207
206
|
fractal_server/utils.py,sha256=PMwrxWFxRTQRl1b9h-NRIbFGPKqpH_hXnkAT3NfZdpY,3571
|
208
207
|
fractal_server/zip_tools.py,sha256=GjDgo_sf6V_DDg6wWeBlZu5zypIxycn_l257p_YVKGc,4876
|
209
|
-
fractal_server-2.14.
|
210
|
-
fractal_server-2.14.
|
211
|
-
fractal_server-2.14.
|
212
|
-
fractal_server-2.14.
|
213
|
-
fractal_server-2.14.
|
208
|
+
fractal_server-2.14.0a8.dist-info/LICENSE,sha256=QKAharUuhxL58kSoLizKJeZE3mTCBnX6ucmz8W0lxlk,1576
|
209
|
+
fractal_server-2.14.0a8.dist-info/METADATA,sha256=9Y62LfjS5cZ88MZggxLwL2usMbt3Ny9GPiX8CBhFgpQ,4562
|
210
|
+
fractal_server-2.14.0a8.dist-info/WHEEL,sha256=7dDg4QLnNKTvwIDR9Ac8jJaAmBC_owJrckbC0jjThyA,88
|
211
|
+
fractal_server-2.14.0a8.dist-info/entry_points.txt,sha256=8tV2kynvFkjnhbtDnxAqImL6HMVKsopgGfew0DOp5UY,58
|
212
|
+
fractal_server-2.14.0a8.dist-info/RECORD,,
|
@@ -1,124 +0,0 @@
|
|
1
|
-
# from typing import Optional
|
2
|
-
# from sqlalchemy.orm import Session
|
3
|
-
# from sqlalchemy.orm.attributes import flag_modified
|
4
|
-
# from sqlmodel import select
|
5
|
-
# from fractal_server.app.db import get_sync_db
|
6
|
-
# from fractal_server.app.history.status_enum import HistoryItemImageStatus
|
7
|
-
# from fractal_server.app.models.v2 import HistoryItemV2
|
8
|
-
# from fractal_server.app.models.v2 import ImageStatus
|
9
|
-
# from fractal_server.logger import set_logger
|
10
|
-
# logger = set_logger(__name__)
|
11
|
-
# def _update_single_image_status(
|
12
|
-
# *,
|
13
|
-
# zarr_url: str,
|
14
|
-
# workflowtask_id: int,
|
15
|
-
# dataset_id: int,
|
16
|
-
# status: HistoryItemImageStatus,
|
17
|
-
# db: Session,
|
18
|
-
# commit: bool = True,
|
19
|
-
# logfile: Optional[str] = None,
|
20
|
-
# ) -> None:
|
21
|
-
# image_status = db.get(
|
22
|
-
# ImageStatus,
|
23
|
-
# (
|
24
|
-
# zarr_url,
|
25
|
-
# workflowtask_id,
|
26
|
-
# dataset_id,
|
27
|
-
# ),
|
28
|
-
# )
|
29
|
-
# if image_status is None:
|
30
|
-
# raise RuntimeError("This should have not happened")
|
31
|
-
# image_status.status = status
|
32
|
-
# if logfile is not None:
|
33
|
-
# image_status.logfile = logfile
|
34
|
-
# db.add(image_status)
|
35
|
-
# if commit:
|
36
|
-
# db.commit()
|
37
|
-
# def update_single_image(
|
38
|
-
# *,
|
39
|
-
# history_item_id: int,
|
40
|
-
# zarr_url: str,
|
41
|
-
# status: HistoryItemImageStatus,
|
42
|
-
# ) -> None:
|
43
|
-
# logger.debug(
|
44
|
-
# f"[update_single_image] {history_item_id=}, {status=}, {zarr_url=}"
|
45
|
-
# )
|
46
|
-
# # Note: thanks to `with_for_update`, a lock is acquired and kept
|
47
|
-
# # until `db.commit()`
|
48
|
-
# with next(get_sync_db()) as db:
|
49
|
-
# stm = (
|
50
|
-
# select(HistoryItemV2)
|
51
|
-
# .where(HistoryItemV2.id == history_item_id)
|
52
|
-
# .with_for_update(nowait=False)
|
53
|
-
# )
|
54
|
-
# history_item = db.execute(stm).scalar_one()
|
55
|
-
# history_item.images[zarr_url] = status
|
56
|
-
# flag_modified(history_item, "images")
|
57
|
-
# db.commit()
|
58
|
-
# _update_single_image_status(
|
59
|
-
# zarr_url=zarr_url,
|
60
|
-
# dataset_id=history_item.dataset_id,
|
61
|
-
# workflowtask_id=history_item.workflowtask_id,
|
62
|
-
# commit=True,
|
63
|
-
# status=status,
|
64
|
-
# db=db,
|
65
|
-
# )
|
66
|
-
# def update_single_image_logfile(
|
67
|
-
# *,
|
68
|
-
# history_item_id: int,
|
69
|
-
# zarr_url: str,
|
70
|
-
# logfile: str,
|
71
|
-
# ) -> None:
|
72
|
-
# logger.debug(
|
73
|
-
# "[update_single_image_logfile] "
|
74
|
-
# f"{history_item_id=}, {logfile=}, {zarr_url=}"
|
75
|
-
# )
|
76
|
-
# with next(get_sync_db()) as db:
|
77
|
-
# history_item = db.get(HistoryItemV2, history_item_id)
|
78
|
-
# image_status = db.get(
|
79
|
-
# ImageStatus,
|
80
|
-
# (
|
81
|
-
# zarr_url,
|
82
|
-
# history_item.workflowtask_id,
|
83
|
-
# history_item.dataset_id,
|
84
|
-
# ),
|
85
|
-
# )
|
86
|
-
# if image_status is None:
|
87
|
-
# raise RuntimeError("This should have not happened")
|
88
|
-
# image_status.logfile = logfile
|
89
|
-
# db.merge(image_status)
|
90
|
-
# db.commit()
|
91
|
-
# def update_all_images(
|
92
|
-
# *,
|
93
|
-
# history_item_id: int,
|
94
|
-
# status: HistoryItemImageStatus,
|
95
|
-
# logfile: Optional[str] = None,
|
96
|
-
# ) -> None:
|
97
|
-
# logger.debug(f"[update_all_images] {history_item_id=}, {status=}")
|
98
|
-
# # Note: thanks to `with_for_update`, a lock is acquired and kept
|
99
|
-
# # until `db.commit()`
|
100
|
-
# stm = (
|
101
|
-
# select(HistoryItemV2)
|
102
|
-
# .where(HistoryItemV2.id == history_item_id)
|
103
|
-
# .with_for_update(nowait=False)
|
104
|
-
# )
|
105
|
-
# with next(get_sync_db()) as db:
|
106
|
-
# history_item = db.execute(stm).scalar_one()
|
107
|
-
# new_images = {
|
108
|
-
# zarr_url: status for zarr_url in history_item.images.keys()
|
109
|
-
# }
|
110
|
-
# history_item.images = new_images
|
111
|
-
# flag_modified(history_item, "images")
|
112
|
-
# db.commit()
|
113
|
-
# # FIXME: Make this a bulk edit, if possible
|
114
|
-
# for ind, zarr_url in enumerate(history_item.images.keys()):
|
115
|
-
# _update_single_image_status(
|
116
|
-
# zarr_url=zarr_url,
|
117
|
-
# dataset_id=history_item.dataset_id,
|
118
|
-
# workflowtask_id=history_item.workflowtask_id,
|
119
|
-
# commit=False,
|
120
|
-
# status=status,
|
121
|
-
# logfile=logfile,
|
122
|
-
# db=db,
|
123
|
-
# )
|
124
|
-
# db.commit()
|