fractal-server 2.14.0a36__py3-none-any.whl → 2.14.1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- fractal_server/__init__.py +1 -1
- fractal_server/app/routes/api/v2/__init__.py +2 -2
- fractal_server/app/routes/api/v2/pre_submission_checks.py +136 -0
- fractal_server/app/runner/compress_folder.py +15 -3
- fractal_server/app/runner/executors/slurm_common/base_slurm_runner.py +0 -10
- fractal_server/app/runner/executors/slurm_common/remote.py +1 -3
- fractal_server/app/runner/executors/slurm_sudo/runner.py +1 -1
- fractal_server/app/runner/extract_archive.py +21 -5
- fractal_server/app/runner/v2/runner.py +88 -73
- fractal_server/config.py +0 -5
- fractal_server/logger.py +8 -3
- {fractal_server-2.14.0a36.dist-info → fractal_server-2.14.1.dist-info}/METADATA +1 -1
- {fractal_server-2.14.0a36.dist-info → fractal_server-2.14.1.dist-info}/RECORD +16 -16
- fractal_server/app/routes/api/v2/verify_image_types.py +0 -61
- {fractal_server-2.14.0a36.dist-info → fractal_server-2.14.1.dist-info}/LICENSE +0 -0
- {fractal_server-2.14.0a36.dist-info → fractal_server-2.14.1.dist-info}/WHEEL +0 -0
- {fractal_server-2.14.0a36.dist-info → fractal_server-2.14.1.dist-info}/entry_points.txt +0 -0
fractal_server/__init__.py
CHANGED
@@ -1 +1 @@
|
|
1
|
-
__VERSION__ = "2.14.
|
1
|
+
__VERSION__ = "2.14.1"
|
@@ -7,6 +7,7 @@ from .dataset import router as dataset_router_v2
|
|
7
7
|
from .history import router as history_router_v2
|
8
8
|
from .images import router as images_routes_v2
|
9
9
|
from .job import router as job_router_v2
|
10
|
+
from .pre_submission_checks import router as pre_submission_checks_router
|
10
11
|
from .project import router as project_router_v2
|
11
12
|
from .status_legacy import router as status_legacy_router_v2
|
12
13
|
from .submit import router as submit_job_router_v2
|
@@ -15,7 +16,6 @@ from .task_collection import router as task_collection_router_v2
|
|
15
16
|
from .task_collection_custom import router as task_collection_router_v2_custom
|
16
17
|
from .task_group import router as task_group_router_v2
|
17
18
|
from .task_group_lifecycle import router as task_group_lifecycle_router_v2
|
18
|
-
from .verify_image_types import router as verify_image_types_router
|
19
19
|
from .workflow import router as workflow_router_v2
|
20
20
|
from .workflow_import import router as workflow_import_router_v2
|
21
21
|
from .workflowtask import router as workflowtask_router_v2
|
@@ -26,7 +26,7 @@ from fractal_server.syringe import Inject
|
|
26
26
|
router_api_v2 = APIRouter()
|
27
27
|
|
28
28
|
router_api_v2.include_router(dataset_router_v2, tags=["V2 Dataset"])
|
29
|
-
router_api_v2.include_router(
|
29
|
+
router_api_v2.include_router(pre_submission_checks_router, tags=["V2 Job"])
|
30
30
|
router_api_v2.include_router(job_router_v2, tags=["V2 Job"])
|
31
31
|
router_api_v2.include_router(images_routes_v2, tags=["V2 Images"])
|
32
32
|
router_api_v2.include_router(project_router_v2, tags=["V2 Project"])
|
@@ -0,0 +1,136 @@
|
|
1
|
+
from typing import Optional
|
2
|
+
|
3
|
+
from fastapi import APIRouter
|
4
|
+
from fastapi import Depends
|
5
|
+
from fastapi import status
|
6
|
+
from fastapi.responses import JSONResponse
|
7
|
+
from pydantic import BaseModel
|
8
|
+
from pydantic import Field
|
9
|
+
from sqlmodel import select
|
10
|
+
|
11
|
+
from ._aux_functions import _get_dataset_check_owner
|
12
|
+
from ._aux_functions import _get_workflow_task_check_owner
|
13
|
+
from .images import ImageQuery
|
14
|
+
from fractal_server.app.db import AsyncSession
|
15
|
+
from fractal_server.app.db import get_async_db
|
16
|
+
from fractal_server.app.models import UserOAuth
|
17
|
+
from fractal_server.app.models.v2 import HistoryImageCache
|
18
|
+
from fractal_server.app.models.v2 import HistoryUnit
|
19
|
+
from fractal_server.app.routes.auth import current_active_user
|
20
|
+
from fractal_server.app.schemas.v2 import HistoryUnitStatus
|
21
|
+
from fractal_server.images.models import AttributeFiltersType
|
22
|
+
from fractal_server.images.tools import aggregate_types
|
23
|
+
from fractal_server.images.tools import filter_image_list
|
24
|
+
|
25
|
+
router = APIRouter()
|
26
|
+
|
27
|
+
|
28
|
+
@router.post(
|
29
|
+
"/project/{project_id}/dataset/{dataset_id}/images/verify-unique-types/",
|
30
|
+
status_code=status.HTTP_200_OK,
|
31
|
+
)
|
32
|
+
async def verify_unique_types(
|
33
|
+
project_id: int,
|
34
|
+
dataset_id: int,
|
35
|
+
query: Optional[ImageQuery] = None,
|
36
|
+
user: UserOAuth = Depends(current_active_user),
|
37
|
+
db: AsyncSession = Depends(get_async_db),
|
38
|
+
) -> list[str]:
|
39
|
+
# Get dataset
|
40
|
+
output = await _get_dataset_check_owner(
|
41
|
+
project_id=project_id, dataset_id=dataset_id, user_id=user.id, db=db
|
42
|
+
)
|
43
|
+
dataset = output["dataset"]
|
44
|
+
|
45
|
+
# Filter images
|
46
|
+
if query is None:
|
47
|
+
filtered_images = dataset.images
|
48
|
+
else:
|
49
|
+
filtered_images = filter_image_list(
|
50
|
+
images=dataset.images,
|
51
|
+
attribute_filters=query.attribute_filters,
|
52
|
+
type_filters=query.type_filters,
|
53
|
+
)
|
54
|
+
|
55
|
+
# Get actual values for each available type
|
56
|
+
available_types = aggregate_types(filtered_images)
|
57
|
+
values_per_type: dict[str, set] = {
|
58
|
+
_type: set() for _type in available_types
|
59
|
+
}
|
60
|
+
for _img in filtered_images:
|
61
|
+
for _type in available_types:
|
62
|
+
values_per_type[_type].add(_img["types"].get(_type, False))
|
63
|
+
|
64
|
+
# Find types with non-unique value
|
65
|
+
non_unique_types = [
|
66
|
+
key for key, value in values_per_type.items() if len(value) > 1
|
67
|
+
]
|
68
|
+
non_unique_types = sorted(non_unique_types)
|
69
|
+
|
70
|
+
return non_unique_types
|
71
|
+
|
72
|
+
|
73
|
+
class NonProcessedImagesPayload(BaseModel):
|
74
|
+
attribute_filters: AttributeFiltersType = Field(default_factory=dict)
|
75
|
+
type_filters: dict[str, bool] = Field(default_factory=dict)
|
76
|
+
|
77
|
+
|
78
|
+
@router.post(
|
79
|
+
"/project/{project_id}/dataset/{dataset_id}/images/non-processed/",
|
80
|
+
status_code=status.HTTP_200_OK,
|
81
|
+
)
|
82
|
+
async def check_workflowtask(
|
83
|
+
project_id: int,
|
84
|
+
dataset_id: int,
|
85
|
+
workflow_id: int,
|
86
|
+
workflowtask_id: int,
|
87
|
+
filters: NonProcessedImagesPayload,
|
88
|
+
user: UserOAuth = Depends(current_active_user),
|
89
|
+
db: AsyncSession = Depends(get_async_db),
|
90
|
+
) -> JSONResponse:
|
91
|
+
|
92
|
+
db_workflow_task, db_workflow = await _get_workflow_task_check_owner(
|
93
|
+
project_id=project_id,
|
94
|
+
workflow_task_id=workflowtask_id,
|
95
|
+
workflow_id=workflow_id,
|
96
|
+
user_id=user.id,
|
97
|
+
db=db,
|
98
|
+
)
|
99
|
+
|
100
|
+
if db_workflow_task.order == 0:
|
101
|
+
return JSONResponse(status_code=200, content=[])
|
102
|
+
|
103
|
+
previous_wft = db_workflow.task_list[db_workflow_task.order - 1]
|
104
|
+
|
105
|
+
if previous_wft.task.output_types != {}:
|
106
|
+
return JSONResponse(status_code=200, content=[])
|
107
|
+
|
108
|
+
res = await _get_dataset_check_owner(
|
109
|
+
project_id=project_id,
|
110
|
+
dataset_id=dataset_id,
|
111
|
+
user_id=user.id,
|
112
|
+
db=db,
|
113
|
+
)
|
114
|
+
dataset = res["dataset"]
|
115
|
+
filtered_images = filter_image_list(
|
116
|
+
images=dataset.images,
|
117
|
+
type_filters=filters.type_filters,
|
118
|
+
attribute_filters=filters.attribute_filters,
|
119
|
+
)
|
120
|
+
|
121
|
+
filtered_zarr_urls = [image["zarr_url"] for image in filtered_images]
|
122
|
+
|
123
|
+
res = await db.execute(
|
124
|
+
select(HistoryImageCache.zarr_url)
|
125
|
+
.join(HistoryUnit)
|
126
|
+
.where(HistoryImageCache.zarr_url.in_(filtered_zarr_urls))
|
127
|
+
.where(HistoryImageCache.dataset_id == dataset_id)
|
128
|
+
.where(HistoryImageCache.workflowtask_id == previous_wft.id)
|
129
|
+
.where(HistoryImageCache.latest_history_unit_id == HistoryUnit.id)
|
130
|
+
.where(HistoryUnit.status == HistoryUnitStatus.DONE)
|
131
|
+
)
|
132
|
+
done_zarr_urls = res.scalars().all()
|
133
|
+
|
134
|
+
missing_zarr_urls = list(set(filtered_zarr_urls) - set(done_zarr_urls))
|
135
|
+
|
136
|
+
return JSONResponse(status_code=200, content=missing_zarr_urls)
|
@@ -80,6 +80,7 @@ def _remove_temp_subfolder(subfolder_path_tmp_copy: Path, logger_name: str):
|
|
80
80
|
def compress_folder(
|
81
81
|
subfolder_path: Path,
|
82
82
|
filelist_path: str | None,
|
83
|
+
default_logging_level: int | None = None,
|
83
84
|
) -> str:
|
84
85
|
"""
|
85
86
|
Compress e.g. `/path/archive` into `/path/archive.tar.gz`
|
@@ -90,13 +91,17 @@ def compress_folder(
|
|
90
91
|
Args:
|
91
92
|
subfolder_path: Absolute path to the folder to compress.
|
92
93
|
remote_to_local: If `True`, exclude some files from the tar.gz archive.
|
94
|
+
default_logging_level:
|
93
95
|
|
94
96
|
Returns:
|
95
97
|
Absolute path to the tar.gz archive.
|
96
98
|
"""
|
97
99
|
|
98
100
|
logger_name = "compress_folder"
|
99
|
-
logger = set_logger(
|
101
|
+
logger = set_logger(
|
102
|
+
logger_name,
|
103
|
+
default_logging_level=default_logging_level,
|
104
|
+
)
|
100
105
|
|
101
106
|
logger.debug("START")
|
102
107
|
logger.debug(f"{subfolder_path=}")
|
@@ -132,7 +137,10 @@ def compress_folder(
|
|
132
137
|
)
|
133
138
|
|
134
139
|
|
135
|
-
def main(
|
140
|
+
def main(
|
141
|
+
sys_argv: list[str],
|
142
|
+
default_logging_level: int | None = None,
|
143
|
+
):
|
136
144
|
|
137
145
|
help_msg = (
|
138
146
|
"Expected use:\n"
|
@@ -146,15 +154,19 @@ def main(sys_argv: list[str]):
|
|
146
154
|
compress_folder(
|
147
155
|
subfolder_path=Path(sys_argv[1]),
|
148
156
|
filelist_path=None,
|
157
|
+
default_logging_level=default_logging_level,
|
149
158
|
)
|
150
159
|
elif num_args == 3 and sys_argv[2] == "--filelist":
|
151
160
|
compress_folder(
|
152
161
|
subfolder_path=Path(sys_argv[1]),
|
153
162
|
filelist_path=sys_argv[3],
|
163
|
+
default_logging_level=default_logging_level,
|
154
164
|
)
|
155
165
|
else:
|
156
166
|
sys.exit(f"Invalid argument.\n{help_msg}\nProvided: {sys_argv[1:]=}")
|
157
167
|
|
158
168
|
|
159
169
|
if __name__ == "__main__":
|
160
|
-
|
170
|
+
import logging
|
171
|
+
|
172
|
+
main(sys.argv, default_logging_level=logging.DEBUG)
|
@@ -524,12 +524,6 @@ class BaseSlurmRunner(BaseRunner):
|
|
524
524
|
slurm_job_ids=self.job_ids,
|
525
525
|
)
|
526
526
|
|
527
|
-
# NOTE: see issue 2444
|
528
|
-
settings = Inject(get_settings)
|
529
|
-
sleep_time = settings.FRACTAL_SLURM_INTERVAL_BEFORE_RETRIEVAL
|
530
|
-
logger.warning(f"[submit] Now sleep {sleep_time} seconds.")
|
531
|
-
time.sleep(sleep_time)
|
532
|
-
|
533
527
|
# Retrieval phase
|
534
528
|
logger.debug("[submit] START retrieval phase")
|
535
529
|
scancelled_job_ids = []
|
@@ -705,10 +699,6 @@ class BaseSlurmRunner(BaseRunner):
|
|
705
699
|
slurm_job_ids=self.job_ids,
|
706
700
|
)
|
707
701
|
|
708
|
-
settings = Inject(get_settings)
|
709
|
-
sleep_time = settings.FRACTAL_SLURM_INTERVAL_BEFORE_RETRIEVAL
|
710
|
-
logger.warning(f"[multisubmit] Now sleep {sleep_time} seconds.")
|
711
|
-
time.sleep(sleep_time)
|
712
702
|
except Exception as e:
|
713
703
|
logger.error(
|
714
704
|
"[multisubmit] Unexpected exception during submission."
|
@@ -148,10 +148,8 @@ def worker(
|
|
148
148
|
out = cloudpickle.dumps(result)
|
149
149
|
|
150
150
|
# Write the output pickle file
|
151
|
-
|
152
|
-
with open(tempfile, "wb") as f:
|
151
|
+
with open(out_fname, "wb") as f:
|
153
152
|
f.write(out)
|
154
|
-
os.rename(tempfile, out_fname)
|
155
153
|
|
156
154
|
|
157
155
|
if __name__ == "__main__":
|
@@ -25,7 +25,10 @@ def _remove_suffix(*, string: str, suffix: str) -> str:
|
|
25
25
|
raise ValueError(f"Cannot remove {suffix=} from {string=}.")
|
26
26
|
|
27
27
|
|
28
|
-
def extract_archive(
|
28
|
+
def extract_archive(
|
29
|
+
archive_path: Path,
|
30
|
+
default_logging_level: int | None = None,
|
31
|
+
):
|
29
32
|
"""
|
30
33
|
Extract e.g. `/path/archive.tar.gz` archive into `/path/archive` folder
|
31
34
|
|
@@ -34,10 +37,14 @@ def extract_archive(archive_path: Path):
|
|
34
37
|
|
35
38
|
Arguments:
|
36
39
|
archive_path: Absolute path to the archive file.
|
40
|
+
default_logging_level
|
37
41
|
"""
|
38
42
|
|
39
43
|
logger_name = "extract_archive"
|
40
|
-
logger = set_logger(
|
44
|
+
logger = set_logger(
|
45
|
+
logger_name,
|
46
|
+
default_logging_level=default_logging_level,
|
47
|
+
)
|
41
48
|
|
42
49
|
logger.debug("START")
|
43
50
|
logger.debug(f"{archive_path.as_posix()=}")
|
@@ -65,7 +72,10 @@ def extract_archive(archive_path: Path):
|
|
65
72
|
logger.debug("END")
|
66
73
|
|
67
74
|
|
68
|
-
def main(
|
75
|
+
def main(
|
76
|
+
sys_argv: list[str],
|
77
|
+
default_logging_level: int | None = None,
|
78
|
+
):
|
69
79
|
help_msg = (
|
70
80
|
"Expected use:\n"
|
71
81
|
"python -m fractal_server.app.runner.extract_archive "
|
@@ -76,8 +86,14 @@ def main(sys_argv: list[str]):
|
|
76
86
|
sys.exit(f"Invalid argument.\n{help_msg}\nProvided: {sys_argv[1:]=}")
|
77
87
|
else:
|
78
88
|
tarfile_path = Path(sys_argv[1])
|
79
|
-
extract_archive(
|
89
|
+
extract_archive(
|
90
|
+
tarfile_path,
|
91
|
+
default_logging_level=default_logging_level,
|
92
|
+
)
|
80
93
|
|
81
94
|
|
82
95
|
if __name__ == "__main__":
|
83
|
-
|
96
|
+
|
97
|
+
import logging
|
98
|
+
|
99
|
+
main(sys.argv, default_logging_level=logging.DEBUG)
|
@@ -36,6 +36,36 @@ from fractal_server.images.models import AttributeFiltersType
|
|
36
36
|
from fractal_server.images.tools import merge_type_filters
|
37
37
|
|
38
38
|
|
39
|
+
def drop_none_attributes(attributes: dict[str, Any]) -> dict[str, Any]:
|
40
|
+
# Unset attributes with `None` value
|
41
|
+
non_none_attributes = {
|
42
|
+
key: value for key, value in attributes.items() if value is not None
|
43
|
+
}
|
44
|
+
return non_none_attributes
|
45
|
+
|
46
|
+
|
47
|
+
def get_origin_attribute_and_types(
|
48
|
+
*,
|
49
|
+
origin_url: str,
|
50
|
+
images: list[dict[str, Any]],
|
51
|
+
) -> tuple[dict[str, Any], dict[str, bool]]:
|
52
|
+
"""
|
53
|
+
Search for origin image and extract its attributes/types.
|
54
|
+
"""
|
55
|
+
origin_img_search = find_image_by_zarr_url(
|
56
|
+
images=images,
|
57
|
+
zarr_url=origin_url,
|
58
|
+
)
|
59
|
+
if origin_img_search is None:
|
60
|
+
updated_attributes = {}
|
61
|
+
updated_types = {}
|
62
|
+
else:
|
63
|
+
origin_image = origin_img_search["image"]
|
64
|
+
updated_attributes = copy(origin_image["attributes"])
|
65
|
+
updated_types = copy(origin_image["types"])
|
66
|
+
return updated_attributes, updated_types
|
67
|
+
|
68
|
+
|
39
69
|
def execute_tasks_v2(
|
40
70
|
*,
|
41
71
|
wf_task_list: list[WorkflowTaskV2],
|
@@ -221,19 +251,7 @@ def execute_tasks_v2(
|
|
221
251
|
# status.
|
222
252
|
for image_obj in current_task_output.image_list_updates:
|
223
253
|
image = image_obj.model_dump()
|
224
|
-
|
225
|
-
tmp_image_paths = [img["zarr_url"] for img in tmp_images]
|
226
|
-
if image["zarr_url"] in tmp_image_paths:
|
227
|
-
if (
|
228
|
-
image["origin"] is not None
|
229
|
-
and image["origin"] != image["zarr_url"]
|
230
|
-
):
|
231
|
-
raise JobExecutionError(
|
232
|
-
"Cannot edit an image with zarr_url different from "
|
233
|
-
"origin.\n"
|
234
|
-
f"zarr_url={image['zarr_url']}\n"
|
235
|
-
f"origin={image['origin']}"
|
236
|
-
)
|
254
|
+
if image["zarr_url"] in [img["zarr_url"] for img in tmp_images]:
|
237
255
|
img_search = find_image_by_zarr_url(
|
238
256
|
images=tmp_images,
|
239
257
|
zarr_url=image["zarr_url"],
|
@@ -244,77 +262,74 @@ def execute_tasks_v2(
|
|
244
262
|
f"Image with zarr_url {image['zarr_url']} not found, "
|
245
263
|
"while updating image list."
|
246
264
|
)
|
247
|
-
|
248
|
-
original_index = img_search["index"]
|
249
|
-
updated_attributes = copy(original_img["attributes"])
|
250
|
-
updated_types = copy(original_img["types"])
|
251
|
-
|
252
|
-
# Update image attributes/types with task output and manifest
|
253
|
-
updated_attributes.update(image["attributes"])
|
254
|
-
updated_types.update(image["types"])
|
255
|
-
updated_types.update(task.output_types)
|
256
|
-
|
257
|
-
# Unset attributes with None value
|
258
|
-
updated_attributes = {
|
259
|
-
key: value
|
260
|
-
for key, value in updated_attributes.items()
|
261
|
-
if value is not None
|
262
|
-
}
|
265
|
+
existing_image_index = img_search["index"]
|
263
266
|
|
267
|
+
if (
|
268
|
+
image["origin"] is None
|
269
|
+
or image["origin"] == image["zarr_url"]
|
270
|
+
):
|
271
|
+
# CASE 1: Edit existing image
|
272
|
+
existing_image = img_search["image"]
|
273
|
+
new_attributes = copy(existing_image["attributes"])
|
274
|
+
new_types = copy(existing_image["types"])
|
275
|
+
new_image = dict(
|
276
|
+
zarr_url=image["zarr_url"],
|
277
|
+
)
|
278
|
+
if "origin" in existing_image.keys():
|
279
|
+
new_image["origin"] = existing_image["origin"]
|
280
|
+
else:
|
281
|
+
# CASE 2: Re-create existing image based on `origin`
|
282
|
+
# Propagate attributes and types from `origin` (if any)
|
283
|
+
new_attributes, new_types = get_origin_attribute_and_types(
|
284
|
+
origin_url=image["origin"],
|
285
|
+
images=tmp_images,
|
286
|
+
)
|
287
|
+
new_image = dict(
|
288
|
+
zarr_url=image["zarr_url"],
|
289
|
+
origin=image["origin"],
|
290
|
+
)
|
291
|
+
# Update attributes
|
292
|
+
new_attributes.update(image["attributes"])
|
293
|
+
new_attributes = drop_none_attributes(new_attributes)
|
294
|
+
new_image["attributes"] = new_attributes
|
295
|
+
# Update types
|
296
|
+
new_types.update(image["types"])
|
297
|
+
new_types.update(task.output_types)
|
298
|
+
new_image["types"] = new_types
|
264
299
|
# Validate new image
|
265
|
-
SingleImage(
|
266
|
-
zarr_url=image["zarr_url"],
|
267
|
-
types=updated_types,
|
268
|
-
attributes=updated_attributes,
|
269
|
-
)
|
270
|
-
|
300
|
+
SingleImage(**new_image)
|
271
301
|
# Update image in the dataset image list
|
272
|
-
tmp_images[
|
273
|
-
|
274
|
-
# Add new image
|
302
|
+
tmp_images[existing_image_index] = new_image
|
303
|
+
|
275
304
|
else:
|
276
|
-
#
|
277
|
-
|
278
|
-
|
279
|
-
|
280
|
-
|
281
|
-
|
282
|
-
f"zarr_url: {image['zarr_url']}"
|
283
|
-
)
|
284
|
-
# Check that image['zarr_url'] is not equal to zarr_dir
|
285
|
-
if image["zarr_url"] == zarr_dir:
|
305
|
+
# CASE 3: Add new image
|
306
|
+
# Check that image['zarr_url'] is a subfolder of zarr_dir
|
307
|
+
if (
|
308
|
+
not image["zarr_url"].startswith(zarr_dir)
|
309
|
+
or image["zarr_url"] == zarr_dir
|
310
|
+
):
|
286
311
|
raise JobExecutionError(
|
287
|
-
"Cannot create image if zarr_url is
|
288
|
-
"zarr_dir.\n"
|
312
|
+
"Cannot create image if zarr_url is not a subfolder "
|
313
|
+
"of zarr_dir.\n"
|
289
314
|
f"zarr_dir: {zarr_dir}\n"
|
290
315
|
f"zarr_url: {image['zarr_url']}"
|
291
316
|
)
|
317
|
+
|
292
318
|
# Propagate attributes and types from `origin` (if any)
|
293
|
-
|
294
|
-
|
295
|
-
|
296
|
-
|
297
|
-
|
298
|
-
|
299
|
-
|
300
|
-
|
301
|
-
|
302
|
-
updated_attributes = copy(original_img["attributes"])
|
303
|
-
updated_types = copy(original_img["types"])
|
304
|
-
# Update image attributes/types with task output and manifest
|
305
|
-
updated_attributes.update(image["attributes"])
|
306
|
-
updated_attributes = {
|
307
|
-
key: value
|
308
|
-
for key, value in updated_attributes.items()
|
309
|
-
if value is not None
|
310
|
-
}
|
311
|
-
updated_types.update(image["types"])
|
312
|
-
updated_types.update(task.output_types)
|
319
|
+
new_attributes, new_types = get_origin_attribute_and_types(
|
320
|
+
origin_url=image["origin"],
|
321
|
+
images=tmp_images,
|
322
|
+
)
|
323
|
+
# Prepare new image
|
324
|
+
new_attributes.update(image["attributes"])
|
325
|
+
new_attributes = drop_none_attributes(new_attributes)
|
326
|
+
new_types.update(image["types"])
|
327
|
+
new_types.update(task.output_types)
|
313
328
|
new_image = dict(
|
314
329
|
zarr_url=image["zarr_url"],
|
315
330
|
origin=image["origin"],
|
316
|
-
attributes=
|
317
|
-
types=
|
331
|
+
attributes=new_attributes,
|
332
|
+
types=new_types,
|
318
333
|
)
|
319
334
|
# Validate new image
|
320
335
|
SingleImage(**new_image)
|
@@ -381,7 +396,7 @@ def execute_tasks_v2(
|
|
381
396
|
status=HistoryUnitStatus.FAILED,
|
382
397
|
db_sync=db,
|
383
398
|
)
|
384
|
-
logger.
|
399
|
+
logger.warning(
|
385
400
|
f'END {wftask.order}-th task (name="{task_name}") - '
|
386
401
|
"ERROR."
|
387
402
|
)
|
fractal_server/config.py
CHANGED
@@ -492,11 +492,6 @@ class Settings(BaseSettings):
|
|
492
492
|
still running on SLURM.
|
493
493
|
"""
|
494
494
|
|
495
|
-
FRACTAL_SLURM_INTERVAL_BEFORE_RETRIEVAL: int = 2
|
496
|
-
"""
|
497
|
-
NOTE: see issue 2444
|
498
|
-
"""
|
499
|
-
|
500
495
|
FRACTAL_SLURM_SBATCH_SLEEP: float = 0
|
501
496
|
"""
|
502
497
|
Interval to wait (in seconds) between two subsequent `sbatch` calls, when
|
fractal_server/logger.py
CHANGED
@@ -58,6 +58,7 @@ def set_logger(
|
|
58
58
|
logger_name: str,
|
59
59
|
*,
|
60
60
|
log_file_path: Optional[Union[str, Path]] = None,
|
61
|
+
default_logging_level: Optional[int] = None,
|
61
62
|
) -> logging.Logger:
|
62
63
|
"""
|
63
64
|
Set up a `fractal-server` logger
|
@@ -66,7 +67,8 @@ def set_logger(
|
|
66
67
|
|
67
68
|
* The attribute `Logger.propagate` set to `False`;
|
68
69
|
* One and only one `logging.StreamHandler` handler, with severity level set
|
69
|
-
to `FRACTAL_LOGGING_LEVEL`
|
70
|
+
to `FRACTAL_LOGGING_LEVEL` (or `default_logging_level`, if set), and
|
71
|
+
formatter set as in the `logger.LOG_FORMAT`
|
70
72
|
variable from the current module;
|
71
73
|
* One or many `logging.FileHandler` handlers, including one pointint to
|
72
74
|
`log_file_path` (if set); all these handlers have severity level set to
|
@@ -75,6 +77,7 @@ def set_logger(
|
|
75
77
|
Args:
|
76
78
|
logger_name: The identifier of the logger.
|
77
79
|
log_file_path: Path to the log file.
|
80
|
+
default_logging_level: Override for `settings.FRACTAL_LOGGING_LEVEL`
|
78
81
|
|
79
82
|
Returns:
|
80
83
|
logger: The logger, as configured by the arguments.
|
@@ -92,8 +95,10 @@ def set_logger(
|
|
92
95
|
|
93
96
|
if not current_stream_handlers:
|
94
97
|
stream_handler = logging.StreamHandler()
|
95
|
-
|
96
|
-
|
98
|
+
if default_logging_level is None:
|
99
|
+
settings = Inject(get_settings)
|
100
|
+
default_logging_level = settings.FRACTAL_LOGGING_LEVEL
|
101
|
+
stream_handler.setLevel(default_logging_level)
|
97
102
|
stream_handler.setFormatter(LOG_FORMATTER)
|
98
103
|
logger.addHandler(stream_handler)
|
99
104
|
|
@@ -1,4 +1,4 @@
|
|
1
|
-
fractal_server/__init__.py,sha256=
|
1
|
+
fractal_server/__init__.py,sha256=zrQ59wiPvBcqQCbaRka9aGjpdRmNIUKLOvb61FFZ9bI,23
|
2
2
|
fractal_server/__main__.py,sha256=rkM8xjY1KeS3l63irB8yCrlVobR-73uDapC4wvrIlxI,6957
|
3
3
|
fractal_server/alembic.ini,sha256=MWwi7GzjzawI9cCAK1LW7NxIBQDUqD12-ptJoq5JpP0,3153
|
4
4
|
fractal_server/app/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
@@ -30,7 +30,7 @@ fractal_server/app/routes/admin/v2/task.py,sha256=QOwgyDU9m7T_wLMwkdgfFaoMjNxcDg
|
|
30
30
|
fractal_server/app/routes/admin/v2/task_group.py,sha256=LG41hAsllBL6kc-JLxRNG_IrI6frIKrIF3xD0GeeTiI,7173
|
31
31
|
fractal_server/app/routes/admin/v2/task_group_lifecycle.py,sha256=0e0ZJ_k75TVHaT2o8Xk33DPDSgh-eBhZf-y4y7t-Adg,9429
|
32
32
|
fractal_server/app/routes/api/__init__.py,sha256=B8l6PSAhR10iZqHEiyTat-_0tkeKdrCigIE6DJGP5b8,638
|
33
|
-
fractal_server/app/routes/api/v2/__init__.py,sha256=
|
33
|
+
fractal_server/app/routes/api/v2/__init__.py,sha256=WE6Znay9R_4_78eSKBMi-s5_lk3oLxWqyAGya4JcYY8,2474
|
34
34
|
fractal_server/app/routes/api/v2/_aux_functions.py,sha256=eE-TdEMI_UX3LBDUGwjG5NyUcihDVaHYlG15NlTJ9DI,12872
|
35
35
|
fractal_server/app/routes/api/v2/_aux_functions_history.py,sha256=ZlI6nwzB5r9AiY0C8TzJS_EQOTPKgkRYl3GpxFAu2bg,4430
|
36
36
|
fractal_server/app/routes/api/v2/_aux_functions_task_lifecycle.py,sha256=qdXCb6IP8-qPEAxGZKljtjIqNzIAyRaAsQSRi5VqFHM,6773
|
@@ -39,6 +39,7 @@ fractal_server/app/routes/api/v2/dataset.py,sha256=h5AhE0sdhQ20ZlIbEJsFnHIOUW0S1
|
|
39
39
|
fractal_server/app/routes/api/v2/history.py,sha256=pDztvwQFOh3JChtSk9GIG3H17yg4G5pk1mq14qXF4Ck,17793
|
40
40
|
fractal_server/app/routes/api/v2/images.py,sha256=BGpO94gVd8BTpCN6Mun2RXmjrPmfkIp73m8RN7uiGW4,8361
|
41
41
|
fractal_server/app/routes/api/v2/job.py,sha256=MU1sHIKk_89WrD0TD44d4ufzqnywot7On_W71KjyUbQ,6500
|
42
|
+
fractal_server/app/routes/api/v2/pre_submission_checks.py,sha256=2E15RXBtGEg1Mt05Rs12RPPglrYqXR4lVIKJQQtufkQ,4479
|
42
43
|
fractal_server/app/routes/api/v2/project.py,sha256=uAZgATiHcOvbnRX-vv1D3HoaEUvLUd7vzVmGcqOP8ZY,4602
|
43
44
|
fractal_server/app/routes/api/v2/status_legacy.py,sha256=Q5ZWQNfeZKL8Xgtou2Xr80iaF1uO-r4oSKgq5H42V_8,6349
|
44
45
|
fractal_server/app/routes/api/v2/submit.py,sha256=hCwwC6bXP7EyhgGyVLv1ClybRH1YytDVoPunOzpsf0s,8822
|
@@ -47,7 +48,6 @@ fractal_server/app/routes/api/v2/task_collection.py,sha256=IDNF6sjDuU37HIQ0TuQA-
|
|
47
48
|
fractal_server/app/routes/api/v2/task_collection_custom.py,sha256=totsl0DOC2DFLw8vgqOFivvftpEk3KbFDeOHT0UVQUs,5997
|
48
49
|
fractal_server/app/routes/api/v2/task_group.py,sha256=62zcVTdheXM5V3WmFuqisIqgETjXmZaRpNMcDX5bXS0,7408
|
49
50
|
fractal_server/app/routes/api/v2/task_group_lifecycle.py,sha256=3o9bCC8ubMwffQPPaxQZy-CjH9IB2RkIReIecI6L2_w,9300
|
50
|
-
fractal_server/app/routes/api/v2/verify_image_types.py,sha256=RBi6-3Sp1wYm_obDPRcEBtLvRfsRknufbZyhGGHVo6I,1924
|
51
51
|
fractal_server/app/routes/api/v2/workflow.py,sha256=sW6Nm7dfzUY354hawyEkpQHy7rUvV2FCV8DPorH-TDU,10270
|
52
52
|
fractal_server/app/routes/api/v2/workflow_import.py,sha256=INmnhlMEBJp-vHPR0f940DANPmIidts3OfcooeM_aNA,11205
|
53
53
|
fractal_server/app/routes/api/v2/workflowtask.py,sha256=7_syX2EO7ibF6Xkm7HBPhsUYq6aYnKNeC5iSaafQhG4,11342
|
@@ -67,7 +67,7 @@ fractal_server/app/routes/aux/validate_user_settings.py,sha256=FLVi__8YFcm_6c_K5
|
|
67
67
|
fractal_server/app/routes/pagination.py,sha256=L8F5JqekF39qz-LpeScdlhb57MQnSRXjK4ZEtsZqYLk,1210
|
68
68
|
fractal_server/app/runner/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
69
69
|
fractal_server/app/runner/components.py,sha256=-Ii5l8d_V6f5DFOd-Zsr8VYmOsyqw0Hox9fEFQiuqxY,66
|
70
|
-
fractal_server/app/runner/compress_folder.py,sha256=
|
70
|
+
fractal_server/app/runner/compress_folder.py,sha256=PpamheggkacuAFhSiHkuW76x9zwawouqNySPBDyAuRI,5245
|
71
71
|
fractal_server/app/runner/exceptions.py,sha256=JC5ufHyeA1hYD_rkZUscI30DD8D903ncag7Z3AArmUY,4215
|
72
72
|
fractal_server/app/runner/executors/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
73
73
|
fractal_server/app/runner/executors/base_runner.py,sha256=4xxMpYycIeAOz5niaJj2xtVW_Cq-shCxP1qk4g-KwOM,5137
|
@@ -78,17 +78,17 @@ fractal_server/app/runner/executors/slurm_common/__init__.py,sha256=47DEQpj8HBSa
|
|
78
78
|
fractal_server/app/runner/executors/slurm_common/_batching.py,sha256=ZY020JZlDS5mfpgpWTChQkyHU7iLE5kx2HVd57_C6XA,8850
|
79
79
|
fractal_server/app/runner/executors/slurm_common/_job_states.py,sha256=nuV-Zba38kDrRESOVB3gaGbrSPZc4q7YGichQaeqTW0,238
|
80
80
|
fractal_server/app/runner/executors/slurm_common/_slurm_config.py,sha256=_feRRnVVnvQa3AsOQqfULfOgaoj2o6Ze0-fwXwic8p4,15795
|
81
|
-
fractal_server/app/runner/executors/slurm_common/base_slurm_runner.py,sha256=
|
81
|
+
fractal_server/app/runner/executors/slurm_common/base_slurm_runner.py,sha256=bqFD9uZfwXqmVXsScrgANp09zvbuc_LSGCDAmBV0wqo,33379
|
82
82
|
fractal_server/app/runner/executors/slurm_common/get_slurm_config.py,sha256=BW6fDpPyB0VH5leVxvwzkVH3r3hC7DuSyoWmRzHITWg,7305
|
83
|
-
fractal_server/app/runner/executors/slurm_common/remote.py,sha256=
|
83
|
+
fractal_server/app/runner/executors/slurm_common/remote.py,sha256=WdzVSLYKOxLb9NmrvwZu0voXRPTO1gyKTUUuRJwu9Lc,5861
|
84
84
|
fractal_server/app/runner/executors/slurm_common/slurm_job_task_models.py,sha256=RoxHLKOn0_wGjnY0Sv0a9nDSiqxYZHKRoMkT3p9_G1E,3607
|
85
85
|
fractal_server/app/runner/executors/slurm_common/utils_executors.py,sha256=naPyJI0I3lD-sYHbSXbMFGUBK4h_SggA5V91Z1Ch1Xg,1416
|
86
86
|
fractal_server/app/runner/executors/slurm_ssh/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
87
87
|
fractal_server/app/runner/executors/slurm_ssh/runner.py,sha256=yKK_cjskHDiasn_QQ-k14GhplP3tNaK7Kp4yiVn44Y0,9437
|
88
88
|
fractal_server/app/runner/executors/slurm_sudo/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
89
89
|
fractal_server/app/runner/executors/slurm_sudo/_subprocess_run_as_user.py,sha256=BlOz4NElv3v7rUYefyeki33uaJxcSDk6rPuVZx9ocdw,2776
|
90
|
-
fractal_server/app/runner/executors/slurm_sudo/runner.py,sha256=
|
91
|
-
fractal_server/app/runner/extract_archive.py,sha256=
|
90
|
+
fractal_server/app/runner/executors/slurm_sudo/runner.py,sha256=zT-DH61oPEq4dvo08EMnXv3QMYpwAZ3teY7R3MJ5G_8,6291
|
91
|
+
fractal_server/app/runner/extract_archive.py,sha256=8h6ZX7Gy0Vqv5KmrEGbWGPuA0MvW207cQZ-8CPYjwXc,2800
|
92
92
|
fractal_server/app/runner/filenames.py,sha256=lPnxKHtdRizr6FqG3zOdjDPyWA7GoaJGTtiuJV0gA8E,70
|
93
93
|
fractal_server/app/runner/run_subprocess.py,sha256=c3JbYXq3hX2aaflQU19qJ5Xs6J6oXGNvnTEoAfv2bxc,959
|
94
94
|
fractal_server/app/runner/set_start_and_last_task_index.py,sha256=-q4zVybAj8ek2XlbENKlfOAJ39hT_zoJoZkqzDqiAMY,1254
|
@@ -101,7 +101,7 @@ fractal_server/app/runner/v2/_slurm_sudo.py,sha256=TVihkQKMX6YWEWxXJjQo0WEQOjVy7
|
|
101
101
|
fractal_server/app/runner/v2/db_tools.py,sha256=du5dKhMMFMErQXbGIgu9JvO_vtMensodyPsyDeqz1yQ,3324
|
102
102
|
fractal_server/app/runner/v2/deduplicate_list.py,sha256=IVTE4abBU1bUprFTkxrTfYKnvkNTanWQ-KWh_etiT08,645
|
103
103
|
fractal_server/app/runner/v2/merge_outputs.py,sha256=D1L4Taieq9i71SPQyNc1kMokgHh-sV_MqF3bv7QMDBc,907
|
104
|
-
fractal_server/app/runner/v2/runner.py,sha256=
|
104
|
+
fractal_server/app/runner/v2/runner.py,sha256=2NtD-86EYmpIynWcu5GBrEZ2_ydgIrVqKgKMRhaj4OE,16447
|
105
105
|
fractal_server/app/runner/v2/runner_functions.py,sha256=AzsE7VF6NMz_5qc0htQkfow5_2rr-wkx50vFJTndj8I,19250
|
106
106
|
fractal_server/app/runner/v2/runner_functions_low_level.py,sha256=_h_OOffq3d7V0uHa8Uvs0mj31y1GSZBUXjDDF3WjVjY,3620
|
107
107
|
fractal_server/app/runner/v2/submit_workflow.py,sha256=QywUGIoHAHnrWgfnyX8W9kVqKY-RvVyNLpzrbsXZOZ4,13075
|
@@ -130,14 +130,14 @@ fractal_server/app/schemas/v2/workflowtask.py,sha256=rVbmNihDAJL_Sckbt1hBK2JEcb-
|
|
130
130
|
fractal_server/app/security/__init__.py,sha256=e2cveg5hQpieGD3bSPd5GTOMthvJ-HXH3buSb9WVfEU,14096
|
131
131
|
fractal_server/app/security/signup_email.py,sha256=Xd6QYxcdmg0PHpDwmUE8XQmPcOj3Xjy5oROcIMhmltM,1472
|
132
132
|
fractal_server/app/user_settings.py,sha256=OP1yiYKtPadxwM51_Q0hdPk3z90TCN4z1BLpQsXyWiU,1316
|
133
|
-
fractal_server/config.py,sha256=
|
133
|
+
fractal_server/config.py,sha256=A3j1bxa2inTNqIp5Ry0sY6DS72k7X_AKA3OUaDwoYbA,28439
|
134
134
|
fractal_server/data_migrations/README.md,sha256=_3AEFvDg9YkybDqCLlFPdDmGJvr6Tw7HRI14aZ3LOIw,398
|
135
135
|
fractal_server/data_migrations/tools.py,sha256=LeMeASwYGtEqd-3wOLle6WARdTGAimoyMmRbbJl-hAM,572
|
136
136
|
fractal_server/gunicorn_fractal.py,sha256=u6U01TLGlXgq1v8QmEpLih3QnsInZD7CqphgJ_GrGzc,1230
|
137
137
|
fractal_server/images/__init__.py,sha256=-_wjoKtSX02P1KjDxDP_EXKvmbONTRmbf7iGVTsyBpM,154
|
138
138
|
fractal_server/images/models.py,sha256=jdGKMPi8WlO9Kvns4grIOU5LjujnvwIGjMFMC0wNy08,3501
|
139
139
|
fractal_server/images/tools.py,sha256=-zFDzRv6cbbRo21OrD0eZY5qWcoMX8dxgEnfyI3tOcg,4140
|
140
|
-
fractal_server/logger.py,sha256=
|
140
|
+
fractal_server/logger.py,sha256=2QxBu5mB6xN3qWqj60nuxdrxcbxwzlx0xL47jKHB5PU,5385
|
141
141
|
fractal_server/main.py,sha256=FD9KzTTsXTQnTW0z3Hu7y0Nj_oAkBeZEInKDXFd4hjE,4561
|
142
142
|
fractal_server/migrations/env.py,sha256=nfyBpMIOT3kny6t-b-tUjyRjZ4k906bb1_wCQ7me1BI,1353
|
143
143
|
fractal_server/migrations/naming_convention.py,sha256=htbKrVdetx3pklowb_9Cdo5RqeF0fJ740DNecY5de_M,265
|
@@ -209,8 +209,8 @@ fractal_server/tasks/v2/utils_templates.py,sha256=Kc_nSzdlV6KIsO0CQSPs1w70zLyENP
|
|
209
209
|
fractal_server/urls.py,sha256=QjIKAC1a46bCdiPMu3AlpgFbcv6a4l3ABcd5xz190Og,471
|
210
210
|
fractal_server/utils.py,sha256=PMwrxWFxRTQRl1b9h-NRIbFGPKqpH_hXnkAT3NfZdpY,3571
|
211
211
|
fractal_server/zip_tools.py,sha256=GjDgo_sf6V_DDg6wWeBlZu5zypIxycn_l257p_YVKGc,4876
|
212
|
-
fractal_server-2.14.
|
213
|
-
fractal_server-2.14.
|
214
|
-
fractal_server-2.14.
|
215
|
-
fractal_server-2.14.
|
216
|
-
fractal_server-2.14.
|
212
|
+
fractal_server-2.14.1.dist-info/LICENSE,sha256=QKAharUuhxL58kSoLizKJeZE3mTCBnX6ucmz8W0lxlk,1576
|
213
|
+
fractal_server-2.14.1.dist-info/METADATA,sha256=ZZZ64mNnygya_tvqA3lEnWvtir9W3L6-DfFdtDKL3UA,4560
|
214
|
+
fractal_server-2.14.1.dist-info/WHEEL,sha256=7dDg4QLnNKTvwIDR9Ac8jJaAmBC_owJrckbC0jjThyA,88
|
215
|
+
fractal_server-2.14.1.dist-info/entry_points.txt,sha256=8tV2kynvFkjnhbtDnxAqImL6HMVKsopgGfew0DOp5UY,58
|
216
|
+
fractal_server-2.14.1.dist-info/RECORD,,
|
@@ -1,61 +0,0 @@
|
|
1
|
-
from typing import Optional
|
2
|
-
|
3
|
-
from fastapi import APIRouter
|
4
|
-
from fastapi import Depends
|
5
|
-
from fastapi import status
|
6
|
-
|
7
|
-
from ._aux_functions import _get_dataset_check_owner
|
8
|
-
from .images import ImageQuery
|
9
|
-
from fractal_server.app.db import AsyncSession
|
10
|
-
from fractal_server.app.db import get_async_db
|
11
|
-
from fractal_server.app.models import UserOAuth
|
12
|
-
from fractal_server.app.routes.auth import current_active_user
|
13
|
-
from fractal_server.images.tools import aggregate_types
|
14
|
-
from fractal_server.images.tools import filter_image_list
|
15
|
-
|
16
|
-
router = APIRouter()
|
17
|
-
|
18
|
-
|
19
|
-
@router.post(
|
20
|
-
"/project/{project_id}/dataset/{dataset_id}/images/verify-unique-types/",
|
21
|
-
status_code=status.HTTP_200_OK,
|
22
|
-
)
|
23
|
-
async def verify_unique_types(
|
24
|
-
project_id: int,
|
25
|
-
dataset_id: int,
|
26
|
-
query: Optional[ImageQuery] = None,
|
27
|
-
user: UserOAuth = Depends(current_active_user),
|
28
|
-
db: AsyncSession = Depends(get_async_db),
|
29
|
-
) -> list[str]:
|
30
|
-
# Get dataset
|
31
|
-
output = await _get_dataset_check_owner(
|
32
|
-
project_id=project_id, dataset_id=dataset_id, user_id=user.id, db=db
|
33
|
-
)
|
34
|
-
dataset = output["dataset"]
|
35
|
-
|
36
|
-
# Filter images
|
37
|
-
if query is None:
|
38
|
-
filtered_images = dataset.images
|
39
|
-
else:
|
40
|
-
filtered_images = filter_image_list(
|
41
|
-
images=dataset.images,
|
42
|
-
attribute_filters=query.attribute_filters,
|
43
|
-
type_filters=query.type_filters,
|
44
|
-
)
|
45
|
-
|
46
|
-
# Get actual values for each available type
|
47
|
-
available_types = aggregate_types(filtered_images)
|
48
|
-
values_per_type: dict[str, set] = {
|
49
|
-
_type: set() for _type in available_types
|
50
|
-
}
|
51
|
-
for _img in filtered_images:
|
52
|
-
for _type in available_types:
|
53
|
-
values_per_type[_type].add(_img["types"].get(_type, False))
|
54
|
-
|
55
|
-
# Find types with non-unique value
|
56
|
-
non_unique_types = [
|
57
|
-
key for key, value in values_per_type.items() if len(value) > 1
|
58
|
-
]
|
59
|
-
non_unique_types = sorted(non_unique_types)
|
60
|
-
|
61
|
-
return non_unique_types
|
File without changes
|
File without changes
|
File without changes
|