fractal-server 2.14.0a36__py3-none-any.whl → 2.14.2__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (26) hide show
  1. fractal_server/__init__.py +1 -1
  2. fractal_server/app/routes/admin/v2/job.py +1 -1
  3. fractal_server/app/routes/api/v2/__init__.py +2 -2
  4. fractal_server/app/routes/api/v2/pre_submission_checks.py +144 -0
  5. fractal_server/app/routes/api/v2/status_legacy.py +2 -2
  6. fractal_server/app/routes/api/v2/task_collection_custom.py +22 -10
  7. fractal_server/app/runner/compress_folder.py +15 -3
  8. fractal_server/app/runner/executors/slurm_common/base_slurm_runner.py +0 -10
  9. fractal_server/app/runner/executors/slurm_common/remote.py +1 -3
  10. fractal_server/app/runner/executors/slurm_sudo/runner.py +1 -1
  11. fractal_server/app/runner/extract_archive.py +21 -5
  12. fractal_server/app/runner/v2/runner.py +174 -136
  13. fractal_server/config.py +0 -5
  14. fractal_server/logger.py +8 -3
  15. fractal_server/tasks/v2/local/collect.py +1 -1
  16. fractal_server/tasks/v2/local/deactivate.py +1 -1
  17. fractal_server/tasks/v2/local/reactivate.py +1 -1
  18. fractal_server/tasks/v2/ssh/collect.py +1 -1
  19. fractal_server/tasks/v2/ssh/deactivate.py +1 -1
  20. fractal_server/tasks/v2/ssh/reactivate.py +1 -1
  21. {fractal_server-2.14.0a36.dist-info → fractal_server-2.14.2.dist-info}/METADATA +1 -1
  22. {fractal_server-2.14.0a36.dist-info → fractal_server-2.14.2.dist-info}/RECORD +25 -25
  23. fractal_server/app/routes/api/v2/verify_image_types.py +0 -61
  24. {fractal_server-2.14.0a36.dist-info → fractal_server-2.14.2.dist-info}/LICENSE +0 -0
  25. {fractal_server-2.14.0a36.dist-info → fractal_server-2.14.2.dist-info}/WHEEL +0 -0
  26. {fractal_server-2.14.0a36.dist-info → fractal_server-2.14.2.dist-info}/entry_points.txt +0 -0
@@ -1 +1 @@
1
- __VERSION__ = "2.14.0a36"
1
+ __VERSION__ = "2.14.2"
@@ -154,7 +154,7 @@ async def update_job(
154
154
  if job_update.status != JobStatusTypeV2.FAILED:
155
155
  raise HTTPException(
156
156
  status_code=status.HTTP_422_UNPROCESSABLE_ENTITY,
157
- detail=f"Cannot set job status to {job_update.status}",
157
+ detail=f"Cannot set job status to {job_update.status.value}",
158
158
  )
159
159
 
160
160
  setattr(job, "status", job_update.status)
@@ -7,6 +7,7 @@ from .dataset import router as dataset_router_v2
7
7
  from .history import router as history_router_v2
8
8
  from .images import router as images_routes_v2
9
9
  from .job import router as job_router_v2
10
+ from .pre_submission_checks import router as pre_submission_checks_router
10
11
  from .project import router as project_router_v2
11
12
  from .status_legacy import router as status_legacy_router_v2
12
13
  from .submit import router as submit_job_router_v2
@@ -15,7 +16,6 @@ from .task_collection import router as task_collection_router_v2
15
16
  from .task_collection_custom import router as task_collection_router_v2_custom
16
17
  from .task_group import router as task_group_router_v2
17
18
  from .task_group_lifecycle import router as task_group_lifecycle_router_v2
18
- from .verify_image_types import router as verify_image_types_router
19
19
  from .workflow import router as workflow_router_v2
20
20
  from .workflow_import import router as workflow_import_router_v2
21
21
  from .workflowtask import router as workflowtask_router_v2
@@ -26,7 +26,7 @@ from fractal_server.syringe import Inject
26
26
  router_api_v2 = APIRouter()
27
27
 
28
28
  router_api_v2.include_router(dataset_router_v2, tags=["V2 Dataset"])
29
- router_api_v2.include_router(verify_image_types_router, tags=["V2 Job"])
29
+ router_api_v2.include_router(pre_submission_checks_router, tags=["V2 Job"])
30
30
  router_api_v2.include_router(job_router_v2, tags=["V2 Job"])
31
31
  router_api_v2.include_router(images_routes_v2, tags=["V2 Images"])
32
32
  router_api_v2.include_router(project_router_v2, tags=["V2 Project"])
@@ -0,0 +1,144 @@
1
+ from typing import Optional
2
+
3
+ from fastapi import APIRouter
4
+ from fastapi import Depends
5
+ from fastapi import status
6
+ from fastapi.responses import JSONResponse
7
+ from pydantic import BaseModel
8
+ from pydantic import Field
9
+ from sqlmodel import select
10
+
11
+ from ._aux_functions import _get_dataset_check_owner
12
+ from ._aux_functions import _get_workflow_task_check_owner
13
+ from .images import ImageQuery
14
+ from fractal_server.app.db import AsyncSession
15
+ from fractal_server.app.db import get_async_db
16
+ from fractal_server.app.models import UserOAuth
17
+ from fractal_server.app.models.v2 import HistoryImageCache
18
+ from fractal_server.app.models.v2 import HistoryUnit
19
+ from fractal_server.app.routes.auth import current_active_user
20
+ from fractal_server.app.schemas.v2 import HistoryUnitStatus
21
+ from fractal_server.images.models import AttributeFiltersType
22
+ from fractal_server.images.tools import aggregate_types
23
+ from fractal_server.images.tools import filter_image_list
24
+
25
+ router = APIRouter()
26
+
27
+
28
+ @router.post(
29
+ "/project/{project_id}/dataset/{dataset_id}/images/verify-unique-types/",
30
+ status_code=status.HTTP_200_OK,
31
+ )
32
+ async def verify_unique_types(
33
+ project_id: int,
34
+ dataset_id: int,
35
+ query: Optional[ImageQuery] = None,
36
+ user: UserOAuth = Depends(current_active_user),
37
+ db: AsyncSession = Depends(get_async_db),
38
+ ) -> list[str]:
39
+ # Get dataset
40
+ output = await _get_dataset_check_owner(
41
+ project_id=project_id, dataset_id=dataset_id, user_id=user.id, db=db
42
+ )
43
+ dataset = output["dataset"]
44
+
45
+ # Filter images
46
+ if query is None:
47
+ filtered_images = dataset.images
48
+ else:
49
+ filtered_images = filter_image_list(
50
+ images=dataset.images,
51
+ attribute_filters=query.attribute_filters,
52
+ type_filters=query.type_filters,
53
+ )
54
+
55
+ # Get actual values for each available type
56
+ available_types = aggregate_types(filtered_images)
57
+ values_per_type: dict[str, set] = {
58
+ _type: set() for _type in available_types
59
+ }
60
+ for _img in filtered_images:
61
+ for _type in available_types:
62
+ values_per_type[_type].add(_img["types"].get(_type, False))
63
+
64
+ # Find types with non-unique value
65
+ non_unique_types = [
66
+ key for key, value in values_per_type.items() if len(value) > 1
67
+ ]
68
+ non_unique_types = sorted(non_unique_types)
69
+
70
+ return non_unique_types
71
+
72
+
73
+ class NonProcessedImagesPayload(BaseModel):
74
+ attribute_filters: AttributeFiltersType = Field(default_factory=dict)
75
+ type_filters: dict[str, bool] = Field(default_factory=dict)
76
+
77
+
78
+ @router.post(
79
+ "/project/{project_id}/dataset/{dataset_id}/images/non-processed/",
80
+ status_code=status.HTTP_200_OK,
81
+ )
82
+ async def check_workflowtask(
83
+ project_id: int,
84
+ dataset_id: int,
85
+ workflow_id: int,
86
+ workflowtask_id: int,
87
+ filters: NonProcessedImagesPayload,
88
+ user: UserOAuth = Depends(current_active_user),
89
+ db: AsyncSession = Depends(get_async_db),
90
+ ) -> JSONResponse:
91
+
92
+ db_workflow_task, db_workflow = await _get_workflow_task_check_owner(
93
+ project_id=project_id,
94
+ workflow_task_id=workflowtask_id,
95
+ workflow_id=workflow_id,
96
+ user_id=user.id,
97
+ db=db,
98
+ )
99
+
100
+ if db_workflow_task.order == 0:
101
+ # Skip check for first task in the workflow
102
+ return JSONResponse(status_code=200, content=[])
103
+
104
+ previous_wft = db_workflow.task_list[db_workflow_task.order - 1]
105
+
106
+ if previous_wft.task.output_types != {}:
107
+ # Skip check if previous task has non-trivial `output_types`
108
+ return JSONResponse(status_code=200, content=[])
109
+ elif previous_wft.task.type in [
110
+ "converter_compound",
111
+ "converter_non_parallel",
112
+ ]:
113
+ # Skip check if previous task is converter
114
+ return JSONResponse(status_code=200, content=[])
115
+
116
+ res = await _get_dataset_check_owner(
117
+ project_id=project_id,
118
+ dataset_id=dataset_id,
119
+ user_id=user.id,
120
+ db=db,
121
+ )
122
+ dataset = res["dataset"]
123
+ filtered_images = filter_image_list(
124
+ images=dataset.images,
125
+ type_filters=filters.type_filters,
126
+ attribute_filters=filters.attribute_filters,
127
+ )
128
+
129
+ filtered_zarr_urls = [image["zarr_url"] for image in filtered_images]
130
+
131
+ res = await db.execute(
132
+ select(HistoryImageCache.zarr_url)
133
+ .join(HistoryUnit)
134
+ .where(HistoryImageCache.zarr_url.in_(filtered_zarr_urls))
135
+ .where(HistoryImageCache.dataset_id == dataset_id)
136
+ .where(HistoryImageCache.workflowtask_id == previous_wft.id)
137
+ .where(HistoryImageCache.latest_history_unit_id == HistoryUnit.id)
138
+ .where(HistoryUnit.status == HistoryUnitStatus.DONE)
139
+ )
140
+ done_zarr_urls = res.scalars().all()
141
+
142
+ missing_zarr_urls = list(set(filtered_zarr_urls) - set(done_zarr_urls))
143
+
144
+ return JSONResponse(status_code=200, content=missing_zarr_urls)
@@ -118,8 +118,8 @@ async def get_workflowtask_status(
118
118
  )
119
119
  except ValueError:
120
120
  logger.warning(
121
- f"Job {running_job.id} is submitted but its task list does "
122
- f"not contain a {WorkflowTaskStatusTypeV2.SUBMITTED} task."
121
+ f"Job {running_job.id} is submitted but its task list does not"
122
+ f" contain a {WorkflowTaskStatusTypeV2.SUBMITTED.value} task."
123
123
  )
124
124
  first_submitted_index = 0
125
125
 
@@ -1,3 +1,4 @@
1
+ import os
1
2
  import shlex
2
3
  import subprocess # nosec
3
4
  from pathlib import Path
@@ -65,25 +66,36 @@ async def collect_task_custom(
65
66
  detail="Cannot infer 'package_root' with 'slurm_ssh' backend.",
66
67
  )
67
68
  else:
68
- if not Path(task_collect.python_interpreter).is_file():
69
+ if not os.access(
70
+ task_collect.python_interpreter, os.X_OK
71
+ ) or not os.access(task_collect.python_interpreter, os.R_OK):
69
72
  raise HTTPException(
70
73
  status_code=status.HTTP_422_UNPROCESSABLE_ENTITY,
71
74
  detail=(
72
75
  f"{task_collect.python_interpreter=} "
73
- "doesn't exist or is not a file."
76
+ "is not accessible to the Fractal user "
77
+ "or it is not executable."
74
78
  ),
75
79
  )
76
- if (
77
- task_collect.package_root is not None
78
- and not Path(task_collect.package_root).is_dir()
79
- ):
80
+ if not Path(task_collect.python_interpreter).is_file():
80
81
  raise HTTPException(
81
82
  status_code=status.HTTP_422_UNPROCESSABLE_ENTITY,
82
- detail=(
83
- f"{task_collect.package_root=} "
84
- "doesn't exist or is not a directory."
85
- ),
83
+ detail=f"{task_collect.python_interpreter=} is not a file.",
86
84
  )
85
+ if task_collect.package_root is not None:
86
+ if not os.access(task_collect.package_root, os.R_OK):
87
+ raise HTTPException(
88
+ status_code=status.HTTP_422_UNPROCESSABLE_ENTITY,
89
+ detail=(
90
+ f"{task_collect.package_root=} "
91
+ "is not accessible to the Fractal user."
92
+ ),
93
+ )
94
+ if not Path(task_collect.package_root).is_dir():
95
+ raise HTTPException(
96
+ status_code=status.HTTP_422_UNPROCESSABLE_ENTITY,
97
+ detail=f"{task_collect.package_root=} is not a directory.",
98
+ )
87
99
 
88
100
  if task_collect.package_root is None:
89
101
 
@@ -80,6 +80,7 @@ def _remove_temp_subfolder(subfolder_path_tmp_copy: Path, logger_name: str):
80
80
  def compress_folder(
81
81
  subfolder_path: Path,
82
82
  filelist_path: str | None,
83
+ default_logging_level: int | None = None,
83
84
  ) -> str:
84
85
  """
85
86
  Compress e.g. `/path/archive` into `/path/archive.tar.gz`
@@ -90,13 +91,17 @@ def compress_folder(
90
91
  Args:
91
92
  subfolder_path: Absolute path to the folder to compress.
92
93
  remote_to_local: If `True`, exclude some files from the tar.gz archive.
94
+ default_logging_level:
93
95
 
94
96
  Returns:
95
97
  Absolute path to the tar.gz archive.
96
98
  """
97
99
 
98
100
  logger_name = "compress_folder"
99
- logger = set_logger(logger_name)
101
+ logger = set_logger(
102
+ logger_name,
103
+ default_logging_level=default_logging_level,
104
+ )
100
105
 
101
106
  logger.debug("START")
102
107
  logger.debug(f"{subfolder_path=}")
@@ -132,7 +137,10 @@ def compress_folder(
132
137
  )
133
138
 
134
139
 
135
- def main(sys_argv: list[str]):
140
+ def main(
141
+ sys_argv: list[str],
142
+ default_logging_level: int | None = None,
143
+ ):
136
144
 
137
145
  help_msg = (
138
146
  "Expected use:\n"
@@ -146,15 +154,19 @@ def main(sys_argv: list[str]):
146
154
  compress_folder(
147
155
  subfolder_path=Path(sys_argv[1]),
148
156
  filelist_path=None,
157
+ default_logging_level=default_logging_level,
149
158
  )
150
159
  elif num_args == 3 and sys_argv[2] == "--filelist":
151
160
  compress_folder(
152
161
  subfolder_path=Path(sys_argv[1]),
153
162
  filelist_path=sys_argv[3],
163
+ default_logging_level=default_logging_level,
154
164
  )
155
165
  else:
156
166
  sys.exit(f"Invalid argument.\n{help_msg}\nProvided: {sys_argv[1:]=}")
157
167
 
158
168
 
159
169
  if __name__ == "__main__":
160
- main(sys.argv)
170
+ import logging
171
+
172
+ main(sys.argv, default_logging_level=logging.DEBUG)
@@ -524,12 +524,6 @@ class BaseSlurmRunner(BaseRunner):
524
524
  slurm_job_ids=self.job_ids,
525
525
  )
526
526
 
527
- # NOTE: see issue 2444
528
- settings = Inject(get_settings)
529
- sleep_time = settings.FRACTAL_SLURM_INTERVAL_BEFORE_RETRIEVAL
530
- logger.warning(f"[submit] Now sleep {sleep_time} seconds.")
531
- time.sleep(sleep_time)
532
-
533
527
  # Retrieval phase
534
528
  logger.debug("[submit] START retrieval phase")
535
529
  scancelled_job_ids = []
@@ -705,10 +699,6 @@ class BaseSlurmRunner(BaseRunner):
705
699
  slurm_job_ids=self.job_ids,
706
700
  )
707
701
 
708
- settings = Inject(get_settings)
709
- sleep_time = settings.FRACTAL_SLURM_INTERVAL_BEFORE_RETRIEVAL
710
- logger.warning(f"[multisubmit] Now sleep {sleep_time} seconds.")
711
- time.sleep(sleep_time)
712
702
  except Exception as e:
713
703
  logger.error(
714
704
  "[multisubmit] Unexpected exception during submission."
@@ -148,10 +148,8 @@ def worker(
148
148
  out = cloudpickle.dumps(result)
149
149
 
150
150
  # Write the output pickle file
151
- tempfile = out_fname + ".tmp"
152
- with open(tempfile, "wb") as f:
151
+ with open(out_fname, "wb") as f:
153
152
  f.write(out)
154
- os.rename(tempfile, out_fname)
155
153
 
156
154
 
157
155
  if __name__ == "__main__":
@@ -151,7 +151,7 @@ class SudoSlurmRunner(BaseSlurmRunner):
151
151
  """
152
152
  Fetch artifacts for a list of SLURM jobs.
153
153
  """
154
- MAX_NUM_THREADS = 4
154
+ MAX_NUM_THREADS = 12
155
155
  THREAD_NAME_PREFIX = "fetch_artifacts"
156
156
  logger.debug(
157
157
  "[_fetch_artifacts] START "
@@ -25,7 +25,10 @@ def _remove_suffix(*, string: str, suffix: str) -> str:
25
25
  raise ValueError(f"Cannot remove {suffix=} from {string=}.")
26
26
 
27
27
 
28
- def extract_archive(archive_path: Path):
28
+ def extract_archive(
29
+ archive_path: Path,
30
+ default_logging_level: int | None = None,
31
+ ):
29
32
  """
30
33
  Extract e.g. `/path/archive.tar.gz` archive into `/path/archive` folder
31
34
 
@@ -34,10 +37,14 @@ def extract_archive(archive_path: Path):
34
37
 
35
38
  Arguments:
36
39
  archive_path: Absolute path to the archive file.
40
+ default_logging_level
37
41
  """
38
42
 
39
43
  logger_name = "extract_archive"
40
- logger = set_logger(logger_name)
44
+ logger = set_logger(
45
+ logger_name,
46
+ default_logging_level=default_logging_level,
47
+ )
41
48
 
42
49
  logger.debug("START")
43
50
  logger.debug(f"{archive_path.as_posix()=}")
@@ -65,7 +72,10 @@ def extract_archive(archive_path: Path):
65
72
  logger.debug("END")
66
73
 
67
74
 
68
- def main(sys_argv: list[str]):
75
+ def main(
76
+ sys_argv: list[str],
77
+ default_logging_level: int | None = None,
78
+ ):
69
79
  help_msg = (
70
80
  "Expected use:\n"
71
81
  "python -m fractal_server.app.runner.extract_archive "
@@ -76,8 +86,14 @@ def main(sys_argv: list[str]):
76
86
  sys.exit(f"Invalid argument.\n{help_msg}\nProvided: {sys_argv[1:]=}")
77
87
  else:
78
88
  tarfile_path = Path(sys_argv[1])
79
- extract_archive(tarfile_path)
89
+ extract_archive(
90
+ tarfile_path,
91
+ default_logging_level=default_logging_level,
92
+ )
80
93
 
81
94
 
82
95
  if __name__ == "__main__":
83
- main(sys.argv)
96
+
97
+ import logging
98
+
99
+ main(sys.argv, default_logging_level=logging.DEBUG)
@@ -9,6 +9,7 @@ from typing import Optional
9
9
 
10
10
  from sqlalchemy.orm.attributes import flag_modified
11
11
  from sqlmodel import delete
12
+ from sqlmodel import update
12
13
 
13
14
  from ....images import SingleImage
14
15
  from ....images.tools import filter_image_list
@@ -25,6 +26,7 @@ from fractal_server.app.models.v2 import AccountingRecord
25
26
  from fractal_server.app.models.v2 import DatasetV2
26
27
  from fractal_server.app.models.v2 import HistoryImageCache
27
28
  from fractal_server.app.models.v2 import HistoryRun
29
+ from fractal_server.app.models.v2 import HistoryUnit
28
30
  from fractal_server.app.models.v2 import TaskGroupV2
29
31
  from fractal_server.app.models.v2 import WorkflowTaskV2
30
32
  from fractal_server.app.runner.executors.base_runner import BaseRunner
@@ -36,6 +38,36 @@ from fractal_server.images.models import AttributeFiltersType
36
38
  from fractal_server.images.tools import merge_type_filters
37
39
 
38
40
 
41
+ def drop_none_attributes(attributes: dict[str, Any]) -> dict[str, Any]:
42
+ # Unset attributes with `None` value
43
+ non_none_attributes = {
44
+ key: value for key, value in attributes.items() if value is not None
45
+ }
46
+ return non_none_attributes
47
+
48
+
49
+ def get_origin_attribute_and_types(
50
+ *,
51
+ origin_url: str,
52
+ images: list[dict[str, Any]],
53
+ ) -> tuple[dict[str, Any], dict[str, bool]]:
54
+ """
55
+ Search for origin image and extract its attributes/types.
56
+ """
57
+ origin_img_search = find_image_by_zarr_url(
58
+ images=images,
59
+ zarr_url=origin_url,
60
+ )
61
+ if origin_img_search is None:
62
+ updated_attributes = {}
63
+ updated_types = {}
64
+ else:
65
+ origin_image = origin_img_search["image"]
66
+ updated_attributes = copy(origin_image["attributes"])
67
+ updated_types = copy(origin_image["types"])
68
+ return updated_attributes, updated_types
69
+
70
+
39
71
  def execute_tasks_v2(
40
72
  *,
41
73
  wf_task_list: list[WorkflowTaskV2],
@@ -189,154 +221,160 @@ def execute_tasks_v2(
189
221
  num_tasks = 0
190
222
 
191
223
  # POST TASK EXECUTION
192
-
193
- non_failed_task_outputs = [
194
- value.task_output
195
- for value in outcomes_dict.values()
196
- if value.task_output is not None
197
- ]
198
- if len(non_failed_task_outputs) > 0:
199
- current_task_output = merge_outputs(non_failed_task_outputs)
200
- # If `current_task_output` includes no images (to be created or
201
- # removed), then flag all the input images as modified.
202
- # See fractal-server issues #1374 and #2409.
203
- if (
204
- current_task_output.image_list_updates == []
205
- and current_task_output.image_list_removals == []
206
- ):
207
- current_task_output = TaskOutput(
208
- image_list_updates=[
209
- dict(zarr_url=img["zarr_url"])
210
- for img in filtered_images
211
- ],
212
- )
213
- else:
214
- current_task_output = TaskOutput()
215
-
216
- # Update image list
217
- num_new_images = 0
218
- current_task_output.check_zarr_urls_are_unique()
219
- # NOTE: In principle we could make the task-output processing more
220
- # granular, and also associate output-processing failures to history
221
- # status.
222
- for image_obj in current_task_output.image_list_updates:
223
- image = image_obj.model_dump()
224
- # Edit existing image
225
- tmp_image_paths = [img["zarr_url"] for img in tmp_images]
226
- if image["zarr_url"] in tmp_image_paths:
224
+ try:
225
+ non_failed_task_outputs = [
226
+ value.task_output
227
+ for value in outcomes_dict.values()
228
+ if value.task_output is not None
229
+ ]
230
+ if len(non_failed_task_outputs) > 0:
231
+ current_task_output = merge_outputs(non_failed_task_outputs)
232
+ # If `current_task_output` includes no images (to be created or
233
+ # removed), then flag all the input images as modified.
234
+ # See fractal-server issues #1374 and #2409.
227
235
  if (
228
- image["origin"] is not None
229
- and image["origin"] != image["zarr_url"]
236
+ current_task_output.image_list_updates == []
237
+ and current_task_output.image_list_removals == []
230
238
  ):
231
- raise JobExecutionError(
232
- "Cannot edit an image with zarr_url different from "
233
- "origin.\n"
234
- f"zarr_url={image['zarr_url']}\n"
235
- f"origin={image['origin']}"
236
- )
237
- img_search = find_image_by_zarr_url(
238
- images=tmp_images,
239
- zarr_url=image["zarr_url"],
240
- )
241
- if img_search is None:
242
- raise ValueError(
243
- "Unexpected error: "
244
- f"Image with zarr_url {image['zarr_url']} not found, "
245
- "while updating image list."
239
+ current_task_output = TaskOutput(
240
+ image_list_updates=[
241
+ dict(zarr_url=img["zarr_url"])
242
+ for img in filtered_images
243
+ ],
246
244
  )
247
- original_img = img_search["image"]
248
- original_index = img_search["index"]
249
- updated_attributes = copy(original_img["attributes"])
250
- updated_types = copy(original_img["types"])
245
+ else:
246
+ current_task_output = TaskOutput()
251
247
 
252
- # Update image attributes/types with task output and manifest
253
- updated_attributes.update(image["attributes"])
254
- updated_types.update(image["types"])
255
- updated_types.update(task.output_types)
248
+ # Update image list
249
+ num_new_images = 0
250
+ current_task_output.check_zarr_urls_are_unique()
251
+ # NOTE: In principle we could make the task-output processing more
252
+ # granular, and also associate output-processing failures to
253
+ # history status.
254
+ for image_obj in current_task_output.image_list_updates:
255
+ image = image_obj.model_dump()
256
+ if image["zarr_url"] in [
257
+ img["zarr_url"] for img in tmp_images
258
+ ]:
259
+ img_search = find_image_by_zarr_url(
260
+ images=tmp_images,
261
+ zarr_url=image["zarr_url"],
262
+ )
263
+ if img_search is None:
264
+ raise ValueError(
265
+ "Unexpected error: "
266
+ f"Image with zarr_url {image['zarr_url']} not "
267
+ "found, while updating image list."
268
+ )
269
+ existing_image_index = img_search["index"]
256
270
 
257
- # Unset attributes with None value
258
- updated_attributes = {
259
- key: value
260
- for key, value in updated_attributes.items()
261
- if value is not None
262
- }
271
+ if (
272
+ image["origin"] is None
273
+ or image["origin"] == image["zarr_url"]
274
+ ):
275
+ # CASE 1: Edit existing image
276
+ existing_image = img_search["image"]
277
+ new_attributes = copy(existing_image["attributes"])
278
+ new_types = copy(existing_image["types"])
279
+ new_image = dict(
280
+ zarr_url=image["zarr_url"],
281
+ )
282
+ if "origin" in existing_image.keys():
283
+ new_image["origin"] = existing_image["origin"]
284
+ else:
285
+ # CASE 2: Re-create existing image based on `origin`
286
+ # Propagate attributes and types from `origin` (if any)
287
+ (
288
+ new_attributes,
289
+ new_types,
290
+ ) = get_origin_attribute_and_types(
291
+ origin_url=image["origin"],
292
+ images=tmp_images,
293
+ )
294
+ new_image = dict(
295
+ zarr_url=image["zarr_url"],
296
+ origin=image["origin"],
297
+ )
298
+ # Update attributes
299
+ new_attributes.update(image["attributes"])
300
+ new_attributes = drop_none_attributes(new_attributes)
301
+ new_image["attributes"] = new_attributes
302
+ # Update types
303
+ new_types.update(image["types"])
304
+ new_types.update(task.output_types)
305
+ new_image["types"] = new_types
306
+ # Validate new image
307
+ SingleImage(**new_image)
308
+ # Update image in the dataset image list
309
+ tmp_images[existing_image_index] = new_image
263
310
 
264
- # Validate new image
265
- SingleImage(
266
- zarr_url=image["zarr_url"],
267
- types=updated_types,
268
- attributes=updated_attributes,
269
- )
311
+ else:
312
+ # CASE 3: Add new image
313
+ # Check that image['zarr_url'] is a subfolder of zarr_dir
314
+ if (
315
+ not image["zarr_url"].startswith(zarr_dir)
316
+ or image["zarr_url"] == zarr_dir
317
+ ):
318
+ raise JobExecutionError(
319
+ "Cannot create image if zarr_url is not a "
320
+ "subfolder of zarr_dir.\n"
321
+ f"zarr_dir: {zarr_dir}\n"
322
+ f"zarr_url: {image['zarr_url']}"
323
+ )
270
324
 
271
- # Update image in the dataset image list
272
- tmp_images[original_index]["attributes"] = updated_attributes
273
- tmp_images[original_index]["types"] = updated_types
274
- # Add new image
275
- else:
276
- # Check that image['zarr_url'] is relative to zarr_dir
277
- if not image["zarr_url"].startswith(zarr_dir):
278
- raise JobExecutionError(
279
- "Cannot create image if zarr_dir is not a parent "
280
- "directory of zarr_url.\n"
281
- f"zarr_dir: {zarr_dir}\n"
282
- f"zarr_url: {image['zarr_url']}"
283
- )
284
- # Check that image['zarr_url'] is not equal to zarr_dir
285
- if image["zarr_url"] == zarr_dir:
286
- raise JobExecutionError(
287
- "Cannot create image if zarr_url is equal to "
288
- "zarr_dir.\n"
289
- f"zarr_dir: {zarr_dir}\n"
290
- f"zarr_url: {image['zarr_url']}"
291
- )
292
- # Propagate attributes and types from `origin` (if any)
293
- updated_attributes = {}
294
- updated_types = {}
295
- if image["origin"] is not None:
296
- img_search = find_image_by_zarr_url(
325
+ # Propagate attributes and types from `origin` (if any)
326
+ new_attributes, new_types = get_origin_attribute_and_types(
327
+ origin_url=image["origin"],
297
328
  images=tmp_images,
298
- zarr_url=image["origin"],
299
329
  )
300
- if img_search is not None:
301
- original_img = img_search["image"]
302
- updated_attributes = copy(original_img["attributes"])
303
- updated_types = copy(original_img["types"])
304
- # Update image attributes/types with task output and manifest
305
- updated_attributes.update(image["attributes"])
306
- updated_attributes = {
307
- key: value
308
- for key, value in updated_attributes.items()
309
- if value is not None
310
- }
311
- updated_types.update(image["types"])
312
- updated_types.update(task.output_types)
313
- new_image = dict(
314
- zarr_url=image["zarr_url"],
315
- origin=image["origin"],
316
- attributes=updated_attributes,
317
- types=updated_types,
330
+ # Prepare new image
331
+ new_attributes.update(image["attributes"])
332
+ new_attributes = drop_none_attributes(new_attributes)
333
+ new_types.update(image["types"])
334
+ new_types.update(task.output_types)
335
+ new_image = dict(
336
+ zarr_url=image["zarr_url"],
337
+ origin=image["origin"],
338
+ attributes=new_attributes,
339
+ types=new_types,
340
+ )
341
+ # Validate new image
342
+ SingleImage(**new_image)
343
+ # Add image into the dataset image list
344
+ tmp_images.append(new_image)
345
+ num_new_images += 1
346
+
347
+ # Remove images from tmp_images
348
+ for img_zarr_url in current_task_output.image_list_removals:
349
+ img_search = find_image_by_zarr_url(
350
+ images=tmp_images, zarr_url=img_zarr_url
318
351
  )
319
- # Validate new image
320
- SingleImage(**new_image)
321
- # Add image into the dataset image list
322
- tmp_images.append(new_image)
323
- num_new_images += 1
352
+ if img_search is None:
353
+ raise JobExecutionError(
354
+ "Cannot remove missing image "
355
+ f"(zarr_url={img_zarr_url})."
356
+ )
357
+ else:
358
+ tmp_images.pop(img_search["index"])
324
359
 
325
- # Remove images from tmp_images
326
- for img_zarr_url in current_task_output.image_list_removals:
327
- img_search = find_image_by_zarr_url(
328
- images=tmp_images, zarr_url=img_zarr_url
360
+ # Update type_filters based on task-manifest output_types
361
+ type_filters_from_task_manifest = task.output_types
362
+ current_dataset_type_filters.update(
363
+ type_filters_from_task_manifest
329
364
  )
330
- if img_search is None:
331
- raise JobExecutionError(
332
- f"Cannot remove missing image (zarr_url={img_zarr_url})."
365
+ except Exception as e:
366
+ logger.error(
367
+ "Unexpected error in post-task-execution block. "
368
+ f"Original error: {str(e)}"
369
+ )
370
+ with next(get_sync_db()) as db:
371
+ db.execute(
372
+ update(HistoryUnit)
373
+ .where(HistoryUnit.history_run_id == history_run_id)
374
+ .values(status=HistoryUnitStatus.FAILED)
333
375
  )
334
- else:
335
- tmp_images.pop(img_search["index"])
336
-
337
- # Update type_filters based on task-manifest output_types
338
- type_filters_from_task_manifest = task.output_types
339
- current_dataset_type_filters.update(type_filters_from_task_manifest)
376
+ db.commit()
377
+ raise e
340
378
 
341
379
  with next(get_sync_db()) as db:
342
380
  # Write current dataset images into the database.
@@ -381,7 +419,7 @@ def execute_tasks_v2(
381
419
  status=HistoryUnitStatus.FAILED,
382
420
  db_sync=db,
383
421
  )
384
- logger.error(
422
+ logger.warning(
385
423
  f'END {wftask.order}-th task (name="{task_name}") - '
386
424
  "ERROR."
387
425
  )
fractal_server/config.py CHANGED
@@ -492,11 +492,6 @@ class Settings(BaseSettings):
492
492
  still running on SLURM.
493
493
  """
494
494
 
495
- FRACTAL_SLURM_INTERVAL_BEFORE_RETRIEVAL: int = 2
496
- """
497
- NOTE: see issue 2444
498
- """
499
-
500
495
  FRACTAL_SLURM_SBATCH_SLEEP: float = 0
501
496
  """
502
497
  Interval to wait (in seconds) between two subsequent `sbatch` calls, when
fractal_server/logger.py CHANGED
@@ -58,6 +58,7 @@ def set_logger(
58
58
  logger_name: str,
59
59
  *,
60
60
  log_file_path: Optional[Union[str, Path]] = None,
61
+ default_logging_level: Optional[int] = None,
61
62
  ) -> logging.Logger:
62
63
  """
63
64
  Set up a `fractal-server` logger
@@ -66,7 +67,8 @@ def set_logger(
66
67
 
67
68
  * The attribute `Logger.propagate` set to `False`;
68
69
  * One and only one `logging.StreamHandler` handler, with severity level set
69
- to `FRACTAL_LOGGING_LEVEL` and formatter set as in the `logger.LOG_FORMAT`
70
+ to `FRACTAL_LOGGING_LEVEL` (or `default_logging_level`, if set), and
71
+ formatter set as in the `logger.LOG_FORMAT`
70
72
  variable from the current module;
71
73
  * One or many `logging.FileHandler` handlers, including one pointint to
72
74
  `log_file_path` (if set); all these handlers have severity level set to
@@ -75,6 +77,7 @@ def set_logger(
75
77
  Args:
76
78
  logger_name: The identifier of the logger.
77
79
  log_file_path: Path to the log file.
80
+ default_logging_level: Override for `settings.FRACTAL_LOGGING_LEVEL`
78
81
 
79
82
  Returns:
80
83
  logger: The logger, as configured by the arguments.
@@ -92,8 +95,10 @@ def set_logger(
92
95
 
93
96
  if not current_stream_handlers:
94
97
  stream_handler = logging.StreamHandler()
95
- settings = Inject(get_settings)
96
- stream_handler.setLevel(settings.FRACTAL_LOGGING_LEVEL)
98
+ if default_logging_level is None:
99
+ settings = Inject(get_settings)
100
+ default_logging_level = settings.FRACTAL_LOGGING_LEVEL
101
+ stream_handler.setLevel(default_logging_level)
97
102
  stream_handler.setFormatter(LOG_FORMATTER)
98
103
  logger.addHandler(stream_handler)
99
104
 
@@ -132,7 +132,7 @@ def collect_local(
132
132
  ).as_posix(),
133
133
  prefix=(
134
134
  f"{int(time.time())}_"
135
- f"{TaskGroupActivityActionV2.COLLECT}_"
135
+ f"{TaskGroupActivityActionV2.COLLECT.value}_"
136
136
  ),
137
137
  logger_name=LOGGER_NAME,
138
138
  )
@@ -107,7 +107,7 @@ def deactivate_local(
107
107
  ).as_posix(),
108
108
  prefix=(
109
109
  f"{int(time.time())}_"
110
- f"{TaskGroupActivityActionV2.DEACTIVATE}_"
110
+ f"{TaskGroupActivityActionV2.DEACTIVATE.value}_"
111
111
  ),
112
112
  logger_name=LOGGER_NAME,
113
113
  )
@@ -107,7 +107,7 @@ def reactivate_local(
107
107
  ).as_posix(),
108
108
  prefix=(
109
109
  f"{int(time.time())}_"
110
- f"{TaskGroupActivityActionV2.REACTIVATE}_"
110
+ f"{TaskGroupActivityActionV2.REACTIVATE.value}_"
111
111
  ),
112
112
  logger_name=LOGGER_NAME,
113
113
  )
@@ -166,7 +166,7 @@ def collect_ssh(
166
166
  script_dir_remote=script_dir_remote,
167
167
  prefix=(
168
168
  f"{int(time.time())}_"
169
- f"{TaskGroupActivityActionV2.COLLECT}"
169
+ f"{TaskGroupActivityActionV2.COLLECT.value}"
170
170
  ),
171
171
  fractal_ssh=fractal_ssh,
172
172
  logger_name=LOGGER_NAME,
@@ -136,7 +136,7 @@ def deactivate_ssh(
136
136
  script_dir_remote=script_dir_remote,
137
137
  prefix=(
138
138
  f"{int(time.time())}_"
139
- f"{TaskGroupActivityActionV2.DEACTIVATE}"
139
+ f"{TaskGroupActivityActionV2.DEACTIVATE.value}"
140
140
  ),
141
141
  fractal_ssh=fractal_ssh,
142
142
  logger_name=LOGGER_NAME,
@@ -144,7 +144,7 @@ def reactivate_ssh(
144
144
  script_dir_remote=script_dir_remote,
145
145
  prefix=(
146
146
  f"{int(time.time())}_"
147
- f"{TaskGroupActivityActionV2.REACTIVATE}"
147
+ f"{TaskGroupActivityActionV2.REACTIVATE.value}"
148
148
  ),
149
149
  fractal_ssh=fractal_ssh,
150
150
  logger_name=LOGGER_NAME,
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.3
2
2
  Name: fractal-server
3
- Version: 2.14.0a36
3
+ Version: 2.14.2
4
4
  Summary: Backend component of the Fractal analytics platform
5
5
  License: BSD-3-Clause
6
6
  Author: Tommaso Comparin
@@ -1,4 +1,4 @@
1
- fractal_server/__init__.py,sha256=8fGTYA0v4yFRsbsrwIUxQDcJEeH1hYIY2CuXgnou2hM,26
1
+ fractal_server/__init__.py,sha256=6Uu9b8c6XWTG60V3XHQPdFjCGQfgYX4y4FTD3DmRFXk,23
2
2
  fractal_server/__main__.py,sha256=rkM8xjY1KeS3l63irB8yCrlVobR-73uDapC4wvrIlxI,6957
3
3
  fractal_server/alembic.ini,sha256=MWwi7GzjzawI9cCAK1LW7NxIBQDUqD12-ptJoq5JpP0,3153
4
4
  fractal_server/app/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
@@ -24,13 +24,13 @@ fractal_server/app/routes/admin/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm
24
24
  fractal_server/app/routes/admin/v2/__init__.py,sha256=_5lqb6-M8-fZqE1HRMep6pAFYRUKMxrvbZOKs-RXWkw,933
25
25
  fractal_server/app/routes/admin/v2/accounting.py,sha256=UDMPD9DMhMBcu4UsEOEtKMCGnkVMtmwBuRklek-_ShQ,3631
26
26
  fractal_server/app/routes/admin/v2/impersonate.py,sha256=gc4lshfEPFR6W2asH7aKu6hqE6chzusdhAUVV9p51eU,1131
27
- fractal_server/app/routes/admin/v2/job.py,sha256=4soc-5d99QEsir7U9AqpofgaGggSBwgMm7mXW5LBvSI,7439
27
+ fractal_server/app/routes/admin/v2/job.py,sha256=Elb3aP9Az6V8u1C0FvhSxt9NDPjBrQeJB0fH1hpXWQs,7445
28
28
  fractal_server/app/routes/admin/v2/project.py,sha256=luy-yiGX1JYTdPm1hpIdDUUqPm8xHuipLy9k2X6zu74,1223
29
29
  fractal_server/app/routes/admin/v2/task.py,sha256=QOwgyDU9m7T_wLMwkdgfFaoMjNxcDg6zMVpngxhUvqk,4374
30
30
  fractal_server/app/routes/admin/v2/task_group.py,sha256=LG41hAsllBL6kc-JLxRNG_IrI6frIKrIF3xD0GeeTiI,7173
31
31
  fractal_server/app/routes/admin/v2/task_group_lifecycle.py,sha256=0e0ZJ_k75TVHaT2o8Xk33DPDSgh-eBhZf-y4y7t-Adg,9429
32
32
  fractal_server/app/routes/api/__init__.py,sha256=B8l6PSAhR10iZqHEiyTat-_0tkeKdrCigIE6DJGP5b8,638
33
- fractal_server/app/routes/api/v2/__init__.py,sha256=9o9zxTU2IJrC_JQ8GUMft3niiBZ39YLvODUeraiRRdQ,2465
33
+ fractal_server/app/routes/api/v2/__init__.py,sha256=WE6Znay9R_4_78eSKBMi-s5_lk3oLxWqyAGya4JcYY8,2474
34
34
  fractal_server/app/routes/api/v2/_aux_functions.py,sha256=eE-TdEMI_UX3LBDUGwjG5NyUcihDVaHYlG15NlTJ9DI,12872
35
35
  fractal_server/app/routes/api/v2/_aux_functions_history.py,sha256=ZlI6nwzB5r9AiY0C8TzJS_EQOTPKgkRYl3GpxFAu2bg,4430
36
36
  fractal_server/app/routes/api/v2/_aux_functions_task_lifecycle.py,sha256=qdXCb6IP8-qPEAxGZKljtjIqNzIAyRaAsQSRi5VqFHM,6773
@@ -39,15 +39,15 @@ fractal_server/app/routes/api/v2/dataset.py,sha256=h5AhE0sdhQ20ZlIbEJsFnHIOUW0S1
39
39
  fractal_server/app/routes/api/v2/history.py,sha256=pDztvwQFOh3JChtSk9GIG3H17yg4G5pk1mq14qXF4Ck,17793
40
40
  fractal_server/app/routes/api/v2/images.py,sha256=BGpO94gVd8BTpCN6Mun2RXmjrPmfkIp73m8RN7uiGW4,8361
41
41
  fractal_server/app/routes/api/v2/job.py,sha256=MU1sHIKk_89WrD0TD44d4ufzqnywot7On_W71KjyUbQ,6500
42
+ fractal_server/app/routes/api/v2/pre_submission_checks.py,sha256=WyJAco9-96c15ImjgvsNfhd2169gG29CXkwtCTVLs38,4816
42
43
  fractal_server/app/routes/api/v2/project.py,sha256=uAZgATiHcOvbnRX-vv1D3HoaEUvLUd7vzVmGcqOP8ZY,4602
43
- fractal_server/app/routes/api/v2/status_legacy.py,sha256=Q5ZWQNfeZKL8Xgtou2Xr80iaF1uO-r4oSKgq5H42V_8,6349
44
+ fractal_server/app/routes/api/v2/status_legacy.py,sha256=0QlbBErOT2Idf-LT0EvOiPtTjrm6WmIY0k69NOgjmWk,6355
44
45
  fractal_server/app/routes/api/v2/submit.py,sha256=hCwwC6bXP7EyhgGyVLv1ClybRH1YytDVoPunOzpsf0s,8822
45
46
  fractal_server/app/routes/api/v2/task.py,sha256=O7pquZhXIS4lRs5XqHvstiwe8BiCuS-B3ZKJI1g6EJU,6985
46
47
  fractal_server/app/routes/api/v2/task_collection.py,sha256=IDNF6sjDuU37HIQ0TuQA-TZIuf7nfHAQXUUNmkrlhLM,12706
47
- fractal_server/app/routes/api/v2/task_collection_custom.py,sha256=totsl0DOC2DFLw8vgqOFivvftpEk3KbFDeOHT0UVQUs,5997
48
+ fractal_server/app/routes/api/v2/task_collection_custom.py,sha256=CRe23ehouz08tedJ3-pe9akqTVyxv7GfqhWTjx5E45Q,6677
48
49
  fractal_server/app/routes/api/v2/task_group.py,sha256=62zcVTdheXM5V3WmFuqisIqgETjXmZaRpNMcDX5bXS0,7408
49
50
  fractal_server/app/routes/api/v2/task_group_lifecycle.py,sha256=3o9bCC8ubMwffQPPaxQZy-CjH9IB2RkIReIecI6L2_w,9300
50
- fractal_server/app/routes/api/v2/verify_image_types.py,sha256=RBi6-3Sp1wYm_obDPRcEBtLvRfsRknufbZyhGGHVo6I,1924
51
51
  fractal_server/app/routes/api/v2/workflow.py,sha256=sW6Nm7dfzUY354hawyEkpQHy7rUvV2FCV8DPorH-TDU,10270
52
52
  fractal_server/app/routes/api/v2/workflow_import.py,sha256=INmnhlMEBJp-vHPR0f940DANPmIidts3OfcooeM_aNA,11205
53
53
  fractal_server/app/routes/api/v2/workflowtask.py,sha256=7_syX2EO7ibF6Xkm7HBPhsUYq6aYnKNeC5iSaafQhG4,11342
@@ -67,7 +67,7 @@ fractal_server/app/routes/aux/validate_user_settings.py,sha256=FLVi__8YFcm_6c_K5
67
67
  fractal_server/app/routes/pagination.py,sha256=L8F5JqekF39qz-LpeScdlhb57MQnSRXjK4ZEtsZqYLk,1210
68
68
  fractal_server/app/runner/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
69
69
  fractal_server/app/runner/components.py,sha256=-Ii5l8d_V6f5DFOd-Zsr8VYmOsyqw0Hox9fEFQiuqxY,66
70
- fractal_server/app/runner/compress_folder.py,sha256=DX-4IYlSXlMd0EmXDD8M8FxisfKLbooSTrdNtzYAQAM,4876
70
+ fractal_server/app/runner/compress_folder.py,sha256=PpamheggkacuAFhSiHkuW76x9zwawouqNySPBDyAuRI,5245
71
71
  fractal_server/app/runner/exceptions.py,sha256=JC5ufHyeA1hYD_rkZUscI30DD8D903ncag7Z3AArmUY,4215
72
72
  fractal_server/app/runner/executors/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
73
73
  fractal_server/app/runner/executors/base_runner.py,sha256=4xxMpYycIeAOz5niaJj2xtVW_Cq-shCxP1qk4g-KwOM,5137
@@ -78,17 +78,17 @@ fractal_server/app/runner/executors/slurm_common/__init__.py,sha256=47DEQpj8HBSa
78
78
  fractal_server/app/runner/executors/slurm_common/_batching.py,sha256=ZY020JZlDS5mfpgpWTChQkyHU7iLE5kx2HVd57_C6XA,8850
79
79
  fractal_server/app/runner/executors/slurm_common/_job_states.py,sha256=nuV-Zba38kDrRESOVB3gaGbrSPZc4q7YGichQaeqTW0,238
80
80
  fractal_server/app/runner/executors/slurm_common/_slurm_config.py,sha256=_feRRnVVnvQa3AsOQqfULfOgaoj2o6Ze0-fwXwic8p4,15795
81
- fractal_server/app/runner/executors/slurm_common/base_slurm_runner.py,sha256=kmou-asQJ7SHBR0VPPiQrMLP9gv_NZG3s9t2yoszGhY,33870
81
+ fractal_server/app/runner/executors/slurm_common/base_slurm_runner.py,sha256=bqFD9uZfwXqmVXsScrgANp09zvbuc_LSGCDAmBV0wqo,33379
82
82
  fractal_server/app/runner/executors/slurm_common/get_slurm_config.py,sha256=BW6fDpPyB0VH5leVxvwzkVH3r3hC7DuSyoWmRzHITWg,7305
83
- fractal_server/app/runner/executors/slurm_common/remote.py,sha256=L5llMccL6ctdFpDQvynJl5KbxtATX2wzpq13_3ppw-I,5929
83
+ fractal_server/app/runner/executors/slurm_common/remote.py,sha256=WdzVSLYKOxLb9NmrvwZu0voXRPTO1gyKTUUuRJwu9Lc,5861
84
84
  fractal_server/app/runner/executors/slurm_common/slurm_job_task_models.py,sha256=RoxHLKOn0_wGjnY0Sv0a9nDSiqxYZHKRoMkT3p9_G1E,3607
85
85
  fractal_server/app/runner/executors/slurm_common/utils_executors.py,sha256=naPyJI0I3lD-sYHbSXbMFGUBK4h_SggA5V91Z1Ch1Xg,1416
86
86
  fractal_server/app/runner/executors/slurm_ssh/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
87
87
  fractal_server/app/runner/executors/slurm_ssh/runner.py,sha256=yKK_cjskHDiasn_QQ-k14GhplP3tNaK7Kp4yiVn44Y0,9437
88
88
  fractal_server/app/runner/executors/slurm_sudo/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
89
89
  fractal_server/app/runner/executors/slurm_sudo/_subprocess_run_as_user.py,sha256=BlOz4NElv3v7rUYefyeki33uaJxcSDk6rPuVZx9ocdw,2776
90
- fractal_server/app/runner/executors/slurm_sudo/runner.py,sha256=lPWkRT499mChP3dNLrdDjMT-nw7-LWv6g58kdF_sMRw,6290
91
- fractal_server/app/runner/extract_archive.py,sha256=I7UGIHXXuFvlgVPsP7GMWPu2-DiS1EiyBs7a1bvgkxI,2458
90
+ fractal_server/app/runner/executors/slurm_sudo/runner.py,sha256=zT-DH61oPEq4dvo08EMnXv3QMYpwAZ3teY7R3MJ5G_8,6291
91
+ fractal_server/app/runner/extract_archive.py,sha256=8h6ZX7Gy0Vqv5KmrEGbWGPuA0MvW207cQZ-8CPYjwXc,2800
92
92
  fractal_server/app/runner/filenames.py,sha256=lPnxKHtdRizr6FqG3zOdjDPyWA7GoaJGTtiuJV0gA8E,70
93
93
  fractal_server/app/runner/run_subprocess.py,sha256=c3JbYXq3hX2aaflQU19qJ5Xs6J6oXGNvnTEoAfv2bxc,959
94
94
  fractal_server/app/runner/set_start_and_last_task_index.py,sha256=-q4zVybAj8ek2XlbENKlfOAJ39hT_zoJoZkqzDqiAMY,1254
@@ -101,7 +101,7 @@ fractal_server/app/runner/v2/_slurm_sudo.py,sha256=TVihkQKMX6YWEWxXJjQo0WEQOjVy7
101
101
  fractal_server/app/runner/v2/db_tools.py,sha256=du5dKhMMFMErQXbGIgu9JvO_vtMensodyPsyDeqz1yQ,3324
102
102
  fractal_server/app/runner/v2/deduplicate_list.py,sha256=IVTE4abBU1bUprFTkxrTfYKnvkNTanWQ-KWh_etiT08,645
103
103
  fractal_server/app/runner/v2/merge_outputs.py,sha256=D1L4Taieq9i71SPQyNc1kMokgHh-sV_MqF3bv7QMDBc,907
104
- fractal_server/app/runner/v2/runner.py,sha256=UmUhAOOcwAT-8b28o5bWn5S9APtr5EbEvulxWJPo6r4,16269
104
+ fractal_server/app/runner/v2/runner.py,sha256=tYG-LB9Dm9_Lz3uYgAhcIeeFLy5I6xMqCdVuUKNXyII,17717
105
105
  fractal_server/app/runner/v2/runner_functions.py,sha256=AzsE7VF6NMz_5qc0htQkfow5_2rr-wkx50vFJTndj8I,19250
106
106
  fractal_server/app/runner/v2/runner_functions_low_level.py,sha256=_h_OOffq3d7V0uHa8Uvs0mj31y1GSZBUXjDDF3WjVjY,3620
107
107
  fractal_server/app/runner/v2/submit_workflow.py,sha256=QywUGIoHAHnrWgfnyX8W9kVqKY-RvVyNLpzrbsXZOZ4,13075
@@ -130,14 +130,14 @@ fractal_server/app/schemas/v2/workflowtask.py,sha256=rVbmNihDAJL_Sckbt1hBK2JEcb-
130
130
  fractal_server/app/security/__init__.py,sha256=e2cveg5hQpieGD3bSPd5GTOMthvJ-HXH3buSb9WVfEU,14096
131
131
  fractal_server/app/security/signup_email.py,sha256=Xd6QYxcdmg0PHpDwmUE8XQmPcOj3Xjy5oROcIMhmltM,1472
132
132
  fractal_server/app/user_settings.py,sha256=OP1yiYKtPadxwM51_Q0hdPk3z90TCN4z1BLpQsXyWiU,1316
133
- fractal_server/config.py,sha256=83dHIuZMdMiu4LAtzVGBe_iD1nWEYOiKmeC-HHZ0nhw,28534
133
+ fractal_server/config.py,sha256=A3j1bxa2inTNqIp5Ry0sY6DS72k7X_AKA3OUaDwoYbA,28439
134
134
  fractal_server/data_migrations/README.md,sha256=_3AEFvDg9YkybDqCLlFPdDmGJvr6Tw7HRI14aZ3LOIw,398
135
135
  fractal_server/data_migrations/tools.py,sha256=LeMeASwYGtEqd-3wOLle6WARdTGAimoyMmRbbJl-hAM,572
136
136
  fractal_server/gunicorn_fractal.py,sha256=u6U01TLGlXgq1v8QmEpLih3QnsInZD7CqphgJ_GrGzc,1230
137
137
  fractal_server/images/__init__.py,sha256=-_wjoKtSX02P1KjDxDP_EXKvmbONTRmbf7iGVTsyBpM,154
138
138
  fractal_server/images/models.py,sha256=jdGKMPi8WlO9Kvns4grIOU5LjujnvwIGjMFMC0wNy08,3501
139
139
  fractal_server/images/tools.py,sha256=-zFDzRv6cbbRo21OrD0eZY5qWcoMX8dxgEnfyI3tOcg,4140
140
- fractal_server/logger.py,sha256=5Z3rfsFwl8UysVljTOaaIvt8Pyp6CVH492ez3jE8WAw,5113
140
+ fractal_server/logger.py,sha256=2QxBu5mB6xN3qWqj60nuxdrxcbxwzlx0xL47jKHB5PU,5385
141
141
  fractal_server/main.py,sha256=FD9KzTTsXTQnTW0z3Hu7y0Nj_oAkBeZEInKDXFd4hjE,4561
142
142
  fractal_server/migrations/env.py,sha256=nfyBpMIOT3kny6t-b-tUjyRjZ4k906bb1_wCQ7me1BI,1353
143
143
  fractal_server/migrations/naming_convention.py,sha256=htbKrVdetx3pklowb_9Cdo5RqeF0fJ740DNecY5de_M,265
@@ -187,14 +187,14 @@ fractal_server/tasks/utils.py,sha256=V7dj8o2AnoHhGSTYlqJHcRFhCIpmOrMOUhtiE_DvRVA
187
187
  fractal_server/tasks/v2/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
188
188
  fractal_server/tasks/v2/local/__init__.py,sha256=9RVItnS7OyLsJOuJjWMCicaky4ASUPQEYD4SzDs0hOE,141
189
189
  fractal_server/tasks/v2/local/_utils.py,sha256=EvhmVwYjqaNyDCUMEsTWYOUXLgEwR1xr6bu32apCEI8,2491
190
- fractal_server/tasks/v2/local/collect.py,sha256=-ScW8Vm0cguwEDQarNAaZFtai0jFFVBrIaVgo2G9KCE,12204
191
- fractal_server/tasks/v2/local/deactivate.py,sha256=94s_RDND8aR5Y8RxFrRx61rZBMPGqOmFnBFLVKK1HVY,10038
192
- fractal_server/tasks/v2/local/reactivate.py,sha256=eBgFgq5xVKNr4DIDX7QU8xXerhwMrPaHDJ1wTth7aQc,6191
190
+ fractal_server/tasks/v2/local/collect.py,sha256=w1s96nqyGRSY_zhII3HP8JaBdJmzxPdqTksUO-ZH9lo,12210
191
+ fractal_server/tasks/v2/local/deactivate.py,sha256=o3lVnW35bjaRyRkjN6cpqe0WyE54sVydi-I5-tmLFNI,10044
192
+ fractal_server/tasks/v2/local/reactivate.py,sha256=hMgRWEY30qi-0lCMdC2GmyGLeAXOZYJCzNgamiucjEM,6197
193
193
  fractal_server/tasks/v2/ssh/__init__.py,sha256=aSQbVi6Ummt9QzcSLWNmSqYjfdxrn9ROmqgH6bDpI7k,135
194
194
  fractal_server/tasks/v2/ssh/_utils.py,sha256=LjaEYVUJDChilu3YuhxuGWYRNnVJ_zqNE9SDHdRTIHY,2824
195
- fractal_server/tasks/v2/ssh/collect.py,sha256=gGFdKTawfi3xyUuslyB-QoNNtc7SXx23a7K7wNH8HhM,13562
196
- fractal_server/tasks/v2/ssh/deactivate.py,sha256=EAVH2HtyvmIFXqUwsGYhlJcAcVh_MvIOaKDY8AyBODw,11400
197
- fractal_server/tasks/v2/ssh/reactivate.py,sha256=8Rnbbny7TjMEAHhboqfgxBVZZK5UNNmh4Ud-0y3jaVM,7970
195
+ fractal_server/tasks/v2/ssh/collect.py,sha256=I-a2st4jo8mzc0u0EScs54Ccr3tNm_as7RZmn1FTGiE,13568
196
+ fractal_server/tasks/v2/ssh/deactivate.py,sha256=meiAVd4mvDR-ikclR0KXkSoJp3PbBYci-eZlOJbcI1c,11406
197
+ fractal_server/tasks/v2/ssh/reactivate.py,sha256=T6YPbMwaWV7RUD33Dmpn3K9173BGuOv0s7Vpr_YOCH4,7976
198
198
  fractal_server/tasks/v2/templates/1_create_venv.sh,sha256=PK0jdHKtQpda1zULebBaVPORt4t6V17wa4N1ohcj5ac,548
199
199
  fractal_server/tasks/v2/templates/2_pip_install.sh,sha256=Md2LPt3BJ7IfN0USF2uivl4rP8OwbzJOUepGAr_Cicg,1836
200
200
  fractal_server/tasks/v2/templates/3_pip_freeze.sh,sha256=JldREScEBI4cD_qjfX4UK7V4aI-FnX9ZvVNxgpSOBFc,168
@@ -209,8 +209,8 @@ fractal_server/tasks/v2/utils_templates.py,sha256=Kc_nSzdlV6KIsO0CQSPs1w70zLyENP
209
209
  fractal_server/urls.py,sha256=QjIKAC1a46bCdiPMu3AlpgFbcv6a4l3ABcd5xz190Og,471
210
210
  fractal_server/utils.py,sha256=PMwrxWFxRTQRl1b9h-NRIbFGPKqpH_hXnkAT3NfZdpY,3571
211
211
  fractal_server/zip_tools.py,sha256=GjDgo_sf6V_DDg6wWeBlZu5zypIxycn_l257p_YVKGc,4876
212
- fractal_server-2.14.0a36.dist-info/LICENSE,sha256=QKAharUuhxL58kSoLizKJeZE3mTCBnX6ucmz8W0lxlk,1576
213
- fractal_server-2.14.0a36.dist-info/METADATA,sha256=5MOzziccWO5Ah9boFwgKLEMgJKoZbRLwHQvhcj4T0-w,4563
214
- fractal_server-2.14.0a36.dist-info/WHEEL,sha256=7dDg4QLnNKTvwIDR9Ac8jJaAmBC_owJrckbC0jjThyA,88
215
- fractal_server-2.14.0a36.dist-info/entry_points.txt,sha256=8tV2kynvFkjnhbtDnxAqImL6HMVKsopgGfew0DOp5UY,58
216
- fractal_server-2.14.0a36.dist-info/RECORD,,
212
+ fractal_server-2.14.2.dist-info/LICENSE,sha256=QKAharUuhxL58kSoLizKJeZE3mTCBnX6ucmz8W0lxlk,1576
213
+ fractal_server-2.14.2.dist-info/METADATA,sha256=LQDnOxlZL14XsZXYcQ42N-JR0NSMBfxN4qOP2raSiG4,4560
214
+ fractal_server-2.14.2.dist-info/WHEEL,sha256=7dDg4QLnNKTvwIDR9Ac8jJaAmBC_owJrckbC0jjThyA,88
215
+ fractal_server-2.14.2.dist-info/entry_points.txt,sha256=8tV2kynvFkjnhbtDnxAqImL6HMVKsopgGfew0DOp5UY,58
216
+ fractal_server-2.14.2.dist-info/RECORD,,
@@ -1,61 +0,0 @@
1
- from typing import Optional
2
-
3
- from fastapi import APIRouter
4
- from fastapi import Depends
5
- from fastapi import status
6
-
7
- from ._aux_functions import _get_dataset_check_owner
8
- from .images import ImageQuery
9
- from fractal_server.app.db import AsyncSession
10
- from fractal_server.app.db import get_async_db
11
- from fractal_server.app.models import UserOAuth
12
- from fractal_server.app.routes.auth import current_active_user
13
- from fractal_server.images.tools import aggregate_types
14
- from fractal_server.images.tools import filter_image_list
15
-
16
- router = APIRouter()
17
-
18
-
19
- @router.post(
20
- "/project/{project_id}/dataset/{dataset_id}/images/verify-unique-types/",
21
- status_code=status.HTTP_200_OK,
22
- )
23
- async def verify_unique_types(
24
- project_id: int,
25
- dataset_id: int,
26
- query: Optional[ImageQuery] = None,
27
- user: UserOAuth = Depends(current_active_user),
28
- db: AsyncSession = Depends(get_async_db),
29
- ) -> list[str]:
30
- # Get dataset
31
- output = await _get_dataset_check_owner(
32
- project_id=project_id, dataset_id=dataset_id, user_id=user.id, db=db
33
- )
34
- dataset = output["dataset"]
35
-
36
- # Filter images
37
- if query is None:
38
- filtered_images = dataset.images
39
- else:
40
- filtered_images = filter_image_list(
41
- images=dataset.images,
42
- attribute_filters=query.attribute_filters,
43
- type_filters=query.type_filters,
44
- )
45
-
46
- # Get actual values for each available type
47
- available_types = aggregate_types(filtered_images)
48
- values_per_type: dict[str, set] = {
49
- _type: set() for _type in available_types
50
- }
51
- for _img in filtered_images:
52
- for _type in available_types:
53
- values_per_type[_type].add(_img["types"].get(_type, False))
54
-
55
- # Find types with non-unique value
56
- non_unique_types = [
57
- key for key, value in values_per_type.items() if len(value) > 1
58
- ]
59
- non_unique_types = sorted(non_unique_types)
60
-
61
- return non_unique_types