fractal-server 2.14.10__py3-none-any.whl → 2.14.12__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1 +1 @@
1
- __VERSION__ = "2.14.10"
1
+ __VERSION__ = "2.14.12"
@@ -1,5 +1,4 @@
1
1
  from copy import deepcopy
2
- from typing import Any
3
2
 
4
3
  from fastapi import APIRouter
5
4
  from fastapi import Depends
@@ -16,6 +15,7 @@ from ._aux_functions_history import get_history_run_or_404
16
15
  from ._aux_functions_history import get_history_unit_or_404
17
16
  from ._aux_functions_history import get_wftask_check_owner
18
17
  from ._aux_functions_history import read_log_file
18
+ from .images import ImagePage
19
19
  from .images import ImageQuery
20
20
  from fractal_server.app.db import AsyncSession
21
21
  from fractal_server.app.db import get_async_db
@@ -34,13 +34,11 @@ from fractal_server.app.schemas.v2 import HistoryUnitRead
34
34
  from fractal_server.app.schemas.v2 import HistoryUnitStatus
35
35
  from fractal_server.app.schemas.v2 import HistoryUnitStatusWithUnset
36
36
  from fractal_server.app.schemas.v2 import ImageLogsRequest
37
- from fractal_server.app.schemas.v2 import SingleImageWithStatus
38
- from fractal_server.images.image_status import enrich_image_list
39
- from fractal_server.images.image_status import IMAGE_STATUS_KEY
37
+ from fractal_server.images.status_tools import enrich_images_async
38
+ from fractal_server.images.status_tools import IMAGE_STATUS_KEY
40
39
  from fractal_server.images.tools import aggregate_attributes
41
40
  from fractal_server.images.tools import aggregate_types
42
41
  from fractal_server.images.tools import filter_image_list
43
- from fractal_server.images.tools import merge_type_filters
44
42
  from fractal_server.logger import set_logger
45
43
 
46
44
 
@@ -62,11 +60,6 @@ def check_historyrun_related_to_dataset_and_wftask(
62
60
  )
63
61
 
64
62
 
65
- class ImageWithStatusPage(PaginationResponse[SingleImageWithStatus]):
66
- attributes: dict[str, list[Any]]
67
- types: list[str]
68
-
69
-
70
63
  router = APIRouter()
71
64
  logger = set_logger(__name__)
72
65
 
@@ -299,11 +292,11 @@ async def get_history_images(
299
292
  dataset_id: int,
300
293
  workflowtask_id: int,
301
294
  request_body: ImageQuery,
302
- unit_status: HistoryUnitStatusWithUnset | None = None,
303
295
  user: UserOAuth = Depends(current_active_user),
304
296
  db: AsyncSession = Depends(get_async_db),
305
297
  pagination: PaginationRequest = Depends(get_pagination_params),
306
- ) -> ImageWithStatusPage:
298
+ ) -> ImagePage:
299
+
307
300
  # Access control and object retrieval
308
301
  wftask = await get_wftask_check_owner(
309
302
  project_id=project_id,
@@ -320,82 +313,54 @@ async def get_history_images(
320
313
  db=db,
321
314
  )
322
315
  dataset = res["dataset"]
323
- workflow = res["workflow"]
324
316
 
325
317
  # Setup prefix for logging
326
- prefix = f"[DS{dataset.id}-WFT{wftask.id}-images]"
327
-
328
- # (1) Get the type-filtered list of dataset images
329
-
330
- # (1A) Reconstruct dataset type filters by starting from {} and making
331
- # incremental updates with `output_types` of all previous tasks
332
- inferred_dataset_type_filters = {}
333
- for current_wftask in workflow.task_list[0 : wftask.order]:
334
- inferred_dataset_type_filters.update(current_wftask.task.output_types)
335
- logger.debug(f"{prefix} {inferred_dataset_type_filters=}")
336
- # (1B) Compute type filters for the current wftask
337
- type_filters_patch = merge_type_filters(
338
- task_input_types=wftask.task.input_types,
339
- wftask_type_filters=wftask.type_filters,
340
- )
341
- logger.debug(f"{prefix} {type_filters_patch=}")
342
- # (1C) Combine dataset type filters (lower priority) and current-wftask
343
- # filters (higher priority)
344
- actual_filters = inferred_dataset_type_filters
345
- actual_filters.update(type_filters_patch)
346
- logger.debug(f"{prefix} {actual_filters=}")
347
- # (1D) Get all matching images from the dataset
348
-
349
- pre_filtered_dataset_images = filter_image_list(
318
+ prefix = f"[DS{dataset.id}-WFT{workflowtask_id}-images]"
319
+
320
+ # (1) Apply type filters
321
+ type_filtered_images = filter_image_list(
350
322
  images=dataset.images,
351
- type_filters=inferred_dataset_type_filters,
323
+ type_filters=request_body.type_filters,
352
324
  )
353
325
 
354
- full_images_list = await enrich_image_list(
326
+ # (2) Extract valid values for attributes and types
327
+ attributes = aggregate_attributes(type_filtered_images)
328
+ attributes[IMAGE_STATUS_KEY] = [
329
+ HistoryUnitStatusWithUnset.DONE,
330
+ HistoryUnitStatusWithUnset.SUBMITTED,
331
+ HistoryUnitStatusWithUnset.FAILED,
332
+ HistoryUnitStatusWithUnset.UNSET,
333
+ ]
334
+ types = aggregate_types(type_filtered_images)
335
+
336
+ # (3) Enrich images with status attribute
337
+ type_filtered_images_with_status = await enrich_images_async(
355
338
  dataset_id=dataset_id,
356
339
  workflowtask_id=workflowtask_id,
357
- images=pre_filtered_dataset_images,
340
+ images=type_filtered_images,
358
341
  db=db,
359
342
  )
360
343
 
361
- if unit_status is not None:
362
- request_body.attribute_filters[IMAGE_STATUS_KEY] = unit_status
363
-
364
- filtered_dataset_images = filter_image_list(
365
- full_images_list,
366
- type_filters=request_body.type_filters,
344
+ # (4) Apply attribute filters
345
+ final_images_with_status = filter_image_list(
346
+ type_filtered_images_with_status,
367
347
  attribute_filters=request_body.attribute_filters,
368
348
  )
369
- logger.debug(f"{prefix} {len(dataset.images)=}")
370
- logger.debug(f"{prefix} {len(filtered_dataset_images)=}")
371
-
372
- attributes = aggregate_attributes(pre_filtered_dataset_images)
373
- types = aggregate_types(pre_filtered_dataset_images)
374
349
 
375
- # Final list of objects
350
+ logger.debug(f"{prefix} {len(dataset.images)=}")
351
+ logger.debug(f"{prefix} {len(final_images_with_status)=}")
376
352
 
377
- total_count = len(filtered_dataset_images)
353
+ # (5) Apply pagination logic
354
+ total_count = len(final_images_with_status)
378
355
  page_size = pagination.page_size or total_count
379
356
  sorted_images_list = sorted(
380
- filtered_dataset_images,
357
+ final_images_with_status,
381
358
  key=lambda image: image["zarr_url"],
382
359
  )
383
360
  paginated_images_list = sorted_images_list[
384
361
  (pagination.page - 1) * page_size : pagination.page * page_size
385
362
  ]
386
363
 
387
- # FIXME: This is only for backwards-compatibility. To remove when we
388
- # update the webclient
389
- paginated_images_list = [
390
- {
391
- **img,
392
- "status": (
393
- lambda x: None if x == HistoryUnitStatusWithUnset.UNSET else x
394
- )(img["attributes"].pop(IMAGE_STATUS_KEY)),
395
- }
396
- for img in paginated_images_list
397
- ]
398
-
399
364
  return dict(
400
365
  current_page=pagination.page,
401
366
  page_size=page_size,
@@ -1,5 +1,3 @@
1
- from typing import Any
2
-
3
1
  from fastapi import APIRouter
4
2
  from fastapi import Depends
5
3
  from fastapi import HTTPException
@@ -26,6 +24,7 @@ from fractal_server.images.tools import aggregate_types
26
24
  from fractal_server.images.tools import find_image_by_zarr_url
27
25
  from fractal_server.images.tools import match_filter
28
26
  from fractal_server.types import AttributeFilters
27
+ from fractal_server.types import ImageAttributeValue
29
28
  from fractal_server.types import TypeFilters
30
29
 
31
30
  router = APIRouter()
@@ -33,11 +32,19 @@ router = APIRouter()
33
32
 
34
33
  class ImagePage(PaginationResponse[SingleImage]):
35
34
 
36
- attributes: dict[str, list[Any]]
35
+ attributes: dict[str, list[ImageAttributeValue]]
37
36
  types: list[str]
38
37
 
39
38
 
40
39
  class ImageQuery(BaseModel):
40
+ """
41
+ Query for a list of images.
42
+
43
+ Attributes:
44
+ type_filters:
45
+ attribute_filters:
46
+ """
47
+
41
48
  type_filters: TypeFilters = Field(default_factory=dict)
42
49
  attribute_filters: AttributeFilters = Field(default_factory=dict)
43
50
 
@@ -33,10 +33,23 @@ from fractal_server.app.runner.v2.db_tools import update_status_of_history_run
33
33
  from fractal_server.app.schemas.v2 import HistoryUnitStatus
34
34
  from fractal_server.app.schemas.v2 import TaskDumpV2
35
35
  from fractal_server.app.schemas.v2 import TaskGroupDumpV2
36
+ from fractal_server.images.status_tools import enrich_images_sync
37
+ from fractal_server.images.status_tools import IMAGE_STATUS_KEY
36
38
  from fractal_server.images.tools import merge_type_filters
37
39
  from fractal_server.types import AttributeFilters
38
40
 
39
41
 
42
+ def _remove_status_from_attributes(
43
+ images: list[dict[str, Any]],
44
+ ) -> list[dict[str, Any]]:
45
+ """
46
+ Drop attribute `IMAGE_STATUS_KEY` from all images.
47
+ """
48
+ images_copy = deepcopy(images)
49
+ [img["attributes"].pop(IMAGE_STATUS_KEY) for img in images_copy]
50
+ return images_copy
51
+
52
+
40
53
  def drop_none_attributes(attributes: dict[str, Any]) -> dict[str, Any]:
41
54
  # Unset attributes with `None` value
42
55
  non_none_attributes = {
@@ -106,7 +119,11 @@ def execute_tasks_v2(
106
119
  tmp_images = deepcopy(dataset.images)
107
120
  current_dataset_type_filters = copy(job_type_filters)
108
121
 
109
- for wftask in wf_task_list:
122
+ ENRICH_IMAGES_WITH_STATUS: bool = (
123
+ IMAGE_STATUS_KEY in job_attribute_filters.keys()
124
+ )
125
+
126
+ for ind_wftask, wftask in enumerate(wf_task_list):
110
127
  task = wftask.task
111
128
  task_name = task.name
112
129
  logger.debug(f'SUBMIT {wftask.order}-th task (name="{task_name}")')
@@ -122,15 +139,22 @@ def execute_tasks_v2(
122
139
  wftask_type_filters=wftask.type_filters,
123
140
  )
124
141
  type_filters.update(type_filters_patch)
142
+
143
+ if ind_wftask == 0 and ENRICH_IMAGES_WITH_STATUS:
144
+ # FIXME: Could this be done on `type_filtered_images`?
145
+ tmp_images = enrich_images_sync(
146
+ images=tmp_images,
147
+ dataset_id=dataset.id,
148
+ workflowtask_id=wftask.id,
149
+ )
125
150
  type_filtered_images = filter_image_list(
126
151
  images=tmp_images,
127
152
  type_filters=type_filters,
128
- attribute_filters=None,
129
153
  )
130
154
  num_available_images = len(type_filtered_images)
155
+
131
156
  filtered_images = filter_image_list(
132
157
  images=type_filtered_images,
133
- type_filters=None,
134
158
  attribute_filters=job_attribute_filters,
135
159
  )
136
160
  else:
@@ -379,7 +403,11 @@ def execute_tasks_v2(
379
403
  with next(get_sync_db()) as db:
380
404
  # Write current dataset images into the database.
381
405
  db_dataset = db.get(DatasetV2, dataset.id)
382
- db_dataset.images = tmp_images
406
+ if ENRICH_IMAGES_WITH_STATUS:
407
+
408
+ db_dataset.images = _remove_status_from_attributes(tmp_images)
409
+ else:
410
+ db_dataset.images = tmp_images
383
411
  flag_modified(db_dataset, "images")
384
412
  db.merge(db_dataset)
385
413
 
@@ -16,7 +16,6 @@ from .history import HistoryUnitRead # noqa F401
16
16
  from .history import HistoryUnitStatus # noqa F401
17
17
  from .history import HistoryUnitStatusWithUnset # noqa F401
18
18
  from .history import ImageLogsRequest # noqa F401
19
- from .history import SingleImageWithStatus # noqa F401
20
19
  from .job import JobCreateV2 # noqa F401
21
20
  from .job import JobReadV2 # noqa F401
22
21
  from .job import JobStatusTypeV2 # noqa F401
@@ -6,8 +6,6 @@ from pydantic import AwareDatetime
6
6
  from pydantic import BaseModel
7
7
  from pydantic import field_serializer
8
8
 
9
- from ....images import SingleImage
10
-
11
9
 
12
10
  class HistoryUnitStatus(StrEnum):
13
11
  """
@@ -83,8 +81,3 @@ class ImageLogsRequest(BaseModel):
83
81
  workflowtask_id: int
84
82
  dataset_id: int
85
83
  zarr_url: str
86
-
87
-
88
- # FIXME: remove this when we update the webclient
89
- class SingleImageWithStatus(SingleImage):
90
- status: HistoryUnitStatus | None = None
@@ -0,0 +1,174 @@
1
+ import time
2
+ from copy import deepcopy
3
+ from typing import Any
4
+
5
+ from sqlalchemy import Select
6
+ from sqlalchemy.ext.asyncio import AsyncSession
7
+ from sqlmodel import select
8
+
9
+ from fractal_server.app.db import get_sync_db
10
+ from fractal_server.app.models.v2 import HistoryImageCache
11
+ from fractal_server.app.models.v2 import HistoryUnit
12
+ from fractal_server.app.schemas.v2 import HistoryUnitStatusWithUnset
13
+ from fractal_server.logger import set_logger
14
+ from fractal_server.types import ImageAttributeValue
15
+
16
+ logger = set_logger(__name__)
17
+
18
+
19
+ IMAGE_STATUS_KEY = "__wftask_dataset_image_status__"
20
+
21
+
22
+ def _enriched_image(*, img: dict[str, Any], status: str) -> dict[str, Any]:
23
+ img["attributes"][IMAGE_STATUS_KEY] = status
24
+ return img
25
+
26
+
27
+ def _prepare_query(
28
+ *,
29
+ dataset_id: int,
30
+ workflowtask_id: int,
31
+ zarr_urls: list[str],
32
+ ) -> Select:
33
+ stm = (
34
+ select(HistoryImageCache.zarr_url, HistoryUnit.status)
35
+ .join(HistoryUnit)
36
+ .where(HistoryImageCache.dataset_id == dataset_id)
37
+ .where(HistoryImageCache.workflowtask_id == workflowtask_id)
38
+ .where(HistoryImageCache.latest_history_unit_id == HistoryUnit.id)
39
+ .where(HistoryImageCache.zarr_url.in_(zarr_urls))
40
+ .order_by(HistoryImageCache.zarr_url)
41
+ )
42
+ return stm
43
+
44
+
45
+ async def enrich_images_async(
46
+ *,
47
+ images: list[dict[str, Any]],
48
+ dataset_id: int,
49
+ workflowtask_id: int,
50
+ db: AsyncSession,
51
+ ) -> list[dict[str, ImageAttributeValue]]:
52
+ """
53
+ Enrich images with a status-related attribute.
54
+
55
+ Args:
56
+ images: The input image list
57
+ dataset_id: The dataset ID
58
+ workflowtask_id: The workflow-task ID
59
+ db: An async db session
60
+
61
+ Returns:
62
+ The list of enriched images
63
+ """
64
+ t_0 = time.perf_counter()
65
+ logger.info(
66
+ f"[enrich_images_async] START, {dataset_id=}, {workflowtask_id=}"
67
+ )
68
+
69
+ zarr_url_to_image = {img["zarr_url"]: deepcopy(img) for img in images}
70
+
71
+ res = await db.execute(
72
+ _prepare_query(
73
+ dataset_id=dataset_id,
74
+ workflowtask_id=workflowtask_id,
75
+ zarr_urls=zarr_url_to_image.keys(),
76
+ )
77
+ )
78
+ list_processed_url_status = res.all()
79
+ t_1 = time.perf_counter()
80
+ logger.debug(f"[enrich_images_async] db-query, elapsed={t_1 - t_0:.3f} s")
81
+
82
+ set_processed_urls = set(item[0] for item in list_processed_url_status)
83
+ processed_images_with_status = [
84
+ _enriched_image(
85
+ img=zarr_url_to_image[item[0]],
86
+ status=item[1],
87
+ )
88
+ for item in list_processed_url_status
89
+ ]
90
+ t_2 = time.perf_counter()
91
+ logger.debug(
92
+ "[enrich_images_async] processed-images, " f"elapsed={t_2 - t_1:.3f} s"
93
+ )
94
+
95
+ non_processed_urls = zarr_url_to_image.keys() - set_processed_urls
96
+ non_processed_images_with_status = [
97
+ _enriched_image(
98
+ img=zarr_url_to_image[zarr_url],
99
+ status=HistoryUnitStatusWithUnset.UNSET,
100
+ )
101
+ for zarr_url in non_processed_urls
102
+ ]
103
+ t_3 = time.perf_counter()
104
+ logger.debug(
105
+ "[enrich_images_async] non-processed-images, "
106
+ f"elapsed={t_3 - t_2:.3f} s"
107
+ )
108
+
109
+ return processed_images_with_status + non_processed_images_with_status
110
+
111
+
112
+ def enrich_images_sync(
113
+ *,
114
+ images: list[dict[str, Any]],
115
+ dataset_id: int,
116
+ workflowtask_id: int,
117
+ ) -> list[dict[str, ImageAttributeValue]]:
118
+ """
119
+ Enrich images with a status-related attribute.
120
+
121
+ Args:
122
+ images: The input image list
123
+ dataset_id: The dataset ID
124
+ workflowtask_id: The workflow-task ID
125
+
126
+ Returns:
127
+ The list of enriched images
128
+ """
129
+ t_0 = time.perf_counter()
130
+ logger.info(
131
+ f"[enrich_images_async] START, {dataset_id=}, {workflowtask_id=}"
132
+ )
133
+
134
+ zarr_url_to_image = {img["zarr_url"]: deepcopy(img) for img in images}
135
+ with next(get_sync_db()) as db:
136
+ res = db.execute(
137
+ _prepare_query(
138
+ dataset_id=dataset_id,
139
+ workflowtask_id=workflowtask_id,
140
+ zarr_urls=zarr_url_to_image.keys(),
141
+ )
142
+ )
143
+ list_processed_url_status = res.all()
144
+ t_1 = time.perf_counter()
145
+ logger.debug(f"[enrich_images_async] db-query, elapsed={t_1 - t_0:.3f} s")
146
+
147
+ set_processed_urls = set(item[0] for item in list_processed_url_status)
148
+ processed_images_with_status = [
149
+ _enriched_image(
150
+ img=zarr_url_to_image[item[0]],
151
+ status=item[1],
152
+ )
153
+ for item in list_processed_url_status
154
+ ]
155
+ t_2 = time.perf_counter()
156
+ logger.debug(
157
+ "[enrich_images_async] processed-images, " f"elapsed={t_2 - t_1:.3f} s"
158
+ )
159
+
160
+ non_processed_urls = zarr_url_to_image.keys() - set_processed_urls
161
+ non_processed_images_with_status = [
162
+ _enriched_image(
163
+ img=zarr_url_to_image[zarr_url],
164
+ status=HistoryUnitStatusWithUnset.UNSET,
165
+ )
166
+ for zarr_url in non_processed_urls
167
+ ]
168
+ t_3 = time.perf_counter()
169
+ logger.debug(
170
+ "[enrich_images_async] non-processed-images, "
171
+ f"elapsed={t_3 - t_2:.3f} s"
172
+ )
173
+
174
+ return processed_images_with_status + non_processed_images_with_status
@@ -125,7 +125,7 @@ def merge_type_filters(
125
125
  def aggregate_attributes(images: list[dict[str, Any]]) -> dict[str, list[Any]]:
126
126
  """
127
127
  Given a list of images, this function returns a dictionary of all image
128
- attributes, each mapped to a list of present values.
128
+ attributes, each mapped to a sorted list of existing values.
129
129
  """
130
130
  attributes = {}
131
131
  for image in images:
@@ -133,7 +133,10 @@ def aggregate_attributes(images: list[dict[str, Any]]) -> dict[str, list[Any]]:
133
133
  attributes.setdefault(k, []).append(v)
134
134
  for k, v in attributes.items():
135
135
  attributes[k] = list(set(v))
136
- return attributes
136
+ sorted_attributes = {
137
+ key: sorted(value) for key, value in attributes.items()
138
+ }
139
+ return sorted_attributes
137
140
 
138
141
 
139
142
  def aggregate_types(images: list[dict[str, Any]]) -> list[str]:
@@ -24,31 +24,21 @@ write_log "END upgrade pip and install setuptools"
24
24
  echo
25
25
 
26
26
  # Install package
27
- write_log "START install ${INSTALL_STRING}"
27
+ write_log "START install with INSTALL_STRING=${INSTALL_STRING}"
28
28
  "$VENVPYTHON" -m pip install ${FRACTAL_PIP_CACHE_DIR_ARG} "$INSTALL_STRING"
29
- write_log "END install ${INSTALL_STRING}"
29
+ write_log "END install with INSTALL_STRING=${INSTALL_STRING}"
30
30
  echo
31
31
 
32
-
33
- # Optionally install pinned versions
32
+ # Install pinned packages (note: do not quote $PINNED_PACKAGE_LIST since it could be e.g. "numpy==1.2.3 torch=3.2.1")
34
33
  if [ "$PINNED_PACKAGE_LIST" != "" ]; then
35
- write_log "START installing pinned versions $PINNED_PACKAGE_LIST"
36
- for PINNED_PKG_VERSION in $PINNED_PACKAGE_LIST; do
37
-
38
- PKGNAME=$(echo "$PINNED_PKG_VERSION" | cut -d '=' -f 1)
39
- write_log "INFO: package name $PKGNAME"
40
- "$VENVPYTHON" -m pip show "$PKGNAME"
41
-
42
- done
43
-
44
- write_log "All packages in ${PINNED_PACKAGE_LIST} are already installed, proceed with specific versions."
45
- "$VENVPYTHON" -m pip install ${FRACTAL_PIP_CACHE_DIR_ARG} "$PINNED_PACKAGE_LIST"
46
- write_log "END installing pinned versions $PINNED_PACKAGE_LIST"
34
+ write_log "START install with PINNED_PACKAGE_LIST=${PINNED_PACKAGE_LIST}"
35
+ "$VENVPYTHON" -m pip install ${FRACTAL_PIP_CACHE_DIR_ARG} $PINNED_PACKAGE_LIST
36
+ write_log "END install with PINNED_PACKAGE_LIST=${PINNED_PACKAGE_LIST}"
37
+ echo
47
38
  else
48
39
  write_log "SKIP installing pinned versions $PINNED_PACKAGE_LIST (empty list)"
49
40
  fi
50
41
 
51
-
52
42
  # End
53
43
  TIME_END=$(date +%s)
54
44
  write_log "All good up to here."
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.3
2
2
  Name: fractal-server
3
- Version: 2.14.10
3
+ Version: 2.14.12
4
4
  Summary: Backend component of the Fractal analytics platform
5
5
  License: BSD-3-Clause
6
6
  Author: Tommaso Comparin
@@ -1,4 +1,4 @@
1
- fractal_server/__init__.py,sha256=4KxVhiHLgADdi86yBYpB-0c9NEhimJDPdBjyV6O1z1M,24
1
+ fractal_server/__init__.py,sha256=2Uq7ENBsUHaOd4e_XjB355CgrpEtZcAw2o31N0wSuJE,24
2
2
  fractal_server/__main__.py,sha256=rkM8xjY1KeS3l63irB8yCrlVobR-73uDapC4wvrIlxI,6957
3
3
  fractal_server/alembic.ini,sha256=MWwi7GzjzawI9cCAK1LW7NxIBQDUqD12-ptJoq5JpP0,3153
4
4
  fractal_server/app/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
@@ -36,8 +36,8 @@ fractal_server/app/routes/api/v2/_aux_functions_task_lifecycle.py,sha256=GpKfw9y
36
36
  fractal_server/app/routes/api/v2/_aux_functions_task_version_update.py,sha256=WLDOYCnb6fnS5avKflyx6yN24Vo1n5kJk5ZyiKbzb8Y,1175
37
37
  fractal_server/app/routes/api/v2/_aux_functions_tasks.py,sha256=MFYnyNPBACSHXTDLXe6cSennnpmlpajN84iivOOMW7Y,11599
38
38
  fractal_server/app/routes/api/v2/dataset.py,sha256=6u4MFqJ3YZ0Zq6Xx8CRMrTPKW55ZaR63Uno21DqFr4Q,8889
39
- fractal_server/app/routes/api/v2/history.py,sha256=OHy3Y4lreGyGXk9v5iud73tzoGV-YAT027gMWJH-5p4,16958
40
- fractal_server/app/routes/api/v2/images.py,sha256=tJn0ANv4Tz2KHyb41sPbBRmSCpt632m8HEcgC3u-rHk,7709
39
+ fractal_server/app/routes/api/v2/history.py,sha256=BEmf_ENF5HNMy8yXrxRdo4280rWuRUa1Jw4u8R9-LQQ,15477
40
+ fractal_server/app/routes/api/v2/images.py,sha256=TS1ltUhP0_SaViupdHrSh3MLDi5OVk-lOhE1VCVyZj0,7869
41
41
  fractal_server/app/routes/api/v2/job.py,sha256=8xRTwh_OCHmK9IfI_zUASa2ozewR0qu0zVBl_a4IvHw,6467
42
42
  fractal_server/app/routes/api/v2/pre_submission_checks.py,sha256=MmjvSQ0pNAWEw5BavR16zIZ4h13py302AmToaz8Vvco,4768
43
43
  fractal_server/app/routes/api/v2/project.py,sha256=ldMEyjtwGpX2teu85sCNWaubDFlw-En8U1SA7G1VaIw,4567
@@ -101,7 +101,7 @@ fractal_server/app/runner/v2/_slurm_sudo.py,sha256=Gvsh4tUlc1_3KdF3B7zEqs-YIntC_
101
101
  fractal_server/app/runner/v2/db_tools.py,sha256=du5dKhMMFMErQXbGIgu9JvO_vtMensodyPsyDeqz1yQ,3324
102
102
  fractal_server/app/runner/v2/deduplicate_list.py,sha256=IVTE4abBU1bUprFTkxrTfYKnvkNTanWQ-KWh_etiT08,645
103
103
  fractal_server/app/runner/v2/merge_outputs.py,sha256=D1L4Taieq9i71SPQyNc1kMokgHh-sV_MqF3bv7QMDBc,907
104
- fractal_server/app/runner/v2/runner.py,sha256=YkxHhWA25Mv7HzviCaYMTxkyuP-JFHj73H9aPO6Fxm0,17713
104
+ fractal_server/app/runner/v2/runner.py,sha256=_vmFdJCqNZVK9sQa7MoAIemkRX-hakLm5RLCBz8QxDg,18667
105
105
  fractal_server/app/runner/v2/runner_functions.py,sha256=Q9AVIR2NEBfRpfqW1wtQTTQfks_R1TnwRFBRro2fvjQ,18837
106
106
  fractal_server/app/runner/v2/submit_workflow.py,sha256=AMnXdozwIGlXD55ch0_SNAG-ntKBO-QRhkbInrvsShU,13140
107
107
  fractal_server/app/runner/v2/task_interface.py,sha256=V2TWBK6tbhycyMrJvFaoJ9IpuKlrLrvmjJbfNMsBBXo,2527
@@ -110,11 +110,11 @@ fractal_server/app/schemas/__init__.py,sha256=stURAU_t3AOBaH0HSUbV-GKhlPKngnnIMo
110
110
  fractal_server/app/schemas/user.py,sha256=t9nbyYjGCSOsxm9K97PDG3-9o27CsaFfhWb_L5nrjqA,1910
111
111
  fractal_server/app/schemas/user_group.py,sha256=x3-kqbo0q2wTP7QI0iZ7PU_9Dr957UYrFMKqS7BXLhE,1425
112
112
  fractal_server/app/schemas/user_settings.py,sha256=NpdC0Me0fgwwdfJuTSlFLCnLUjiWWzrJlPn_UPLjXnw,1862
113
- fractal_server/app/schemas/v2/__init__.py,sha256=M49RJ8SKcVoSfSTuiTCcbexSo8JMtLQTVFltCW4CuGQ,3103
113
+ fractal_server/app/schemas/v2/__init__.py,sha256=ft9gFmLLClNbWE8pwmG81fuLl1hlfdSsb8TvLoX3Sqk,3047
114
114
  fractal_server/app/schemas/v2/accounting.py,sha256=Wylt7uWTiDIFlHJOh4XEtYitk2FjFlmnodDrJDxcr0E,397
115
115
  fractal_server/app/schemas/v2/dataset.py,sha256=NKCjBwGBC7mPiSlXktZAcleJsvlLY6KfNKw7Wx4Zfqk,1728
116
116
  fractal_server/app/schemas/v2/dumps.py,sha256=o4RiWoSmQ8UPoWxgKoeORykGNIdczeNmm-ng-dBRD7k,2216
117
- fractal_server/app/schemas/v2/history.py,sha256=BCOii7GW7OvcDr1mLZl5-kYxtczzefQciuAxp95zrFk,1958
117
+ fractal_server/app/schemas/v2/history.py,sha256=pZiMKfh6nMWbTp5MUtrnGySPKbeRFf5tM1VLFaTgGcw,1784
118
118
  fractal_server/app/schemas/v2/job.py,sha256=fPay7dLSr-skKRdVRoZig8rf_sZwUdVdHZaJ4XM8vMI,3288
119
119
  fractal_server/app/schemas/v2/manifest.py,sha256=sZhj99iDgjE2MWXeTxnXSb6pFdKwRnFpCVQzcnpoTrI,6821
120
120
  fractal_server/app/schemas/v2/project.py,sha256=l96-3bCfB3knhITaLj1WSyBgbzP_k8CdtvgX_5jO_fU,657
@@ -133,9 +133,9 @@ fractal_server/data_migrations/README.md,sha256=_3AEFvDg9YkybDqCLlFPdDmGJvr6Tw7H
133
133
  fractal_server/data_migrations/tools.py,sha256=LeMeASwYGtEqd-3wOLle6WARdTGAimoyMmRbbJl-hAM,572
134
134
  fractal_server/gunicorn_fractal.py,sha256=u6U01TLGlXgq1v8QmEpLih3QnsInZD7CqphgJ_GrGzc,1230
135
135
  fractal_server/images/__init__.py,sha256=-_wjoKtSX02P1KjDxDP_EXKvmbONTRmbf7iGVTsyBpM,154
136
- fractal_server/images/image_status.py,sha256=2EUxvuYEFKSKoICEiQntT5P3Un7FS8gv2Ef1HZFUfF4,2667
137
136
  fractal_server/images/models.py,sha256=6WchcIzLLLwdkLNRfg71Dl4Y-9UFLPyrrzh1lWgjuP0,1245
138
- fractal_server/images/tools.py,sha256=XKhbdjfWZpTSe1akK1bSQl4gzEQlj9ETDbELkuwayVg,4066
137
+ fractal_server/images/status_tools.py,sha256=tLp-Sojlhf-eQ97O1hj-2fg2zmgHfED9EXkec3Jjz_0,5141
138
+ fractal_server/images/tools.py,sha256=92kmt2Fnyp8ycTbyuar9_U8kJTi0wKpBk8ZagARWl9Y,4177
139
139
  fractal_server/logger.py,sha256=QIeVn3QpZsiIL2jDdrKotr-MLyDcZYgiPiTluFU46lE,5317
140
140
  fractal_server/main.py,sha256=FD9KzTTsXTQnTW0z3Hu7y0Nj_oAkBeZEInKDXFd4hjE,4561
141
141
  fractal_server/migrations/env.py,sha256=nfyBpMIOT3kny6t-b-tUjyRjZ4k906bb1_wCQ7me1BI,1353
@@ -196,7 +196,7 @@ fractal_server/tasks/v2/ssh/collect.py,sha256=bClq8hB04igrUXk1Mgc7pRWQws77mpYX8K
196
196
  fractal_server/tasks/v2/ssh/deactivate.py,sha256=YO2PJ0VV-LhVW-6O-t-d6BQciO2fYAkYbz5Y9UBiXaA,12928
197
197
  fractal_server/tasks/v2/ssh/reactivate.py,sha256=1DIQduhqZLbrIeoVyyp54vemBWZu94tFDvjpmDsZZI0,8818
198
198
  fractal_server/tasks/v2/templates/1_create_venv.sh,sha256=PK0jdHKtQpda1zULebBaVPORt4t6V17wa4N1ohcj5ac,548
199
- fractal_server/tasks/v2/templates/2_pip_install.sh,sha256=Md2LPt3BJ7IfN0USF2uivl4rP8OwbzJOUepGAr_Cicg,1836
199
+ fractal_server/tasks/v2/templates/2_pip_install.sh,sha256=jMJPQJXHKznO6fxOOXtFXKPdCmTf1VLLWj_JL_ZdKxo,1644
200
200
  fractal_server/tasks/v2/templates/3_pip_freeze.sh,sha256=JldREScEBI4cD_qjfX4UK7V4aI-FnX9ZvVNxgpSOBFc,168
201
201
  fractal_server/tasks/v2/templates/4_pip_show.sh,sha256=qm1vPy6AkKhWDjCJGXS8LqCLYO3KsAyRK325ZsFcF6U,1747
202
202
  fractal_server/tasks/v2/templates/5_get_venv_size_and_file_number.sh,sha256=q-6ZUvA6w6FDVEoSd9O63LaJ9tKZc7qAFH72SGPrd_k,284
@@ -214,8 +214,8 @@ fractal_server/types/validators/_workflow_task_arguments_validators.py,sha256=HL
214
214
  fractal_server/urls.py,sha256=QjIKAC1a46bCdiPMu3AlpgFbcv6a4l3ABcd5xz190Og,471
215
215
  fractal_server/utils.py,sha256=FCY6HUsRnnbsWkT2kwQ2izijiHuCrCD3Kh50G0QudxE,3531
216
216
  fractal_server/zip_tools.py,sha256=tqz_8f-vQ9OBRW-4OQfO6xxY-YInHTyHmZxU7U4PqZo,4885
217
- fractal_server-2.14.10.dist-info/LICENSE,sha256=QKAharUuhxL58kSoLizKJeZE3mTCBnX6ucmz8W0lxlk,1576
218
- fractal_server-2.14.10.dist-info/METADATA,sha256=PYdArZinbL0-GpXR6qoaSlOrIz974JdmJN8cNXieP0Y,4244
219
- fractal_server-2.14.10.dist-info/WHEEL,sha256=7dDg4QLnNKTvwIDR9Ac8jJaAmBC_owJrckbC0jjThyA,88
220
- fractal_server-2.14.10.dist-info/entry_points.txt,sha256=8tV2kynvFkjnhbtDnxAqImL6HMVKsopgGfew0DOp5UY,58
221
- fractal_server-2.14.10.dist-info/RECORD,,
217
+ fractal_server-2.14.12.dist-info/LICENSE,sha256=QKAharUuhxL58kSoLizKJeZE3mTCBnX6ucmz8W0lxlk,1576
218
+ fractal_server-2.14.12.dist-info/METADATA,sha256=c2Sxzo4hw2jBpCe9rBhWVz4x0UCztabOGI9YBZqu7vU,4244
219
+ fractal_server-2.14.12.dist-info/WHEEL,sha256=7dDg4QLnNKTvwIDR9Ac8jJaAmBC_owJrckbC0jjThyA,88
220
+ fractal_server-2.14.12.dist-info/entry_points.txt,sha256=8tV2kynvFkjnhbtDnxAqImL6HMVKsopgGfew0DOp5UY,58
221
+ fractal_server-2.14.12.dist-info/RECORD,,
@@ -1,85 +0,0 @@
1
- import time
2
- from copy import deepcopy
3
- from typing import Any
4
-
5
- from fastapi import Depends
6
- from sqlmodel import select
7
-
8
- from fractal_server.app.db import AsyncSession
9
- from fractal_server.app.db import get_async_db
10
- from fractal_server.app.models.v2 import HistoryImageCache
11
- from fractal_server.app.models.v2 import HistoryUnit
12
- from fractal_server.app.schemas.v2 import HistoryUnitStatusWithUnset
13
- from fractal_server.logger import set_logger
14
-
15
- IMAGE_STATUS_KEY = "__wftask_dataset_image_status__"
16
-
17
- logger = set_logger(__name__)
18
-
19
-
20
- def _enriched_image(*, img: dict[str, Any], status: str) -> dict[str, Any]:
21
- img["attributes"][IMAGE_STATUS_KEY] = status
22
- return img
23
-
24
-
25
- async def enrich_image_list(
26
- *,
27
- images: list[dict[str, Any]],
28
- dataset_id: int,
29
- workflowtask_id: int,
30
- db: AsyncSession = Depends(get_async_db),
31
- ) -> list[dict[str, Any]]:
32
- start_time = time.perf_counter()
33
- logger.info(
34
- f"START {enrich_image_list.__name__} for {dataset_id=}, "
35
- f"{workflowtask_id=}"
36
- )
37
-
38
- zarr_url_to_image = {img["zarr_url"]: deepcopy(img) for img in images}
39
-
40
- stm = (
41
- select(HistoryImageCache.zarr_url, HistoryUnit.status)
42
- .join(HistoryUnit)
43
- .where(HistoryImageCache.dataset_id == dataset_id)
44
- .where(HistoryImageCache.workflowtask_id == workflowtask_id)
45
- .where(HistoryImageCache.latest_history_unit_id == HistoryUnit.id)
46
- .where(HistoryImageCache.zarr_url.in_(zarr_url_to_image.keys()))
47
- .order_by(HistoryImageCache.zarr_url)
48
- )
49
- res = await db.execute(stm)
50
- list_processed_url_status = res.all()
51
- logger.debug(
52
- f"POST db query, "
53
- f"elapsed={time.perf_counter() - start_time:.3f} "
54
- "seconds"
55
- )
56
-
57
- set_processed_urls = set(item[0] for item in list_processed_url_status)
58
- processed_images_with_status = [
59
- _enriched_image(
60
- img=zarr_url_to_image[item[0]],
61
- status=item[1],
62
- )
63
- for item in list_processed_url_status
64
- ]
65
- logger.debug(
66
- f"POST processed_images_with_status, "
67
- f"elapsed={time.perf_counter() - start_time:.3f} "
68
- "seconds"
69
- )
70
-
71
- non_processed_urls = zarr_url_to_image.keys() - set_processed_urls
72
- non_processed_images_with_status = [
73
- _enriched_image(
74
- img=zarr_url_to_image[zarr_url],
75
- status=HistoryUnitStatusWithUnset.UNSET,
76
- )
77
- for zarr_url in non_processed_urls
78
- ]
79
- logger.debug(
80
- f"POST non_processed_images_with_status, "
81
- f"elapsed={time.perf_counter() - start_time:.3f} "
82
- "seconds"
83
- )
84
-
85
- return processed_images_with_status + non_processed_images_with_status