senoquant 1.0.0b2__py3-none-any.whl → 1.0.0b4__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- senoquant/__init__.py +6 -2
- senoquant/_reader.py +1 -1
- senoquant/_widget.py +9 -1
- senoquant/reader/core.py +201 -18
- senoquant/tabs/__init__.py +2 -0
- senoquant/tabs/batch/backend.py +76 -27
- senoquant/tabs/batch/frontend.py +127 -25
- senoquant/tabs/quantification/features/marker/dialog.py +26 -6
- senoquant/tabs/quantification/features/marker/export.py +97 -24
- senoquant/tabs/quantification/features/marker/rows.py +2 -2
- senoquant/tabs/quantification/features/spots/dialog.py +41 -11
- senoquant/tabs/quantification/features/spots/export.py +163 -10
- senoquant/tabs/quantification/frontend.py +2 -2
- senoquant/tabs/segmentation/frontend.py +46 -9
- senoquant/tabs/segmentation/models/cpsam/model.py +1 -1
- senoquant/tabs/segmentation/models/default_2d/model.py +22 -77
- senoquant/tabs/segmentation/models/default_3d/model.py +8 -74
- senoquant/tabs/segmentation/stardist_onnx_utils/_csbdeep/tools/create_zip_contents.py +0 -0
- senoquant/tabs/segmentation/stardist_onnx_utils/onnx_framework/inspect/probe.py +13 -13
- senoquant/tabs/segmentation/stardist_onnx_utils/onnx_framework/stardist_libs.py +171 -0
- senoquant/tabs/spots/frontend.py +96 -5
- senoquant/tabs/spots/models/rmp/details.json +3 -9
- senoquant/tabs/spots/models/rmp/model.py +341 -266
- senoquant/tabs/spots/models/ufish/details.json +32 -0
- senoquant/tabs/spots/models/ufish/model.py +327 -0
- senoquant/tabs/spots/ufish_utils/__init__.py +13 -0
- senoquant/tabs/spots/ufish_utils/core.py +387 -0
- senoquant/tabs/visualization/__init__.py +1 -0
- senoquant/tabs/visualization/backend.py +306 -0
- senoquant/tabs/visualization/frontend.py +1113 -0
- senoquant/tabs/visualization/plots/__init__.py +80 -0
- senoquant/tabs/visualization/plots/base.py +152 -0
- senoquant/tabs/visualization/plots/double_expression.py +187 -0
- senoquant/tabs/visualization/plots/spatialplot.py +156 -0
- senoquant/tabs/visualization/plots/umap.py +140 -0
- senoquant/utils.py +1 -1
- senoquant-1.0.0b4.dist-info/METADATA +162 -0
- {senoquant-1.0.0b2.dist-info → senoquant-1.0.0b4.dist-info}/RECORD +53 -30
- {senoquant-1.0.0b2.dist-info → senoquant-1.0.0b4.dist-info}/top_level.txt +1 -0
- ufish/__init__.py +1 -0
- ufish/api.py +778 -0
- ufish/model/__init__.py +0 -0
- ufish/model/loss.py +62 -0
- ufish/model/network/__init__.py +0 -0
- ufish/model/network/spot_learn.py +50 -0
- ufish/model/network/ufish_net.py +204 -0
- ufish/model/train.py +175 -0
- ufish/utils/__init__.py +0 -0
- ufish/utils/img.py +418 -0
- ufish/utils/log.py +8 -0
- ufish/utils/spot_calling.py +115 -0
- senoquant/tabs/spots/models/udwt/details.json +0 -103
- senoquant/tabs/spots/models/udwt/model.py +0 -482
- senoquant-1.0.0b2.dist-info/METADATA +0 -193
- {senoquant-1.0.0b2.dist-info → senoquant-1.0.0b4.dist-info}/WHEEL +0 -0
- {senoquant-1.0.0b2.dist-info → senoquant-1.0.0b4.dist-info}/entry_points.txt +0 -0
- {senoquant-1.0.0b2.dist-info → senoquant-1.0.0b4.dist-info}/licenses/LICENSE +0 -0
|
@@ -0,0 +1,387 @@
|
|
|
1
|
+
"""UFish-based spot enhancement utilities.
|
|
2
|
+
|
|
3
|
+
This module wraps UFish inference for SenoQuant spot detection workflows.
|
|
4
|
+
It handles:
|
|
5
|
+
|
|
6
|
+
- optional import of UFish from site-packages or vendored sources,
|
|
7
|
+
- ONNX Runtime execution-provider selection,
|
|
8
|
+
- model/weights caching between calls, and
|
|
9
|
+
- default ONNX weight retrieval from the SenoQuant Hugging Face model repo.
|
|
10
|
+
|
|
11
|
+
Notes
|
|
12
|
+
-----
|
|
13
|
+
Weight loading priority is:
|
|
14
|
+
|
|
15
|
+
1. explicit ``UFishConfig.weights_path``,
|
|
16
|
+
2. legacy ``UFishConfig.load_from_internet``, then
|
|
17
|
+
3. default ``ufish.onnx`` resolved via :func:`ensure_hf_model`.
|
|
18
|
+
"""
|
|
19
|
+
|
|
20
|
+
from __future__ import annotations
|
|
21
|
+
|
|
22
|
+
from dataclasses import dataclass
|
|
23
|
+
import logging
|
|
24
|
+
from pathlib import Path
|
|
25
|
+
import sys
|
|
26
|
+
from types import MethodType
|
|
27
|
+
from typing import TYPE_CHECKING, Any, cast
|
|
28
|
+
|
|
29
|
+
import numpy as np
|
|
30
|
+
|
|
31
|
+
from senoquant.tabs.segmentation.models.hf import (
|
|
32
|
+
DEFAULT_REPO_ID,
|
|
33
|
+
ensure_hf_model,
|
|
34
|
+
)
|
|
35
|
+
|
|
36
|
+
try: # pragma: no cover - optional dependency
|
|
37
|
+
from ufish.api import UFish
|
|
38
|
+
except ImportError: # pragma: no cover - optional dependency
|
|
39
|
+
_repo_root = Path(__file__).resolve().parents[5]
|
|
40
|
+
_vendored_root = _repo_root / "_vendor" / "ufish"
|
|
41
|
+
if _vendored_root.exists():
|
|
42
|
+
vendored_root = str(_vendored_root)
|
|
43
|
+
if vendored_root not in sys.path:
|
|
44
|
+
sys.path.insert(0, vendored_root)
|
|
45
|
+
try:
|
|
46
|
+
from ufish.api import UFish
|
|
47
|
+
except ImportError:
|
|
48
|
+
UFish = None
|
|
49
|
+
else:
|
|
50
|
+
UFish = None
|
|
51
|
+
|
|
52
|
+
try: # pragma: no cover - optional dependency
|
|
53
|
+
import onnxruntime as ort
|
|
54
|
+
except ImportError: # pragma: no cover - optional dependency
|
|
55
|
+
ort = None
|
|
56
|
+
|
|
57
|
+
if TYPE_CHECKING: # pragma: no cover - typing only
|
|
58
|
+
from ufish.api import UFish as UFishType
|
|
59
|
+
|
|
60
|
+
|
|
61
|
+
@dataclass(slots=True)
|
|
62
|
+
class UFishConfig:
|
|
63
|
+
"""Configuration for UFish enhancement.
|
|
64
|
+
|
|
65
|
+
Parameters
|
|
66
|
+
----------
|
|
67
|
+
weights_path : str or None, optional
|
|
68
|
+
Explicit local path to ONNX/PyTorch weights. When provided, this path
|
|
69
|
+
is used directly and takes precedence over all other loading modes.
|
|
70
|
+
load_from_internet : bool, optional
|
|
71
|
+
Legacy compatibility mode that calls
|
|
72
|
+
``UFish.load_weights_from_internet()`` directly.
|
|
73
|
+
device : {"cuda", "dml", "mps"} or None, optional
|
|
74
|
+
Preferred accelerator mode used to influence ONNX Runtime provider
|
|
75
|
+
ordering when constructing UFish sessions.
|
|
76
|
+
"""
|
|
77
|
+
|
|
78
|
+
weights_path: str | None = None
|
|
79
|
+
load_from_internet: bool = False
|
|
80
|
+
device: str | None = None
|
|
81
|
+
|
|
82
|
+
|
|
83
|
+
class _UFishState:
|
|
84
|
+
"""In-process cache for the UFish model and loaded weights."""
|
|
85
|
+
|
|
86
|
+
def __init__(self) -> None:
|
|
87
|
+
"""Initialize empty cached state."""
|
|
88
|
+
self.model: UFishType | None = None
|
|
89
|
+
self.weights_loaded = False
|
|
90
|
+
self.device: str | None = None
|
|
91
|
+
self.weights_path: str | None = None
|
|
92
|
+
|
|
93
|
+
|
|
94
|
+
_UFISH_STATE = _UFishState()
|
|
95
|
+
_UFISH_HF_FILENAME = "ufish.onnx"
|
|
96
|
+
_LOGGER = logging.getLogger(__name__)
|
|
97
|
+
|
|
98
|
+
|
|
99
|
+
def _ensure_ufish_available() -> None:
|
|
100
|
+
"""Raise a helpful error when UFish cannot be imported.
|
|
101
|
+
|
|
102
|
+
Raises
|
|
103
|
+
------
|
|
104
|
+
ImportError
|
|
105
|
+
If the ``ufish`` package is unavailable from both normal and vendored
|
|
106
|
+
import locations.
|
|
107
|
+
"""
|
|
108
|
+
if UFish is None: # pragma: no cover - import guard
|
|
109
|
+
msg = "ufish is required for spot enhancement."
|
|
110
|
+
raise ImportError(msg)
|
|
111
|
+
|
|
112
|
+
|
|
113
|
+
def _resolve_default_weights_path() -> Path:
|
|
114
|
+
"""Resolve the default UFish ONNX path.
|
|
115
|
+
|
|
116
|
+
Returns
|
|
117
|
+
-------
|
|
118
|
+
pathlib.Path
|
|
119
|
+
Local path to ``ufish.onnx``. The file is downloaded to the
|
|
120
|
+
``ufish_utils`` directory if it does not already exist.
|
|
121
|
+
|
|
122
|
+
Raises
|
|
123
|
+
------
|
|
124
|
+
RuntimeError
|
|
125
|
+
If the Hugging Face download helper is unavailable or download fails.
|
|
126
|
+
"""
|
|
127
|
+
target_dir = Path(__file__).resolve().parent
|
|
128
|
+
return ensure_hf_model(
|
|
129
|
+
_UFISH_HF_FILENAME,
|
|
130
|
+
target_dir,
|
|
131
|
+
repo_id=DEFAULT_REPO_ID,
|
|
132
|
+
)
|
|
133
|
+
|
|
134
|
+
|
|
135
|
+
def _preferred_providers() -> list[str]:
|
|
136
|
+
"""Return ONNX Runtime providers ordered by GPU preference.
|
|
137
|
+
|
|
138
|
+
Returns
|
|
139
|
+
-------
|
|
140
|
+
list[str]
|
|
141
|
+
Providers available in the current runtime, ordered from most
|
|
142
|
+
preferred accelerator to CPU fallback.
|
|
143
|
+
"""
|
|
144
|
+
if ort is None:
|
|
145
|
+
return []
|
|
146
|
+
available = set(ort.get_available_providers())
|
|
147
|
+
preferred = [
|
|
148
|
+
"CUDAExecutionProvider",
|
|
149
|
+
"ROCMExecutionProvider",
|
|
150
|
+
"DmlExecutionProvider",
|
|
151
|
+
"DirectMLExecutionProvider",
|
|
152
|
+
"CoreMLExecutionProvider",
|
|
153
|
+
"CPUExecutionProvider",
|
|
154
|
+
]
|
|
155
|
+
providers = [provider for provider in preferred if provider in available]
|
|
156
|
+
return providers or list(available)
|
|
157
|
+
|
|
158
|
+
|
|
159
|
+
def _select_onnx_providers(device: str | None) -> list[str]:
|
|
160
|
+
"""Choose execution providers for a requested device hint.
|
|
161
|
+
|
|
162
|
+
Parameters
|
|
163
|
+
----------
|
|
164
|
+
device : str or None
|
|
165
|
+
Device hint from :class:`UFishConfig`.
|
|
166
|
+
|
|
167
|
+
Returns
|
|
168
|
+
-------
|
|
169
|
+
list[str]
|
|
170
|
+
Provider names to pass to ``onnxruntime.InferenceSession``.
|
|
171
|
+
"""
|
|
172
|
+
preferred = _preferred_providers()
|
|
173
|
+
if not device:
|
|
174
|
+
return preferred
|
|
175
|
+
if device == "cuda":
|
|
176
|
+
return [
|
|
177
|
+
p
|
|
178
|
+
for p in preferred
|
|
179
|
+
if p in {"CUDAExecutionProvider", "CPUExecutionProvider"}
|
|
180
|
+
]
|
|
181
|
+
if device == "dml":
|
|
182
|
+
return [
|
|
183
|
+
p
|
|
184
|
+
for p in preferred
|
|
185
|
+
if p
|
|
186
|
+
in {
|
|
187
|
+
"DmlExecutionProvider",
|
|
188
|
+
"DirectMLExecutionProvider",
|
|
189
|
+
"CPUExecutionProvider",
|
|
190
|
+
}
|
|
191
|
+
]
|
|
192
|
+
if device == "mps":
|
|
193
|
+
return [
|
|
194
|
+
p
|
|
195
|
+
for p in preferred
|
|
196
|
+
if p in {"CoreMLExecutionProvider", "CPUExecutionProvider"}
|
|
197
|
+
]
|
|
198
|
+
return preferred
|
|
199
|
+
|
|
200
|
+
|
|
201
|
+
def _patch_onnx_loader(model: UFishType) -> None:
|
|
202
|
+
"""Monkey-patch UFish ONNX loader to use SenoQuant provider selection.
|
|
203
|
+
|
|
204
|
+
Parameters
|
|
205
|
+
----------
|
|
206
|
+
model : UFishType
|
|
207
|
+
UFish instance whose private ``_load_onnx`` method will be replaced.
|
|
208
|
+
"""
|
|
209
|
+
if ort is None:
|
|
210
|
+
return
|
|
211
|
+
ort_any = cast("Any", ort)
|
|
212
|
+
|
|
213
|
+
def _load_onnx(
|
|
214
|
+
self: UFishType,
|
|
215
|
+
onnx_path: str,
|
|
216
|
+
providers: list[str] | None = None,
|
|
217
|
+
) -> None:
|
|
218
|
+
providers = providers or _select_onnx_providers(
|
|
219
|
+
getattr(self, "_device", None),
|
|
220
|
+
)
|
|
221
|
+
self.ort_session = ort_any.InferenceSession(
|
|
222
|
+
str(onnx_path),
|
|
223
|
+
providers=providers,
|
|
224
|
+
)
|
|
225
|
+
self.model = None
|
|
226
|
+
|
|
227
|
+
model._load_onnx = MethodType(_load_onnx, model) # noqa: SLF001
|
|
228
|
+
|
|
229
|
+
|
|
230
|
+
def _get_ufish(config: UFishConfig) -> UFishType:
|
|
231
|
+
"""Return a cached UFish instance for the requested configuration.
|
|
232
|
+
|
|
233
|
+
Parameters
|
|
234
|
+
----------
|
|
235
|
+
config : UFishConfig
|
|
236
|
+
Runtime configuration used to determine whether the cached model can be
|
|
237
|
+
reused or must be re-instantiated.
|
|
238
|
+
|
|
239
|
+
Returns
|
|
240
|
+
-------
|
|
241
|
+
UFishType
|
|
242
|
+
Ready-to-use UFish instance with patched ONNX loading behavior.
|
|
243
|
+
"""
|
|
244
|
+
_ensure_ufish_available()
|
|
245
|
+
if _UFISH_STATE.model is None or _UFISH_STATE.device != config.device:
|
|
246
|
+
ufish_cls = cast("type[UFishType]", UFish)
|
|
247
|
+
ufish_any = cast("Any", ufish_cls)
|
|
248
|
+
if config.device:
|
|
249
|
+
_UFISH_STATE.model = ufish_any(device=config.device)
|
|
250
|
+
else:
|
|
251
|
+
_UFISH_STATE.model = ufish_any()
|
|
252
|
+
_patch_onnx_loader(cast("UFishType", _UFISH_STATE.model))
|
|
253
|
+
_UFISH_STATE.weights_loaded = False
|
|
254
|
+
_UFISH_STATE.device = config.device
|
|
255
|
+
_UFISH_STATE.weights_path = None
|
|
256
|
+
return cast("UFishType", _UFISH_STATE.model)
|
|
257
|
+
|
|
258
|
+
|
|
259
|
+
def _ensure_weights(model: UFishType, config: UFishConfig) -> None:
|
|
260
|
+
"""Ensure model weights are loaded according to configuration.
|
|
261
|
+
|
|
262
|
+
Parameters
|
|
263
|
+
----------
|
|
264
|
+
model : UFishType
|
|
265
|
+
Active UFish model instance.
|
|
266
|
+
config : UFishConfig
|
|
267
|
+
Weight source and device settings.
|
|
268
|
+
|
|
269
|
+
Raises
|
|
270
|
+
------
|
|
271
|
+
RuntimeError
|
|
272
|
+
If neither Hugging Face/default loading nor fallback loading succeeds.
|
|
273
|
+
"""
|
|
274
|
+
if config.weights_path:
|
|
275
|
+
weights_path = Path(config.weights_path).expanduser().resolve()
|
|
276
|
+
resolved_path = str(weights_path)
|
|
277
|
+
if _UFISH_STATE.weights_loaded and _UFISH_STATE.weights_path == resolved_path:
|
|
278
|
+
return
|
|
279
|
+
model.load_weights(resolved_path)
|
|
280
|
+
_UFISH_STATE.weights_loaded = True
|
|
281
|
+
_UFISH_STATE.weights_path = resolved_path
|
|
282
|
+
return
|
|
283
|
+
|
|
284
|
+
if config.load_from_internet:
|
|
285
|
+
if _UFISH_STATE.weights_loaded and _UFISH_STATE.weights_path == "internet":
|
|
286
|
+
return
|
|
287
|
+
model.load_weights_from_internet()
|
|
288
|
+
_UFISH_STATE.weights_loaded = True
|
|
289
|
+
_UFISH_STATE.weights_path = "internet"
|
|
290
|
+
return
|
|
291
|
+
|
|
292
|
+
try:
|
|
293
|
+
weights_path = _resolve_default_weights_path()
|
|
294
|
+
except RuntimeError:
|
|
295
|
+
# Keep legacy behavior when HF download dependencies are unavailable.
|
|
296
|
+
if _UFISH_STATE.weights_loaded and _UFISH_STATE.weights_path == "default":
|
|
297
|
+
return
|
|
298
|
+
model.load_weights()
|
|
299
|
+
_UFISH_STATE.weights_loaded = True
|
|
300
|
+
_UFISH_STATE.weights_path = "default"
|
|
301
|
+
return
|
|
302
|
+
except Exception as exc:
|
|
303
|
+
try:
|
|
304
|
+
if _UFISH_STATE.weights_loaded and _UFISH_STATE.weights_path == "default":
|
|
305
|
+
return
|
|
306
|
+
model.load_weights()
|
|
307
|
+
_UFISH_STATE.weights_loaded = True
|
|
308
|
+
_UFISH_STATE.weights_path = "default"
|
|
309
|
+
return
|
|
310
|
+
except Exception:
|
|
311
|
+
msg = (
|
|
312
|
+
"Could not load UFish weights from local default or Hugging Face. "
|
|
313
|
+
"Provide UFishConfig(weights_path=...) or ensure model download access."
|
|
314
|
+
)
|
|
315
|
+
raise RuntimeError(msg) from exc
|
|
316
|
+
|
|
317
|
+
resolved_path = str(weights_path)
|
|
318
|
+
if _UFISH_STATE.weights_loaded and _UFISH_STATE.weights_path == resolved_path:
|
|
319
|
+
return
|
|
320
|
+
model.load_weights(resolved_path)
|
|
321
|
+
_UFISH_STATE.weights_loaded = True
|
|
322
|
+
_UFISH_STATE.weights_path = resolved_path
|
|
323
|
+
|
|
324
|
+
|
|
325
|
+
def enhance_image(
|
|
326
|
+
image: np.ndarray,
|
|
327
|
+
*,
|
|
328
|
+
config: UFishConfig | None = None,
|
|
329
|
+
) -> np.ndarray:
|
|
330
|
+
"""Enhance an image using UFish.
|
|
331
|
+
|
|
332
|
+
Parameters
|
|
333
|
+
----------
|
|
334
|
+
image : numpy.ndarray
|
|
335
|
+
Input image array. UFish supports 2D images and common 3D stack
|
|
336
|
+
layouts handled by UFish internally.
|
|
337
|
+
config : UFishConfig or None, optional
|
|
338
|
+
Optional runtime configuration. If omitted, default behavior is used.
|
|
339
|
+
|
|
340
|
+
Returns
|
|
341
|
+
-------
|
|
342
|
+
numpy.ndarray
|
|
343
|
+
Enhanced image produced by UFish with the same dimensionality as the
|
|
344
|
+
input image.
|
|
345
|
+
|
|
346
|
+
Raises
|
|
347
|
+
------
|
|
348
|
+
ImportError
|
|
349
|
+
If UFish cannot be imported.
|
|
350
|
+
RuntimeError
|
|
351
|
+
If weights cannot be loaded from configured/default sources.
|
|
352
|
+
"""
|
|
353
|
+
if config is None:
|
|
354
|
+
config = UFishConfig()
|
|
355
|
+
model = _get_ufish(config)
|
|
356
|
+
_ensure_weights(model, config)
|
|
357
|
+
image = np.asarray(image)
|
|
358
|
+
model_any = cast("Any", model)
|
|
359
|
+
predict_chunks = getattr(model_any, "predict_chunks", None)
|
|
360
|
+
|
|
361
|
+
def _run_inference() -> tuple[Any, Any]:
|
|
362
|
+
if callable(predict_chunks):
|
|
363
|
+
return predict_chunks(image)
|
|
364
|
+
return model_any.predict(image)
|
|
365
|
+
|
|
366
|
+
try:
|
|
367
|
+
_pred_spots, enhanced = _run_inference()
|
|
368
|
+
except Exception as exc:
|
|
369
|
+
# If ONNX inference fails on GPU, retry once on CPU provider.
|
|
370
|
+
weight_path = getattr(model_any, "weight_path", None)
|
|
371
|
+
can_retry_on_cpu = (
|
|
372
|
+
ort is not None
|
|
373
|
+
and isinstance(weight_path, str)
|
|
374
|
+
and weight_path.endswith(".onnx")
|
|
375
|
+
and hasattr(model_any, "_load_onnx")
|
|
376
|
+
and "CPUExecutionProvider" in set(ort.get_available_providers())
|
|
377
|
+
)
|
|
378
|
+
if not can_retry_on_cpu:
|
|
379
|
+
raise
|
|
380
|
+
_LOGGER.warning(
|
|
381
|
+
"UFish inference failed; retrying with ONNX CPUExecutionProvider. "
|
|
382
|
+
"Original error: %s",
|
|
383
|
+
exc,
|
|
384
|
+
)
|
|
385
|
+
model_any._load_onnx(weight_path, providers=["CPUExecutionProvider"])
|
|
386
|
+
_pred_spots, enhanced = _run_inference()
|
|
387
|
+
return np.asarray(enhanced)
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
"""Visualization tab modules."""
|
|
@@ -0,0 +1,306 @@
|
|
|
1
|
+
"""Backend logic for the Visualization tab."""
|
|
2
|
+
|
|
3
|
+
from __future__ import annotations
|
|
4
|
+
|
|
5
|
+
from dataclasses import dataclass, field
|
|
6
|
+
from pathlib import Path
|
|
7
|
+
from typing import Iterable
|
|
8
|
+
import shutil
|
|
9
|
+
import tempfile
|
|
10
|
+
|
|
11
|
+
from .plots import PlotConfig
|
|
12
|
+
|
|
13
|
+
|
|
14
|
+
@dataclass
|
|
15
|
+
class PlotExportResult:
|
|
16
|
+
"""Output metadata for a single plot export.
|
|
17
|
+
|
|
18
|
+
Attributes
|
|
19
|
+
----------
|
|
20
|
+
plot_id : str
|
|
21
|
+
Stable identifier for the exported plot instance.
|
|
22
|
+
plot_type : str
|
|
23
|
+
Plot type name used for routing (e.g., ``"UMAP"``).
|
|
24
|
+
temp_dir : Path
|
|
25
|
+
Temporary directory where the plot wrote its outputs.
|
|
26
|
+
outputs : list of Path
|
|
27
|
+
Explicit file paths returned by the plot processor.
|
|
28
|
+
"""
|
|
29
|
+
|
|
30
|
+
plot_id: str
|
|
31
|
+
plot_type: str
|
|
32
|
+
temp_dir: Path
|
|
33
|
+
outputs: list[Path] = field(default_factory=list)
|
|
34
|
+
|
|
35
|
+
|
|
36
|
+
@dataclass
|
|
37
|
+
class VisualizationResult:
|
|
38
|
+
"""Aggregated output information for a visualization run.
|
|
39
|
+
|
|
40
|
+
Attributes
|
|
41
|
+
----------
|
|
42
|
+
input_root : Path
|
|
43
|
+
Root input directory.
|
|
44
|
+
output_root : Path
|
|
45
|
+
Root output directory for the run.
|
|
46
|
+
temp_root : Path
|
|
47
|
+
Temporary root directory used during processing.
|
|
48
|
+
plot_outputs : list of PlotExportResult
|
|
49
|
+
Per-plot export metadata for the run.
|
|
50
|
+
"""
|
|
51
|
+
input_root: Path
|
|
52
|
+
output_root: Path
|
|
53
|
+
temp_root: Path
|
|
54
|
+
plot_outputs: list[PlotExportResult]
|
|
55
|
+
|
|
56
|
+
|
|
57
|
+
class VisualizationBackend:
|
|
58
|
+
"""Backend orchestrator for visualization exports.
|
|
59
|
+
|
|
60
|
+
Notes
|
|
61
|
+
-----
|
|
62
|
+
Feature export routines live with their feature implementations. The
|
|
63
|
+
backend iterates through configured feature contexts, asks each feature
|
|
64
|
+
handler to export into a temporary directory, and then routes those
|
|
65
|
+
outputs into a final output structure.
|
|
66
|
+
"""
|
|
67
|
+
|
|
68
|
+
def __init__(self) -> None:
|
|
69
|
+
"""Initialize the backend state.
|
|
70
|
+
|
|
71
|
+
Attributes
|
|
72
|
+
----------
|
|
73
|
+
metrics : list
|
|
74
|
+
Placeholder container for computed metrics.
|
|
75
|
+
"""
|
|
76
|
+
self.metrics: list[object] = []
|
|
77
|
+
|
|
78
|
+
def process(
|
|
79
|
+
self,
|
|
80
|
+
plots: Iterable[object],
|
|
81
|
+
input_path: Path,
|
|
82
|
+
output_path: str,
|
|
83
|
+
output_name: str,
|
|
84
|
+
export_format: str,
|
|
85
|
+
markers: list[str] | None = None,
|
|
86
|
+
thresholds: dict[str, float] | None = None,
|
|
87
|
+
save: bool = True,
|
|
88
|
+
cleanup: bool = True,
|
|
89
|
+
) -> VisualizationResult:
|
|
90
|
+
"""Run plot exports and route their outputs.
|
|
91
|
+
|
|
92
|
+
Parameters
|
|
93
|
+
----------
|
|
94
|
+
plots : iterable of object
|
|
95
|
+
Plot UI contexts with ``state`` and ``plot_handler``.
|
|
96
|
+
Each handler should implement ``plot(temp_dir, input_path, export_format)``.
|
|
97
|
+
input_path : Path
|
|
98
|
+
Path to the input folder containing quantification files.
|
|
99
|
+
output_path : str
|
|
100
|
+
Base output folder path.
|
|
101
|
+
output_name : str
|
|
102
|
+
Folder name used to group exported outputs.
|
|
103
|
+
export_format : str
|
|
104
|
+
File format requested by the user (``"png"`` or ``"svg"``).
|
|
105
|
+
markers : list of str, optional
|
|
106
|
+
List of selected markers to include.
|
|
107
|
+
thresholds : dict, optional
|
|
108
|
+
Dictionary of {marker_name: threshold_value} for filtering.
|
|
109
|
+
save : bool, optional
|
|
110
|
+
Whether to save (route) the outputs to the final destination immediately.
|
|
111
|
+
cleanup : bool, optional
|
|
112
|
+
Whether to delete temporary export folders after routing.
|
|
113
|
+
|
|
114
|
+
Returns
|
|
115
|
+
-------
|
|
116
|
+
VisualizationResult
|
|
117
|
+
Output metadata for the completed run.
|
|
118
|
+
|
|
119
|
+
Notes
|
|
120
|
+
-----
|
|
121
|
+
If a plot export does not return explicit output paths, the backend
|
|
122
|
+
will move all files found in the plot's temp directory. This allows
|
|
123
|
+
plot implementations to either return specific files or simply write
|
|
124
|
+
into the provided temporary directory.
|
|
125
|
+
"""
|
|
126
|
+
input_root = input_path.parent
|
|
127
|
+
# Treat `output_path` as the folder and `output_name` as an optional
|
|
128
|
+
# filename base. Resolve output_root without using output_name so
|
|
129
|
+
# output_name can be applied as a file name rather than a subfolder.
|
|
130
|
+
output_root = self._resolve_output_root(output_path, "")
|
|
131
|
+
output_root.mkdir(parents=True, exist_ok=True)
|
|
132
|
+
temp_root = Path(tempfile.mkdtemp(prefix="senoquant-plot-"))
|
|
133
|
+
|
|
134
|
+
plot_outputs: list[PlotExportResult] = []
|
|
135
|
+
for context in plots:
|
|
136
|
+
plot = getattr(context, "state", None)
|
|
137
|
+
handler = getattr(context, "plot_handler", None)
|
|
138
|
+
if not isinstance(plot, PlotConfig):
|
|
139
|
+
continue
|
|
140
|
+
print(f"[Backend] Processing plot: {plot.type_name}")
|
|
141
|
+
print(f"[Backend] Handler: {handler}")
|
|
142
|
+
print(f"[Backend] Handler has plot method: {hasattr(handler, 'plot') if handler else False}")
|
|
143
|
+
temp_dir = temp_root / plot.plot_id
|
|
144
|
+
temp_dir.mkdir(parents=True, exist_ok=True)
|
|
145
|
+
outputs: list[Path] = []
|
|
146
|
+
if handler is not None and hasattr(handler, "plot"):
|
|
147
|
+
print(f"[Backend] Calling handler.plot() with input_path={input_path}, format={export_format}")
|
|
148
|
+
outputs = [
|
|
149
|
+
Path(path)
|
|
150
|
+
for path in handler.plot(
|
|
151
|
+
temp_dir,
|
|
152
|
+
input_path,
|
|
153
|
+
export_format,
|
|
154
|
+
markers=markers,
|
|
155
|
+
thresholds=thresholds
|
|
156
|
+
)
|
|
157
|
+
]
|
|
158
|
+
print(f"[Backend] Handler returned {len(outputs)} outputs: {outputs}")
|
|
159
|
+
else:
|
|
160
|
+
print(f"[Backend] Skipping: handler is None or has no plot method")
|
|
161
|
+
plot_outputs.append(
|
|
162
|
+
PlotExportResult(
|
|
163
|
+
plot_id=plot.plot_id,
|
|
164
|
+
plot_type=plot.type_name,
|
|
165
|
+
temp_dir=temp_dir,
|
|
166
|
+
outputs=outputs,
|
|
167
|
+
)
|
|
168
|
+
)
|
|
169
|
+
|
|
170
|
+
if save:
|
|
171
|
+
print(f"[Backend] About to route {len(plot_outputs)} plot outputs")
|
|
172
|
+
self._route_plot_outputs(output_root, plot_outputs, output_name)
|
|
173
|
+
if cleanup:
|
|
174
|
+
shutil.rmtree(temp_root, ignore_errors=True)
|
|
175
|
+
return VisualizationResult(
|
|
176
|
+
input_root=input_root,
|
|
177
|
+
output_root=output_root,
|
|
178
|
+
temp_root=temp_root,
|
|
179
|
+
plot_outputs=plot_outputs,
|
|
180
|
+
)
|
|
181
|
+
|
|
182
|
+
def save_result(
|
|
183
|
+
self,
|
|
184
|
+
result: VisualizationResult,
|
|
185
|
+
output_path: str,
|
|
186
|
+
output_name: str
|
|
187
|
+
) -> None:
|
|
188
|
+
"""Save an existing visualization result to the specified output.
|
|
189
|
+
|
|
190
|
+
This moves/copies files from the result's temporary directory (or previous location)
|
|
191
|
+
to the new output path.
|
|
192
|
+
"""
|
|
193
|
+
output_root = self._resolve_output_root(output_path, "")
|
|
194
|
+
output_root.mkdir(parents=True, exist_ok=True)
|
|
195
|
+
result.output_root = output_root
|
|
196
|
+
self._route_plot_outputs(output_root, result.plot_outputs, output_name)
|
|
197
|
+
|
|
198
|
+
def _resolve_output_root(self, output_path: str, output_name: str) -> Path:
|
|
199
|
+
"""Resolve the final output root directory.
|
|
200
|
+
|
|
201
|
+
Parameters
|
|
202
|
+
----------
|
|
203
|
+
output_path : str
|
|
204
|
+
Base output folder path.
|
|
205
|
+
output_name : str
|
|
206
|
+
Folder name used to group exported outputs.
|
|
207
|
+
|
|
208
|
+
Returns
|
|
209
|
+
-------
|
|
210
|
+
Path
|
|
211
|
+
Resolved output directory path.
|
|
212
|
+
"""
|
|
213
|
+
if output_path and output_path.strip():
|
|
214
|
+
base = Path(output_path)
|
|
215
|
+
else:
|
|
216
|
+
# Default to repository root (current working directory)
|
|
217
|
+
base = Path.cwd()
|
|
218
|
+
if output_name and output_name.strip():
|
|
219
|
+
return base / output_name
|
|
220
|
+
return base
|
|
221
|
+
|
|
222
|
+
def _route_plot_outputs(
|
|
223
|
+
self,
|
|
224
|
+
output_root: Path,
|
|
225
|
+
plot_outputs: Iterable[PlotExportResult],
|
|
226
|
+
output_name: str = "",
|
|
227
|
+
) -> None:
|
|
228
|
+
"""Move plot outputs from temp folders to the final location.
|
|
229
|
+
|
|
230
|
+
Parameters
|
|
231
|
+
----------
|
|
232
|
+
output_root : Path
|
|
233
|
+
Destination root folder.
|
|
234
|
+
plot_outputs : iterable of PlotExportResult
|
|
235
|
+
Export results to route.
|
|
236
|
+
|
|
237
|
+
Notes
|
|
238
|
+
-----
|
|
239
|
+
When a plot returns no explicit output list, all files present
|
|
240
|
+
in the temporary directory are routed instead. Subdirectories are
|
|
241
|
+
not traversed.
|
|
242
|
+
"""
|
|
243
|
+
for plot_output in plot_outputs:
|
|
244
|
+
print(f"[Backend] Routing {plot_output.plot_type} to {output_root}")
|
|
245
|
+
final_paths: list[Path] = []
|
|
246
|
+
outputs = plot_output.outputs
|
|
247
|
+
# Choose source list: explicit outputs if provided, otherwise files
|
|
248
|
+
# from the temp directory.
|
|
249
|
+
source_files: list[Path] = []
|
|
250
|
+
if outputs:
|
|
251
|
+
source_files = [p for p in outputs if Path(p).exists()]
|
|
252
|
+
else:
|
|
253
|
+
source_files = [p for p in plot_output.temp_dir.glob("*") if p.is_file()]
|
|
254
|
+
|
|
255
|
+
if not source_files:
|
|
256
|
+
print(f"[Backend] No files to route for {plot_output.plot_type}")
|
|
257
|
+
plot_output.outputs = []
|
|
258
|
+
continue
|
|
259
|
+
|
|
260
|
+
# If the caller provided output_name, use it as the base filename.
|
|
261
|
+
for idx, src in enumerate(source_files):
|
|
262
|
+
src = Path(src)
|
|
263
|
+
ext = src.suffix
|
|
264
|
+
if output_name and output_name.strip():
|
|
265
|
+
# If multiple files, append an index to avoid collisions.
|
|
266
|
+
if len(source_files) == 1:
|
|
267
|
+
dest_name = f"{output_name}{ext}"
|
|
268
|
+
else:
|
|
269
|
+
dest_name = f"{output_name}_{idx+1}{ext}"
|
|
270
|
+
else:
|
|
271
|
+
# Fallback: prefix with plot type for clarity
|
|
272
|
+
safe_type = plot_output.plot_type.replace(' ', '_')
|
|
273
|
+
dest_name = f"{safe_type}_{src.name}"
|
|
274
|
+
dest = output_root / dest_name
|
|
275
|
+
print(f"[Backend] Copying {src} -> {dest}")
|
|
276
|
+
try:
|
|
277
|
+
shutil.copy2(str(src), dest)
|
|
278
|
+
except shutil.SameFileError:
|
|
279
|
+
print(f"[Backend] Skipping copy: source and destination are the same ({dest})")
|
|
280
|
+
final_paths.append(dest)
|
|
281
|
+
|
|
282
|
+
# Update plot_output.outputs to point at final routed files
|
|
283
|
+
plot_output.outputs = final_paths
|
|
284
|
+
|
|
285
|
+
def _plot_dir_name(self, plot_output: PlotExportResult) -> str:
|
|
286
|
+
"""Build a filesystem-friendly folder name for a plot.
|
|
287
|
+
|
|
288
|
+
Parameters
|
|
289
|
+
----------
|
|
290
|
+
plot_output : PlotExportResult
|
|
291
|
+
Export result metadata.
|
|
292
|
+
|
|
293
|
+
Returns
|
|
294
|
+
-------
|
|
295
|
+
str
|
|
296
|
+
Directory name for the plot outputs.
|
|
297
|
+
|
|
298
|
+
Notes
|
|
299
|
+
-----
|
|
300
|
+
Non-alphanumeric characters are replaced to avoid filesystem issues.
|
|
301
|
+
"""
|
|
302
|
+
name = plot_output.plot_type.strip()
|
|
303
|
+
safe = "".join(
|
|
304
|
+
char if char.isalnum() or char in "-_ " else "_" for char in name
|
|
305
|
+
)
|
|
306
|
+
return safe.replace(" ", "_").lower()
|