swcgeom 0.19.4__cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of swcgeom might be problematic. Click here for more details.

Files changed (72) hide show
  1. swcgeom/__init__.py +21 -0
  2. swcgeom/analysis/__init__.py +13 -0
  3. swcgeom/analysis/feature_extractor.py +454 -0
  4. swcgeom/analysis/features.py +218 -0
  5. swcgeom/analysis/lmeasure.py +750 -0
  6. swcgeom/analysis/sholl.py +201 -0
  7. swcgeom/analysis/trunk.py +183 -0
  8. swcgeom/analysis/visualization.py +191 -0
  9. swcgeom/analysis/visualization3d.py +81 -0
  10. swcgeom/analysis/volume.py +143 -0
  11. swcgeom/core/__init__.py +19 -0
  12. swcgeom/core/branch.py +129 -0
  13. swcgeom/core/branch_tree.py +65 -0
  14. swcgeom/core/compartment.py +107 -0
  15. swcgeom/core/node.py +130 -0
  16. swcgeom/core/path.py +155 -0
  17. swcgeom/core/population.py +341 -0
  18. swcgeom/core/swc.py +247 -0
  19. swcgeom/core/swc_utils/__init__.py +19 -0
  20. swcgeom/core/swc_utils/assembler.py +35 -0
  21. swcgeom/core/swc_utils/base.py +180 -0
  22. swcgeom/core/swc_utils/checker.py +107 -0
  23. swcgeom/core/swc_utils/io.py +204 -0
  24. swcgeom/core/swc_utils/normalizer.py +163 -0
  25. swcgeom/core/swc_utils/subtree.py +70 -0
  26. swcgeom/core/tree.py +384 -0
  27. swcgeom/core/tree_utils.py +277 -0
  28. swcgeom/core/tree_utils_impl.py +58 -0
  29. swcgeom/images/__init__.py +9 -0
  30. swcgeom/images/augmentation.py +149 -0
  31. swcgeom/images/contrast.py +87 -0
  32. swcgeom/images/folder.py +217 -0
  33. swcgeom/images/io.py +578 -0
  34. swcgeom/images/loaders/__init__.py +8 -0
  35. swcgeom/images/loaders/pbd.cpython-311-x86_64-linux-gnu.so +0 -0
  36. swcgeom/images/loaders/pbd.pyx +523 -0
  37. swcgeom/images/loaders/raw.cpython-311-x86_64-linux-gnu.so +0 -0
  38. swcgeom/images/loaders/raw.pyx +183 -0
  39. swcgeom/transforms/__init__.py +20 -0
  40. swcgeom/transforms/base.py +136 -0
  41. swcgeom/transforms/branch.py +223 -0
  42. swcgeom/transforms/branch_tree.py +74 -0
  43. swcgeom/transforms/geometry.py +270 -0
  44. swcgeom/transforms/image_preprocess.py +107 -0
  45. swcgeom/transforms/image_stack.py +219 -0
  46. swcgeom/transforms/images.py +206 -0
  47. swcgeom/transforms/mst.py +183 -0
  48. swcgeom/transforms/neurolucida_asc.py +498 -0
  49. swcgeom/transforms/path.py +56 -0
  50. swcgeom/transforms/population.py +36 -0
  51. swcgeom/transforms/tree.py +265 -0
  52. swcgeom/transforms/tree_assembler.py +161 -0
  53. swcgeom/utils/__init__.py +18 -0
  54. swcgeom/utils/debug.py +23 -0
  55. swcgeom/utils/download.py +119 -0
  56. swcgeom/utils/dsu.py +58 -0
  57. swcgeom/utils/ellipse.py +131 -0
  58. swcgeom/utils/file.py +90 -0
  59. swcgeom/utils/neuromorpho.py +581 -0
  60. swcgeom/utils/numpy_helper.py +70 -0
  61. swcgeom/utils/plotter_2d.py +134 -0
  62. swcgeom/utils/plotter_3d.py +35 -0
  63. swcgeom/utils/renderer.py +145 -0
  64. swcgeom/utils/sdf.py +324 -0
  65. swcgeom/utils/solid_geometry.py +154 -0
  66. swcgeom/utils/transforms.py +367 -0
  67. swcgeom/utils/volumetric_object.py +483 -0
  68. swcgeom-0.19.4.dist-info/METADATA +86 -0
  69. swcgeom-0.19.4.dist-info/RECORD +72 -0
  70. swcgeom-0.19.4.dist-info/WHEEL +6 -0
  71. swcgeom-0.19.4.dist-info/licenses/LICENSE +201 -0
  72. swcgeom-0.19.4.dist-info/top_level.txt +1 -0
swcgeom/images/io.py ADDED
@@ -0,0 +1,578 @@
1
+
2
+ # SPDX-FileCopyrightText: 2022 - 2025 Zexin Yuan <pypi@yzx9.xyz>
3
+ #
4
+ # SPDX-License-Identifier: Apache-2.0
5
+
6
+ """Read and write image stack."""
7
+
8
+ import os
9
+ import re
10
+ import warnings
11
+ from abc import ABC, abstractmethod
12
+ from collections.abc import Callable, Iterable
13
+ from functools import cache, lru_cache
14
+ from typing import Any, Generic, Literal, TypeVar, cast, overload
15
+
16
+ import nrrd
17
+ import numpy as np
18
+ import numpy.typing as npt
19
+ import tifffile
20
+ from typing_extensions import deprecated
21
+
22
+ from swcgeom.images.loaders import PBD, Raw
23
+
24
+ __all__ = ["read_imgs", "save_tiff", "read_images"]
25
+
26
+ Vec3i = tuple[int, int, int]
27
+ ScalarType = TypeVar("ScalarType", bound=np.generic, covariant=True)
28
+
29
+ RE_TERAFLY_ROOT = re.compile(r"^RES\((\d+)x(\d+)x(\d+)\)$")
30
+ RE_TERAFLY_NAME = re.compile(r"^\d+(_\d+)?(_\d+)?")
31
+
32
+ UINT_MAX = {
33
+ np.dtype(np.uint8): (2**8) - 1,
34
+ np.dtype(np.uint16): (2**16) - 1,
35
+ np.dtype(np.uint32): (2**32) - 1,
36
+ np.dtype(np.uint64): (2**64) - 1,
37
+ }
38
+
39
+ AXES_ORDER = {
40
+ "X": 0,
41
+ "Y": 1,
42
+ "Z": 2,
43
+ "C": 3,
44
+ "I": 2, # vaa3d compatibility
45
+ }
46
+
47
+
48
+ class ImageStack(ABC, Generic[ScalarType]):
49
+ """Image stack."""
50
+
51
+ # fmt: off
52
+ @overload
53
+ @abstractmethod
54
+ def __getitem__(self, key: int) -> npt.NDArray[ScalarType]: ... # array of shape (Y, Z, C)
55
+ @overload
56
+ @abstractmethod
57
+ def __getitem__(self, key: tuple[int, int]) -> npt.NDArray[ScalarType]: ... # array of shape (Z, C)
58
+ @overload
59
+ @abstractmethod
60
+ def __getitem__(self, key: tuple[int, int, int]) -> npt.NDArray[ScalarType]: ... # array of shape (C,)
61
+ @overload
62
+ @abstractmethod
63
+ def __getitem__(self, key: tuple[int, int, int, int]) -> ScalarType: ... # value
64
+ @overload
65
+ @abstractmethod
66
+ def __getitem__(
67
+ self, key: slice | tuple[slice, slice] | tuple[slice, slice, slice] | tuple[slice, slice, slice, slice],
68
+ ) -> npt.NDArray[ScalarType]: ... # array of shape (X, Y, Z, C)
69
+ @overload
70
+ @abstractmethod
71
+ def __getitem__(self, key: npt.NDArray[np.integer[Any]]) -> npt.NDArray[ScalarType]: ...
72
+ # fmt: on
73
+ @abstractmethod
74
+ def __getitem__(self, key):
75
+ """Get pixel/patch of image stack.
76
+
77
+ Returns:
78
+ value: NDArray which shape depends on key. If key is tuple of ints,
79
+ """
80
+ raise NotImplementedError()
81
+
82
+ def get_full(self) -> npt.NDArray[ScalarType]:
83
+ """Get full image stack.
84
+
85
+ NOTE: this will load the full image stack into memory.
86
+ """
87
+ return self[:, :, :, :]
88
+
89
+ @property
90
+ def shape(self) -> tuple[int, int, int, int]:
91
+ raise NotImplementedError()
92
+
93
+
94
+ @overload
95
+ def read_imgs(fname: str, *, dtype: ScalarType, **kwargs) -> ImageStack[ScalarType]: ...
96
+ @overload
97
+ def read_imgs(fname: str, *, dtype: None = ..., **kwargs) -> ImageStack[np.float32]: ...
98
+ def read_imgs(fname: str, **kwargs): # type: ignore
99
+ """Read image stack.
100
+
101
+ Args:
102
+ fname: The path of image stack.
103
+ dtype: Casting data to specified dtype.
104
+ If integer and float conversions occur, they will be scaled (assuming floats
105
+ are between 0 and 1). Default to `np.float32`.
106
+ **kwargs: Forwarding to the corresponding reader.
107
+ """
108
+ kwargs.setdefault("dtype", np.float32)
109
+ if not os.path.exists(fname):
110
+ raise ValueError(f"image stack not exists: {fname}")
111
+
112
+ # match file extension
113
+ match os.path.splitext(fname)[-1]:
114
+ case ".tif" | ".tiff":
115
+ return TiffImageStack(fname, **kwargs)
116
+ case ".nrrd":
117
+ return NrrdImageStack(fname, **kwargs)
118
+ case ".v3dpbd":
119
+ return V3dpbdImageStack(fname, **kwargs)
120
+ case ".v3draw":
121
+ return V3drawImageStack(fname, **kwargs)
122
+ case ".npy":
123
+ return NDArrayImageStack(np.load(fname), **kwargs)
124
+
125
+ # try to read as terafly
126
+ if TeraflyImageStack.is_root(fname):
127
+ return TeraflyImageStack(fname, **kwargs)
128
+
129
+ raise ValueError("unsupported image stack")
130
+
131
+
132
+ def save_tiff(
133
+ data: npt.NDArray | ImageStack,
134
+ fname: str,
135
+ *,
136
+ dtype: np.unsignedinteger | np.floating | None = None,
137
+ compression: str | Literal[False] = "zlib",
138
+ **kwargs,
139
+ ) -> None:
140
+ """Save image stack as tiff.
141
+
142
+ Args:
143
+ data: The image stack.
144
+ fname: str
145
+ dtype: Casting data to specified dtype.
146
+ If integer and float conversions occur, they will be scaled (assuming
147
+ floats are between 0 and 1).
148
+ compression: Compression algorithm, forwarding to `tifffile.imwrite`.
149
+ If no algorithnm is specify specified, we will use the zlib algorithm with
150
+ compression level 6 by default.
151
+ **kwargs: Forwarding to `tifffile.imwrite`
152
+ """
153
+ if isinstance(data, ImageStack):
154
+ data = data.get_full() # TODO: avoid load full imgs to memory
155
+
156
+ if data.ndim == 3:
157
+ data = np.expand_dims(data, -1) # (_, _, _) -> (_, _, _, C), C === 1
158
+
159
+ axes = "ZXYC"
160
+ assert data.ndim == 4, "should be an array of shape (X, Y, Z, C)"
161
+ assert data.shape[-1] in [1, 3], "support 'miniblack' or 'rgb'"
162
+
163
+ if dtype is not None:
164
+ if np.issubdtype(data.dtype, np.floating) and np.issubdtype(
165
+ dtype, np.unsignedinteger
166
+ ):
167
+ scaler_factor = UINT_MAX[np.dtype(dtype)]
168
+ elif np.issubdtype(data.dtype, np.unsignedinteger) and np.issubdtype(
169
+ dtype, np.floating
170
+ ):
171
+ scaler_factor = 1 / UINT_MAX[np.dtype(data.dtype)]
172
+ else:
173
+ scaler_factor = 1
174
+
175
+ data = (data * scaler_factor).astype(dtype)
176
+
177
+ if compression is not False:
178
+ kwargs.setdefault("compression", compression)
179
+ if compression == "zlib":
180
+ kwargs.setdefault("compressionargs", {"level": 6})
181
+
182
+ data = np.moveaxis(data, 2, 0) # (_, _, Z, _) -> (Z, _, _, _)
183
+ kwargs.setdefault("photometric", "rgb" if data.shape[-1] == 3 else "minisblack")
184
+ metadata = kwargs.get("metadata", {})
185
+ metadata.setdefault("axes", axes)
186
+ kwargs.update(metadata=metadata)
187
+ tifffile.imwrite(fname, data, **kwargs)
188
+
189
+
190
+ class NDArrayImageStack(ImageStack[ScalarType]):
191
+ """NDArray image stack."""
192
+
193
+ def __init__(
194
+ self, imgs: npt.NDArray[Any], *, dtype: ScalarType | None = None
195
+ ) -> None:
196
+ super().__init__()
197
+
198
+ if imgs.ndim == 3: # (_, _, _) -> (_, _, _, C)
199
+ imgs = np.expand_dims(imgs, -1)
200
+ assert imgs.ndim == 4, "Should be shape of (X, Y, Z, C)"
201
+
202
+ if dtype is not None:
203
+ dtype_raw = imgs.dtype
204
+ if np.issubdtype(dtype, np.floating) and np.issubdtype(
205
+ dtype_raw, np.unsignedinteger
206
+ ):
207
+ scalar_factor = 1.0 / UINT_MAX[dtype_raw]
208
+ imgs = scalar_factor * imgs.astype(dtype)
209
+ elif np.issubdtype(dtype, np.unsignedinteger) and np.issubdtype(
210
+ dtype_raw, np.floating
211
+ ):
212
+ scalar_factor = UINT_MAX[dtype]
213
+ imgs *= (scalar_factor * imgs).astype(dtype)
214
+ else:
215
+ imgs = imgs.astype(dtype)
216
+
217
+ self.imgs = imgs
218
+
219
+ def __getitem__(self, key):
220
+ return self.imgs.__getitem__(key)
221
+
222
+ def get_full(self) -> npt.NDArray[ScalarType]:
223
+ return self.imgs
224
+
225
+ @property
226
+ def shape(self) -> tuple[int, int, int, int]:
227
+ return cast(tuple[int, int, int, int], self.imgs.shape)
228
+
229
+
230
+ class TiffImageStack(NDArrayImageStack[ScalarType]):
231
+ """Tiff image stack."""
232
+
233
+ def __init__(self, fname: str, *, dtype: ScalarType, **kwargs) -> None:
234
+ with tifffile.TiffFile(fname, **kwargs) as f:
235
+ s = f.series[0]
236
+ imgs, axes = s.asarray(), s.axes
237
+
238
+ if len(axes) != imgs.ndim or any(c not in AXES_ORDER for c in axes):
239
+ axes_raw = axes
240
+ axes = "ZXYC" if imgs.ndim == 4 else "ZXY"
241
+ warnings.warn(f"reset unexcept axes `{axes_raw}` to `{axes}` in: {fname}")
242
+
243
+ orders = [AXES_ORDER[c] for c in axes]
244
+ imgs = imgs.transpose(np.argsort(orders))
245
+ super().__init__(imgs, dtype=dtype)
246
+
247
+
248
+ class NrrdImageStack(NDArrayImageStack[ScalarType]):
249
+ """Nrrd image stack."""
250
+
251
+ def __init__(self, fname: str, *, dtype: ScalarType, **kwargs) -> None:
252
+ imgs, header = nrrd.read(fname, **kwargs)
253
+ super().__init__(imgs, dtype=dtype)
254
+ self.header = header
255
+
256
+
257
+ class V3dImageStack(NDArrayImageStack[ScalarType]):
258
+ """v3d image stack."""
259
+
260
+ def __init_subclass__(cls, loader: Raw | PBD) -> None:
261
+ super().__init_subclass__()
262
+ cls._loader = loader
263
+
264
+ def __init__(self, fname: str, *, dtype: ScalarType, **kwargs) -> None:
265
+ r = self._loader()
266
+ imgs = r.load(fname)
267
+ super().__init__(imgs, dtype=dtype, **kwargs)
268
+
269
+
270
+ class V3drawImageStack(V3dImageStack[ScalarType], loader=Raw):
271
+ """v3draw image stack."""
272
+
273
+
274
+ class V3dpbdImageStack(V3dImageStack[ScalarType], loader=PBD):
275
+ """v3dpbd image stack."""
276
+
277
+
278
+ class TeraflyImageStack(ImageStack[ScalarType]):
279
+ """TeraFly image stack.
280
+
281
+ TeraFly is a terabytes of multidimensional volumetric images file
282
+ format as described in [1]_.
283
+
284
+ NOTE: Terafly and Vaa3d use a especial right-handed coordinate system
285
+ (with origin point in the left-top and z-axis points front), but we
286
+ flip y-axis to makes it a left-handed coordinate system (with origin
287
+ point in the left-bottom and z-axis points front). If you need to
288
+ use its coordinate system, remember to FLIP Y-AXIS BACK.
289
+
290
+ References:
291
+ .. [1] Bria, Alessandro, Giulio Iannello, Leonardo Onofri, and Hanchuan Peng.
292
+ “TeraFly: Real-Time Three-Dimensional Visualization and Annotation of Terabytes
293
+ of Multidimensional Volumetric Images.” Nature Methods 13, no. 3 (March 2016):
294
+ 192-94. https://doi.org/10.1038/nmeth.3767.
295
+ """
296
+
297
+ _listdir: Callable[[str], list[str]]
298
+ _read_patch: Callable[[str], npt.NDArray]
299
+
300
+ def __init__(
301
+ self, root: str, *, dtype: ScalarType, lru_maxsize: int | None = 128
302
+ ) -> None:
303
+ r"""
304
+ Args:
305
+ root: The root of terafly which contains directories named as `RES(YxXxZ)`.
306
+ dtype: np.dtype
307
+ lru_maxsize: Forwarding to `functools.lru_cache`.
308
+ A decompressed array size of (256, 256, 256, 1), which is the typical
309
+ size of terafly image stack, takes about 256 * 256 * 256 * 1 * 4B = 64MB.
310
+ A cache size of 128 requires about 8GB memory.
311
+ """
312
+
313
+ super().__init__()
314
+ self.root = root
315
+ self.dtype = dtype
316
+ self.res, self.res_dirs, self.res_patch_sizes = self.get_resolutions(root)
317
+
318
+ @cache
319
+ def listdir(path: str) -> list[str]:
320
+ return os.listdir(path)
321
+
322
+ @lru_cache(maxsize=lru_maxsize)
323
+ def read_patch(path: str) -> npt.NDArray[ScalarType]:
324
+ match os.path.splitext(path)[-1]:
325
+ case "raw":
326
+ # Treat it as a v3draw file
327
+ return V3drawImageStack(path, dtype=dtype).get_full()
328
+ case _:
329
+ return read_imgs(path, dtype=dtype).get_full()
330
+
331
+ self._listdir, self._read_patch = listdir, read_patch
332
+
333
+ def __getitem__(self, key):
334
+ """Get images in max resolution.
335
+
336
+ >>> imgs[0, 0, 0, 0] # get value # doctest: +SKIP
337
+ >>> imgs[0:64, 0:64, 0:64, :] # get patch # doctest: +SKIP
338
+ ```
339
+ """
340
+ if not isinstance(key, tuple):
341
+ raise IndexError(
342
+ "Potential memory issue, you are loading large images "
343
+ "into memory, if sure, load it explicitly with "
344
+ "`get_full`"
345
+ )
346
+
347
+ if not isinstance(key[0], slice):
348
+ offset = [key[i] for i in range(3)]
349
+ return self.get_patch(offset, np.add(offset, 1)).item()
350
+
351
+ slices = [k.indices(self.res[-1][i]) for i, k in enumerate(key)]
352
+ starts, ends, strides = np.array(slices).transpose()
353
+ return self.get_patch(starts, ends, strides)
354
+
355
+ def get_patch(
356
+ self, starts, ends, strides: int | Vec3i = 1, res_level=-1
357
+ ) -> npt.NDArray[ScalarType]:
358
+ """Get patch of image stack.
359
+
360
+ Returns:
361
+ patch: array of shape (X, Y, Z, C)
362
+ """
363
+ if isinstance(strides, int):
364
+ strides = (strides, strides, strides)
365
+
366
+ starts, ends = np.array(starts), np.array(ends)
367
+ self._check_params(res_level, starts, np.subtract(ends, 1))
368
+ assert np.equal(strides, [1, 1, 1]).all() # TODO: support stride
369
+
370
+ shape_out = np.concatenate([ends - starts, [1]])
371
+ out = np.zeros(shape_out, dtype=self.dtype)
372
+ self._get_range(starts, ends, res_level, out=out)
373
+
374
+ # flip y-axis to makes it a left-handed coordinate system
375
+ out = np.flip(out, axis=1)
376
+ return out
377
+
378
+ def find_correspond_imgs(self, p, res_level=-1):
379
+ """Find the image which contain this point.
380
+
381
+ Returns:
382
+ patch: array of shape (X, Y, Z, C)
383
+ patch_offset: (int, int, int)
384
+ """
385
+ p = np.array(p)
386
+ self._check_params(res_level, p)
387
+ return self._find_correspond_imgs(p, res_level)
388
+
389
+ def get_correspond_coord(self, p, in_res_level: int, out_res_level: int):
390
+ raise NotImplementedError() # TODO
391
+
392
+ @property
393
+ def shape(self) -> tuple[int, int, int, int]:
394
+ res_max = self.res[-1]
395
+ return res_max[0], res_max[1], res_max[2], 1
396
+
397
+ @classmethod
398
+ def get_resolutions(cls, root: str) -> tuple[list[Vec3i], list[str], list[Vec3i]]:
399
+ """Get all resolutions.
400
+
401
+ Returns:
402
+ resolutions: Sequence of sorted resolutions (from small to large).
403
+ roots: Sequence of root of resolutions respectively.
404
+ patch_sizes: Sequence of patch size of resolutions respectively.
405
+ """
406
+
407
+ roots = list(cls.get_resolution_dirs(root))
408
+ assert len(roots) > 0, "no resolution detected"
409
+
410
+ res = [RE_TERAFLY_ROOT.search(d) for d in roots]
411
+ res = [[int(a) for a in d.groups()] for d in res if d is not None]
412
+ res = np.array(res)
413
+ res[:, [0, 1]] = res[:, [1, 0]] # (Y, X, _) -> (X, Y, _)
414
+
415
+ def listdir(d: str):
416
+ return filter(RE_TERAFLY_NAME.match, os.listdir(d))
417
+
418
+ def get_patch_size(src: str):
419
+ y0 = next(listdir(src))
420
+ x0 = next(listdir(os.path.join(src, y0)))
421
+ z0 = next(listdir(os.path.join(src, y0, x0)))
422
+ patch = read_imgs(os.path.join(src, y0, x0, z0))
423
+ return patch.shape
424
+
425
+ patch_sizes = [get_patch_size(os.path.join(root, d)) for d in roots]
426
+
427
+ # sort
428
+ indices = np.argsort(np.prod(res, axis=1, dtype=np.longlong))
429
+ res = res[indices]
430
+ roots = np.take(roots, indices)
431
+ patch_sizes = np.take(patch_sizes, indices)
432
+ return res, roots, patch_sizes # type: ignore
433
+
434
+ @staticmethod
435
+ def is_root(root: str) -> bool:
436
+ return os.path.isdir(root) and any(
437
+ RE_TERAFLY_ROOT.match(d) for d in os.listdir(root)
438
+ )
439
+
440
+ @staticmethod
441
+ def get_resolution_dirs(root: str) -> Iterable[str]:
442
+ return filter(RE_TERAFLY_ROOT.match, os.listdir(root))
443
+
444
+ def _check_params(self, res_level, *coords):
445
+ assert res_level == -1 # TODO: support multi-resolutions
446
+
447
+ res_level = len(self.res) + res_level if res_level < 0 else res_level
448
+ assert 0 <= res_level < len(self.res), "invalid resolution level"
449
+
450
+ res = self.res[res_level]
451
+ for p in coords:
452
+ assert np.less([0, 0, 0], p).all(), (
453
+ f"indices ({p[0]}, {p[1]}, {p[2]}) out of range (0, 0, 0)"
454
+ )
455
+
456
+ assert np.greater(res, p).all(), (
457
+ f"indices ({p[0]}, {p[1]}, {p[2]}) out of range ({res[0]}, {res[1]}, {res[2]})"
458
+ )
459
+
460
+ def _get_range(self, starts, ends, res_level, out):
461
+ # pylint: disable=too-many-locals
462
+ shape = ends - starts
463
+ patch, offset = self._find_correspond_imgs(starts, res_level=res_level)
464
+ if patch is not None:
465
+ coords = starts - offset
466
+ lens = np.min([patch.shape[:3] - coords, shape], axis=0)
467
+ out[: lens[0], : lens[1], : lens[2]] = patch[
468
+ coords[0] : coords[0] + lens[0],
469
+ coords[1] : coords[1] + lens[1],
470
+ coords[2] : coords[2] + lens[2],
471
+ ]
472
+ else:
473
+ size = self.res_patch_sizes[res_level]
474
+ lens = (np.floor(starts / size).astype(np.int64) + 1) * size - starts
475
+
476
+ if shape[0] > lens[0]:
477
+ starts_x = starts + [lens[0], 0, 0]
478
+ ends_x = ends
479
+ self._get_range(starts_x, ends_x, res_level, out[lens[0] :, :, :])
480
+
481
+ if shape[1] > lens[1]:
482
+ starts_y = starts + [0, lens[1], 0]
483
+ ends_y = np.array([starts[0], ends[1], ends[2]])
484
+ ends_y += [min(shape[0], lens[0]), 0, 0]
485
+ self._get_range(starts_y, ends_y, res_level, out[:, lens[1] :, :])
486
+
487
+ if shape[2] > lens[2]:
488
+ starts_z = starts + [0, 0, lens[2]]
489
+ ends_z = np.array([starts[0], starts[1], ends[2]])
490
+ ends_z += [min(shape[0], lens[0]), min(shape[1], lens[1]), 0]
491
+ self._get_range(starts_z, ends_z, res_level, out[:, :, lens[2] :])
492
+
493
+ def _find_correspond_imgs(self, p, res_level):
494
+ # pylint: disable=too-many-locals
495
+ x, y, z = p
496
+ cur = os.path.join(self.root, self.res_dirs[res_level])
497
+
498
+ def get_v(f: str):
499
+ return float(os.path.splitext(f.split("_")[-1])[0])
500
+
501
+ for v in [y, x, z]:
502
+ # extract v from `y/`, `y_x/`, `y_x_z.tif`
503
+ dirs = [d for d in self._listdir(cur) if RE_TERAFLY_NAME.match(d)]
504
+ diff = np.array([get_v(d) for d in dirs])
505
+ if (invalid := diff > 10 * v).all():
506
+ return None, None
507
+
508
+ diff[invalid] = -np.inf # remove values which greater than v
509
+
510
+ # find the index of the value smaller than v and closest to v
511
+ idx = np.argmax(diff)
512
+ cur = os.path.join(cur, dirs[idx])
513
+
514
+ patch = self._read_patch(cur)
515
+ name = os.path.splitext(os.path.basename(cur))[0]
516
+ offset = [int(int(i) / 10) for i in name.split("_")]
517
+ offset[0], offset[1] = offset[1], offset[0] # (Y, X, _) -> (X, Y, _)
518
+ if np.less_equal(np.add(offset, patch.shape[:3]), p).any():
519
+ return None, None
520
+
521
+ return patch, offset
522
+
523
+
524
+ # Legacy
525
+
526
+
527
+ class GrayImageStack:
528
+ """Gray Image stack."""
529
+
530
+ imgs: ImageStack
531
+
532
+ def __init__(self, imgs: ImageStack) -> None:
533
+ self.imgs = imgs
534
+
535
+ @overload
536
+ def __getitem__(self, key: Vec3i) -> np.float32: ...
537
+ @overload
538
+ def __getitem__(self, key: npt.NDArray[np.integer[Any]]) -> np.float32: ...
539
+ @overload
540
+ def __getitem__(
541
+ self, key: slice | tuple[slice, slice] | tuple[slice, slice, slice]
542
+ ) -> npt.NDArray[np.float32]: ...
543
+ def __getitem__(self, key):
544
+ """Get pixel/patch of image stack."""
545
+ v = self[key]
546
+ if not isinstance(v, np.ndarray):
547
+ return v
548
+ if v.ndim == 4:
549
+ return v[:, :, :, 0]
550
+ if v.ndim == 3:
551
+ return v[:, :, 0]
552
+ if v.ndim == 2:
553
+ return v[:, 0]
554
+ if v.ndim == 1:
555
+ return v[0]
556
+ raise ValueError("unsupported key")
557
+
558
+ def get_full(self) -> npt.NDArray[np.float32]:
559
+ """Get full image stack.
560
+
561
+ NOTE: this will load the full image stack into memory.
562
+ """
563
+ return self.imgs.get_full()[:, :, :, 0]
564
+
565
+ @property
566
+ def shape(self) -> tuple[int, int, int]:
567
+ return self.imgs.shape[:-1]
568
+
569
+
570
+ @deprecated("Use `read_imgs` instead")
571
+ def read_images(*args, **kwargs) -> GrayImageStack:
572
+ """Read images.
573
+
574
+ .. deprecated:: 0.16.0
575
+ Use :meth:`read_imgs` instead.
576
+ """
577
+
578
+ return GrayImageStack(read_imgs(*args, **kwargs))
@@ -0,0 +1,8 @@
1
+ # SPDX-FileCopyrightText: 2022 - 2025 Zexin Yuan <pypi@yzx9.xyz>
2
+ #
3
+ # SPDX-License-Identifier: Apache-2.0
4
+
5
+ from swcgeom.images.loaders.pbd import PBD # noqa: F401
6
+ from swcgeom.images.loaders.raw import Raw # noqa: F401
7
+
8
+ __all__ = ["Raw", "PBD"]