swcgeom 0.19.4__cp311-cp311-win_amd64.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of swcgeom might be problematic. Click here for more details.
- swcgeom/__init__.py +21 -0
- swcgeom/analysis/__init__.py +13 -0
- swcgeom/analysis/feature_extractor.py +454 -0
- swcgeom/analysis/features.py +218 -0
- swcgeom/analysis/lmeasure.py +750 -0
- swcgeom/analysis/sholl.py +201 -0
- swcgeom/analysis/trunk.py +183 -0
- swcgeom/analysis/visualization.py +191 -0
- swcgeom/analysis/visualization3d.py +81 -0
- swcgeom/analysis/volume.py +143 -0
- swcgeom/core/__init__.py +19 -0
- swcgeom/core/branch.py +129 -0
- swcgeom/core/branch_tree.py +65 -0
- swcgeom/core/compartment.py +107 -0
- swcgeom/core/node.py +130 -0
- swcgeom/core/path.py +155 -0
- swcgeom/core/population.py +341 -0
- swcgeom/core/swc.py +247 -0
- swcgeom/core/swc_utils/__init__.py +19 -0
- swcgeom/core/swc_utils/assembler.py +35 -0
- swcgeom/core/swc_utils/base.py +180 -0
- swcgeom/core/swc_utils/checker.py +107 -0
- swcgeom/core/swc_utils/io.py +204 -0
- swcgeom/core/swc_utils/normalizer.py +163 -0
- swcgeom/core/swc_utils/subtree.py +70 -0
- swcgeom/core/tree.py +384 -0
- swcgeom/core/tree_utils.py +277 -0
- swcgeom/core/tree_utils_impl.py +58 -0
- swcgeom/images/__init__.py +9 -0
- swcgeom/images/augmentation.py +149 -0
- swcgeom/images/contrast.py +87 -0
- swcgeom/images/folder.py +217 -0
- swcgeom/images/io.py +578 -0
- swcgeom/images/loaders/__init__.py +8 -0
- swcgeom/images/loaders/pbd.cp311-win_amd64.pyd +0 -0
- swcgeom/images/loaders/pbd.pyx +523 -0
- swcgeom/images/loaders/raw.cp311-win_amd64.pyd +0 -0
- swcgeom/images/loaders/raw.pyx +183 -0
- swcgeom/transforms/__init__.py +20 -0
- swcgeom/transforms/base.py +136 -0
- swcgeom/transforms/branch.py +223 -0
- swcgeom/transforms/branch_tree.py +74 -0
- swcgeom/transforms/geometry.py +270 -0
- swcgeom/transforms/image_preprocess.py +107 -0
- swcgeom/transforms/image_stack.py +219 -0
- swcgeom/transforms/images.py +206 -0
- swcgeom/transforms/mst.py +183 -0
- swcgeom/transforms/neurolucida_asc.py +498 -0
- swcgeom/transforms/path.py +56 -0
- swcgeom/transforms/population.py +36 -0
- swcgeom/transforms/tree.py +265 -0
- swcgeom/transforms/tree_assembler.py +161 -0
- swcgeom/utils/__init__.py +18 -0
- swcgeom/utils/debug.py +23 -0
- swcgeom/utils/download.py +119 -0
- swcgeom/utils/dsu.py +58 -0
- swcgeom/utils/ellipse.py +131 -0
- swcgeom/utils/file.py +90 -0
- swcgeom/utils/neuromorpho.py +581 -0
- swcgeom/utils/numpy_helper.py +70 -0
- swcgeom/utils/plotter_2d.py +134 -0
- swcgeom/utils/plotter_3d.py +35 -0
- swcgeom/utils/renderer.py +145 -0
- swcgeom/utils/sdf.py +324 -0
- swcgeom/utils/solid_geometry.py +154 -0
- swcgeom/utils/transforms.py +367 -0
- swcgeom/utils/volumetric_object.py +483 -0
- swcgeom-0.19.4.dist-info/METADATA +86 -0
- swcgeom-0.19.4.dist-info/RECORD +72 -0
- swcgeom-0.19.4.dist-info/WHEEL +5 -0
- swcgeom-0.19.4.dist-info/licenses/LICENSE +201 -0
- swcgeom-0.19.4.dist-info/top_level.txt +1 -0
|
@@ -0,0 +1,265 @@
|
|
|
1
|
+
|
|
2
|
+
# SPDX-FileCopyrightText: 2022 - 2025 Zexin Yuan <pypi@yzx9.xyz>
|
|
3
|
+
#
|
|
4
|
+
# SPDX-License-Identifier: Apache-2.0
|
|
5
|
+
|
|
6
|
+
"""Transformation in tree."""
|
|
7
|
+
|
|
8
|
+
from collections.abc import Callable
|
|
9
|
+
|
|
10
|
+
import numpy as np
|
|
11
|
+
from typing_extensions import deprecated, override
|
|
12
|
+
|
|
13
|
+
from swcgeom.core import Branch, BranchTree, DictSWC, Path, Tree, cut_tree, to_subtree
|
|
14
|
+
from swcgeom.core.swc_utils import SWCTypes, get_types
|
|
15
|
+
from swcgeom.transforms.base import Transform
|
|
16
|
+
from swcgeom.transforms.branch import BranchConvSmoother, BranchIsometricResampler
|
|
17
|
+
from swcgeom.transforms.branch_tree import BranchTreeAssembler
|
|
18
|
+
from swcgeom.transforms.geometry import Normalizer
|
|
19
|
+
|
|
20
|
+
__all__ = [
|
|
21
|
+
"ToBranchTree",
|
|
22
|
+
"ToLongestPath",
|
|
23
|
+
"TreeSmoother",
|
|
24
|
+
"TreeNormalizer",
|
|
25
|
+
"CutByType",
|
|
26
|
+
"CutAxonTree",
|
|
27
|
+
"CutDendriteTree",
|
|
28
|
+
"CutByFurcationOrder",
|
|
29
|
+
"CutShortTipBranch",
|
|
30
|
+
"IsometricResampler",
|
|
31
|
+
]
|
|
32
|
+
|
|
33
|
+
|
|
34
|
+
# pylint: disable=too-few-public-methods
|
|
35
|
+
class ToBranchTree(Transform[Tree, BranchTree]):
|
|
36
|
+
"""Transform tree to branch tree."""
|
|
37
|
+
|
|
38
|
+
@override
|
|
39
|
+
def __call__(self, x: Tree) -> BranchTree:
|
|
40
|
+
return BranchTree.from_tree(x)
|
|
41
|
+
|
|
42
|
+
|
|
43
|
+
class ToLongestPath(Transform[Tree, Path[DictSWC]]):
|
|
44
|
+
"""Transform tree to longest path."""
|
|
45
|
+
|
|
46
|
+
def __init__(self, *, detach: bool = True) -> None:
|
|
47
|
+
self.detach = detach
|
|
48
|
+
|
|
49
|
+
@override
|
|
50
|
+
def __call__(self, x: Tree) -> Path[DictSWC]:
|
|
51
|
+
paths = x.get_paths()
|
|
52
|
+
idx = np.argmax([p.length() for p in paths])
|
|
53
|
+
path = paths[idx]
|
|
54
|
+
if self.detach:
|
|
55
|
+
path = path.detach()
|
|
56
|
+
return path # type: ignore
|
|
57
|
+
|
|
58
|
+
|
|
59
|
+
class TreeSmoother(Transform[Tree, Tree]): # pylint: disable=missing-class-docstring
|
|
60
|
+
def __init__(self, n_nodes: int = 5) -> None:
|
|
61
|
+
super().__init__()
|
|
62
|
+
self.n_nodes = n_nodes
|
|
63
|
+
self.trans = BranchConvSmoother(n_nodes=n_nodes)
|
|
64
|
+
|
|
65
|
+
@override
|
|
66
|
+
def __call__(self, x: Tree) -> Tree:
|
|
67
|
+
x = x.copy()
|
|
68
|
+
for br in x.get_branches():
|
|
69
|
+
# TODO: works but is weird
|
|
70
|
+
smoothed = self.trans(br)
|
|
71
|
+
x.ndata["x"][br.origin_id()] = smoothed.x()
|
|
72
|
+
x.ndata["y"][br.origin_id()] = smoothed.y()
|
|
73
|
+
x.ndata["z"][br.origin_id()] = smoothed.z()
|
|
74
|
+
|
|
75
|
+
return x
|
|
76
|
+
|
|
77
|
+
@override
|
|
78
|
+
def extra_repr(self) -> str:
|
|
79
|
+
return f"n_nodes={self.n_nodes}"
|
|
80
|
+
|
|
81
|
+
|
|
82
|
+
@deprecated("Use `Normalizer` instead")
|
|
83
|
+
class TreeNormalizer(Normalizer[Tree]):
|
|
84
|
+
"""Noramlize coordinates and radius to 0-1.
|
|
85
|
+
|
|
86
|
+
.. deprecated:: 0.6.0
|
|
87
|
+
Use :cls:`Normalizer` instead.
|
|
88
|
+
"""
|
|
89
|
+
|
|
90
|
+
|
|
91
|
+
class CutByType(Transform[Tree, Tree]):
|
|
92
|
+
"""Cut tree by type.
|
|
93
|
+
|
|
94
|
+
In order to preserve the tree structure, all ancestor nodes of the node to be preserved will be preserved.
|
|
95
|
+
|
|
96
|
+
NOTE: Not all reserved nodes are of the specified type.
|
|
97
|
+
"""
|
|
98
|
+
|
|
99
|
+
def __init__(self, type: int) -> None: # pylint: disable=redefined-builtin
|
|
100
|
+
super().__init__()
|
|
101
|
+
self.type = type
|
|
102
|
+
|
|
103
|
+
@override
|
|
104
|
+
def __call__(self, x: Tree) -> Tree:
|
|
105
|
+
removals = set(x.id()[x.type() != self.type])
|
|
106
|
+
|
|
107
|
+
def leave(n: Tree.Node, keep_children: list[bool]) -> bool:
|
|
108
|
+
if n.id in removals and any(keep_children):
|
|
109
|
+
removals.remove(n.id)
|
|
110
|
+
return n.id not in removals
|
|
111
|
+
|
|
112
|
+
x.traverse(leave=leave)
|
|
113
|
+
y = to_subtree(x, removals)
|
|
114
|
+
return y
|
|
115
|
+
|
|
116
|
+
@override
|
|
117
|
+
def extra_repr(self) -> str:
|
|
118
|
+
return f"type={self.type}"
|
|
119
|
+
|
|
120
|
+
|
|
121
|
+
class CutAxonTree(CutByType):
|
|
122
|
+
"""Cut axon tree."""
|
|
123
|
+
|
|
124
|
+
def __init__(self, types: SWCTypes | None = None) -> None:
|
|
125
|
+
types = get_types(types)
|
|
126
|
+
super().__init__(type=types.axon)
|
|
127
|
+
|
|
128
|
+
|
|
129
|
+
class CutDendriteTree(CutByType):
|
|
130
|
+
"""Cut dendrite tree."""
|
|
131
|
+
|
|
132
|
+
def __init__(self, types: SWCTypes | None = None) -> None:
|
|
133
|
+
types = get_types(types)
|
|
134
|
+
super().__init__(type=types.basal_dendrite) # TODO: apical dendrite
|
|
135
|
+
|
|
136
|
+
|
|
137
|
+
class CutByFurcationOrder(Transform[Tree, Tree]):
|
|
138
|
+
"""Cut tree by furcation order."""
|
|
139
|
+
|
|
140
|
+
max_furcation_order: int
|
|
141
|
+
|
|
142
|
+
def __init__(self, max_bifurcation_order: int) -> None:
|
|
143
|
+
self.max_furcation_order = max_bifurcation_order
|
|
144
|
+
|
|
145
|
+
@override
|
|
146
|
+
def __call__(self, x: Tree) -> Tree:
|
|
147
|
+
return cut_tree(x, enter=self._enter)
|
|
148
|
+
|
|
149
|
+
def __repr__(self) -> str:
|
|
150
|
+
return f"CutByBifurcationOrder-{self.max_furcation_order}"
|
|
151
|
+
|
|
152
|
+
def _enter(self, n: Tree.Node, parent_level: int | None) -> tuple[int, bool]:
|
|
153
|
+
if parent_level is None:
|
|
154
|
+
level = 0
|
|
155
|
+
elif n.is_furcation():
|
|
156
|
+
level = parent_level + 1
|
|
157
|
+
else:
|
|
158
|
+
level = parent_level
|
|
159
|
+
return (level, level >= self.max_furcation_order)
|
|
160
|
+
|
|
161
|
+
|
|
162
|
+
@deprecated("Use CutByFurcationOrder instead")
|
|
163
|
+
class CutByBifurcationOrder(CutByFurcationOrder):
|
|
164
|
+
"""Cut tree by bifurcation order.
|
|
165
|
+
|
|
166
|
+
NOTE: Deprecated due to the wrong spelling of furcation. For now, it
|
|
167
|
+
is just an alias of `CutByFurcationOrder` and raise a warning. It
|
|
168
|
+
will be change to raise an error in the future.
|
|
169
|
+
"""
|
|
170
|
+
|
|
171
|
+
max_furcation_order: int
|
|
172
|
+
|
|
173
|
+
def __init__(self, max_bifurcation_order: int) -> None:
|
|
174
|
+
super().__init__(max_bifurcation_order)
|
|
175
|
+
|
|
176
|
+
def __repr__(self) -> str:
|
|
177
|
+
return f"CutByBifurcationOrder-{self.max_furcation_order}"
|
|
178
|
+
|
|
179
|
+
|
|
180
|
+
class CutShortTipBranch(Transform[Tree, Tree]):
|
|
181
|
+
"""Cut off too short terminal branches.
|
|
182
|
+
|
|
183
|
+
This method is usually applied in the post-processing of manual
|
|
184
|
+
reconstruction. When the user draw lines, a line head is often left
|
|
185
|
+
at the junction of two lines.
|
|
186
|
+
"""
|
|
187
|
+
|
|
188
|
+
thre: float
|
|
189
|
+
callbacks: list[Callable[[Tree.Branch], None]]
|
|
190
|
+
|
|
191
|
+
def __init__(
|
|
192
|
+
self, thre: float = 5, callback: Callable[[Tree.Branch], None] | None = None
|
|
193
|
+
) -> None:
|
|
194
|
+
self.thre = thre
|
|
195
|
+
self.callbacks = []
|
|
196
|
+
|
|
197
|
+
if callback is not None:
|
|
198
|
+
self.callbacks.append(callback)
|
|
199
|
+
|
|
200
|
+
@override
|
|
201
|
+
def __call__(self, x: Tree) -> Tree:
|
|
202
|
+
removals: list[int] = []
|
|
203
|
+
self.callbacks.append(lambda br: removals.append(br[1].id))
|
|
204
|
+
x.traverse(leave=self._leave)
|
|
205
|
+
self.callbacks.pop()
|
|
206
|
+
return to_subtree(x, removals)
|
|
207
|
+
|
|
208
|
+
@override
|
|
209
|
+
def extra_repr(self) -> str:
|
|
210
|
+
return f"threshold={self.thre}"
|
|
211
|
+
|
|
212
|
+
def _leave(
|
|
213
|
+
self, n: Tree.Node, children: list[tuple[float, Tree.Node] | None]
|
|
214
|
+
) -> tuple[float, Tree.Node] | None:
|
|
215
|
+
if len(children) == 0: # tip
|
|
216
|
+
return 0, n
|
|
217
|
+
|
|
218
|
+
if len(children) == 1 and children[0] is not None: # elongation
|
|
219
|
+
dis, child = children[0]
|
|
220
|
+
dis += n.distance(child)
|
|
221
|
+
return dis, n
|
|
222
|
+
|
|
223
|
+
for c in children:
|
|
224
|
+
if c is None:
|
|
225
|
+
continue
|
|
226
|
+
|
|
227
|
+
dis, child = c
|
|
228
|
+
if dis + n.distance(child) > self.thre:
|
|
229
|
+
continue
|
|
230
|
+
|
|
231
|
+
path = [n.id] # n does not delete, but will include in callback
|
|
232
|
+
while child is not None: # TODO: perf
|
|
233
|
+
path.append(child.id)
|
|
234
|
+
child = cc[0] if len((cc := child.children())) > 0 else None
|
|
235
|
+
|
|
236
|
+
br = Tree.Branch(n.attach, path)
|
|
237
|
+
for cb in self.callbacks:
|
|
238
|
+
cb(br)
|
|
239
|
+
|
|
240
|
+
return None
|
|
241
|
+
|
|
242
|
+
|
|
243
|
+
class Resampler(Transform[Tree, Tree]):
|
|
244
|
+
def __init__(self, branch_resampler: Transform[Branch, Branch]) -> None:
|
|
245
|
+
super().__init__()
|
|
246
|
+
self.resampler = branch_resampler
|
|
247
|
+
self.assembler = BranchTreeAssembler()
|
|
248
|
+
|
|
249
|
+
@override
|
|
250
|
+
def __call__(self, x: Tree) -> Tree:
|
|
251
|
+
t = BranchTree.from_tree(x)
|
|
252
|
+
t.branches = {
|
|
253
|
+
k: [self.resampler(br) for br in brs] for k, brs in t.branches.items()
|
|
254
|
+
}
|
|
255
|
+
return self.assembler(t)
|
|
256
|
+
|
|
257
|
+
|
|
258
|
+
class IsometricResampler(Resampler):
|
|
259
|
+
def __init__(
|
|
260
|
+
self, distance: float, *, adjust_last_gap: bool = True, **kwargs
|
|
261
|
+
) -> None:
|
|
262
|
+
branch_resampler = BranchIsometricResampler(
|
|
263
|
+
distance, adjust_last_gap=adjust_last_gap, **kwargs
|
|
264
|
+
)
|
|
265
|
+
super().__init__(branch_resampler)
|
|
@@ -0,0 +1,161 @@
|
|
|
1
|
+
# SPDX-FileCopyrightText: 2022 - 2025 Zexin Yuan <pypi@yzx9.xyz>
|
|
2
|
+
#
|
|
3
|
+
# SPDX-License-Identifier: Apache-2.0
|
|
4
|
+
|
|
5
|
+
"""Assemble a tree."""
|
|
6
|
+
|
|
7
|
+
from collections.abc import Iterable
|
|
8
|
+
from copy import copy
|
|
9
|
+
|
|
10
|
+
import numpy as np
|
|
11
|
+
import pandas as pd
|
|
12
|
+
from typing_extensions import override
|
|
13
|
+
|
|
14
|
+
from swcgeom.core import Tree
|
|
15
|
+
from swcgeom.core.swc_utils import (
|
|
16
|
+
SWCNames,
|
|
17
|
+
get_names,
|
|
18
|
+
link_roots_to_nearest_,
|
|
19
|
+
sort_nodes_,
|
|
20
|
+
)
|
|
21
|
+
from swcgeom.transforms.base import Transform
|
|
22
|
+
|
|
23
|
+
EPS = 1e-5
|
|
24
|
+
|
|
25
|
+
|
|
26
|
+
class LinesToTree(Transform[list[pd.DataFrame], Tree]):
|
|
27
|
+
"""Assemble lines to swc."""
|
|
28
|
+
|
|
29
|
+
def __init__(self, *, thre: float = 0.2, undirected: bool = True):
|
|
30
|
+
"""
|
|
31
|
+
Args:
|
|
32
|
+
thre: Connection threshold.
|
|
33
|
+
undirected: Both ends of a line can be considered connection point.
|
|
34
|
+
If `False`, only the starting point.
|
|
35
|
+
"""
|
|
36
|
+
super().__init__()
|
|
37
|
+
self.thre = thre
|
|
38
|
+
self.undirected = undirected
|
|
39
|
+
|
|
40
|
+
@override
|
|
41
|
+
def __call__(
|
|
42
|
+
self, lines: Iterable[pd.DataFrame], *, names: SWCNames | None = None
|
|
43
|
+
): # TODO check this
|
|
44
|
+
return self.assemble(lines, names=names)
|
|
45
|
+
|
|
46
|
+
def assemble(
|
|
47
|
+
self,
|
|
48
|
+
lines: Iterable[pd.DataFrame],
|
|
49
|
+
*,
|
|
50
|
+
undirected: bool = True,
|
|
51
|
+
names: SWCNames | None = None,
|
|
52
|
+
) -> pd.DataFrame:
|
|
53
|
+
"""Assemble lines to a tree.
|
|
54
|
+
|
|
55
|
+
Assemble all the lines into a set of subtrees, and then connect them.
|
|
56
|
+
|
|
57
|
+
Args:
|
|
58
|
+
lines: An array of tables containing a line, columns should following the swc.
|
|
59
|
+
undirected: Forwarding to `self.try_assemble`.
|
|
60
|
+
names: Forwarding to `self.try_assemble`.
|
|
61
|
+
|
|
62
|
+
Returns:
|
|
63
|
+
tree: ~pd.DataFrame
|
|
64
|
+
|
|
65
|
+
See Also:
|
|
66
|
+
self.try_assemble
|
|
67
|
+
"""
|
|
68
|
+
|
|
69
|
+
tree, lines = self.try_assemble(
|
|
70
|
+
lines, sort_nodes=False, undirected=undirected, names=names
|
|
71
|
+
)
|
|
72
|
+
while len(lines) > 0:
|
|
73
|
+
t, lines = self.try_assemble(
|
|
74
|
+
lines,
|
|
75
|
+
id_offset=len(tree),
|
|
76
|
+
sort_nodes=False,
|
|
77
|
+
undirected=undirected,
|
|
78
|
+
names=names,
|
|
79
|
+
)
|
|
80
|
+
tree = pd.concat([tree, t])
|
|
81
|
+
|
|
82
|
+
tree = tree.reset_index()
|
|
83
|
+
link_roots_to_nearest_(tree)
|
|
84
|
+
sort_nodes_(tree)
|
|
85
|
+
return tree
|
|
86
|
+
|
|
87
|
+
def try_assemble(
|
|
88
|
+
self,
|
|
89
|
+
lines: Iterable[pd.DataFrame],
|
|
90
|
+
*,
|
|
91
|
+
id_offset: int = 0,
|
|
92
|
+
undirected: bool = True,
|
|
93
|
+
sort_nodes: bool = True,
|
|
94
|
+
names: SWCNames | None = None,
|
|
95
|
+
) -> tuple[pd.DataFrame, list[pd.DataFrame]]:
|
|
96
|
+
"""Trying assemble lines to a tree.
|
|
97
|
+
|
|
98
|
+
Treat the first line as a tree, find a line whose shortest distance between
|
|
99
|
+
the tree and the line is less than threshold, merge it into the tree, repeat
|
|
100
|
+
until there are no line to merge, return tree and the remaining lines.
|
|
101
|
+
|
|
102
|
+
Args:
|
|
103
|
+
lines: An array of tables containing a line, columns should following the swc.
|
|
104
|
+
id_offset: The offset of the line node id.
|
|
105
|
+
undirected: Both ends of a line can be considered connection point.
|
|
106
|
+
If `False`, only the starting point.
|
|
107
|
+
sort_nodes: sort nodes of subtree.
|
|
108
|
+
names: SWCNames, optional
|
|
109
|
+
|
|
110
|
+
Returns:
|
|
111
|
+
tree: ~pandas.DataFrame
|
|
112
|
+
remaining_lines: List of ~pandas.DataFrame
|
|
113
|
+
"""
|
|
114
|
+
|
|
115
|
+
names = get_names(names)
|
|
116
|
+
lines = copy(list(lines))
|
|
117
|
+
|
|
118
|
+
tree = lines[0]
|
|
119
|
+
tree[names.id] = id_offset + np.arange(len(tree))
|
|
120
|
+
tree[names.pid] = tree[names.id] - 1
|
|
121
|
+
tree.at[0, names.pid] = -1
|
|
122
|
+
del lines[0]
|
|
123
|
+
|
|
124
|
+
while True:
|
|
125
|
+
for i, line in enumerate(lines):
|
|
126
|
+
for p in [0, -1] if undirected else [0]:
|
|
127
|
+
xyz = [names.x, names.y, names.z]
|
|
128
|
+
vs = tree[xyz] - line.iloc[p][xyz]
|
|
129
|
+
dis = np.linalg.norm(vs, axis=1)
|
|
130
|
+
ind = np.argmin(dis)
|
|
131
|
+
if dis[ind] > self.thre:
|
|
132
|
+
continue
|
|
133
|
+
|
|
134
|
+
if dis[ind] < EPS:
|
|
135
|
+
line = line.drop((p + len(line)) % len(line)).reset_index(
|
|
136
|
+
drop=True
|
|
137
|
+
)
|
|
138
|
+
|
|
139
|
+
line[names.id] = id_offset + len(tree) + np.arange(len(line))
|
|
140
|
+
line[names.pid] = line[names.id] + (-1 if p == 0 else 1)
|
|
141
|
+
line.at[(p + len(line)) % len(line), names.pid] = tree.iloc[ind][
|
|
142
|
+
names.id
|
|
143
|
+
]
|
|
144
|
+
tree = pd.concat([tree, line])
|
|
145
|
+
del lines[i]
|
|
146
|
+
break
|
|
147
|
+
else:
|
|
148
|
+
continue
|
|
149
|
+
|
|
150
|
+
break
|
|
151
|
+
else:
|
|
152
|
+
break
|
|
153
|
+
|
|
154
|
+
if sort_nodes:
|
|
155
|
+
sort_nodes_(tree)
|
|
156
|
+
|
|
157
|
+
return tree, lines
|
|
158
|
+
|
|
159
|
+
@override
|
|
160
|
+
def extra_repr(self) -> str:
|
|
161
|
+
return f"thre={self.thre}, undirected={self.undirected}"
|
|
@@ -0,0 +1,18 @@
|
|
|
1
|
+
# SPDX-FileCopyrightText: 2022 - 2025 Zexin Yuan <pypi@yzx9.xyz>
|
|
2
|
+
#
|
|
3
|
+
# SPDX-License-Identifier: Apache-2.0
|
|
4
|
+
|
|
5
|
+
"""Utils."""
|
|
6
|
+
|
|
7
|
+
from swcgeom.utils.debug import * # noqa: F403
|
|
8
|
+
from swcgeom.utils.dsu import * # noqa: F403
|
|
9
|
+
from swcgeom.utils.ellipse import * # noqa: F403
|
|
10
|
+
from swcgeom.utils.file import * # noqa: F403
|
|
11
|
+
from swcgeom.utils.neuromorpho import * # noqa: F403
|
|
12
|
+
from swcgeom.utils.numpy_helper import * # noqa: F403
|
|
13
|
+
from swcgeom.utils.plotter_2d import * # noqa: F403
|
|
14
|
+
from swcgeom.utils.renderer import * # noqa: F403
|
|
15
|
+
from swcgeom.utils.sdf import * # noqa: F403
|
|
16
|
+
from swcgeom.utils.solid_geometry import * # noqa: F403
|
|
17
|
+
from swcgeom.utils.transforms import * # noqa: F403
|
|
18
|
+
from swcgeom.utils.volumetric_object import * # noqa: F403
|
swcgeom/utils/debug.py
ADDED
|
@@ -0,0 +1,23 @@
|
|
|
1
|
+
# SPDX-FileCopyrightText: 2022 - 2025 Zexin Yuan <pypi@yzx9.xyz>
|
|
2
|
+
#
|
|
3
|
+
# SPDX-License-Identifier: Apache-2.0
|
|
4
|
+
|
|
5
|
+
"""Debug helpers"""
|
|
6
|
+
|
|
7
|
+
import time
|
|
8
|
+
from functools import wraps
|
|
9
|
+
|
|
10
|
+
__all__ = ["func_timer"]
|
|
11
|
+
|
|
12
|
+
|
|
13
|
+
def func_timer(function):
|
|
14
|
+
@wraps(function)
|
|
15
|
+
def function_timer(*args, **kwargs):
|
|
16
|
+
print(f"[Function: {function.__name__} start...]")
|
|
17
|
+
t0 = time.time()
|
|
18
|
+
result = function(*args, **kwargs)
|
|
19
|
+
t1 = time.time()
|
|
20
|
+
print(f"[Function: {function.__name__} finished, spent time: {t1 - t0:.2f}s]")
|
|
21
|
+
return result
|
|
22
|
+
|
|
23
|
+
return function_timer
|
|
@@ -0,0 +1,119 @@
|
|
|
1
|
+
# SPDX-FileCopyrightText: 2022 - 2025 Zexin Yuan <pypi@yzx9.xyz>
|
|
2
|
+
#
|
|
3
|
+
# SPDX-License-Identifier: Apache-2.0
|
|
4
|
+
|
|
5
|
+
"""Download helpers.
|
|
6
|
+
|
|
7
|
+
NOTE: All denpendencies need to be installed, try:
|
|
8
|
+
|
|
9
|
+
```sh
|
|
10
|
+
pip install swcgeom[all]
|
|
11
|
+
```
|
|
12
|
+
"""
|
|
13
|
+
|
|
14
|
+
import itertools
|
|
15
|
+
import logging
|
|
16
|
+
import multiprocessing
|
|
17
|
+
import os
|
|
18
|
+
from functools import partial
|
|
19
|
+
from urllib.parse import urljoin
|
|
20
|
+
|
|
21
|
+
__all__ = ["download", "fetch_page", "clone_index_page"]
|
|
22
|
+
|
|
23
|
+
|
|
24
|
+
def download(dst: str, url: str) -> None:
|
|
25
|
+
"""Download a file."""
|
|
26
|
+
from urllib3 import connection_from_url
|
|
27
|
+
|
|
28
|
+
conn = connection_from_url(url)
|
|
29
|
+
r = conn.request("GET", url)
|
|
30
|
+
|
|
31
|
+
dirname = os.path.dirname(dst)
|
|
32
|
+
if dirname != "" and not os.path.exists(dirname):
|
|
33
|
+
os.makedirs(dirname)
|
|
34
|
+
|
|
35
|
+
with open(dst, "wb") as file:
|
|
36
|
+
file.write(r.data)
|
|
37
|
+
|
|
38
|
+
|
|
39
|
+
def fetch_page(url: str):
|
|
40
|
+
"""Fetch page content."""
|
|
41
|
+
from bs4 import BeautifulSoup
|
|
42
|
+
from urllib3 import connection_from_url
|
|
43
|
+
|
|
44
|
+
conn = connection_from_url(url)
|
|
45
|
+
r = conn.request("GET", url)
|
|
46
|
+
data = r.data.decode("utf-8")
|
|
47
|
+
return BeautifulSoup(data, features="html.parser")
|
|
48
|
+
|
|
49
|
+
|
|
50
|
+
def clone_index_page(
|
|
51
|
+
index_url: str, dist_dir: str, override: bool = False, multiprocess: int = 4
|
|
52
|
+
) -> None:
|
|
53
|
+
"""Download directory from index page.
|
|
54
|
+
|
|
55
|
+
E.g: `https://download.brainimagelibrary.org/biccn/zeng/luo/fMOST/cells/`
|
|
56
|
+
|
|
57
|
+
Args:
|
|
58
|
+
index_url: URL of index page.
|
|
59
|
+
dist_dir: Directory of dist.
|
|
60
|
+
override: Override existing file, skip file if `False`.
|
|
61
|
+
multiprocess: How many process are available for download.
|
|
62
|
+
"""
|
|
63
|
+
files = get_urls_in_index_page(index_url)
|
|
64
|
+
logging.info("downloader: search `%s`, found %s files.", index_url, len(files))
|
|
65
|
+
|
|
66
|
+
task = partial(
|
|
67
|
+
_clone_index_page, index_url=index_url, dist_dir=dist_dir, override=override
|
|
68
|
+
)
|
|
69
|
+
with multiprocessing.Pool(multiprocess) as p:
|
|
70
|
+
p.map(task, files)
|
|
71
|
+
|
|
72
|
+
|
|
73
|
+
def _clone_index_page(url: str, index_url: str, dist_dir: str, override: bool) -> None:
|
|
74
|
+
from urllib3.exceptions import HTTPError
|
|
75
|
+
|
|
76
|
+
filepath = url.removeprefix(index_url)
|
|
77
|
+
dist = os.path.join(dist_dir, filepath)
|
|
78
|
+
if os.path.exists(dist):
|
|
79
|
+
if not override:
|
|
80
|
+
logging.info("downloader: file `%s` exits, skipped.", dist)
|
|
81
|
+
return
|
|
82
|
+
|
|
83
|
+
logging.info("downloader: file `%s` exits, deleted.", dist)
|
|
84
|
+
os.remove(dist)
|
|
85
|
+
|
|
86
|
+
try:
|
|
87
|
+
logging.info("downloader: downloading `%s` to `%s`", url, dist)
|
|
88
|
+
download(dist, url)
|
|
89
|
+
logging.info("downloader: download `%s` to `%s`", url, dist)
|
|
90
|
+
except HTTPError as ex:
|
|
91
|
+
logging.info("downloader: fails to download `%s`, except `%s`", url, ex)
|
|
92
|
+
|
|
93
|
+
|
|
94
|
+
def get_urls_in_index_page(url: str) -> list[str]:
|
|
95
|
+
"""Get all file links by dfs."""
|
|
96
|
+
soup = fetch_page(url)
|
|
97
|
+
links = [el.attrs["href"] for el in soup.find_all("a")]
|
|
98
|
+
files = [urljoin(url, a) for a in links if not a.endswith("/")]
|
|
99
|
+
dirs = [urljoin(url, a) for a in links if a != "../" and a.endswith("/")]
|
|
100
|
+
files.extend(itertools.chain(*[get_urls_in_index_page(dir) for dir in dirs]))
|
|
101
|
+
return files
|
|
102
|
+
|
|
103
|
+
|
|
104
|
+
if __name__ == "__main__":
|
|
105
|
+
import argparse
|
|
106
|
+
|
|
107
|
+
parser = argparse.ArgumentParser(description="Download files from index page.")
|
|
108
|
+
parser.add_argument("url", type=str, help="URL of index page.")
|
|
109
|
+
parser.add_argument("dist", type=str, help="Directory of dist.")
|
|
110
|
+
parser.add_argument(
|
|
111
|
+
"--override", type=bool, default=False, help="Override existing file."
|
|
112
|
+
)
|
|
113
|
+
parser.add_argument(
|
|
114
|
+
"--multiprocess", type=int, default=4, help="How many process are available."
|
|
115
|
+
)
|
|
116
|
+
args = parser.parse_args()
|
|
117
|
+
|
|
118
|
+
logging.basicConfig(level=logging.INFO)
|
|
119
|
+
clone_index_page(args.url, args.dist, args.override, args.multiprocess)
|
swcgeom/utils/dsu.py
ADDED
|
@@ -0,0 +1,58 @@
|
|
|
1
|
+
# SPDX-FileCopyrightText: 2022 - 2025 Zexin Yuan <pypi@yzx9.xyz>
|
|
2
|
+
#
|
|
3
|
+
# SPDX-License-Identifier: Apache-2.0
|
|
4
|
+
|
|
5
|
+
"""Disjoint Set Union Impl."""
|
|
6
|
+
|
|
7
|
+
__all__ = ["DisjointSetUnion"]
|
|
8
|
+
|
|
9
|
+
|
|
10
|
+
class DisjointSetUnion:
|
|
11
|
+
"""Disjoint Set Union.
|
|
12
|
+
|
|
13
|
+
DSU with path compression and union by rank.
|
|
14
|
+
|
|
15
|
+
>>> dsu = DisjointSetUnion(3)
|
|
16
|
+
>>> dsu.is_same_set(0, 1)
|
|
17
|
+
False
|
|
18
|
+
>>> dsu.union_sets(0, 1)
|
|
19
|
+
>>> dsu.is_same_set(0, 1)
|
|
20
|
+
True
|
|
21
|
+
>>> dsu.is_same_set(0, 2)
|
|
22
|
+
False
|
|
23
|
+
>>> dsu.union_sets(1, 2)
|
|
24
|
+
>>> dsu.is_same_set(0, 2)
|
|
25
|
+
True
|
|
26
|
+
"""
|
|
27
|
+
|
|
28
|
+
def __init__(self, node_number: int):
|
|
29
|
+
self.element_parent = [i for i in range(node_number)]
|
|
30
|
+
self.rank = [0 for _ in range(node_number)]
|
|
31
|
+
|
|
32
|
+
def find_parent(self, node_id: int) -> int:
|
|
33
|
+
if node_id != self.element_parent[node_id]:
|
|
34
|
+
self.element_parent[node_id] = self.find_parent(
|
|
35
|
+
self.element_parent[node_id]
|
|
36
|
+
)
|
|
37
|
+
return self.element_parent[node_id]
|
|
38
|
+
|
|
39
|
+
def union_sets(self, node_a: int, node_b: int) -> None:
|
|
40
|
+
assert self.validate_node(node_a) and self.validate_node(node_b)
|
|
41
|
+
|
|
42
|
+
root_a = self.find_parent(node_a)
|
|
43
|
+
root_b = self.find_parent(node_b)
|
|
44
|
+
if root_a != root_b:
|
|
45
|
+
# union by rank
|
|
46
|
+
if self.rank[root_a] < self.rank[root_b]:
|
|
47
|
+
self.element_parent[root_a] = root_b
|
|
48
|
+
elif self.rank[root_a] > self.rank[root_b]:
|
|
49
|
+
self.element_parent[root_b] = root_a
|
|
50
|
+
else:
|
|
51
|
+
self.element_parent[root_b] = root_a
|
|
52
|
+
self.rank[root_a] += 1
|
|
53
|
+
|
|
54
|
+
def is_same_set(self, node_a: int, node_b: int) -> bool:
|
|
55
|
+
return self.find_parent(node_a) == self.find_parent(node_b)
|
|
56
|
+
|
|
57
|
+
def validate_node(self, node_id: int) -> bool:
|
|
58
|
+
return 0 <= node_id < len(self.element_parent)
|