swcgeom 0.14.0__py3-none-any.whl → 0.16.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of swcgeom might be problematic. Click here for more details.
- swcgeom/_version.py +2 -2
- swcgeom/analysis/lmeasure.py +821 -0
- swcgeom/analysis/sholl.py +31 -2
- swcgeom/core/__init__.py +4 -0
- swcgeom/core/branch.py +9 -4
- swcgeom/core/branch_tree.py +2 -3
- swcgeom/core/{segment.py → compartment.py} +14 -9
- swcgeom/core/node.py +0 -8
- swcgeom/core/path.py +21 -6
- swcgeom/core/population.py +42 -3
- swcgeom/core/swc_utils/assembler.py +20 -138
- swcgeom/core/swc_utils/base.py +12 -5
- swcgeom/core/swc_utils/checker.py +12 -2
- swcgeom/core/swc_utils/subtree.py +2 -2
- swcgeom/core/tree.py +53 -49
- swcgeom/core/tree_utils.py +27 -5
- swcgeom/core/tree_utils_impl.py +22 -6
- swcgeom/images/augmentation.py +6 -1
- swcgeom/images/contrast.py +107 -0
- swcgeom/images/folder.py +111 -29
- swcgeom/images/io.py +79 -40
- swcgeom/transforms/__init__.py +2 -0
- swcgeom/transforms/base.py +41 -21
- swcgeom/transforms/branch.py +5 -5
- swcgeom/transforms/geometry.py +42 -18
- swcgeom/transforms/image_preprocess.py +100 -0
- swcgeom/transforms/image_stack.py +46 -28
- swcgeom/transforms/images.py +76 -6
- swcgeom/transforms/mst.py +10 -18
- swcgeom/transforms/neurolucida_asc.py +495 -0
- swcgeom/transforms/population.py +2 -2
- swcgeom/transforms/tree.py +12 -14
- swcgeom/transforms/tree_assembler.py +85 -19
- swcgeom/utils/__init__.py +1 -0
- swcgeom/utils/neuromorpho.py +425 -300
- swcgeom/utils/numpy_helper.py +14 -4
- swcgeom/utils/plotter_2d.py +130 -0
- swcgeom/utils/renderer.py +28 -139
- swcgeom/utils/sdf.py +5 -1
- {swcgeom-0.14.0.dist-info → swcgeom-0.16.0.dist-info}/METADATA +3 -3
- swcgeom-0.16.0.dist-info/RECORD +67 -0
- {swcgeom-0.14.0.dist-info → swcgeom-0.16.0.dist-info}/WHEEL +1 -1
- swcgeom-0.14.0.dist-info/RECORD +0 -62
- {swcgeom-0.14.0.dist-info → swcgeom-0.16.0.dist-info}/LICENSE +0 -0
- {swcgeom-0.14.0.dist-info → swcgeom-0.16.0.dist-info}/top_level.txt +0 -0
|
@@ -0,0 +1,495 @@
|
|
|
1
|
+
"""Neurolucida related transformation."""
|
|
2
|
+
|
|
3
|
+
import os
|
|
4
|
+
import re
|
|
5
|
+
from enum import Enum, auto
|
|
6
|
+
from io import TextIOBase
|
|
7
|
+
from typing import Any, List, NamedTuple, Optional, cast
|
|
8
|
+
|
|
9
|
+
import numpy as np
|
|
10
|
+
|
|
11
|
+
from swcgeom.core import Tree
|
|
12
|
+
from swcgeom.core.swc_utils import SWCNames, SWCTypes, get_names, get_types
|
|
13
|
+
from swcgeom.transforms.base import Transform
|
|
14
|
+
|
|
15
|
+
__all__ = ["NeurolucidaAscToSwc"]
|
|
16
|
+
|
|
17
|
+
|
|
18
|
+
class NeurolucidaAscToSwc(Transform[str, Tree]):
|
|
19
|
+
"""Convert neurolucida asc format to swc format."""
|
|
20
|
+
|
|
21
|
+
def __call__(self, x: str) -> Tree:
|
|
22
|
+
return self.convert(x)
|
|
23
|
+
|
|
24
|
+
@classmethod
|
|
25
|
+
def convert(cls, fname: str) -> Tree:
|
|
26
|
+
with open(fname, "r") as f:
|
|
27
|
+
tree = cls.from_stream(f, source=os.path.abspath(fname))
|
|
28
|
+
|
|
29
|
+
return tree
|
|
30
|
+
|
|
31
|
+
@classmethod
|
|
32
|
+
def from_stream(cls, x: TextIOBase, *, source: str = "") -> Tree:
|
|
33
|
+
parser = Parser(x, source=source)
|
|
34
|
+
ast = parser.parse()
|
|
35
|
+
tree = cls.from_ast(ast)
|
|
36
|
+
return tree
|
|
37
|
+
|
|
38
|
+
@staticmethod
|
|
39
|
+
def from_ast(
|
|
40
|
+
ast: "AST",
|
|
41
|
+
*,
|
|
42
|
+
names: Optional[SWCNames] = None,
|
|
43
|
+
types: Optional[SWCTypes] = None,
|
|
44
|
+
) -> Tree:
|
|
45
|
+
names = get_names(names)
|
|
46
|
+
types = get_types(types)
|
|
47
|
+
ndata = {n: [] for n in names.cols()}
|
|
48
|
+
|
|
49
|
+
next_id = 0
|
|
50
|
+
typee = [types.undefined]
|
|
51
|
+
|
|
52
|
+
def walk_ast(root: ASTNode, pid: int = -1) -> None:
|
|
53
|
+
nonlocal next_id, typee
|
|
54
|
+
match root.type:
|
|
55
|
+
case ASTType.ROOT:
|
|
56
|
+
for n in root.children:
|
|
57
|
+
walk_ast(n)
|
|
58
|
+
|
|
59
|
+
case ASTType.TREE:
|
|
60
|
+
match root.value:
|
|
61
|
+
case "AXON":
|
|
62
|
+
typee.append(types.axon)
|
|
63
|
+
case "DENDRITE":
|
|
64
|
+
typee.append(types.basal_dendrite)
|
|
65
|
+
|
|
66
|
+
for n in root.children:
|
|
67
|
+
walk_ast(n)
|
|
68
|
+
|
|
69
|
+
typee.pop()
|
|
70
|
+
|
|
71
|
+
case ASTType.NODE:
|
|
72
|
+
x, y, z, r = root.value
|
|
73
|
+
idx = next_id
|
|
74
|
+
next_id += 1
|
|
75
|
+
|
|
76
|
+
ndata[names.id].append(idx)
|
|
77
|
+
ndata[names.type].append(typee[-1])
|
|
78
|
+
ndata[names.x].append(x)
|
|
79
|
+
ndata[names.y].append(y)
|
|
80
|
+
ndata[names.z].append(z)
|
|
81
|
+
ndata[names.r].append(r)
|
|
82
|
+
ndata[names.pid].append(pid)
|
|
83
|
+
|
|
84
|
+
for n in root.children:
|
|
85
|
+
walk_ast(n, pid=idx)
|
|
86
|
+
|
|
87
|
+
walk_ast(ast)
|
|
88
|
+
tree = Tree(
|
|
89
|
+
next_id,
|
|
90
|
+
source=ast.source,
|
|
91
|
+
names=names,
|
|
92
|
+
**ndata, # type: ignore
|
|
93
|
+
)
|
|
94
|
+
return tree
|
|
95
|
+
|
|
96
|
+
|
|
97
|
+
# -----------------
|
|
98
|
+
# ASC format parser
|
|
99
|
+
# -----------------
|
|
100
|
+
|
|
101
|
+
# AST
|
|
102
|
+
|
|
103
|
+
|
|
104
|
+
class ASTType(Enum):
|
|
105
|
+
ROOT = auto()
|
|
106
|
+
TREE = auto()
|
|
107
|
+
NODE = auto()
|
|
108
|
+
COLOR = auto()
|
|
109
|
+
COMMENT = auto()
|
|
110
|
+
|
|
111
|
+
|
|
112
|
+
class ASTNode:
|
|
113
|
+
parent: "ASTNode | None" = None
|
|
114
|
+
|
|
115
|
+
def __init__(
|
|
116
|
+
self,
|
|
117
|
+
type: ASTType,
|
|
118
|
+
value: Any = None,
|
|
119
|
+
tokens: Optional[List["Token"]] = None,
|
|
120
|
+
children: Optional[List["ASTNode"]] = None,
|
|
121
|
+
):
|
|
122
|
+
self.type = type
|
|
123
|
+
self.value = value
|
|
124
|
+
self.tokens = tokens or []
|
|
125
|
+
self.children = children or []
|
|
126
|
+
for child in self.children:
|
|
127
|
+
child.parent = self
|
|
128
|
+
|
|
129
|
+
def add_child(self, child: "ASTNode") -> None:
|
|
130
|
+
self.children.append(child)
|
|
131
|
+
child.parent = self
|
|
132
|
+
if child.tokens is not None:
|
|
133
|
+
self.tokens.extend(child.tokens)
|
|
134
|
+
|
|
135
|
+
def __eq__(self, __value: object) -> bool:
|
|
136
|
+
"""
|
|
137
|
+
Compare two ASTNode objects.
|
|
138
|
+
|
|
139
|
+
Notes
|
|
140
|
+
-----
|
|
141
|
+
The `parent`, `tokens` attribute is not compared.
|
|
142
|
+
"""
|
|
143
|
+
return (
|
|
144
|
+
isinstance(__value, ASTNode)
|
|
145
|
+
and self.type == __value.type
|
|
146
|
+
and self.value == __value.value
|
|
147
|
+
and self.children == __value.children
|
|
148
|
+
)
|
|
149
|
+
|
|
150
|
+
|
|
151
|
+
class AST(ASTNode):
|
|
152
|
+
def __init__(self, children: Optional[List[ASTNode]] = None, source: str = ""):
|
|
153
|
+
super().__init__(ASTType.ROOT, children=children)
|
|
154
|
+
self.source = source
|
|
155
|
+
|
|
156
|
+
|
|
157
|
+
# ASC values
|
|
158
|
+
|
|
159
|
+
|
|
160
|
+
class ASCNode(NamedTuple):
|
|
161
|
+
x: float
|
|
162
|
+
y: float
|
|
163
|
+
z: float
|
|
164
|
+
r: float
|
|
165
|
+
|
|
166
|
+
|
|
167
|
+
class ASCColor(NamedTuple):
|
|
168
|
+
color: str
|
|
169
|
+
|
|
170
|
+
def __eq__(self, __value: object) -> bool:
|
|
171
|
+
return (
|
|
172
|
+
isinstance(__value, ASCColor)
|
|
173
|
+
and self.color.upper() == __value.color.upper()
|
|
174
|
+
)
|
|
175
|
+
|
|
176
|
+
|
|
177
|
+
class ASCComment(NamedTuple):
|
|
178
|
+
comment: str
|
|
179
|
+
|
|
180
|
+
|
|
181
|
+
# Error
|
|
182
|
+
|
|
183
|
+
|
|
184
|
+
class TokenTypeError(ValueError):
|
|
185
|
+
def __init__(self, token: "Token", expected: str):
|
|
186
|
+
super().__init__(
|
|
187
|
+
f"Unexpected token {token.type.name} `{token.value}` at {token.lineno}:{token.column}, expected {expected}"
|
|
188
|
+
)
|
|
189
|
+
|
|
190
|
+
|
|
191
|
+
class LiteralTokenError(ValueError):
|
|
192
|
+
def __init__(self, token: "Token", expected: str):
|
|
193
|
+
super().__init__(
|
|
194
|
+
f"Unexpected LITERAL token {token.value} at {token.lineno}:{token.column}, expected {expected}"
|
|
195
|
+
)
|
|
196
|
+
|
|
197
|
+
|
|
198
|
+
class AssertionTokenTypeError(Exception):
|
|
199
|
+
pass
|
|
200
|
+
|
|
201
|
+
|
|
202
|
+
# Parser
|
|
203
|
+
|
|
204
|
+
|
|
205
|
+
class Parser:
|
|
206
|
+
def __init__(self, r: TextIOBase, *, source: str = ""):
|
|
207
|
+
self.lexer = Lexer(r)
|
|
208
|
+
self.next_token = None
|
|
209
|
+
self.source = source
|
|
210
|
+
self._read_token()
|
|
211
|
+
|
|
212
|
+
def parse(self) -> AST:
|
|
213
|
+
try:
|
|
214
|
+
return self._parse()
|
|
215
|
+
except AssertionTokenTypeError as assertion_err:
|
|
216
|
+
msg = (
|
|
217
|
+
f"Error parsing {self.source}" if self.source != "" else "Error parsing"
|
|
218
|
+
)
|
|
219
|
+
original_error = assertion_err.__cause__
|
|
220
|
+
err = ValueError(msg)
|
|
221
|
+
if original_error is None:
|
|
222
|
+
raise err
|
|
223
|
+
|
|
224
|
+
ignores = ["_assert_and_cunsume", "_assert"]
|
|
225
|
+
current = assertion_err.__traceback__
|
|
226
|
+
while current is not None:
|
|
227
|
+
if (
|
|
228
|
+
current.tb_next is not None
|
|
229
|
+
and current.tb_next.tb_frame.f_code.co_name in ignores
|
|
230
|
+
):
|
|
231
|
+
current.tb_next = None
|
|
232
|
+
else:
|
|
233
|
+
current = current.tb_next
|
|
234
|
+
|
|
235
|
+
original_error.__traceback__ = assertion_err.__traceback__
|
|
236
|
+
|
|
237
|
+
raise err from original_error
|
|
238
|
+
except Exception as original_error:
|
|
239
|
+
msg = f"Error parsing {self.source}" if self.source else "Error parsing"
|
|
240
|
+
raise ValueError(msg) from original_error
|
|
241
|
+
|
|
242
|
+
def _parse(self) -> AST:
|
|
243
|
+
root = AST(source=self.source)
|
|
244
|
+
|
|
245
|
+
token = self._assert_and_cunsume(TokenType.BRACKET_LEFT)
|
|
246
|
+
root.tokens.append(token)
|
|
247
|
+
|
|
248
|
+
while (token := self.next_token) is not None:
|
|
249
|
+
if token.type == TokenType.BRACKET_RIGHT:
|
|
250
|
+
break
|
|
251
|
+
|
|
252
|
+
if token.type != TokenType.BRACKET_LEFT:
|
|
253
|
+
raise TokenTypeError(token, "BRACKET_LEFT, BRACKET_RIGHT")
|
|
254
|
+
|
|
255
|
+
root.tokens.append(token)
|
|
256
|
+
self._consume()
|
|
257
|
+
|
|
258
|
+
token = self._assert(self.next_token, TokenType.LITERAL)
|
|
259
|
+
match str.upper(token.value):
|
|
260
|
+
case "AXON" | "DENDRITE":
|
|
261
|
+
self._parse_tree(root)
|
|
262
|
+
|
|
263
|
+
case "COLOR":
|
|
264
|
+
self._parse_color(root) # TODO: bug
|
|
265
|
+
|
|
266
|
+
case _:
|
|
267
|
+
raise LiteralTokenError(token, "AXON, DENDRITE, COLOR")
|
|
268
|
+
|
|
269
|
+
token = self._assert(self.next_token, TokenType.BRACKET_RIGHT)
|
|
270
|
+
token = self._assert_and_cunsume(TokenType.BRACKET_RIGHT)
|
|
271
|
+
root.tokens.append(token)
|
|
272
|
+
return root
|
|
273
|
+
|
|
274
|
+
def _parse_tree(self, root: ASTNode) -> None:
|
|
275
|
+
t1 = self._assert_and_cunsume(TokenType.LITERAL)
|
|
276
|
+
node = ASTNode(ASTType.TREE, str.upper(t1.value), tokens=[t1])
|
|
277
|
+
|
|
278
|
+
t2 = self._assert_and_cunsume(TokenType.BRACKET_RIGHT)
|
|
279
|
+
node.tokens.append(t2)
|
|
280
|
+
|
|
281
|
+
t3 = self._assert_and_cunsume(TokenType.BRACKET_LEFT)
|
|
282
|
+
node.tokens.append(t3)
|
|
283
|
+
|
|
284
|
+
self._parse_subtree(node)
|
|
285
|
+
root.add_child(node)
|
|
286
|
+
|
|
287
|
+
def _parse_subtree(self, root: ASTNode) -> None:
|
|
288
|
+
flag = True # flag to check if the brachet_left can be consumed
|
|
289
|
+
current = root
|
|
290
|
+
while (token := self.next_token) is not None:
|
|
291
|
+
match token.type:
|
|
292
|
+
case TokenType.BRACKET_LEFT:
|
|
293
|
+
self._read_token()
|
|
294
|
+
if flag:
|
|
295
|
+
flag = False
|
|
296
|
+
else:
|
|
297
|
+
self._parse_subtree(current)
|
|
298
|
+
|
|
299
|
+
case TokenType.BRACKET_RIGHT:
|
|
300
|
+
break
|
|
301
|
+
|
|
302
|
+
case TokenType.FLOAT:
|
|
303
|
+
current = self._parse_node(current)
|
|
304
|
+
flag = True
|
|
305
|
+
|
|
306
|
+
case TokenType.LITERAL:
|
|
307
|
+
match str.upper(token.value):
|
|
308
|
+
case "COLOR":
|
|
309
|
+
self._parse_color(current)
|
|
310
|
+
case _:
|
|
311
|
+
raise LiteralTokenError(token, "COLOR")
|
|
312
|
+
|
|
313
|
+
flag = True
|
|
314
|
+
|
|
315
|
+
case TokenType.OR:
|
|
316
|
+
current = root
|
|
317
|
+
self._read_token()
|
|
318
|
+
flag = True
|
|
319
|
+
|
|
320
|
+
case TokenType.COMMENT:
|
|
321
|
+
self._parse_comment(current)
|
|
322
|
+
|
|
323
|
+
case _:
|
|
324
|
+
excepted = (
|
|
325
|
+
"BRACKET_LEFT, BRACKET_RIGHT, LITERAL, FLOAT, OR, COMMENT"
|
|
326
|
+
)
|
|
327
|
+
raise TokenTypeError(token, excepted)
|
|
328
|
+
|
|
329
|
+
current.tokens.append(token)
|
|
330
|
+
|
|
331
|
+
def _parse_node(self, root: ASTNode) -> ASTNode:
|
|
332
|
+
# FLOAT FLOAT FLOAT FLOAT )
|
|
333
|
+
t1 = self._assert_and_cunsume(TokenType.FLOAT)
|
|
334
|
+
t2 = self._assert(self.next_token, TokenType.FLOAT)
|
|
335
|
+
self._read_token()
|
|
336
|
+
t3 = self._assert(self.next_token, TokenType.FLOAT)
|
|
337
|
+
self._read_token()
|
|
338
|
+
t4 = self._assert(self.next_token, TokenType.FLOAT)
|
|
339
|
+
self._read_token()
|
|
340
|
+
t5 = self._assert_and_cunsume(TokenType.BRACKET_RIGHT)
|
|
341
|
+
|
|
342
|
+
x, y, z, r = t1.value, t2.value, t3.value, t4.value
|
|
343
|
+
node = ASTNode(ASTType.NODE, ASCNode(x, y, z, r), tokens=[t1, t2, t3, t4, t5])
|
|
344
|
+
root.add_child(node)
|
|
345
|
+
return node
|
|
346
|
+
|
|
347
|
+
def _parse_color(self, root: ASTNode) -> ASTNode:
|
|
348
|
+
# COLOR COLOR_VALUE )
|
|
349
|
+
t1 = self._assert_and_cunsume(TokenType.LITERAL)
|
|
350
|
+
t2 = self._assert_and_cunsume(TokenType.LITERAL)
|
|
351
|
+
t3 = self._assert_and_cunsume(TokenType.BRACKET_RIGHT)
|
|
352
|
+
|
|
353
|
+
node = ASTNode(ASTType.COLOR, ASCColor(t2.value), tokens=[t1, t2, t3])
|
|
354
|
+
root.add_child(node)
|
|
355
|
+
return node
|
|
356
|
+
|
|
357
|
+
def _parse_comment(self, root: ASTNode) -> ASTNode:
|
|
358
|
+
# ; COMMENT
|
|
359
|
+
t1 = self._assert_and_cunsume(TokenType.COMMENT)
|
|
360
|
+
node = ASTNode(ASTType.COMMENT, ASCComment(t1.value), tokens=[t1])
|
|
361
|
+
root.add_child(node) # ? where the comment should be added
|
|
362
|
+
return node
|
|
363
|
+
|
|
364
|
+
def _read_token(self) -> None:
|
|
365
|
+
self.next_token = next(self.lexer, None)
|
|
366
|
+
|
|
367
|
+
def _assert_and_cunsume(self, type: "TokenType") -> "Token":
|
|
368
|
+
token = self._consume()
|
|
369
|
+
token = self._assert(token, type)
|
|
370
|
+
return cast(Token, token)
|
|
371
|
+
|
|
372
|
+
def _assert(self, token: "Token | None", type: "TokenType") -> "Token":
|
|
373
|
+
if token is None:
|
|
374
|
+
raise AssertionTokenTypeError() from ValueError("Unexpected EOF")
|
|
375
|
+
|
|
376
|
+
if token.type != type:
|
|
377
|
+
raise AssertionTokenTypeError() from TokenTypeError(token, type.name)
|
|
378
|
+
|
|
379
|
+
return token
|
|
380
|
+
|
|
381
|
+
def _consume(self) -> "Token | None":
|
|
382
|
+
token = self.next_token
|
|
383
|
+
self._read_token()
|
|
384
|
+
return token
|
|
385
|
+
|
|
386
|
+
|
|
387
|
+
# -----------------
|
|
388
|
+
# ASC format lexer
|
|
389
|
+
# -----------------
|
|
390
|
+
|
|
391
|
+
|
|
392
|
+
class TokenType(Enum):
|
|
393
|
+
BRACKET_LEFT = auto()
|
|
394
|
+
BRACKET_RIGHT = auto()
|
|
395
|
+
COMMENT = auto()
|
|
396
|
+
OR = auto()
|
|
397
|
+
FLOAT = auto()
|
|
398
|
+
LITERAL = auto()
|
|
399
|
+
|
|
400
|
+
|
|
401
|
+
class Token:
|
|
402
|
+
def __init__(self, type: TokenType, value: Any, lineno: int, column: int):
|
|
403
|
+
self.type = type
|
|
404
|
+
self.value = value
|
|
405
|
+
self.lineno = lineno
|
|
406
|
+
self.column = column
|
|
407
|
+
|
|
408
|
+
def __repr__(self) -> str:
|
|
409
|
+
return f"Token({self.type.name}, {self.value}, Position={self.lineno}:{self.column})"
|
|
410
|
+
|
|
411
|
+
|
|
412
|
+
RE_FLOAT = re.compile(r"[-+]?[0-9]*\.?[0-9]+([eE][-+]?[0-9]+)?")
|
|
413
|
+
|
|
414
|
+
|
|
415
|
+
class Lexer:
|
|
416
|
+
def __init__(self, r: TextIOBase):
|
|
417
|
+
self.r = r
|
|
418
|
+
self.lineno = 1
|
|
419
|
+
self.column = 1
|
|
420
|
+
self.next_char = self.r.read(1)
|
|
421
|
+
|
|
422
|
+
def __iter__(self):
|
|
423
|
+
return self
|
|
424
|
+
|
|
425
|
+
def __next__(self) -> Token:
|
|
426
|
+
match (word := self._read_word()):
|
|
427
|
+
case "":
|
|
428
|
+
raise StopIteration
|
|
429
|
+
|
|
430
|
+
case "(":
|
|
431
|
+
return self._token(TokenType.BRACKET_LEFT, word)
|
|
432
|
+
|
|
433
|
+
case ")":
|
|
434
|
+
return self._token(TokenType.BRACKET_RIGHT, word)
|
|
435
|
+
|
|
436
|
+
case ";":
|
|
437
|
+
return self._token(TokenType.COMMENT, self._read_line())
|
|
438
|
+
|
|
439
|
+
case "|":
|
|
440
|
+
return self._token(TokenType.OR, word)
|
|
441
|
+
|
|
442
|
+
case _ if RE_FLOAT.match(word) is not None:
|
|
443
|
+
return self._token(TokenType.FLOAT, float(word))
|
|
444
|
+
|
|
445
|
+
case _:
|
|
446
|
+
return self._token(TokenType.LITERAL, word)
|
|
447
|
+
|
|
448
|
+
def _read_char(self) -> bool:
|
|
449
|
+
self.next_char = self.r.read(1)
|
|
450
|
+
if self.next_char == "":
|
|
451
|
+
return False
|
|
452
|
+
|
|
453
|
+
if self.next_char == "\n":
|
|
454
|
+
self.lineno += 1
|
|
455
|
+
self.column = 1
|
|
456
|
+
else:
|
|
457
|
+
self.column += 1
|
|
458
|
+
return True
|
|
459
|
+
|
|
460
|
+
def _read_word(self) -> str:
|
|
461
|
+
# skip leading spaces
|
|
462
|
+
while self.next_char != "" and self.next_char in " \t\n":
|
|
463
|
+
self._read_char()
|
|
464
|
+
|
|
465
|
+
token = ""
|
|
466
|
+
while self.next_char != "" and self.next_char not in " \t\n();|":
|
|
467
|
+
token += self.next_char
|
|
468
|
+
self._read_char()
|
|
469
|
+
|
|
470
|
+
if token != "":
|
|
471
|
+
return token
|
|
472
|
+
|
|
473
|
+
if self.next_char == "":
|
|
474
|
+
return ""
|
|
475
|
+
|
|
476
|
+
ch = self.next_char
|
|
477
|
+
self._read_char()
|
|
478
|
+
return ch
|
|
479
|
+
|
|
480
|
+
def _read_line(self) -> str:
|
|
481
|
+
if self.next_char != "\n":
|
|
482
|
+
line = self.r.readline()
|
|
483
|
+
line = self.next_char + line
|
|
484
|
+
if line.endswith("\n"):
|
|
485
|
+
line = line[:-1]
|
|
486
|
+
else:
|
|
487
|
+
line = ""
|
|
488
|
+
|
|
489
|
+
self.lineno += 1
|
|
490
|
+
self.column = 1
|
|
491
|
+
self.next_char = self.r.read(1)
|
|
492
|
+
return line
|
|
493
|
+
|
|
494
|
+
def _token(self, type: TokenType, value: Any) -> Token:
|
|
495
|
+
return Token(type, value, self.lineno, self.column)
|
swcgeom/transforms/population.py
CHANGED
swcgeom/transforms/tree.py
CHANGED
|
@@ -64,12 +64,16 @@ class TreeSmoother(Transform[Tree, Tree]): # pylint: disable=missing-class-docs
|
|
|
64
64
|
|
|
65
65
|
return x
|
|
66
66
|
|
|
67
|
-
def
|
|
68
|
-
return f"
|
|
67
|
+
def extra_repr(self):
|
|
68
|
+
return f"n_nodes={self.n_nodes}"
|
|
69
69
|
|
|
70
70
|
|
|
71
71
|
class TreeNormalizer(Normalizer[Tree]):
|
|
72
|
-
"""Noramlize coordinates and radius to 0-1.
|
|
72
|
+
"""Noramlize coordinates and radius to 0-1.
|
|
73
|
+
|
|
74
|
+
.. deprecated:: 0.6.0
|
|
75
|
+
Use :cls:`Normalizer` instead.
|
|
76
|
+
"""
|
|
73
77
|
|
|
74
78
|
def __init__(self, *args, **kwargs) -> None:
|
|
75
79
|
warnings.warn(
|
|
@@ -107,8 +111,8 @@ class CutByType(Transform[Tree, Tree]):
|
|
|
107
111
|
y = to_subtree(x, removals)
|
|
108
112
|
return y
|
|
109
113
|
|
|
110
|
-
def
|
|
111
|
-
return f"
|
|
114
|
+
def extra_repr(self):
|
|
115
|
+
return f"type={self.type}"
|
|
112
116
|
|
|
113
117
|
|
|
114
118
|
class CutAxonTree(CutByType):
|
|
@@ -118,9 +122,6 @@ class CutAxonTree(CutByType):
|
|
|
118
122
|
types = get_types(types)
|
|
119
123
|
super().__init__(type=types.axon)
|
|
120
124
|
|
|
121
|
-
def __repr__(self) -> str:
|
|
122
|
-
return "CutAxonTree"
|
|
123
|
-
|
|
124
125
|
|
|
125
126
|
class CutDendriteTree(CutByType):
|
|
126
127
|
"""Cut dendrite tree."""
|
|
@@ -129,9 +130,6 @@ class CutDendriteTree(CutByType):
|
|
|
129
130
|
types = get_types(types)
|
|
130
131
|
super().__init__(type=types.basal_dendrite) # TODO: apical dendrite
|
|
131
132
|
|
|
132
|
-
def __repr__(self) -> str:
|
|
133
|
-
return "CutDenriteTree"
|
|
134
|
-
|
|
135
133
|
|
|
136
134
|
class CutByBifurcationOrder(Transform[Tree, Tree]):
|
|
137
135
|
"""Cut tree by bifurcation order."""
|
|
@@ -177,9 +175,6 @@ class CutShortTipBranch(Transform[Tree, Tree]):
|
|
|
177
175
|
if callback is not None:
|
|
178
176
|
self.callbacks.append(callback)
|
|
179
177
|
|
|
180
|
-
def __repr__(self) -> str:
|
|
181
|
-
return f"CutShortTipBranch-{self.thre}"
|
|
182
|
-
|
|
183
178
|
def __call__(self, x: Tree) -> Tree:
|
|
184
179
|
removals: List[int] = []
|
|
185
180
|
self.callbacks.append(lambda br: removals.append(br[1].id))
|
|
@@ -187,6 +182,9 @@ class CutShortTipBranch(Transform[Tree, Tree]):
|
|
|
187
182
|
self.callbacks.pop()
|
|
188
183
|
return to_subtree(x, removals)
|
|
189
184
|
|
|
185
|
+
def extra_repr(self):
|
|
186
|
+
return f"threshold={self.thre}"
|
|
187
|
+
|
|
190
188
|
def _leave(
|
|
191
189
|
self, n: Tree.Node, children: List[Tuple[float, Tree.Node] | None]
|
|
192
190
|
) -> Tuple[float, Tree.Node] | None:
|