swcgeom 0.19.4__cp313-cp313-win_amd64.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of swcgeom might be problematic. Click here for more details.

Files changed (72) hide show
  1. swcgeom/__init__.py +21 -0
  2. swcgeom/analysis/__init__.py +13 -0
  3. swcgeom/analysis/feature_extractor.py +454 -0
  4. swcgeom/analysis/features.py +218 -0
  5. swcgeom/analysis/lmeasure.py +750 -0
  6. swcgeom/analysis/sholl.py +201 -0
  7. swcgeom/analysis/trunk.py +183 -0
  8. swcgeom/analysis/visualization.py +191 -0
  9. swcgeom/analysis/visualization3d.py +81 -0
  10. swcgeom/analysis/volume.py +143 -0
  11. swcgeom/core/__init__.py +19 -0
  12. swcgeom/core/branch.py +129 -0
  13. swcgeom/core/branch_tree.py +65 -0
  14. swcgeom/core/compartment.py +107 -0
  15. swcgeom/core/node.py +130 -0
  16. swcgeom/core/path.py +155 -0
  17. swcgeom/core/population.py +341 -0
  18. swcgeom/core/swc.py +247 -0
  19. swcgeom/core/swc_utils/__init__.py +19 -0
  20. swcgeom/core/swc_utils/assembler.py +35 -0
  21. swcgeom/core/swc_utils/base.py +180 -0
  22. swcgeom/core/swc_utils/checker.py +107 -0
  23. swcgeom/core/swc_utils/io.py +204 -0
  24. swcgeom/core/swc_utils/normalizer.py +163 -0
  25. swcgeom/core/swc_utils/subtree.py +70 -0
  26. swcgeom/core/tree.py +384 -0
  27. swcgeom/core/tree_utils.py +277 -0
  28. swcgeom/core/tree_utils_impl.py +58 -0
  29. swcgeom/images/__init__.py +9 -0
  30. swcgeom/images/augmentation.py +149 -0
  31. swcgeom/images/contrast.py +87 -0
  32. swcgeom/images/folder.py +217 -0
  33. swcgeom/images/io.py +578 -0
  34. swcgeom/images/loaders/__init__.py +8 -0
  35. swcgeom/images/loaders/pbd.cp313-win_amd64.pyd +0 -0
  36. swcgeom/images/loaders/pbd.pyx +523 -0
  37. swcgeom/images/loaders/raw.cp313-win_amd64.pyd +0 -0
  38. swcgeom/images/loaders/raw.pyx +183 -0
  39. swcgeom/transforms/__init__.py +20 -0
  40. swcgeom/transforms/base.py +136 -0
  41. swcgeom/transforms/branch.py +223 -0
  42. swcgeom/transforms/branch_tree.py +74 -0
  43. swcgeom/transforms/geometry.py +270 -0
  44. swcgeom/transforms/image_preprocess.py +107 -0
  45. swcgeom/transforms/image_stack.py +219 -0
  46. swcgeom/transforms/images.py +206 -0
  47. swcgeom/transforms/mst.py +183 -0
  48. swcgeom/transforms/neurolucida_asc.py +498 -0
  49. swcgeom/transforms/path.py +56 -0
  50. swcgeom/transforms/population.py +36 -0
  51. swcgeom/transforms/tree.py +265 -0
  52. swcgeom/transforms/tree_assembler.py +161 -0
  53. swcgeom/utils/__init__.py +18 -0
  54. swcgeom/utils/debug.py +23 -0
  55. swcgeom/utils/download.py +119 -0
  56. swcgeom/utils/dsu.py +58 -0
  57. swcgeom/utils/ellipse.py +131 -0
  58. swcgeom/utils/file.py +90 -0
  59. swcgeom/utils/neuromorpho.py +581 -0
  60. swcgeom/utils/numpy_helper.py +70 -0
  61. swcgeom/utils/plotter_2d.py +134 -0
  62. swcgeom/utils/plotter_3d.py +35 -0
  63. swcgeom/utils/renderer.py +145 -0
  64. swcgeom/utils/sdf.py +324 -0
  65. swcgeom/utils/solid_geometry.py +154 -0
  66. swcgeom/utils/transforms.py +367 -0
  67. swcgeom/utils/volumetric_object.py +483 -0
  68. swcgeom-0.19.4.dist-info/METADATA +86 -0
  69. swcgeom-0.19.4.dist-info/RECORD +72 -0
  70. swcgeom-0.19.4.dist-info/WHEEL +5 -0
  71. swcgeom-0.19.4.dist-info/licenses/LICENSE +201 -0
  72. swcgeom-0.19.4.dist-info/top_level.txt +1 -0
@@ -0,0 +1,204 @@
1
+
2
+ # SPDX-FileCopyrightText: 2022 - 2025 Zexin Yuan <pypi@yzx9.xyz>
3
+ #
4
+ # SPDX-License-Identifier: Apache-2.0
5
+
6
+ """Read and write swc format."""
7
+
8
+ import re
9
+ import warnings
10
+ from collections.abc import Callable, Iterable
11
+ from typing import Literal
12
+
13
+ import numpy as np
14
+ import numpy.typing as npt
15
+ import pandas as pd
16
+
17
+ from swcgeom.core.swc_utils.base import SWCNames, get_names
18
+ from swcgeom.core.swc_utils.checker import is_single_root
19
+ from swcgeom.core.swc_utils.normalizer import (
20
+ link_roots_to_nearest_,
21
+ mark_roots_as_somas_,
22
+ reset_index_,
23
+ sort_nodes_,
24
+ )
25
+ from swcgeom.utils import FileReader, PathOrIO
26
+
27
+ __all__ = ["read_swc", "to_swc"]
28
+
29
+
30
+ def read_swc(
31
+ swc_file: PathOrIO,
32
+ extra_cols: Iterable[str] | None = None,
33
+ fix_roots: Literal["somas", "nearest", False] = False,
34
+ sort_nodes: bool = False,
35
+ reset_index: bool = True,
36
+ *,
37
+ encoding: Literal["detect"] | str = "utf-8",
38
+ names: SWCNames | None = None,
39
+ ) -> tuple[pd.DataFrame, list[str]]:
40
+ """Read swc file.
41
+
42
+ NOTE: the id should be consecutively incremented.
43
+
44
+ Args:
45
+ extra_cols: Read more cols in swc file.
46
+ fix_roots: Fix multiple roots.
47
+ sort_nodes: Sort the indices of neuron tree.
48
+ After sorting the nodes, the index for each parent are always less than
49
+ that of its children.
50
+ reset_index: Reset node index to start with zero.
51
+ DO NOT set to false if you are not sure what will happened.
52
+ encoding: The name of the encoding used to decode the file.
53
+ If is `detect`, we will try to detect the character encoding.
54
+
55
+ Returns:
56
+ df: ~pandas.DataFrame
57
+ comments: List of string
58
+ """
59
+ names = get_names(names)
60
+ df, comments = parse_swc(
61
+ swc_file, names=names, extra_cols=extra_cols, encoding=encoding
62
+ )
63
+
64
+ # fix swc
65
+ if fix_roots is not False and np.count_nonzero(df[names.pid] == -1) > 1:
66
+ match fix_roots:
67
+ case "somas":
68
+ mark_roots_as_somas_(df)
69
+ case "nearest":
70
+ link_roots_to_nearest_(df)
71
+ case _:
72
+ raise ValueError(f"unknown fix type `{fix_roots}`")
73
+
74
+ if sort_nodes:
75
+ sort_nodes_(df)
76
+ elif reset_index:
77
+ reset_index_(df)
78
+
79
+ # check swc
80
+ if not is_single_root(df, names=names):
81
+ warnings.warn(f"not a simple tree in `{swc_file}`")
82
+
83
+ if (df[names.pid] == -1).argmax() != 0:
84
+ warnings.warn(f"root is not the first node in `{swc_file}`")
85
+
86
+ if (df[names.r] <= 0).any():
87
+ warnings.warn(f"non-positive radius in `{swc_file}`")
88
+
89
+ return df, comments
90
+
91
+
92
+ def to_swc(
93
+ get_ndata: Callable[[str], npt.NDArray],
94
+ *,
95
+ extra_cols: Iterable[str] | None = None,
96
+ id_offset: int = 1,
97
+ comments: Iterable[str] | None = None,
98
+ names: SWCNames | None = None,
99
+ ) -> Iterable[str]:
100
+ """Convert to swc format."""
101
+
102
+ if comments is not None:
103
+ for c in comments:
104
+ if not c.isspace():
105
+ yield f"# {c.lstrip()}\n"
106
+ else:
107
+ yield "#"
108
+
109
+ names = get_names(names)
110
+ cols = names.cols() + (list(extra_cols) if extra_cols is not None else [])
111
+ yield f"# {' '.join(cols)}\n"
112
+
113
+ def get_v(k: str, idx: int) -> str:
114
+ vs = get_ndata(k)
115
+ v = vs[idx]
116
+ if np.issubdtype(vs.dtype, np.floating):
117
+ return f"{v:.4f}"
118
+
119
+ if k == names.id or (k == names.pid and v != -1):
120
+ v += id_offset
121
+
122
+ return str(v)
123
+
124
+ for idx in get_ndata(names.id):
125
+ yield " ".join(get_v(k, idx) for k in cols) + "\n"
126
+
127
+
128
+ RE_COMMENT = re.compile(r"^\s*#")
129
+ RE_FLOAT = r"([+-]?(?:\d+(?:[.]\d*)?(?:[eE][+-]?\d+)?|[.]\d+(?:[eE][+-]?\d+)?))"
130
+
131
+
132
+ def parse_swc(
133
+ fname: PathOrIO,
134
+ *,
135
+ names: SWCNames,
136
+ extra_cols: Iterable[str] | None = None,
137
+ encoding: Literal["detect"] | str = "utf-8",
138
+ ) -> tuple[pd.DataFrame, list[str]]:
139
+ """Parse swc file.
140
+
141
+ Args:
142
+ encoding: The name of the encoding used to decode the file.
143
+ If is `detect`, we will try to detect the character encoding.
144
+
145
+ Returns:
146
+ df: ~pandas.DataFrame
147
+ comments: List of string
148
+ """
149
+ # pylint: disable=too-many-locals
150
+ extras = list(extra_cols) if extra_cols else []
151
+
152
+ keys = names.cols() + extras
153
+ vals = [[] for _ in keys]
154
+ transforms = [int, int, float, float, float, float, int] + [float for _ in extras]
155
+
156
+ re_swc_cols = [
157
+ r"([0-9]+)", # id
158
+ r"([0-9]+)", # type
159
+ RE_FLOAT, # x
160
+ RE_FLOAT, # y
161
+ RE_FLOAT, # z
162
+ RE_FLOAT, # r
163
+ r"(-?[0-9]+)", # pid
164
+ ] + [
165
+ RE_FLOAT
166
+ for _ in extras # assert float
167
+ ]
168
+
169
+ re_swc_cols_str = r"\s+".join(re_swc_cols)
170
+ # Leading spaces are allowed, as this is part of the data in
171
+ # neuromorpho.org. More fields at the end is allowed, such as
172
+ # reading eswc as swc, but with a warning.
173
+ re_swc = re.compile(rf"^\s*{re_swc_cols_str}\s*([\s+-.0-9]*)$")
174
+
175
+ last_group = 7 + len(extras) + 1
176
+ ignored_comment = f"# {' '.join(names.cols())}"
177
+ flag = True
178
+
179
+ comments = []
180
+ with FileReader(fname, encoding=encoding) as f:
181
+ try:
182
+ for i, line in enumerate(f):
183
+ if (match := re_swc.search(line)) is not None:
184
+ if flag and match.group(last_group):
185
+ warnings.warn(
186
+ f"some fields are ignored in row {i + 1} of `{fname}`"
187
+ )
188
+ flag = False
189
+
190
+ for i, trans in enumerate(transforms):
191
+ vals[i].append(trans(match.group(i + 1)))
192
+ elif match := RE_COMMENT.match(line):
193
+ comment = line[len(match.group(0)) :].removesuffix("\n")
194
+ if not comment.startswith(ignored_comment):
195
+ comments.append(comment)
196
+ elif not line.isspace():
197
+ raise ValueError(f"invalid row {i + 1} in `{fname}`")
198
+ except UnicodeDecodeError as e:
199
+ raise ValueError(
200
+ "decode failed, try to enable auto detect `encoding='detect'`"
201
+ ) from e
202
+
203
+ df = pd.DataFrame.from_dict(dict(zip(keys, vals)))
204
+ return df, comments
@@ -0,0 +1,163 @@
1
+
2
+ # SPDX-FileCopyrightText: 2022 - 2025 Zexin Yuan <pypi@yzx9.xyz>
3
+ #
4
+ # SPDX-License-Identifier: Apache-2.0
5
+
6
+ """SWC format utils.
7
+
8
+ Methods ending with a underline imply an in-place transformation.
9
+ """
10
+
11
+ from collections.abc import Callable
12
+ from typing import Literal
13
+
14
+ import numpy as np
15
+ import numpy.typing as npt
16
+ import pandas as pd
17
+
18
+ from swcgeom.core.swc_utils.base import SWCNames, Topology, get_dsu, get_names
19
+
20
+ __all__ = [
21
+ "mark_roots_as_somas",
22
+ "mark_roots_as_somas_",
23
+ "link_roots_to_nearest",
24
+ "link_roots_to_nearest_",
25
+ "sort_nodes",
26
+ "sort_nodes_",
27
+ "sort_nodes_impl",
28
+ "reset_index",
29
+ "reset_index_",
30
+ ]
31
+
32
+
33
+ def mark_roots_as_somas(
34
+ df: pd.DataFrame,
35
+ update_type: int | Literal[False] = 1,
36
+ *,
37
+ names: SWCNames | None = None,
38
+ ) -> pd.DataFrame:
39
+ return _copy_and_apply(
40
+ mark_roots_as_somas_, df, update_type=update_type, names=names
41
+ )
42
+
43
+
44
+ def mark_roots_as_somas_(
45
+ df: pd.DataFrame,
46
+ update_type: int | Literal[False] = 1,
47
+ *,
48
+ names: SWCNames | None = None,
49
+ ) -> None:
50
+ """Merge multiple roots in swc.
51
+
52
+ The first root are reserved and others are linked to it.
53
+ """
54
+ names = get_names(names)
55
+ roots = df[names.pid] == -1
56
+ root_loc = roots.argmax()
57
+ root_id = df.loc[root_loc, names.id] # type:ignore
58
+ df[names.pid] = np.where(df[names.pid] != -1, df[names.pid], root_id)
59
+ if update_type is not False:
60
+ df[names.type] = np.where(df[names.pid] != -1, df[names.type], update_type)
61
+ df.loc[root_loc, names.pid] = -1 # type:ignore
62
+
63
+
64
+ def link_roots_to_nearest(
65
+ df: pd.DataFrame, *, names: SWCNames | None = None
66
+ ) -> pd.DataFrame:
67
+ return _copy_and_apply(link_roots_to_nearest_, df, names=names)
68
+
69
+
70
+ def link_roots_to_nearest_(df: pd.DataFrame, *, names: SWCNames | None = None) -> None:
71
+ """Merge multiple roots in swc.
72
+
73
+ The first root are reserved, and the others was.
74
+ """
75
+ names = get_names(names)
76
+ dsu = get_dsu(df)
77
+ roots = df[df[names.pid] == -1].iterrows()
78
+ next(roots) # type: ignore # skip the first one
79
+ for i, row in roots:
80
+ vs = df[[names.x, names.y, names.z]] - row[[names.x, names.y, names.z]]
81
+ dis = np.linalg.norm(vs.to_numpy(), axis=1)
82
+ subtree = dsu == dsu[i] # type: ignore
83
+ dis = np.where(subtree, np.inf, dis) # avoid link to same tree
84
+ dsu = np.where(subtree, dsu[dis.argmin()], dsu) # merge set
85
+ df.loc[i, names.pid] = df[names.id].iloc[dis.argmin()]
86
+
87
+
88
+ def sort_nodes(df: pd.DataFrame, *, names: SWCNames | None = None) -> pd.DataFrame:
89
+ """Sort the indices of neuron tree.
90
+
91
+ The index for parent are always less than children.
92
+
93
+ See Also:
94
+ ~.core.swc_utils.checker.is_sorted
95
+ """
96
+ return _copy_and_apply(sort_nodes_, df, names=names)
97
+
98
+
99
+ def sort_nodes_(df: pd.DataFrame, *, names: SWCNames | None = None) -> None:
100
+ """Sort the indices of neuron tree.
101
+
102
+ The index for parent are always less than children.
103
+
104
+ See Also:
105
+ ~.core.swc_utils.checker.is_sorted
106
+ """
107
+ names = get_names(names)
108
+ ids, pids = df[names.id].to_numpy(), df[names.pid].to_numpy()
109
+ (new_ids, new_pids), indices = sort_nodes_impl((ids, pids))
110
+ for col in df.columns:
111
+ df[col] = df[col][indices].to_numpy()
112
+
113
+ df[names.id], df[names.pid] = new_ids, new_pids
114
+
115
+
116
+ def sort_nodes_impl(topology: Topology) -> tuple[Topology, npt.NDArray[np.int32]]:
117
+ """Sort the indices of neuron tree.
118
+
119
+ Returns:
120
+ new_topology: Topology
121
+ id_map: Map from new id to original id.
122
+ """
123
+ old_ids, old_pids = topology
124
+ assert np.count_nonzero(old_pids == -1) == 1, "should be single root"
125
+
126
+ id_map = np.full_like(old_ids, fill_value=-3) # new_id to old_id
127
+ new_pids = np.full_like(old_ids, fill_value=-3)
128
+ new_id = 0
129
+ first_root = old_ids[(old_pids == -1).argmax()]
130
+ s: list[tuple[npt.NDArray[np.int32], int]] = [(first_root, -1)]
131
+ while len(s) != 0:
132
+ old_id, new_pid = s.pop()
133
+ id_map[new_id] = old_id
134
+ new_pids[new_id] = new_pid
135
+ s.extend((j, new_id) for j in old_ids[old_pids == old_id]) # (old_id, new_pid)
136
+ new_id = new_id + 1
137
+
138
+ id2idx = dict(zip(old_ids, range(len(old_ids)))) # old_id to old_idx
139
+ indices = np.array([id2idx[i] for i in id_map], dtype=np.int32) # new_id to old_idx
140
+ new_ids = np.arange(len(new_pids))
141
+ return (new_ids, new_pids), indices
142
+
143
+
144
+ def reset_index(df: pd.DataFrame, *, names: SWCNames | None = None) -> pd.DataFrame:
145
+ """Reset node index to start with zero."""
146
+ return _copy_and_apply(reset_index_, df, names=names)
147
+
148
+
149
+ def reset_index_(df: pd.DataFrame, *, names: SWCNames | None = None) -> None:
150
+ """Reset node index to start with zero."""
151
+ names = get_names(names)
152
+ roots = df[names.pid] == -1
153
+ root_loc = roots.argmax()
154
+ root_id = df.loc[root_loc, names.id] # type:ignore
155
+ df[names.id] = df[names.id] - root_id
156
+ df[names.pid] = df[names.pid] - root_id
157
+ df.loc[root_loc, names.pid] = -1 # type:ignore
158
+
159
+
160
+ def _copy_and_apply(fn: Callable, df: pd.DataFrame, *args, **kwargs):
161
+ df = df.copy()
162
+ fn(df, *args, **kwargs)
163
+ return df
@@ -0,0 +1,70 @@
1
+
2
+ # SPDX-FileCopyrightText: 2022 - 2025 Zexin Yuan <pypi@yzx9.xyz>
3
+ #
4
+ # SPDX-License-Identifier: Apache-2.0
5
+
6
+ """Cut subtree.
7
+
8
+ This module provides a series of low-level topological subtree methods, but in more
9
+ cases, you can use the high-level methods provided in `tree_utils`, which wrap the
10
+ methods in this module and provide a high-level API.
11
+ """
12
+
13
+ from typing import cast
14
+
15
+ import numpy as np
16
+ import numpy.typing as npt
17
+
18
+ from swcgeom.core.swc_utils.base import Topology, traverse
19
+
20
+ __all__ = ["REMOVAL", "to_sub_topology", "propagate_removal"]
21
+
22
+ REMOVAL = -2 # A marker in utils, place in the ids to mark it removal
23
+
24
+
25
+ def to_sub_topology(sub: Topology) -> tuple[Topology, npt.NDArray[np.int32]]:
26
+ """Create sub tree from origin tree.
27
+
28
+ Mark the node to be removed, then use this method to get a child structure.
29
+
30
+ Returns:
31
+ sub_topology: Topology
32
+ mapping: Map from new id to old id.
33
+
34
+ See Also:
35
+ propagate_removal:
36
+ If the node you remove is not a leaf node, you need to use it
37
+ to mark all child nodes.
38
+ """
39
+ sub_id = np.array(sub[0], dtype=np.int32)
40
+ sub_pid = np.array(sub[1], dtype=np.int32)
41
+
42
+ # remove nodes
43
+ keeped_id = cast(npt.NDArray[np.bool_], sub_id != REMOVAL)
44
+ sub_id, sub_pid = sub_id[keeped_id], sub_pid[keeped_id]
45
+
46
+ old2new = {idx: i for i, idx in enumerate(sub_id)} # old idx to new id
47
+ new_id = np.arange(0, sub_id.shape[0], dtype=np.int32)
48
+ new_pid = np.array([old2new[i] if i != -1 else -1 for i in sub_pid], dtype=np.int32)
49
+
50
+ return (new_id, new_pid), sub_id
51
+
52
+
53
+ def propagate_removal(topology: Topology) -> Topology:
54
+ """Mark all children when parent is marked as removed.
55
+
56
+ Returns:
57
+ new_topology: Topology
58
+ """
59
+
60
+ new_ids, pids = topology
61
+ ids = np.arange(0, pids.shape[0])
62
+
63
+ def propagate(n: int, parent: bool | None) -> bool:
64
+ if remove := bool(parent) or (new_ids[n] == REMOVAL):
65
+ new_ids[n] = REMOVAL
66
+
67
+ return remove
68
+
69
+ traverse((ids, pids), enter=propagate)
70
+ return (new_ids, pids.copy())