swcgeom 0.18.3__py3-none-any.whl → 0.19.1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of swcgeom might be problematic. Click here for more details.
- swcgeom/analysis/feature_extractor.py +22 -24
- swcgeom/analysis/features.py +18 -40
- swcgeom/analysis/lmeasure.py +227 -323
- swcgeom/analysis/sholl.py +17 -23
- swcgeom/analysis/trunk.py +23 -28
- swcgeom/analysis/visualization.py +37 -44
- swcgeom/analysis/visualization3d.py +16 -25
- swcgeom/analysis/volume.py +33 -47
- swcgeom/core/__init__.py +1 -6
- swcgeom/core/branch.py +10 -17
- swcgeom/core/branch_tree.py +3 -2
- swcgeom/core/compartment.py +1 -1
- swcgeom/core/node.py +3 -6
- swcgeom/core/path.py +11 -16
- swcgeom/core/population.py +32 -51
- swcgeom/core/swc.py +25 -16
- swcgeom/core/swc_utils/__init__.py +4 -6
- swcgeom/core/swc_utils/assembler.py +5 -12
- swcgeom/core/swc_utils/base.py +40 -31
- swcgeom/core/swc_utils/checker.py +3 -8
- swcgeom/core/swc_utils/io.py +32 -47
- swcgeom/core/swc_utils/normalizer.py +17 -23
- swcgeom/core/swc_utils/subtree.py +13 -20
- swcgeom/core/tree.py +61 -51
- swcgeom/core/tree_utils.py +36 -49
- swcgeom/core/tree_utils_impl.py +4 -6
- swcgeom/images/augmentation.py +23 -39
- swcgeom/images/contrast.py +22 -46
- swcgeom/images/folder.py +32 -34
- swcgeom/images/io.py +108 -126
- swcgeom/transforms/base.py +28 -19
- swcgeom/transforms/branch.py +31 -41
- swcgeom/transforms/branch_tree.py +3 -1
- swcgeom/transforms/geometry.py +13 -4
- swcgeom/transforms/image_preprocess.py +2 -0
- swcgeom/transforms/image_stack.py +40 -35
- swcgeom/transforms/images.py +31 -24
- swcgeom/transforms/mst.py +27 -40
- swcgeom/transforms/neurolucida_asc.py +13 -13
- swcgeom/transforms/path.py +4 -0
- swcgeom/transforms/population.py +4 -0
- swcgeom/transforms/tree.py +16 -11
- swcgeom/transforms/tree_assembler.py +37 -54
- swcgeom/utils/download.py +7 -14
- swcgeom/utils/dsu.py +12 -0
- swcgeom/utils/ellipse.py +26 -14
- swcgeom/utils/file.py +8 -13
- swcgeom/utils/neuromorpho.py +78 -92
- swcgeom/utils/numpy_helper.py +15 -12
- swcgeom/utils/plotter_2d.py +10 -16
- swcgeom/utils/plotter_3d.py +7 -9
- swcgeom/utils/renderer.py +16 -8
- swcgeom/utils/sdf.py +12 -23
- swcgeom/utils/solid_geometry.py +58 -2
- swcgeom/utils/transforms.py +164 -100
- swcgeom/utils/volumetric_object.py +29 -53
- {swcgeom-0.18.3.dist-info → swcgeom-0.19.1.dist-info}/METADATA +6 -5
- swcgeom-0.19.1.dist-info/RECORD +67 -0
- {swcgeom-0.18.3.dist-info → swcgeom-0.19.1.dist-info}/WHEEL +1 -1
- swcgeom-0.18.3.dist-info/RECORD +0 -67
- {swcgeom-0.18.3.dist-info → swcgeom-0.19.1.dist-info/licenses}/LICENSE +0 -0
- {swcgeom-0.18.3.dist-info → swcgeom-0.19.1.dist-info}/top_level.txt +0 -0
|
@@ -17,10 +17,10 @@
|
|
|
17
17
|
|
|
18
18
|
from collections.abc import Iterable
|
|
19
19
|
from copy import copy
|
|
20
|
-
from typing import Optional
|
|
21
20
|
|
|
22
21
|
import numpy as np
|
|
23
22
|
import pandas as pd
|
|
23
|
+
from typing_extensions import override
|
|
24
24
|
|
|
25
25
|
from swcgeom.core import Tree
|
|
26
26
|
from swcgeom.core.swc_utils import (
|
|
@@ -39,20 +39,18 @@ class LinesToTree(Transform[list[pd.DataFrame], Tree]):
|
|
|
39
39
|
|
|
40
40
|
def __init__(self, *, thre: float = 0.2, undirected: bool = True):
|
|
41
41
|
"""
|
|
42
|
-
|
|
43
|
-
|
|
44
|
-
|
|
45
|
-
|
|
46
|
-
undirected : bool, default `True`
|
|
47
|
-
Both ends of a line can be considered connection point. If
|
|
48
|
-
`False`, only the starting point.
|
|
42
|
+
Args:
|
|
43
|
+
thre: Connection threshold.
|
|
44
|
+
undirected: Both ends of a line can be considered connection point.
|
|
45
|
+
If `False`, only the starting point.
|
|
49
46
|
"""
|
|
50
47
|
super().__init__()
|
|
51
48
|
self.thre = thre
|
|
52
49
|
self.undirected = undirected
|
|
53
50
|
|
|
51
|
+
@override
|
|
54
52
|
def __call__(
|
|
55
|
-
self, lines: Iterable[pd.DataFrame], *, names:
|
|
53
|
+
self, lines: Iterable[pd.DataFrame], *, names: SWCNames | None = None
|
|
56
54
|
): # TODO check this
|
|
57
55
|
return self.assemble(lines, names=names)
|
|
58
56
|
|
|
@@ -61,30 +59,22 @@ class LinesToTree(Transform[list[pd.DataFrame], Tree]):
|
|
|
61
59
|
lines: Iterable[pd.DataFrame],
|
|
62
60
|
*,
|
|
63
61
|
undirected: bool = True,
|
|
64
|
-
names:
|
|
62
|
+
names: SWCNames | None = None,
|
|
65
63
|
) -> pd.DataFrame:
|
|
66
64
|
"""Assemble lines to a tree.
|
|
67
65
|
|
|
68
|
-
Assemble all the lines into a set of subtrees, and then connect
|
|
69
|
-
|
|
70
|
-
|
|
71
|
-
|
|
72
|
-
|
|
73
|
-
|
|
74
|
-
|
|
75
|
-
|
|
76
|
-
|
|
77
|
-
|
|
78
|
-
|
|
79
|
-
|
|
80
|
-
|
|
81
|
-
Returns
|
|
82
|
-
-------
|
|
83
|
-
tree : ~pd.DataFrame
|
|
84
|
-
|
|
85
|
-
See Also
|
|
86
|
-
--------
|
|
87
|
-
self.try_assemble
|
|
66
|
+
Assemble all the lines into a set of subtrees, and then connect them.
|
|
67
|
+
|
|
68
|
+
Args:
|
|
69
|
+
lines: An array of tables containing a line, columns should following the swc.
|
|
70
|
+
undirected: Forwarding to `self.try_assemble`.
|
|
71
|
+
names: Forwarding to `self.try_assemble`.
|
|
72
|
+
|
|
73
|
+
Returns:
|
|
74
|
+
tree: ~pd.DataFrame
|
|
75
|
+
|
|
76
|
+
See Also:
|
|
77
|
+
self.try_assemble
|
|
88
78
|
"""
|
|
89
79
|
|
|
90
80
|
tree, lines = self.try_assemble(
|
|
@@ -112,33 +102,25 @@ class LinesToTree(Transform[list[pd.DataFrame], Tree]):
|
|
|
112
102
|
id_offset: int = 0,
|
|
113
103
|
undirected: bool = True,
|
|
114
104
|
sort_nodes: bool = True,
|
|
115
|
-
names:
|
|
105
|
+
names: SWCNames | None = None,
|
|
116
106
|
) -> tuple[pd.DataFrame, list[pd.DataFrame]]:
|
|
117
107
|
"""Trying assemble lines to a tree.
|
|
118
108
|
|
|
119
|
-
Treat the first line as a tree, find a line whose shortest distance
|
|
120
|
-
|
|
121
|
-
|
|
122
|
-
|
|
123
|
-
|
|
124
|
-
|
|
125
|
-
|
|
126
|
-
|
|
127
|
-
|
|
128
|
-
|
|
129
|
-
|
|
130
|
-
|
|
131
|
-
|
|
132
|
-
|
|
133
|
-
|
|
134
|
-
sort_nodes : bool, default `True`
|
|
135
|
-
sort nodes of subtree.
|
|
136
|
-
names : SWCNames, optional
|
|
137
|
-
|
|
138
|
-
Returns
|
|
139
|
-
-------
|
|
140
|
-
tree : ~pandas.DataFrame
|
|
141
|
-
remaining_lines : List of ~pandas.DataFrame
|
|
109
|
+
Treat the first line as a tree, find a line whose shortest distance between
|
|
110
|
+
the tree and the line is less than threshold, merge it into the tree, repeat
|
|
111
|
+
until there are no line to merge, return tree and the remaining lines.
|
|
112
|
+
|
|
113
|
+
Args:
|
|
114
|
+
lines: An array of tables containing a line, columns should following the swc.
|
|
115
|
+
id_offset: The offset of the line node id.
|
|
116
|
+
undirected: Both ends of a line can be considered connection point.
|
|
117
|
+
If `False`, only the starting point.
|
|
118
|
+
sort_nodes: sort nodes of subtree.
|
|
119
|
+
names: SWCNames, optional
|
|
120
|
+
|
|
121
|
+
Returns:
|
|
122
|
+
tree: ~pandas.DataFrame
|
|
123
|
+
remaining_lines: List of ~pandas.DataFrame
|
|
142
124
|
"""
|
|
143
125
|
|
|
144
126
|
names = get_names(names)
|
|
@@ -185,5 +167,6 @@ class LinesToTree(Transform[list[pd.DataFrame], Tree]):
|
|
|
185
167
|
|
|
186
168
|
return tree, lines
|
|
187
169
|
|
|
170
|
+
@override
|
|
188
171
|
def extra_repr(self) -> str:
|
|
189
172
|
return f"thre={self.thre}, undirected={self.undirected}"
|
swcgeom/utils/download.py
CHANGED
|
@@ -15,9 +15,7 @@
|
|
|
15
15
|
|
|
16
16
|
"""Download helpers.
|
|
17
17
|
|
|
18
|
-
|
|
19
|
-
-----
|
|
20
|
-
All denpendencies need to be installed, try:
|
|
18
|
+
NOTE: All denpendencies need to be installed, try:
|
|
21
19
|
|
|
22
20
|
```sh
|
|
23
21
|
pip install swcgeom[all]
|
|
@@ -67,16 +65,11 @@ def clone_index_page(
|
|
|
67
65
|
|
|
68
66
|
E.g: `https://download.brainimagelibrary.org/biccn/zeng/luo/fMOST/cells/`
|
|
69
67
|
|
|
70
|
-
|
|
71
|
-
|
|
72
|
-
|
|
73
|
-
|
|
74
|
-
|
|
75
|
-
Directory of dist.
|
|
76
|
-
override : bool, default `False`
|
|
77
|
-
Override existing file, skip file if `False`.
|
|
78
|
-
multiprocess : int, default `4`
|
|
79
|
-
How many process are available for download.
|
|
68
|
+
Args:
|
|
69
|
+
index_url: URL of index page.
|
|
70
|
+
dist_dir: Directory of dist.
|
|
71
|
+
override: Override existing file, skip file if `False`.
|
|
72
|
+
multiprocess: How many process are available for download.
|
|
80
73
|
"""
|
|
81
74
|
files = get_urls_in_index_page(index_url)
|
|
82
75
|
logging.info("downloader: search `%s`, found %s files.", index_url, len(files))
|
|
@@ -95,7 +88,7 @@ def _clone_index_page(url: str, index_url: str, dist_dir: str, override: bool) -
|
|
|
95
88
|
dist = os.path.join(dist_dir, filepath)
|
|
96
89
|
if os.path.exists(dist):
|
|
97
90
|
if not override:
|
|
98
|
-
logging.info("downloader: file `%s` exits,
|
|
91
|
+
logging.info("downloader: file `%s` exits, skipped.", dist)
|
|
99
92
|
return
|
|
100
93
|
|
|
101
94
|
logging.info("downloader: file `%s` exits, deleted.", dist)
|
swcgeom/utils/dsu.py
CHANGED
|
@@ -22,6 +22,18 @@ class DisjointSetUnion:
|
|
|
22
22
|
"""Disjoint Set Union.
|
|
23
23
|
|
|
24
24
|
DSU with path compression and union by rank.
|
|
25
|
+
|
|
26
|
+
>>> dsu = DisjointSetUnion(3)
|
|
27
|
+
>>> dsu.is_same_set(0, 1)
|
|
28
|
+
False
|
|
29
|
+
>>> dsu.union_sets(0, 1)
|
|
30
|
+
>>> dsu.is_same_set(0, 1)
|
|
31
|
+
True
|
|
32
|
+
>>> dsu.is_same_set(0, 2)
|
|
33
|
+
False
|
|
34
|
+
>>> dsu.union_sets(1, 2)
|
|
35
|
+
>>> dsu.is_same_set(0, 2)
|
|
36
|
+
True
|
|
25
37
|
"""
|
|
26
38
|
|
|
27
39
|
def __init__(self, node_number: int):
|
swcgeom/utils/ellipse.py
CHANGED
|
@@ -85,29 +85,41 @@ class Ellipse:
|
|
|
85
85
|
|
|
86
86
|
|
|
87
87
|
def mvee(points: npt.NDArray[np.floating], tol: float = 1e-3) -> Ellipse:
|
|
88
|
-
|
|
89
|
-
return Ellipse(A, centroid)
|
|
88
|
+
"""Finds the Minimum Volume Enclosing Ellipsoid.
|
|
90
89
|
|
|
90
|
+
>>> # Create a set of 2D points
|
|
91
|
+
>>> points = np.array([[0, 0], [1, 0], [0, 1], [1, 1]], dtype=np.float64)
|
|
92
|
+
>>> ellipse = mvee(points)
|
|
91
93
|
|
|
92
|
-
|
|
93
|
-
|
|
94
|
-
|
|
95
|
-
"""Finds the Minimum Volume Enclosing Ellipsoid.
|
|
94
|
+
>>> # Check centroid is at center of points
|
|
95
|
+
>>> np.allclose(ellipse.centroid, [0.5, 0.5])
|
|
96
|
+
True
|
|
96
97
|
|
|
97
|
-
|
|
98
|
-
|
|
99
|
-
|
|
100
|
-
|
|
101
|
-
centroid : array of shape (d,)
|
|
102
|
-
The center coordinates of the ellipse.
|
|
98
|
+
>>> # Check ellipse properties
|
|
99
|
+
>>> rx, ry = ellipse.radii
|
|
100
|
+
>>> np.allclose([rx, ry], [np.sqrt(2) / 2, np.sqrt(2) / 2], rtol=1e-5)
|
|
101
|
+
True
|
|
103
102
|
|
|
104
|
-
Reference
|
|
105
|
-
---------
|
|
103
|
+
Reference:
|
|
106
104
|
1. http://stackoverflow.com/questions/14016898/port-matlab-bounding-ellipsoid-code-to-python
|
|
107
105
|
2. http://stackoverflow.com/questions/1768197/bounding-ellipse/1768440#1768440
|
|
108
106
|
3. https://minillinim.github.io/GroopM/dev_docs/groopm.ellipsoid-pysrc.html
|
|
107
|
+
|
|
108
|
+
Args:
|
|
109
|
+
points: Array of shape (N, d) where N is number of points and d is dimension
|
|
110
|
+
tol: Tolerance for convergence
|
|
111
|
+
|
|
112
|
+
Returns:
|
|
113
|
+
Ellipse: An Ellipse object containing the minimum volume enclosing ellipse
|
|
109
114
|
"""
|
|
110
115
|
|
|
116
|
+
A, centroid = _mvee(points, tol=tol)
|
|
117
|
+
return Ellipse(A, centroid)
|
|
118
|
+
|
|
119
|
+
|
|
120
|
+
def _mvee(
|
|
121
|
+
points: npt.NDArray[np.floating], tol: float = 1e-3
|
|
122
|
+
) -> tuple[npt.NDArray[np.float64], npt.NDArray[np.float64]]:
|
|
111
123
|
N, d = points.shape
|
|
112
124
|
Q = np.column_stack((points, np.ones(N))).T
|
|
113
125
|
err = tol + 1.0
|
swcgeom/utils/file.py
CHANGED
|
@@ -15,10 +15,7 @@
|
|
|
15
15
|
|
|
16
16
|
"""File related utils.
|
|
17
17
|
|
|
18
|
-
|
|
19
|
-
-----
|
|
20
|
-
If character coding is enabled, all denpendencies need to be installed,
|
|
21
|
-
try:
|
|
18
|
+
NOTE: If character coding is enabled, all denpendencies need to be installed, try:
|
|
22
19
|
|
|
23
20
|
```sh
|
|
24
21
|
pip install swcgeom[all]
|
|
@@ -45,15 +42,13 @@ class FileReader:
|
|
|
45
42
|
) -> None:
|
|
46
43
|
"""Read file.
|
|
47
44
|
|
|
48
|
-
|
|
49
|
-
|
|
50
|
-
|
|
51
|
-
|
|
52
|
-
The
|
|
53
|
-
|
|
54
|
-
|
|
55
|
-
Used for detect character endocing, raising warning when
|
|
56
|
-
parsing with low confidence.
|
|
45
|
+
Args:
|
|
46
|
+
fname: PathOrIO
|
|
47
|
+
encoding: The name of the encoding used to decode the file.
|
|
48
|
+
If is `detect`, we will try to detect the character encoding.
|
|
49
|
+
low_confidence: The confidence threshold for character encoding detection.
|
|
50
|
+
Used for detect character endocing, raising warning when parsing with
|
|
51
|
+
low confidence.
|
|
57
52
|
"""
|
|
58
53
|
# TODO: support StringIO
|
|
59
54
|
self.fname, self.fb, self.f = "", None, None
|
swcgeom/utils/neuromorpho.py
CHANGED
|
@@ -15,10 +15,7 @@
|
|
|
15
15
|
|
|
16
16
|
"""NeuroMorpho.org.
|
|
17
17
|
|
|
18
|
-
|
|
19
|
-
--------
|
|
20
|
-
|
|
21
|
-
Metadata:
|
|
18
|
+
Metadata Example:
|
|
22
19
|
|
|
23
20
|
```json
|
|
24
21
|
{
|
|
@@ -80,9 +77,7 @@ Metadata:
|
|
|
80
77
|
}
|
|
81
78
|
```
|
|
82
79
|
|
|
83
|
-
|
|
84
|
-
-----
|
|
85
|
-
All denpendencies need to be installed, try:
|
|
80
|
+
NOTE: All denpendencies need to be installed, try:
|
|
86
81
|
|
|
87
82
|
```sh
|
|
88
83
|
pip install swcgeom[all]
|
|
@@ -97,7 +92,7 @@ import math
|
|
|
97
92
|
import os
|
|
98
93
|
import urllib.parse
|
|
99
94
|
from collections.abc import Callable, Iterable
|
|
100
|
-
from typing import Any, Literal
|
|
95
|
+
from typing import Any, Literal
|
|
101
96
|
|
|
102
97
|
from tqdm import tqdm
|
|
103
98
|
|
|
@@ -140,7 +135,7 @@ DOWNLOAD_CONFIGS: dict[RESOURCES, tuple[str, int]] = {
|
|
|
140
135
|
"log_source": (URL_LOG_SOURCE, 512 * GB),
|
|
141
136
|
}
|
|
142
137
|
|
|
143
|
-
# fmt:off
|
|
138
|
+
# fmt: off
|
|
144
139
|
# Test version: 8.5.25 (2023-08-01)
|
|
145
140
|
# No ETAs for future version
|
|
146
141
|
invalid_ids = [
|
|
@@ -166,7 +161,7 @@ def neuromorpho_is_valid(metadata: dict[str, Any]) -> bool:
|
|
|
166
161
|
|
|
167
162
|
|
|
168
163
|
def neuromorpho_convert_lmdb_to_swc(
|
|
169
|
-
root: str, dest:
|
|
164
|
+
root: str, dest: str | None = None, *, verbose: bool = False, **kwargs
|
|
170
165
|
) -> None:
|
|
171
166
|
nmo = NeuroMorpho(root, verbose=verbose)
|
|
172
167
|
nmo.convert_lmdb_to_swc(dest, **kwargs)
|
|
@@ -182,11 +177,9 @@ class NeuroMorpho:
|
|
|
182
177
|
self, root: str, *, url_base: str = URL_BASE, verbose: bool = False
|
|
183
178
|
) -> None:
|
|
184
179
|
"""
|
|
185
|
-
|
|
186
|
-
|
|
187
|
-
|
|
188
|
-
verbose : bool, default False
|
|
189
|
-
Show verbose log.
|
|
180
|
+
Args:
|
|
181
|
+
root: str
|
|
182
|
+
verbose: Show verbose log.
|
|
190
183
|
"""
|
|
191
184
|
|
|
192
185
|
super().__init__()
|
|
@@ -252,34 +245,15 @@ class NeuroMorpho:
|
|
|
252
245
|
# pylint: disable-next=too-many-locals
|
|
253
246
|
def convert_lmdb_to_swc(
|
|
254
247
|
self,
|
|
255
|
-
dest:
|
|
248
|
+
dest: str | None = None,
|
|
256
249
|
*,
|
|
257
|
-
group_by:
|
|
258
|
-
where:
|
|
250
|
+
group_by: str | Callable[[dict[str, Any]], str | None] | None = None,
|
|
251
|
+
where: Callable[[dict[str, Any]], bool] | None = None,
|
|
259
252
|
encoding: str | None = "utf-8",
|
|
260
253
|
) -> None:
|
|
261
254
|
r"""Convert lmdb format to SWCs.
|
|
262
255
|
|
|
263
|
-
|
|
264
|
-
----------
|
|
265
|
-
path : str
|
|
266
|
-
dest : str, optional
|
|
267
|
-
If None, use `path/swc`.
|
|
268
|
-
group_by : str | (metadata: dict[str, Any]) -> str | None, optional
|
|
269
|
-
Group neurons by metadata. If a None is returned then no
|
|
270
|
-
grouping. If a string is entered, use it as a metadata
|
|
271
|
-
attribute name for grouping, e.g.: `archive`, `species`.
|
|
272
|
-
where : (metadata: dict[str, Any]) -> bool, optional
|
|
273
|
-
Filter neurons by metadata.
|
|
274
|
-
encoding : str | None, default to `utf-8`
|
|
275
|
-
Change swc encoding, part of the original data is not utf-8
|
|
276
|
-
encoded. If is None, keep the original encoding format.
|
|
277
|
-
verbose : bool, default False
|
|
278
|
-
Print verbose info.
|
|
279
|
-
|
|
280
|
-
Notes
|
|
281
|
-
-----
|
|
282
|
-
We are asserting the following folder.
|
|
256
|
+
NOTE: We are asserting the following folder.
|
|
283
257
|
|
|
284
258
|
```text
|
|
285
259
|
|- root
|
|
@@ -289,10 +263,23 @@ class NeuroMorpho:
|
|
|
289
263
|
| | |- groups # output of groups if grouped
|
|
290
264
|
```
|
|
291
265
|
|
|
292
|
-
|
|
293
|
-
|
|
294
|
-
|
|
295
|
-
|
|
266
|
+
Args:
|
|
267
|
+
path: str
|
|
268
|
+
dest: If None, use `path/swc`.
|
|
269
|
+
group_by: Group neurons by metadata.
|
|
270
|
+
If None, no grouping. If a string is entered, use it as a metadata
|
|
271
|
+
attribute name for grouping, e.g.: `archive`, `species`. If a callable
|
|
272
|
+
is entered, use it as a function `(metadata: dict[str, Any]) -> str | None\
|
|
273
|
+
to get the group name.
|
|
274
|
+
where: Filter neurons by metadata.
|
|
275
|
+
(metadata: dict[str, Any]) -> bool
|
|
276
|
+
encoding: Change swc encoding, part of the original data is not utf-8 encoded.
|
|
277
|
+
If is None, keep the original encoding format.default to `utf-8`
|
|
278
|
+
verbose: Print verbose info.
|
|
279
|
+
|
|
280
|
+
See Also:
|
|
281
|
+
neuromorpho_is_valid:
|
|
282
|
+
Recommended filter function, try `where=neuromorpho_is_valid`
|
|
296
283
|
"""
|
|
297
284
|
|
|
298
285
|
import lmdb
|
|
@@ -302,9 +289,19 @@ class NeuroMorpho:
|
|
|
302
289
|
where = where or (lambda _: True)
|
|
303
290
|
if isinstance(group_by, str):
|
|
304
291
|
key = group_by
|
|
305
|
-
|
|
292
|
+
|
|
293
|
+
def group_by_key(v):
|
|
294
|
+
return v[key]
|
|
295
|
+
|
|
296
|
+
group_by = group_by_key
|
|
297
|
+
|
|
306
298
|
elif group_by is None:
|
|
307
|
-
|
|
299
|
+
|
|
300
|
+
def no_group(v):
|
|
301
|
+
return None
|
|
302
|
+
|
|
303
|
+
group_by = no_group
|
|
304
|
+
|
|
308
305
|
items = []
|
|
309
306
|
for k, v in tx_m.cursor():
|
|
310
307
|
metadata = json.loads(v)
|
|
@@ -336,9 +333,9 @@ class NeuroMorpho:
|
|
|
336
333
|
|
|
337
334
|
if encoding is None:
|
|
338
335
|
with open(fs, "wb") as f:
|
|
339
|
-
f.write(bs)
|
|
336
|
+
f.write(bs)
|
|
340
337
|
else:
|
|
341
|
-
bs = io.BytesIO(bs)
|
|
338
|
+
bs = io.BytesIO(bs)
|
|
342
339
|
with (
|
|
343
340
|
open(fs, "w", encoding=encoding) as fw,
|
|
344
341
|
FileReader(bs, encoding="detect") as fr,
|
|
@@ -355,27 +352,20 @@ class NeuroMorpho:
|
|
|
355
352
|
self,
|
|
356
353
|
path: str,
|
|
357
354
|
*,
|
|
358
|
-
pages:
|
|
355
|
+
pages: Iterable[int] | None = None,
|
|
359
356
|
page_size: int = API_PAGE_SIZE_MAX,
|
|
360
357
|
**kwargs,
|
|
361
358
|
) -> list[int]:
|
|
362
359
|
r"""Download all neuron metadata.
|
|
363
360
|
|
|
364
|
-
|
|
365
|
-
|
|
366
|
-
|
|
367
|
-
|
|
368
|
-
|
|
369
|
-
|
|
370
|
-
|
|
371
|
-
|
|
372
|
-
**kwargs :
|
|
373
|
-
Forwarding to `get`.
|
|
374
|
-
|
|
375
|
-
Returns
|
|
376
|
-
-------
|
|
377
|
-
err_pages : List of int
|
|
378
|
-
Failed pages.
|
|
361
|
+
Args:
|
|
362
|
+
path: Path to save data.
|
|
363
|
+
pages: If is None, download all pages.
|
|
364
|
+
verbose: Show verbose log.
|
|
365
|
+
**kwargs: Forwarding to `get`.
|
|
366
|
+
|
|
367
|
+
Returns:
|
|
368
|
+
err_pages: Failed pages.
|
|
379
369
|
"""
|
|
380
370
|
|
|
381
371
|
# TODO: how to cache between versions?
|
|
@@ -410,32 +400,24 @@ class NeuroMorpho:
|
|
|
410
400
|
path: str,
|
|
411
401
|
path_metadata: str,
|
|
412
402
|
*,
|
|
413
|
-
keys:
|
|
403
|
+
keys: Iterable[bytes] | None = None,
|
|
414
404
|
override: bool = False,
|
|
415
405
|
map_size: int = 512 * GB,
|
|
416
406
|
**kwargs,
|
|
417
407
|
) -> list[bytes]:
|
|
418
408
|
"""Download files.
|
|
419
409
|
|
|
420
|
-
|
|
421
|
-
|
|
422
|
-
|
|
423
|
-
|
|
424
|
-
|
|
425
|
-
|
|
426
|
-
|
|
427
|
-
|
|
428
|
-
|
|
429
|
-
|
|
430
|
-
|
|
431
|
-
map_size : int, default 512GB
|
|
432
|
-
**kwargs :
|
|
433
|
-
Forwarding to `get`.
|
|
434
|
-
|
|
435
|
-
Returns
|
|
436
|
-
-------
|
|
437
|
-
err_keys : List of str
|
|
438
|
-
Failed keys.
|
|
410
|
+
Args:
|
|
411
|
+
url: URL of file.
|
|
412
|
+
path: Path to save data.
|
|
413
|
+
path_metadata: Path to lmdb of metadata.
|
|
414
|
+
keys: If exist, ignore `override` option. If None, download all key.
|
|
415
|
+
override: Override even exists, default to False
|
|
416
|
+
map_size: int, default 512GB
|
|
417
|
+
**kwargs: Forwarding to `get`.
|
|
418
|
+
|
|
419
|
+
Returns:
|
|
420
|
+
err_keys: Failed keys.
|
|
439
421
|
"""
|
|
440
422
|
|
|
441
423
|
import lmdb
|
|
@@ -445,16 +427,16 @@ class NeuroMorpho:
|
|
|
445
427
|
if keys is None:
|
|
446
428
|
with env_m.begin() as tx_m:
|
|
447
429
|
if override:
|
|
448
|
-
keys = [k for k,
|
|
430
|
+
keys = [k for k, _ in tx_m.cursor()]
|
|
449
431
|
else:
|
|
450
432
|
with env_c.begin() as tx:
|
|
451
|
-
keys = [k for k,
|
|
433
|
+
keys = [k for k, _ in tx_m.cursor() if tx.get(k) is None]
|
|
452
434
|
|
|
453
435
|
err_keys = []
|
|
454
436
|
for k in tqdm(keys) if self.verbose else keys:
|
|
455
437
|
try:
|
|
456
438
|
with env_m.begin() as tx:
|
|
457
|
-
metadata = json.loads(tx.get(k).decode("utf-8"))
|
|
439
|
+
metadata = json.loads(tx.get(k).decode("utf-8"))
|
|
458
440
|
|
|
459
441
|
swc = self._get_file(url, metadata, **kwargs)
|
|
460
442
|
with env_c.begin(write=True) as tx:
|
|
@@ -485,10 +467,8 @@ class NeuroMorpho:
|
|
|
485
467
|
def _get_file(self, url: str, metadata: dict[str, Any], **kwargs) -> bytes:
|
|
486
468
|
"""Get file.
|
|
487
469
|
|
|
488
|
-
Returns
|
|
489
|
-
|
|
490
|
-
bs : bytes
|
|
491
|
-
Bytes of morphology file, encoding is NOT FIXED.
|
|
470
|
+
Returns:
|
|
471
|
+
bs: Bytes of morphology file, encoding is NOT FIXED.
|
|
492
472
|
"""
|
|
493
473
|
|
|
494
474
|
archive = urllib.parse.quote(metadata["archive"].lower())
|
|
@@ -502,7 +482,7 @@ class NeuroMorpho:
|
|
|
502
482
|
return self._get(url, **kwargs)
|
|
503
483
|
|
|
504
484
|
def _get(
|
|
505
|
-
self, url: str, *, timeout: int = 2 * 60, proxy:
|
|
485
|
+
self, url: str, *, timeout: int = 2 * 60, proxy: str | None = None
|
|
506
486
|
) -> bytes:
|
|
507
487
|
if not url.startswith("http://") and not url.startswith("https://"):
|
|
508
488
|
url = urllib.parse.urljoin(self.url_base, url)
|
|
@@ -529,9 +509,15 @@ class NeuroMorpho:
|
|
|
529
509
|
self.ssl_context = ssl_context
|
|
530
510
|
super().__init__(**kwargs)
|
|
531
511
|
|
|
532
|
-
def init_poolmanager(
|
|
512
|
+
def init_poolmanager(
|
|
513
|
+
self, connections, maxsize, block=False, **pool_kwargs
|
|
514
|
+
):
|
|
533
515
|
super().init_poolmanager(
|
|
534
|
-
connections,
|
|
516
|
+
connections,
|
|
517
|
+
maxsize,
|
|
518
|
+
block,
|
|
519
|
+
ssl_context=self.ssl_context,
|
|
520
|
+
**pool_kwargs,
|
|
535
521
|
)
|
|
536
522
|
|
|
537
523
|
def proxy_manager_for(self, proxy, **proxy_kwargs):
|
swcgeom/utils/numpy_helper.py
CHANGED
|
@@ -32,18 +32,21 @@ def padding1d(
|
|
|
32
32
|
) -> npt.NDArray:
|
|
33
33
|
"""Padding x to array of shape (n,).
|
|
34
34
|
|
|
35
|
-
|
|
36
|
-
|
|
37
|
-
|
|
38
|
-
|
|
39
|
-
|
|
40
|
-
|
|
41
|
-
|
|
42
|
-
|
|
43
|
-
|
|
44
|
-
|
|
45
|
-
|
|
46
|
-
|
|
35
|
+
>>> padding1d(5, [1, 2, 3])
|
|
36
|
+
array([1., 2., 3., 0., 0.], dtype=float32)
|
|
37
|
+
>>> padding1d(5, [1, 2, 3], padding_value=6)
|
|
38
|
+
array([1., 2., 3., 6., 6.], dtype=float32)
|
|
39
|
+
>>> padding1d(5, [1, 2, 3], dtype=np.int64)
|
|
40
|
+
array([1, 2, 3, 0, 0])
|
|
41
|
+
|
|
42
|
+
Args:
|
|
43
|
+
n: Size of vector.
|
|
44
|
+
v: Input vector.
|
|
45
|
+
padding_value: Padding value.
|
|
46
|
+
If x.shape[0] is less than n, the rest will be filled with padding value.
|
|
47
|
+
dtype: Data type of array.
|
|
48
|
+
If specify, cast x to dtype, else dtype of x will used, otherwise defaults
|
|
49
|
+
to `~numpy.float32`.
|
|
47
50
|
"""
|
|
48
51
|
|
|
49
52
|
if not isinstance(v, np.ndarray):
|