pyNIBS 0.2024.8__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- pyNIBS-0.2024.8.dist-info/LICENSE +623 -0
- pyNIBS-0.2024.8.dist-info/METADATA +723 -0
- pyNIBS-0.2024.8.dist-info/RECORD +107 -0
- pyNIBS-0.2024.8.dist-info/WHEEL +5 -0
- pyNIBS-0.2024.8.dist-info/top_level.txt +1 -0
- pynibs/__init__.py +34 -0
- pynibs/coil.py +1367 -0
- pynibs/congruence/__init__.py +15 -0
- pynibs/congruence/congruence.py +1108 -0
- pynibs/congruence/ext_metrics.py +257 -0
- pynibs/congruence/stimulation_threshold.py +318 -0
- pynibs/data/configuration_exp0.yaml +59 -0
- pynibs/data/configuration_linear_MEP.yaml +61 -0
- pynibs/data/configuration_linear_RT.yaml +61 -0
- pynibs/data/configuration_sigmoid4.yaml +68 -0
- pynibs/data/network mapping configuration/configuration guide.md +238 -0
- pynibs/data/network mapping configuration/configuration_TEMPLATE.yaml +42 -0
- pynibs/data/network mapping configuration/configuration_for_testing.yaml +43 -0
- pynibs/data/network mapping configuration/configuration_modelTMS.yaml +43 -0
- pynibs/data/network mapping configuration/configuration_reg_isi_05.yaml +43 -0
- pynibs/data/network mapping configuration/output_documentation.md +185 -0
- pynibs/data/network mapping configuration/recommendations_for_accuracy_threshold.md +77 -0
- pynibs/data/neuron/models/L23_PC_cADpyr_biphasic_v1.csv +1281 -0
- pynibs/data/neuron/models/L23_PC_cADpyr_monophasic_v1.csv +1281 -0
- pynibs/data/neuron/models/L4_LBC_biphasic_v1.csv +1281 -0
- pynibs/data/neuron/models/L4_LBC_monophasic_v1.csv +1281 -0
- pynibs/data/neuron/models/L4_NBC_biphasic_v1.csv +1281 -0
- pynibs/data/neuron/models/L4_NBC_monophasic_v1.csv +1281 -0
- pynibs/data/neuron/models/L4_SBC_biphasic_v1.csv +1281 -0
- pynibs/data/neuron/models/L4_SBC_monophasic_v1.csv +1281 -0
- pynibs/data/neuron/models/L5_TTPC2_cADpyr_biphasic_v1.csv +1281 -0
- pynibs/data/neuron/models/L5_TTPC2_cADpyr_monophasic_v1.csv +1281 -0
- pynibs/expio/Mep.py +1518 -0
- pynibs/expio/__init__.py +8 -0
- pynibs/expio/brainsight.py +979 -0
- pynibs/expio/brainvis.py +71 -0
- pynibs/expio/cobot.py +239 -0
- pynibs/expio/exp.py +1876 -0
- pynibs/expio/fit_funs.py +287 -0
- pynibs/expio/localite.py +1987 -0
- pynibs/expio/signal_ced.py +51 -0
- pynibs/expio/visor.py +624 -0
- pynibs/freesurfer.py +502 -0
- pynibs/hdf5_io/__init__.py +10 -0
- pynibs/hdf5_io/hdf5_io.py +1857 -0
- pynibs/hdf5_io/xdmf.py +1542 -0
- pynibs/mesh/__init__.py +3 -0
- pynibs/mesh/mesh_struct.py +1394 -0
- pynibs/mesh/transformations.py +866 -0
- pynibs/mesh/utils.py +1103 -0
- pynibs/models/_TMS.py +211 -0
- pynibs/models/__init__.py +0 -0
- pynibs/muap.py +392 -0
- pynibs/neuron/__init__.py +2 -0
- pynibs/neuron/neuron_regression.py +284 -0
- pynibs/neuron/util.py +58 -0
- pynibs/optimization/__init__.py +5 -0
- pynibs/optimization/multichannel.py +278 -0
- pynibs/optimization/opt_mep.py +152 -0
- pynibs/optimization/optimization.py +1445 -0
- pynibs/optimization/workhorses.py +698 -0
- pynibs/pckg/__init__.py +0 -0
- pynibs/pckg/biosig/biosig4c++-1.9.5.src_fixed.tar.gz +0 -0
- pynibs/pckg/libeep/__init__.py +0 -0
- pynibs/pckg/libeep/pyeep.so +0 -0
- pynibs/regression/__init__.py +11 -0
- pynibs/regression/dual_node_detection.py +2375 -0
- pynibs/regression/regression.py +2984 -0
- pynibs/regression/score_types.py +0 -0
- pynibs/roi/__init__.py +2 -0
- pynibs/roi/roi.py +895 -0
- pynibs/roi/roi_structs.py +1233 -0
- pynibs/subject.py +1009 -0
- pynibs/tensor_scaling.py +144 -0
- pynibs/tests/data/InstrumentMarker20200225163611937.xml +19 -0
- pynibs/tests/data/TriggerMarkers_Coil0_20200225163443682.xml +14 -0
- pynibs/tests/data/TriggerMarkers_Coil1_20200225170337572.xml +6373 -0
- pynibs/tests/data/Xdmf.dtd +89 -0
- pynibs/tests/data/brainsight_niiImage_nifticoord.txt +145 -0
- pynibs/tests/data/brainsight_niiImage_nifticoord_largefile.txt +1434 -0
- pynibs/tests/data/brainsight_niiImage_niifticoord_mixedtargets.txt +47 -0
- pynibs/tests/data/create_subject_testsub.py +332 -0
- pynibs/tests/data/data.hdf5 +0 -0
- pynibs/tests/data/geo.hdf5 +0 -0
- pynibs/tests/test_coil.py +474 -0
- pynibs/tests/test_elements2nodes.py +100 -0
- pynibs/tests/test_hdf5_io/test_xdmf.py +61 -0
- pynibs/tests/test_mesh_transformations.py +123 -0
- pynibs/tests/test_mesh_utils.py +143 -0
- pynibs/tests/test_nnav_imports.py +101 -0
- pynibs/tests/test_quality_measures.py +117 -0
- pynibs/tests/test_regressdata.py +289 -0
- pynibs/tests/test_roi.py +17 -0
- pynibs/tests/test_rotations.py +86 -0
- pynibs/tests/test_subject.py +71 -0
- pynibs/tests/test_util.py +24 -0
- pynibs/tms_pulse.py +34 -0
- pynibs/util/__init__.py +4 -0
- pynibs/util/dosing.py +233 -0
- pynibs/util/quality_measures.py +562 -0
- pynibs/util/rotations.py +340 -0
- pynibs/util/simnibs.py +763 -0
- pynibs/util/util.py +727 -0
- pynibs/visualization/__init__.py +2 -0
- pynibs/visualization/para.py +4372 -0
- pynibs/visualization/plot_2D.py +137 -0
- pynibs/visualization/render_3D.py +347 -0
pynibs/mesh/utils.py
ADDED
|
@@ -0,0 +1,1103 @@
|
|
|
1
|
+
import h5py
|
|
2
|
+
import math
|
|
3
|
+
import warnings
|
|
4
|
+
import numpy as np
|
|
5
|
+
import pandas as pd
|
|
6
|
+
from tqdm import tqdm
|
|
7
|
+
import multiprocessing
|
|
8
|
+
from functools import partial
|
|
9
|
+
from numpy import cross as cycross
|
|
10
|
+
from scipy.spatial import Delaunay
|
|
11
|
+
|
|
12
|
+
import pynibs
|
|
13
|
+
|
|
14
|
+
|
|
15
|
+
def calc_tet_volume(points, abs=True):
|
|
16
|
+
"""
|
|
17
|
+
Calculate tetrahedra volumes.
|
|
18
|
+
|
|
19
|
+
Parameters
|
|
20
|
+
----------
|
|
21
|
+
points: np.ndarray
|
|
22
|
+
shape: (n_tets,4,3)
|
|
23
|
+
|
|
24
|
+
.. code-block:: sh
|
|
25
|
+
|
|
26
|
+
[[[Ax, Ay, Az],
|
|
27
|
+
[Bx, By, Bz],
|
|
28
|
+
[Cx, Cy, Cz],
|
|
29
|
+
[Dx, Dy, Dy]],
|
|
30
|
+
|
|
31
|
+
[[Ax, Ay, Az],
|
|
32
|
+
[Bx, By, Bz],
|
|
33
|
+
[Cx, Cy, Cz],
|
|
34
|
+
[Dx, Dy, Dy]],
|
|
35
|
+
...
|
|
36
|
+
]
|
|
37
|
+
|
|
38
|
+
Returns
|
|
39
|
+
-------
|
|
40
|
+
volume: np.ndarray
|
|
41
|
+
shape: ``(n_tets)``
|
|
42
|
+
|
|
43
|
+
Other Parameters
|
|
44
|
+
----------------
|
|
45
|
+
abs : bool, default: true
|
|
46
|
+
Return magnitude
|
|
47
|
+
"""
|
|
48
|
+
if points.ndim == 2:
|
|
49
|
+
points = np.atleast_3d(points).reshape(1, 4, 3)
|
|
50
|
+
if points.ndim != 3:
|
|
51
|
+
raise ValueError(f"Wrong dimensions for points: ({points.shape}). Expected: (n_tets,4,3))")
|
|
52
|
+
|
|
53
|
+
a = np.zeros((points.shape[0], 4, 4))
|
|
54
|
+
a[:, :, 3] = 1
|
|
55
|
+
a[:, :, :3] = points
|
|
56
|
+
a = a.swapaxes(1, 2)
|
|
57
|
+
|
|
58
|
+
if abs:
|
|
59
|
+
return np.abs(1 / 6 * np.linalg.det(a))
|
|
60
|
+
else:
|
|
61
|
+
return 1 / 6 * np.linalg.det(a)
|
|
62
|
+
|
|
63
|
+
|
|
64
|
+
def calc_tri_surface(points):
|
|
65
|
+
"""
|
|
66
|
+
Calculate triangle surface areas.
|
|
67
|
+
|
|
68
|
+
Parameters
|
|
69
|
+
----------
|
|
70
|
+
points : np.ndarray
|
|
71
|
+
(n_triangles,3,3)
|
|
72
|
+
|
|
73
|
+
Returns
|
|
74
|
+
-------
|
|
75
|
+
triangle_area : np.ndarray
|
|
76
|
+
"""
|
|
77
|
+
a = np.linalg.norm(points[:, 0] - points[:, 1], axis=1)
|
|
78
|
+
b = np.linalg.norm(points[:, 1] - points[:, 2], axis=1)
|
|
79
|
+
c = np.linalg.norm(points[:, 0] - points[:, 2], axis=1)
|
|
80
|
+
s = np.sum((a, b, c), axis=0) / 2
|
|
81
|
+
return (s * (s - a) * (s - b) * (s - c)) ** 0.5
|
|
82
|
+
|
|
83
|
+
|
|
84
|
+
def get_sphere(mesh=None, mesh_fn=None, target=None, radius=None, roi_idx=None, roi=None, elmtype='tris', domain=None):
|
|
85
|
+
"""
|
|
86
|
+
Return element idx of elements within a certain distance to provided target.
|
|
87
|
+
Element indices are 0-based (tris and tets start at 0, 'pynibs' style)
|
|
88
|
+
Elements might be 'tris' (default) or 'tets'
|
|
89
|
+
|
|
90
|
+
If roi object / idx and mesh fn is provided, the roi is expected to have midlayer information and the roi
|
|
91
|
+
geometry is used.
|
|
92
|
+
|
|
93
|
+
Parameters
|
|
94
|
+
----------
|
|
95
|
+
mesh : pynibs.mesh.mesh_struct.TetrahedraLinear, optional
|
|
96
|
+
mesh_fn : str, optional
|
|
97
|
+
Filename to SimNIBS .msh or pyNIBS .hdf5 mesh file.
|
|
98
|
+
target : np.ndarray of float or list of float
|
|
99
|
+
(3,) X, Y, Z coordinates of target.
|
|
100
|
+
radius : float
|
|
101
|
+
Sphere radius im mm.
|
|
102
|
+
roi_idx : str or int, optional
|
|
103
|
+
ROI name.
|
|
104
|
+
elmtype : str, default: 'tris'
|
|
105
|
+
Return triangles or tetrahedra in sphere around target. One of ('tris', 'tets').
|
|
106
|
+
|
|
107
|
+
Returns
|
|
108
|
+
-------
|
|
109
|
+
elms_in_sphere : np.ndarray
|
|
110
|
+
(n_elements): Indices of elements found in ROI
|
|
111
|
+
"""
|
|
112
|
+
# let's handle the input parameter combinations
|
|
113
|
+
assert target is not None
|
|
114
|
+
assert mesh is not None or mesh_fn is not None
|
|
115
|
+
if elmtype.lower().startswith('tri'):
|
|
116
|
+
elmtype = "tris"
|
|
117
|
+
elif elmtype.lower().startswith('tet'):
|
|
118
|
+
elmtype = "tets"
|
|
119
|
+
|
|
120
|
+
if mesh_fn is not None:
|
|
121
|
+
if mesh is not None:
|
|
122
|
+
raise ValueError("Either provide mesh or mesh_fn")
|
|
123
|
+
if mesh_fn.endswith('.hdf5'):
|
|
124
|
+
mesh = pynibs.load_mesh_hdf5(mesh_fn)
|
|
125
|
+
elif mesh_fn.endswith('.msh'):
|
|
126
|
+
mesh = pynibs.load_mesh_msh(mesh_fn)
|
|
127
|
+
|
|
128
|
+
if roi is None and roi_idx is not None:
|
|
129
|
+
if mesh_fn is None:
|
|
130
|
+
raise ValueError("Provide mesh_fn to load roi from.")
|
|
131
|
+
roi = pynibs.load_roi_surface_obj_from_hdf5(mesh_fn)[roi_idx]
|
|
132
|
+
if roi is None and roi_idx is not None or roi is not None and roi_idx is None:
|
|
133
|
+
raise ValueError("Provide either roi and roi_idx or none of them.")
|
|
134
|
+
if elmtype == 'tris':
|
|
135
|
+
return tris_in_sphere(mesh=mesh, target=target, radius=radius, roi=roi)
|
|
136
|
+
elif elmtype == 'tets':
|
|
137
|
+
return tets_in_sphere(mesh=mesh, target=target, radius=radius, roi=roi, domain=domain)
|
|
138
|
+
else:
|
|
139
|
+
raise ValueError(f"Unknown elmtype '{elmtype}'")
|
|
140
|
+
|
|
141
|
+
|
|
142
|
+
def tets_in_sphere(mesh, target, radius, roi, domain=None):
|
|
143
|
+
"""
|
|
144
|
+
Worker function for get_sphere()
|
|
145
|
+
|
|
146
|
+
Returns element idx of elements within a certain distance to provided target.
|
|
147
|
+
If roi object / idx and mesh fn is provided, the roi is expected to have midlayer information and the roi
|
|
148
|
+
geometry is used.
|
|
149
|
+
|
|
150
|
+
If radius is None or 0, the nearest element is returned.
|
|
151
|
+
|
|
152
|
+
Parameters
|
|
153
|
+
----------
|
|
154
|
+
mesh : pynibs.TetrahedraLinear, optional
|
|
155
|
+
target : np.ndarray of float, optional
|
|
156
|
+
(3,) X, Y, Z coordinates of target
|
|
157
|
+
radius : float, optional
|
|
158
|
+
Sphere radius im mm
|
|
159
|
+
roi : pynibs.mesh.ROI, optional
|
|
160
|
+
Region of interest
|
|
161
|
+
|
|
162
|
+
Returns
|
|
163
|
+
-------
|
|
164
|
+
tets_in_sphere : np.ndarray
|
|
165
|
+
(n_tets): Indices of elements found in ROI
|
|
166
|
+
|
|
167
|
+
"""
|
|
168
|
+
if roi is None:
|
|
169
|
+
if radius is None or radius == 0:
|
|
170
|
+
return np.where(np.linalg.norm(mesh.tetrahedra_center - target, axis=1) ==
|
|
171
|
+
np.min(np.linalg.norm(mesh.tetrahedra_center - target, axis=1)))[0]
|
|
172
|
+
|
|
173
|
+
else:
|
|
174
|
+
if domain is None:
|
|
175
|
+
domain = [1, 2, 3, 4, 5]
|
|
176
|
+
tet_target_idx = np.where(np.linalg.norm(mesh.tetrahedra_center - target, axis=1) <= radius)[0]
|
|
177
|
+
return np.array([idx for idx in tet_target_idx if mesh.tetrahedra_regions[idx] in domain])
|
|
178
|
+
|
|
179
|
+
else:
|
|
180
|
+
warnings.warn("Sphere 'tets' extraction from ROI untested! Consider extracting 'tris' instead.")
|
|
181
|
+
if radius is not None and radius > 0:
|
|
182
|
+
tri_target_idx = np.where(np.linalg.norm(roi.tri_center_coord_mid - target, axis=1) <= radius)[0]
|
|
183
|
+
else:
|
|
184
|
+
tri_target_idx = np.where(np.linalg.norm(roi.tri_center_coord_mid - target, axis=1) == np.min(
|
|
185
|
+
np.linalg.norm(roi.tri_center_coord_mid - target, axis=1)))[0]
|
|
186
|
+
tet_target_idx = roi.tet_idx_tri_center_mid[tri_target_idx]
|
|
187
|
+
return tet_target_idx
|
|
188
|
+
|
|
189
|
+
|
|
190
|
+
def tris_in_sphere(mesh, target, radius, roi):
|
|
191
|
+
"""
|
|
192
|
+
Worker function for get_sphere().
|
|
193
|
+
|
|
194
|
+
Returns triangle idx of elements within a certain distance to provided target.
|
|
195
|
+
If roi object / idx and mesh fn is provided, the roi is expected to have midlayer information and the roi
|
|
196
|
+
geometry is used.
|
|
197
|
+
|
|
198
|
+
If radius is None or 0, the nearest element is returned.
|
|
199
|
+
|
|
200
|
+
Parameters
|
|
201
|
+
----------
|
|
202
|
+
mesh : pynibs.mesh.TetrahedraLinear, optional
|
|
203
|
+
target : np.ndarray of float or list of float
|
|
204
|
+
(3,) X, Y, Z coordinates of target
|
|
205
|
+
radius : float
|
|
206
|
+
Sphere radius im mm
|
|
207
|
+
roi : pynibs.mesh.mesh_struct.ROI, optional
|
|
208
|
+
ROI
|
|
209
|
+
|
|
210
|
+
Returns
|
|
211
|
+
-------
|
|
212
|
+
tris_in_sphere : np.ndarray
|
|
213
|
+
(n_triangles): Indices of elements found in sphere
|
|
214
|
+
"""
|
|
215
|
+
if roi is None:
|
|
216
|
+
if radius is None or radius == 0:
|
|
217
|
+
tri_target_idx = np.where(np.linalg.norm(mesh.triangles_center - target, axis=1) ==
|
|
218
|
+
np.min(np.linalg.norm(mesh.triangles_center - target, axis=1)))[0]
|
|
219
|
+
else:
|
|
220
|
+
tri_target_idx = np.where(np.linalg.norm(mesh.triangles_center - target, axis=1) <= radius)[0]
|
|
221
|
+
else:
|
|
222
|
+
if radius is not None and radius > 0:
|
|
223
|
+
tri_target_idx = np.where(np.linalg.norm(roi.tri_center_coord_mid - target, axis=1) <= radius)[0]
|
|
224
|
+
else:
|
|
225
|
+
tri_target_idx = np.where(np.linalg.norm(roi.tri_center_coord_mid - target, axis=1) == np.min(
|
|
226
|
+
np.linalg.norm(roi.tri_center_coord_mid - target, axis=1)))[0]
|
|
227
|
+
|
|
228
|
+
return tri_target_idx
|
|
229
|
+
|
|
230
|
+
|
|
231
|
+
def sample_sphere(n_points, r):
|
|
232
|
+
"""
|
|
233
|
+
Creates n_points evenly spread in a sphere of radius r.
|
|
234
|
+
|
|
235
|
+
Parameters
|
|
236
|
+
----------
|
|
237
|
+
n_points: int
|
|
238
|
+
Number of points to be spread, must be odd.
|
|
239
|
+
r: float
|
|
240
|
+
Radius of sphere.
|
|
241
|
+
|
|
242
|
+
Returns
|
|
243
|
+
-------
|
|
244
|
+
points: np.ndarray of float
|
|
245
|
+
(N x 3), Evenly spread points in a unit sphere.
|
|
246
|
+
"""
|
|
247
|
+
|
|
248
|
+
assert n_points % 2 == 1, "The number of points must be odd"
|
|
249
|
+
points = []
|
|
250
|
+
|
|
251
|
+
# The golden ratio
|
|
252
|
+
phi = (1 + math.sqrt(5)) / 2.
|
|
253
|
+
n = int((n_points - 1) / 2)
|
|
254
|
+
|
|
255
|
+
for i in range(-n, n + 1):
|
|
256
|
+
lat = math.asin(2 * i / n_points)
|
|
257
|
+
lon = 2 * math.pi * i / phi
|
|
258
|
+
x = r * math.cos(lat) * math.cos(lon)
|
|
259
|
+
y = r * math.cos(lat) * math.sin(lon)
|
|
260
|
+
z = r * math.sin(lat)
|
|
261
|
+
points.append((x, y, z))
|
|
262
|
+
|
|
263
|
+
points = np.array(points, dtype=float)
|
|
264
|
+
|
|
265
|
+
return points
|
|
266
|
+
|
|
267
|
+
|
|
268
|
+
def get_indices_discontinuous_data(data, con, neighbor=False, deviation_factor=2,
|
|
269
|
+
min_val=None, not_fitted_elms=None, crit='median', neigh_style='point'):
|
|
270
|
+
"""
|
|
271
|
+
Get element indices (and the best neighbor index), where the data is discontinuous
|
|
272
|
+
|
|
273
|
+
Parameters
|
|
274
|
+
----------
|
|
275
|
+
data : np.ndarray of float [n_data]
|
|
276
|
+
Data array to analyze given in the element center
|
|
277
|
+
con : np.ndarray of float [n_data, 3 or 4]
|
|
278
|
+
Connectivity matrix
|
|
279
|
+
neighbor : bool, default: False
|
|
280
|
+
Return also the element index of the "best" neighbor (w.r.t. median of data)
|
|
281
|
+
deviation_factor : float
|
|
282
|
+
Allows data deviation from 1/deviation_factor < data[i]/median < deviation_factor
|
|
283
|
+
min_val : float, optional
|
|
284
|
+
If given, only return elements which have a neighbor with data higher than min_val.
|
|
285
|
+
not_fitted_elms : np.ndarray
|
|
286
|
+
If given, these elements are not used as neighbors
|
|
287
|
+
crit: str, default: median
|
|
288
|
+
Criterium for best neighbor. Either median or max value
|
|
289
|
+
neigh_style : str, default: 'point'
|
|
290
|
+
Should neighbors share point or 'edge'
|
|
291
|
+
|
|
292
|
+
Returns
|
|
293
|
+
-------
|
|
294
|
+
idx_disc : list of int [n_disc]
|
|
295
|
+
Index list containing the indices of the discontinuous elements
|
|
296
|
+
idx_neighbor : list of int [n_disc]
|
|
297
|
+
Index list containing the indices of the "best" neighbors of the discontinuous elements
|
|
298
|
+
"""
|
|
299
|
+
|
|
300
|
+
n_ele = con.shape[0]
|
|
301
|
+
idx_disc, idx_neighbor = [], []
|
|
302
|
+
|
|
303
|
+
data[data == 0] = 1e-12
|
|
304
|
+
|
|
305
|
+
if neigh_style == 'point':
|
|
306
|
+
def get_neigh(m):
|
|
307
|
+
return np.logical_and(0 < mask, mask < 3)
|
|
308
|
+
elif neigh_style == 'edge':
|
|
309
|
+
def get_neigh(m):
|
|
310
|
+
return mask == 2
|
|
311
|
+
else:
|
|
312
|
+
raise NotImplementedError(f"neigh_style {neigh_style} unknown.")
|
|
313
|
+
|
|
314
|
+
if crit == 'median':
|
|
315
|
+
def is_neigh():
|
|
316
|
+
if not (1 / deviation_factor < data_i / median < deviation_factor):
|
|
317
|
+
neighbor_indices = np.where(mask_neighbor)[0]
|
|
318
|
+
best_neigh = neighbor_indices[(np.abs(data[neighbor_indices] - median)).argmin()]
|
|
319
|
+
if min_val is None or data[best_neigh] > min_val:
|
|
320
|
+
idx_disc.append(elm_i)
|
|
321
|
+
# if neighbor:
|
|
322
|
+
idx_neighbor.append(best_neigh)
|
|
323
|
+
elif crit == 'max':
|
|
324
|
+
def is_neigh():
|
|
325
|
+
if data_i / median < 1 / deviation_factor:
|
|
326
|
+
neighbor_indices = np.where(mask_neighbor)[0]
|
|
327
|
+
best_neigh = neighbor_indices[(data[neighbor_indices]).argmax()]
|
|
328
|
+
if min_val is None or data[best_neigh] > min_val:
|
|
329
|
+
idx_disc.append(elm_i)
|
|
330
|
+
|
|
331
|
+
# if neighbor:
|
|
332
|
+
idx_neighbor.append(best_neigh)
|
|
333
|
+
elif crit == 'randmax':
|
|
334
|
+
def is_neigh():
|
|
335
|
+
if data_i / median < 1 / deviation_factor:
|
|
336
|
+
neighbor_indices = np.where(mask_neighbor)[0]
|
|
337
|
+
best_neigh = np.random.choice(neighbor_indices[(data[neighbor_indices]) > 0], 1)
|
|
338
|
+
if min_val is None or data[best_neigh] > min_val:
|
|
339
|
+
idx_disc.append(elm_i)
|
|
340
|
+
|
|
341
|
+
# if neighbor:
|
|
342
|
+
idx_neighbor.append(best_neigh)
|
|
343
|
+
else:
|
|
344
|
+
raise NotImplementedError(f"Criterium {crit} unknown. ")
|
|
345
|
+
|
|
346
|
+
for elm_i, data_i in zip(range(n_ele), data):
|
|
347
|
+
if elm_i in not_fitted_elms:
|
|
348
|
+
continue
|
|
349
|
+
|
|
350
|
+
# find neighbors
|
|
351
|
+
mask = np.sum(np.isin(con, con[elm_i, :]), axis=1)
|
|
352
|
+
mask_neighbor = get_neigh(mask)
|
|
353
|
+
|
|
354
|
+
# best_values are set to 0 for bad elements and unfittable ones. do not use these as neighbors
|
|
355
|
+
if not_fitted_elms is not None and len(not_fitted_elms) != 0:
|
|
356
|
+
mask_neighbor[not_fitted_elms] = False
|
|
357
|
+
|
|
358
|
+
# if the element is lonely floating and has no neighbors ... continue
|
|
359
|
+
if not np.sum(mask_neighbor):
|
|
360
|
+
continue
|
|
361
|
+
|
|
362
|
+
# check if current value does not fit to neighbors
|
|
363
|
+
median = np.median(data[mask_neighbor])
|
|
364
|
+
|
|
365
|
+
if not median:
|
|
366
|
+
median = 1e-12
|
|
367
|
+
|
|
368
|
+
is_neigh()
|
|
369
|
+
|
|
370
|
+
# if not (1 / deviation_factor < data_i / median < deviation_factor):
|
|
371
|
+
# # find best neighbor idx
|
|
372
|
+
# neighbor_indices = np.where(mask_neighbor)[0]
|
|
373
|
+
#
|
|
374
|
+
# if crit == 'max':
|
|
375
|
+
# best_neigh = neighbor_indices[(data[neighbor_indices]).argmax()]
|
|
376
|
+
# elif crit == 'median':
|
|
377
|
+
# best_neigh = neighbor_indices[(np.abs(data[neighbor_indices] - median)).argmin()]
|
|
378
|
+
# else:
|
|
379
|
+
# raise NotImplementedError(f"Criterium {crit} unknown. ")
|
|
380
|
+
# if min_val is None or data[best_neigh] > min_val:
|
|
381
|
+
# idx_disc.append(elm_i)
|
|
382
|
+
#
|
|
383
|
+
# if neighbor:
|
|
384
|
+
# idx_neighbor.append(best_neigh)
|
|
385
|
+
|
|
386
|
+
# stop = time.time()
|
|
387
|
+
# print(stop-start)
|
|
388
|
+
|
|
389
|
+
if neighbor:
|
|
390
|
+
return idx_disc, idx_neighbor
|
|
391
|
+
else:
|
|
392
|
+
return idx_disc
|
|
393
|
+
|
|
394
|
+
|
|
395
|
+
def find_nearest(array, value):
|
|
396
|
+
"""
|
|
397
|
+
Given an "array", and given a "value" , returns an index j such that "value" is between array[j]
|
|
398
|
+
and array[j+1]. "array" must be monotonic increasing. j=-1 or j=len(array) is returned
|
|
399
|
+
to indicate that "value" is out of range below and above respectively.
|
|
400
|
+
|
|
401
|
+
Parameters
|
|
402
|
+
----------
|
|
403
|
+
array : np.ndarray of float
|
|
404
|
+
Monotonic increasing array.
|
|
405
|
+
value : float
|
|
406
|
+
Target value the nearest neighbor index in ``array`` is computed for.
|
|
407
|
+
|
|
408
|
+
Returns
|
|
409
|
+
-------
|
|
410
|
+
idx : int
|
|
411
|
+
Index j such that "value" is between array[j] and array[j+1].
|
|
412
|
+
|
|
413
|
+
"""
|
|
414
|
+
n = len(array)
|
|
415
|
+
if value < array[0]:
|
|
416
|
+
return -1
|
|
417
|
+
elif value > array[n - 1]:
|
|
418
|
+
return n
|
|
419
|
+
jl = 0 # Initialize lower
|
|
420
|
+
ju = n - 1 # and upper limits.
|
|
421
|
+
while ju - jl > 1: # If we are not yet done,
|
|
422
|
+
jm = (ju + jl) >> 1 # compute a midpoint with a bitshift
|
|
423
|
+
if value >= array[jm]:
|
|
424
|
+
jl = jm # and replace either the lower limit
|
|
425
|
+
else:
|
|
426
|
+
ju = jm # or the upper limit, as appropriate.
|
|
427
|
+
# Repeat until the test condition is satisfied.
|
|
428
|
+
if value == array[0]: # edge cases at bottom
|
|
429
|
+
return 0
|
|
430
|
+
elif value == array[n - 1]: # and top
|
|
431
|
+
return n - 1
|
|
432
|
+
else:
|
|
433
|
+
return jl
|
|
434
|
+
|
|
435
|
+
|
|
436
|
+
def in_hull(points, hull):
|
|
437
|
+
"""
|
|
438
|
+
Test if points in `points` are in `hull`.
|
|
439
|
+
`points` should be a [N x K] coordinates of N points in K dimensions.
|
|
440
|
+
`hull` is either a scipy.spatial.Delaunay object or the [M x K] array of the
|
|
441
|
+
coordinates of M points in Kdimensions for which Delaunay triangulation
|
|
442
|
+
will be computed.
|
|
443
|
+
|
|
444
|
+
Parameters
|
|
445
|
+
----------
|
|
446
|
+
points : np.ndarray
|
|
447
|
+
(N_points x 3) Set of floating point data to test whether they are lying inside the hull or not.
|
|
448
|
+
hull : scipy.spatial.Delaunay or np.ndarray
|
|
449
|
+
(M x K) Surface data.
|
|
450
|
+
|
|
451
|
+
Returns
|
|
452
|
+
-------
|
|
453
|
+
inside : np.ndarray of bool
|
|
454
|
+
TRUE: point inside the hull
|
|
455
|
+
FALSE: point outside the hull
|
|
456
|
+
"""
|
|
457
|
+
|
|
458
|
+
if not isinstance(hull, Delaunay):
|
|
459
|
+
hull = Delaunay(hull)
|
|
460
|
+
return hull.find_simplex(points) >= 0
|
|
461
|
+
|
|
462
|
+
|
|
463
|
+
def calc_tetrahedra_volume_cross(P1, P2, P3, P4):
|
|
464
|
+
"""
|
|
465
|
+
Calculates volume of tetrahedra specified by the 4 points P1...P4
|
|
466
|
+
multiple tetrahedra can be defined by P1...P4 as 2-D np.ndarrays
|
|
467
|
+
using the cross and vector dot product.
|
|
468
|
+
|
|
469
|
+
.. math::
|
|
470
|
+
P1=\\begin{bmatrix}
|
|
471
|
+
x_{{tet}_1} & y_{{tet}_1} & z_{{tet}_1} \\\\
|
|
472
|
+
x_{{tet}_2} & y_{{tet}_2} & z_{{tet}_2} \\\\
|
|
473
|
+
... & ... & ... \\\\
|
|
474
|
+
x_{{tet}_N} & y_{{tet}_N} & z_{{tet}_N} \\\\
|
|
475
|
+
\\end{bmatrix}
|
|
476
|
+
|
|
477
|
+
Parameters
|
|
478
|
+
----------
|
|
479
|
+
P1 : np.ndarray of float [N_tet x 3]
|
|
480
|
+
Coordinates of first point of tetrahedra
|
|
481
|
+
P2 : np.ndarray of float [N_tet x 3]
|
|
482
|
+
Coordinates of second point of tetrahedra
|
|
483
|
+
P3 : np.ndarray of float [N_tet x 3]
|
|
484
|
+
Coordinates of third point of tetrahedra
|
|
485
|
+
P4 : np.ndarray of float [N_tet x 3]
|
|
486
|
+
Coordinates of fourth point of tetrahedra
|
|
487
|
+
|
|
488
|
+
Returns
|
|
489
|
+
-------
|
|
490
|
+
tetrahedra_volume: np.ndarray of float [N_tet x 1]
|
|
491
|
+
Volumes of tetrahedra
|
|
492
|
+
"""
|
|
493
|
+
|
|
494
|
+
tetrahedra_volume = 1.0 / 6 * \
|
|
495
|
+
np.sum(np.multiply(cycross(P2 - P1, P3 - P1), P4 - P1), 1)
|
|
496
|
+
tetrahedra_volume = tetrahedra_volume[:, np.newaxis]
|
|
497
|
+
return tetrahedra_volume
|
|
498
|
+
|
|
499
|
+
|
|
500
|
+
def calc_tetrahedra_volume_det(P1, P2, P3, P4):
|
|
501
|
+
"""
|
|
502
|
+
Calculate volume of tetrahedron specified by 4 points P1...P4
|
|
503
|
+
multiple tetrahedra can be defined by P1...P4 as 2-D np.arrays
|
|
504
|
+
using the determinant.
|
|
505
|
+
|
|
506
|
+
|
|
507
|
+
.. math::
|
|
508
|
+
P1=\\begin{bmatrix}
|
|
509
|
+
x_{{tet}_1} & y_{{tet}_1} & z_{{tet}_1} \\\\
|
|
510
|
+
x_{{tet}_2} & y_{{tet}_2} & z_{{tet}_2} \\\\
|
|
511
|
+
... & ... & ... \\\\
|
|
512
|
+
x_{{tet}_N} & y_{{tet}_N} & z_{{tet}_N} \\\\
|
|
513
|
+
\\end{bmatrix}
|
|
514
|
+
|
|
515
|
+
Parameters
|
|
516
|
+
----------
|
|
517
|
+
P1 : np.ndarray of float [N_tet x 3]
|
|
518
|
+
Coordinates of first point of tetrahedra
|
|
519
|
+
P2 : np.ndarray of float [N_tet x 3]
|
|
520
|
+
Coordinates of second point of tetrahedra
|
|
521
|
+
P3 : np.ndarray of float [N_tet x 3]
|
|
522
|
+
Coordinates of third point of tetrahedra
|
|
523
|
+
P4 : np.ndarray of float [N_tet x 3]
|
|
524
|
+
Coordinates of fourth point of tetrahedra
|
|
525
|
+
|
|
526
|
+
Returns
|
|
527
|
+
-------
|
|
528
|
+
tetrahedra_volume : np.ndarray of float [N_tet x 1]
|
|
529
|
+
Volumes of tetrahedra
|
|
530
|
+
"""
|
|
531
|
+
|
|
532
|
+
N_tets = P1.shape[0] if P1.ndim > 1 else 1
|
|
533
|
+
|
|
534
|
+
# add ones
|
|
535
|
+
j1 = np.hstack((np.ones((N_tets, 1)), P1))
|
|
536
|
+
j2 = np.hstack((np.ones((N_tets, 1)), P2))
|
|
537
|
+
j3 = np.hstack((np.ones((N_tets, 1)), P3))
|
|
538
|
+
j4 = np.hstack((np.ones((N_tets, 1)), P4))
|
|
539
|
+
|
|
540
|
+
j = np.zeros((P1.shape[0] if P1.ndim > 1 else 1, 4, 4))
|
|
541
|
+
|
|
542
|
+
j[:, :, 0] = j1
|
|
543
|
+
j[:, :, 1] = j2
|
|
544
|
+
j[:, :, 2] = j3
|
|
545
|
+
j[:, :, 3] = j4
|
|
546
|
+
|
|
547
|
+
tetrahedra_volume = 1.0 / 6.0 * np.linalg.det(j)
|
|
548
|
+
tetrahedra_volume = tetrahedra_volume[:, np.newaxis]
|
|
549
|
+
return tetrahedra_volume
|
|
550
|
+
|
|
551
|
+
|
|
552
|
+
def calc_gradient_surface(phi, points, triangles):
|
|
553
|
+
"""
|
|
554
|
+
Calculate gradient of potential phi on surface (i.e. tangential component) given in vertices of a triangular
|
|
555
|
+
mesh forming a 2D surface.
|
|
556
|
+
|
|
557
|
+
Parameters
|
|
558
|
+
----------
|
|
559
|
+
phi : np.ndarray of float [N_points x 1]
|
|
560
|
+
Potential in nodes
|
|
561
|
+
points : np.ndarray of float [N_points x 3]
|
|
562
|
+
Coordinates of nodes (x,y,z)
|
|
563
|
+
triangles : np.ndarray of int32 [N_tri x 3]
|
|
564
|
+
Connectivity of triangular mesh
|
|
565
|
+
|
|
566
|
+
Returns
|
|
567
|
+
-------
|
|
568
|
+
grad_phi : np.ndarray of float [N_tri x 3]
|
|
569
|
+
Gradient of potential phi on surface
|
|
570
|
+
"""
|
|
571
|
+
|
|
572
|
+
grad_phi = np.zeros((triangles.shape[0], 3))
|
|
573
|
+
|
|
574
|
+
for i in range(triangles.shape[0]):
|
|
575
|
+
a = np.array([[points[triangles[i, 0], 0] - points[triangles[i, 2], 0],
|
|
576
|
+
points[triangles[i, 0], 1] - points[triangles[i, 2], 1],
|
|
577
|
+
points[triangles[i, 0], 2] - points[triangles[i, 2], 2]],
|
|
578
|
+
[points[triangles[i, 1], 0] - points[triangles[i, 2], 0],
|
|
579
|
+
points[triangles[i, 1], 1] - points[triangles[i, 2], 1],
|
|
580
|
+
points[triangles[i, 1], 2] - points[triangles[i, 2], 2]]])
|
|
581
|
+
|
|
582
|
+
b = np.array([phi[triangles[i, 0]] - phi[triangles[i, 2]],
|
|
583
|
+
phi[triangles[i, 1]] - phi[triangles[i, 2]]])
|
|
584
|
+
|
|
585
|
+
grad_phi[i, :] = np.dot(np.linalg.pinv(a), b).T
|
|
586
|
+
|
|
587
|
+
return grad_phi
|
|
588
|
+
|
|
589
|
+
|
|
590
|
+
def determine_e_midlayer_workhorse(fn_e_results, subject, mesh_idx, midlayer_fun, fn_mesh_hdf5, roi_idx, phi_scaling=1.,
|
|
591
|
+
verbose=False):
|
|
592
|
+
"""
|
|
593
|
+
phi_scaling: float
|
|
594
|
+
simnibs < 3.0 : 1000.
|
|
595
|
+
simnibs >= 3.0 : 1. (Default)
|
|
596
|
+
"""
|
|
597
|
+
|
|
598
|
+
if verbose:
|
|
599
|
+
print(f"Loading Mesh and ROI {roi_idx} from {fn_mesh_hdf5}")
|
|
600
|
+
|
|
601
|
+
msh = pynibs.load_mesh_hdf5(fn_mesh_hdf5)
|
|
602
|
+
roi = pynibs.load_roi_surface_obj_from_hdf5(fn_mesh_hdf5)
|
|
603
|
+
|
|
604
|
+
for fn_e in fn_e_results:
|
|
605
|
+
|
|
606
|
+
with h5py.File(fn_e + ".hdf5", 'r') as f:
|
|
607
|
+
phi = f['data/nodes/v'][:][:, np.newaxis]
|
|
608
|
+
# phi = f['data/potential'][:][:, np.newaxis]
|
|
609
|
+
dadt = f['data/nodes/D'][:]
|
|
610
|
+
# dadt = np.reshape(f['data/dAdt'][:], (phi.shape[0], 3), order="c")
|
|
611
|
+
|
|
612
|
+
# determine e_norm and e_tan for every simulation
|
|
613
|
+
if verbose:
|
|
614
|
+
print(f"Determine midlayer E-field for {fn_e}.hdf5")
|
|
615
|
+
|
|
616
|
+
# choose which function to use for midlayer computation
|
|
617
|
+
if midlayer_fun == "pynibs":
|
|
618
|
+
e_norm_temp, e_tan_temp = msh.calc_E_on_GM_WM_surface3(phi=phi * phi_scaling,
|
|
619
|
+
dAdt=dadt,
|
|
620
|
+
roi=roi[roi_idx],
|
|
621
|
+
verbose=False,
|
|
622
|
+
mode='magnitude')
|
|
623
|
+
|
|
624
|
+
e_norm_temp = e_norm_temp.flatten() * -1
|
|
625
|
+
e_tan_temp = e_tan_temp.flatten()
|
|
626
|
+
e_mag_temp = np.linalg.norm(np.vstack([e_norm_temp, e_tan_temp]).transpose(), axis=1).flatten()
|
|
627
|
+
|
|
628
|
+
elif midlayer_fun == "simnibs":
|
|
629
|
+
e_norm_temp_simnibs, e_tan_temp_simnibs = msh.calc_E_on_GM_WM_surface_simnibs_KW(phi=phi * phi_scaling,
|
|
630
|
+
dAdt=dadt,
|
|
631
|
+
roi=roi[roi_idx],
|
|
632
|
+
verbose=False,
|
|
633
|
+
subject=subject,
|
|
634
|
+
mesh_idx=mesh_idx)
|
|
635
|
+
|
|
636
|
+
e_norm_temp_simnibs = e_norm_temp_simnibs.flatten()
|
|
637
|
+
e_tan_temp_simnibs = e_tan_temp_simnibs.flatten()
|
|
638
|
+
e_mag_temp_simnibs = np.linalg.norm(np.vstack([e_norm_temp_simnibs, e_tan_temp_simnibs]).transpose(),
|
|
639
|
+
axis=1).flatten()
|
|
640
|
+
else:
|
|
641
|
+
raise ValueError(f"midlayer_fun {midlayer_fun} not implemented.")
|
|
642
|
+
|
|
643
|
+
del phi, dadt
|
|
644
|
+
|
|
645
|
+
with h5py.File(fn_e + ".hdf5", 'a') as f:
|
|
646
|
+
try:
|
|
647
|
+
del f['data/midlayer/roi_surface/{}/E_mag'.format(roi_idx)]
|
|
648
|
+
del f['data/midlayer/roi_surface/{}/E_tan'.format(roi_idx)]
|
|
649
|
+
del f['data/midlayer/roi_surface/{}/E_norm'.format(roi_idx)]
|
|
650
|
+
except KeyError:
|
|
651
|
+
pass
|
|
652
|
+
|
|
653
|
+
f.create_dataset('data/midlayer/roi_surface/{}/E_mag'.format(roi_idx), data=e_mag_temp_simnibs)
|
|
654
|
+
f.create_dataset('data/midlayer/roi_surface/{}/E_tan'.format(roi_idx), data=e_tan_temp_simnibs)
|
|
655
|
+
f.create_dataset('data/midlayer/roi_surface/{}/E_norm'.format(roi_idx), data=e_norm_temp_simnibs)
|
|
656
|
+
|
|
657
|
+
if verbose:
|
|
658
|
+
print("\tAdding results to {}".format(fn_e + ".hdf5"))
|
|
659
|
+
|
|
660
|
+
|
|
661
|
+
def determine_e_midlayer(fn_e_results, fn_mesh_hdf5, subject, mesh_idx, roi_idx, n_cpu=4, midlayer_fun="simnibs",
|
|
662
|
+
phi_scaling=1., verbose=False):
|
|
663
|
+
"""
|
|
664
|
+
Parallel version to determine the midlayer e-fields from a list of .hdf5 results files
|
|
665
|
+
|
|
666
|
+
Parameters
|
|
667
|
+
----------
|
|
668
|
+
fn_e_results : list of str
|
|
669
|
+
List of results filenames (.hdf5 format)
|
|
670
|
+
fn_mesh_hdf5 : str
|
|
671
|
+
Filename of corresponding mesh file
|
|
672
|
+
subject : pynibs.Subject
|
|
673
|
+
Subject object
|
|
674
|
+
mesh_idx : int
|
|
675
|
+
Mesh index
|
|
676
|
+
roi_idx : int
|
|
677
|
+
ROI index
|
|
678
|
+
n_cpu : int, default: 4
|
|
679
|
+
Number of parallel computations
|
|
680
|
+
midlayer_fun : str, default: "simnibs"
|
|
681
|
+
Method to determine the midlayer e-fields ("pynibs" or "simnibs")
|
|
682
|
+
phi_scaling : float, default: 1.0
|
|
683
|
+
Scaling factor of scalar potential to change between "m" and "mm"
|
|
684
|
+
|
|
685
|
+
Returns
|
|
686
|
+
-------
|
|
687
|
+
<File> .hdf5 file
|
|
688
|
+
Adds midlayer e-field results to ROI
|
|
689
|
+
"""
|
|
690
|
+
|
|
691
|
+
# msh = pynibs.load_mesh_msh(subject.mesh[mesh_idx]['fn_mesh_msh'])
|
|
692
|
+
|
|
693
|
+
n_cpu_available = multiprocessing.cpu_count()
|
|
694
|
+
n_cpu = min(n_cpu, n_cpu_available)
|
|
695
|
+
|
|
696
|
+
workhorse_partial = partial(determine_e_midlayer_workhorse,
|
|
697
|
+
subject=subject,
|
|
698
|
+
mesh_idx=mesh_idx,
|
|
699
|
+
midlayer_fun=midlayer_fun,
|
|
700
|
+
fn_mesh_hdf5=fn_mesh_hdf5,
|
|
701
|
+
roi_idx=roi_idx,
|
|
702
|
+
phi_scaling=phi_scaling,
|
|
703
|
+
verbose=verbose)
|
|
704
|
+
|
|
705
|
+
fn_e_results_chunks = pynibs.compute_chunks(fn_e_results, n_cpu)
|
|
706
|
+
pool = multiprocessing.Pool(n_cpu)
|
|
707
|
+
pool.map(workhorse_partial, fn_e_results_chunks)
|
|
708
|
+
pool.close()
|
|
709
|
+
pool.join()
|
|
710
|
+
|
|
711
|
+
|
|
712
|
+
def find_element_idx_by_points(nodes, con, points):
|
|
713
|
+
"""
|
|
714
|
+
Finds the tetrahedral element index of an arbitrary point in the FEM mesh.
|
|
715
|
+
|
|
716
|
+
Parameters
|
|
717
|
+
----------
|
|
718
|
+
nodes : np.ndarray [N_nodes x 3]
|
|
719
|
+
Coordinates (x, y, z) of the nodes
|
|
720
|
+
con : np.ndarray [N_tet x 4]
|
|
721
|
+
Connectivity matrix
|
|
722
|
+
points : np.ndarray [N_points x 3]
|
|
723
|
+
Points for which the element indices are found.
|
|
724
|
+
|
|
725
|
+
Returns
|
|
726
|
+
-------
|
|
727
|
+
ele_idx : np.ndarray [N_points]
|
|
728
|
+
Element indices of tetrahedra where corresponding 'points' are lying in
|
|
729
|
+
"""
|
|
730
|
+
|
|
731
|
+
node_idx = []
|
|
732
|
+
for i in range(points.shape[0]):
|
|
733
|
+
node_idx.append(np.where(np.linalg.norm(nodes - points[i, :], axis=1) < 1e-2)[0])
|
|
734
|
+
|
|
735
|
+
# ele_idx = np.where((con == np.array(node_idx)).all(axis=1))[0]
|
|
736
|
+
ele_idx = np.where(np.all(np.sort(con, axis=1) == np.sort(np.array(node_idx).flatten()), axis=1))[0]
|
|
737
|
+
return ele_idx
|
|
738
|
+
|
|
739
|
+
|
|
740
|
+
def check_islands_for_single_elm(source_elm, connectivity=None, adjacency=None, island_crit=1):
|
|
741
|
+
"""
|
|
742
|
+
This identifies islands in a mesh for a given element. An island is a set of elements, that is only connect
|
|
743
|
+
via a single node to another set of elements.
|
|
744
|
+
These islands usually crash the FEM solver and should be removed.
|
|
745
|
+
|
|
746
|
+
1. Find all elements connect to source_elm via one node (1-node-neighbor)
|
|
747
|
+
2. Start with source_elm and visit all 2-node-neighbors ('shared-edge)
|
|
748
|
+
3. Continue recursively with all 2-node-neighbors and visit their 2-node-neighbors
|
|
749
|
+
4. See if any 1-node-neighbors have not been visited with this strategy. If so, an island has been found
|
|
750
|
+
|
|
751
|
+
Parameters
|
|
752
|
+
----------
|
|
753
|
+
source_elm : int
|
|
754
|
+
The source element to check
|
|
755
|
+
connectivity : np.ndarray, optional
|
|
756
|
+
Connectivity ('node_number_list') starting with 0. Can be triangles or tetrahedra
|
|
757
|
+
(n_elms, 3) or (n_elms_4).
|
|
758
|
+
adjacency : np.ndparray, optional
|
|
759
|
+
Adjenceny matrix (n_elm, n_elm). Weights are supposed to be number of shared nodes.
|
|
760
|
+
Computed from neighbors if not provided.
|
|
761
|
+
island_crit : int, default: 'any'
|
|
762
|
+
How many nodes to define islands?
|
|
763
|
+
'any' -> Elements connected via a single node or single edge are defined as an island.
|
|
764
|
+
'node' -> Elements connected via a single _node_ are defined as an island.
|
|
765
|
+
'edge' -> Elements connected via a single _edge_ are defined as an island.
|
|
766
|
+
|
|
767
|
+
Returns
|
|
768
|
+
-------
|
|
769
|
+
n_visited : int
|
|
770
|
+
n_not_visited : int
|
|
771
|
+
neighbors_visited : dict, which neighbors have been visited and which have not
|
|
772
|
+
"""
|
|
773
|
+
if adjacency is not None and connectivity is not None:
|
|
774
|
+
raise ValueError(f"Provide either neighbors or connectivity, not both.")
|
|
775
|
+
|
|
776
|
+
if adjacency is None:
|
|
777
|
+
assert connectivity is not None
|
|
778
|
+
adjacency = np.array([np.sum(np.isin(connectivity, elm), axis=1) for elm in connectivity])
|
|
779
|
+
|
|
780
|
+
if island_crit == 'any':
|
|
781
|
+
# find elements with only one node in common
|
|
782
|
+
neighbors_visited = {i: False for i in np.where(adjacency[source_elm] >= 1)[0]}
|
|
783
|
+
island_crit = 1
|
|
784
|
+
elif island_crit == 'node':
|
|
785
|
+
# find elements with only one node in common
|
|
786
|
+
neighbors_visited = {i: False for i in np.where(adjacency[source_elm] == 1)[0]}
|
|
787
|
+
island_crit = 1
|
|
788
|
+
elif island_crit == 'edge':
|
|
789
|
+
# find elements with only one node in common
|
|
790
|
+
neighbors_visited = {i: False for i in np.where(adjacency[source_elm] == 2)[0]}
|
|
791
|
+
island_crit = 2
|
|
792
|
+
else:
|
|
793
|
+
raise ValueError
|
|
794
|
+
|
|
795
|
+
# now visit all elements with 2 or more neighboring elements recursivly.
|
|
796
|
+
# everything that's left over between 1 and 2 neighbors is an island
|
|
797
|
+
neighs_to_check = set(np.where(adjacency[source_elm] >= island_crit)[0].tolist())
|
|
798
|
+
|
|
799
|
+
# add the starting element to the list
|
|
800
|
+
neighs_to_check.add(source_elm)
|
|
801
|
+
# go through all elements in list
|
|
802
|
+
# print(neighs_to_check)
|
|
803
|
+
while neighs_to_check:
|
|
804
|
+
elm = neighs_to_check.pop()
|
|
805
|
+
neighbors_visited[elm] = True
|
|
806
|
+
|
|
807
|
+
# now add all 2-node neighbors for this element
|
|
808
|
+
# for neigh in np.where(adjacency[elm] > 1)[0]:
|
|
809
|
+
sort = (-adjacency[elm]).argsort()
|
|
810
|
+
for idx in np.arange((np.bincount(adjacency[elm])[(island_crit):]).sum()):
|
|
811
|
+
i = sort[idx]
|
|
812
|
+
if i not in neighbors_visited or not neighbors_visited[i]:
|
|
813
|
+
# print(i)
|
|
814
|
+
neighs_to_check.add(i)
|
|
815
|
+
|
|
816
|
+
return np.sum([v for v in neighbors_visited.values()]), \
|
|
817
|
+
np.sum([not v for v in neighbors_visited.values()]), \
|
|
818
|
+
neighbors_visited
|
|
819
|
+
|
|
820
|
+
|
|
821
|
+
def find_islands(connectivity=None, adjacency=None, island_crit='any', verbose=False, largest=False):
|
|
822
|
+
"""
|
|
823
|
+
This identifies islands in a mesh. An island is a set of elements, that is only connect
|
|
824
|
+
via a single node to another set of elements.
|
|
825
|
+
These islands usually crash the FEM solver and should be removed.
|
|
826
|
+
|
|
827
|
+
For each element:
|
|
828
|
+
1. Find all elements connect to source_elm via one node (1-node-neighbor)
|
|
829
|
+
2. Start with source_elm and visit all 2-node-neighbors ('shared-edge)
|
|
830
|
+
3. Continue recursively with all 2-node-neighbors and visit their 2-node-neighbors
|
|
831
|
+
4. See if any 1-node-neighbors have not been visited with this strategy. If so, an island has been found
|
|
832
|
+
|
|
833
|
+
.. figure:: ../../doc/images/find_islands.png
|
|
834
|
+
:scale: 50 %
|
|
835
|
+
:alt: Island detection
|
|
836
|
+
|
|
837
|
+
Islands are groups of elements that are only connected via a single node/edge to another group.
|
|
838
|
+
|
|
839
|
+
Parameters
|
|
840
|
+
----------
|
|
841
|
+
connectivity : np.ndarray, optional
|
|
842
|
+
Connectivity ('node_number_list') starting with 0. Can be triangles or tetrahedra
|
|
843
|
+
(n_elms, 3) or (n_elms_4).
|
|
844
|
+
adjacency : np.ndparray, optional
|
|
845
|
+
Adjenceny matrix (n_elm, n_elm). Weights are supposed to be number of shared nodes.
|
|
846
|
+
Computed from neighbors if not provided.
|
|
847
|
+
island_crit : int or str, default: 'any'
|
|
848
|
+
How many nodes to define islands?
|
|
849
|
+
'any' -> Elements connected via a single node or single edge are defined as an island.
|
|
850
|
+
'node' -> Elements connected via a single _node_ are defined as an island.
|
|
851
|
+
'edge' -> Elements connected via a single _edge_ are defined as an island.
|
|
852
|
+
largest : book, default: False
|
|
853
|
+
Only return largest island, speeds up computation quite a bit if only one large, and many small islands exist.
|
|
854
|
+
verbose : bool, optional
|
|
855
|
+
Print some verbosity information. Default: False
|
|
856
|
+
Returns
|
|
857
|
+
-------
|
|
858
|
+
elms_with_island : list
|
|
859
|
+
Elements with neighboring islands
|
|
860
|
+
counter_visited : np.ndarray
|
|
861
|
+
shape = (n_elms). How often as each element been visited.
|
|
862
|
+
counter_not_visited : np.ndarray
|
|
863
|
+
shape = (n_elms). How often as each element not been visited.
|
|
864
|
+
"""
|
|
865
|
+
elms_with_island = []
|
|
866
|
+
if adjacency is not None and connectivity is not None:
|
|
867
|
+
raise ValueError(f"Provide either neighbors or connectivity, not both.")
|
|
868
|
+
if adjacency is None and connectivity is None:
|
|
869
|
+
raise ValueError(f"Provide either neighbors or connectivity")
|
|
870
|
+
if adjacency is None:
|
|
871
|
+
assert connectivity is not None
|
|
872
|
+
adjacency = np.array([np.sum(np.isin(connectivity, elm), axis=1) for elm in connectivity])
|
|
873
|
+
|
|
874
|
+
counter_not_visited = np.zeros(adjacency.shape[0])
|
|
875
|
+
counter_visited = np.zeros(adjacency.shape[0])
|
|
876
|
+
|
|
877
|
+
size_island = 0
|
|
878
|
+
visited_elms = set()
|
|
879
|
+
# go through elements and check for islands
|
|
880
|
+
for elm_source in tqdm(range(adjacency.shape[0]), desc="Checking for islands."):
|
|
881
|
+
if largest and elm_source in visited_elms:
|
|
882
|
+
continue
|
|
883
|
+
_, n_not_visited, visited = check_islands_for_single_elm(elm_source,
|
|
884
|
+
adjacency=adjacency,
|
|
885
|
+
island_crit=island_crit)
|
|
886
|
+
|
|
887
|
+
# add stats for each element how often it has been visited
|
|
888
|
+
elms_visited = [i for i, v in visited.items() if v]
|
|
889
|
+
|
|
890
|
+
if not largest:
|
|
891
|
+
counter_visited[elms_visited] += 1
|
|
892
|
+
elms_not_visited = [i for i, v in visited.items() if not v]
|
|
893
|
+
counter_not_visited[elms_not_visited] += 1
|
|
894
|
+
else:
|
|
895
|
+
visited_elms.update(elms_visited)
|
|
896
|
+
if len(elms_visited) > size_island:
|
|
897
|
+
elms_not_visited = [i for i, v in visited.items() if not v]
|
|
898
|
+
size_island = len(elms_visited)
|
|
899
|
+
visited_elms.update(elms_visited)
|
|
900
|
+
counter_visited = np.zeros(adjacency.shape[0])
|
|
901
|
+
counter_not_visited = np.zeros(adjacency.shape[0])
|
|
902
|
+
counter_visited[elms_visited] += 1
|
|
903
|
+
counter_not_visited[elms_not_visited] += 1
|
|
904
|
+
|
|
905
|
+
# if an island is found add it to results list
|
|
906
|
+
if n_not_visited:
|
|
907
|
+
if verbose:
|
|
908
|
+
print(f"\nElement {elm_source: >4}: {n_not_visited} 1-node-neighbors not visited. ")
|
|
909
|
+
elms_with_island.append(elm_source)
|
|
910
|
+
|
|
911
|
+
return elms_with_island, counter_visited, counter_not_visited
|
|
912
|
+
|
|
913
|
+
|
|
914
|
+
def find_island_elms(connectivity=None, adjacency=None, verbose=False, island_crit='edge', decision='cumulative'):
|
|
915
|
+
"""
|
|
916
|
+
Searches for islands in a mesh and returns element indices of the smallest island.
|
|
917
|
+
Island is defines as a set of elements, which share a single node and/or single edge with the rest of the mesh.
|
|
918
|
+
|
|
919
|
+
Parameters
|
|
920
|
+
----------
|
|
921
|
+
connectivity : np.ndarray, optional
|
|
922
|
+
Connectivity ('node_number_list') starting with 0. Can be triangles or tetrahedra
|
|
923
|
+
(n_elms, 3) or (n_elms_4).
|
|
924
|
+
adjacency : np.ndparray, optional
|
|
925
|
+
Adjenceny matrix (n_elm, n_elm). Weights are supposed to be number of shared nodes.
|
|
926
|
+
Computed from neighbors if not provided.
|
|
927
|
+
island_crit : int, default: 'edge'
|
|
928
|
+
How many nodes to define islands?
|
|
929
|
+
'node' -> Elements connected via a single _node_ are defined as an island.
|
|
930
|
+
'edge' -> Elements connected via a single _edge_ are defined as an island.
|
|
931
|
+
decision : str, default: cumulative
|
|
932
|
+
'cumulative' -> Return all element indices that are not visited any times
|
|
933
|
+
'smallest' -> Return smallest island.
|
|
934
|
+
verbose : bool, optional
|
|
935
|
+
Print some verbosity information. Default: False
|
|
936
|
+
|
|
937
|
+
Returns
|
|
938
|
+
-------
|
|
939
|
+
island : list of island-elms
|
|
940
|
+
"""
|
|
941
|
+
if adjacency is not None and connectivity is not None:
|
|
942
|
+
raise ValueError(f"Provide either neighbors or connectivity, not both.")
|
|
943
|
+
if adjacency is None:
|
|
944
|
+
assert connectivity is not None
|
|
945
|
+
adjacency = np.array([np.sum(np.isin(connectivity, elm), axis=1) for elm in connectivity])
|
|
946
|
+
|
|
947
|
+
all_islands, counter_visited, counter_not_visited = find_islands(connectivity, verbose=verbose,
|
|
948
|
+
island_crit=island_crit)
|
|
949
|
+
|
|
950
|
+
if decision == 'smallest':
|
|
951
|
+
# find the size of the islands
|
|
952
|
+
one_node_neighs = {k: np.where(adjacency[k] == island_crit)[0] for k in all_islands}
|
|
953
|
+
|
|
954
|
+
visited, not_visited = {}, {}
|
|
955
|
+
for island, one_node_neigh in one_node_neighs.items():
|
|
956
|
+
|
|
957
|
+
# two counters
|
|
958
|
+
n_visited_i, n_not_visited_i = 0, 0
|
|
959
|
+
|
|
960
|
+
for island_i in one_node_neigh:
|
|
961
|
+
n_visited, n_not_visited, _ = check_islands_for_single_elm(island_i, adjacency=adjacency,
|
|
962
|
+
island_crit=island_crit + 1)
|
|
963
|
+
n_visited_i += n_visited
|
|
964
|
+
n_not_visited_i += n_not_visited
|
|
965
|
+
visited[island] = n_visited_i
|
|
966
|
+
not_visited[island] = n_not_visited_i
|
|
967
|
+
|
|
968
|
+
# choose the smallest island and get all 2-neighbors
|
|
969
|
+
smallest_island_idx = np.argmin(list(visited.values()))
|
|
970
|
+
smallest_island = list(visited.keys())[smallest_island_idx]
|
|
971
|
+
|
|
972
|
+
_, _, elm_idx_from_smalles_island = check_islands_for_single_elm(smallest_island, island_crit=island_crit + 1,
|
|
973
|
+
adjacency=adjacency)
|
|
974
|
+
|
|
975
|
+
if verbose:
|
|
976
|
+
print(f"Island with {len(list(elm_idx_from_smalles_island.keys()))} elements found.")
|
|
977
|
+
return list(elm_idx_from_smalles_island.keys())
|
|
978
|
+
|
|
979
|
+
elif decision == 'cumulative':
|
|
980
|
+
return np.argwhere(counter_not_visited > 0)
|
|
981
|
+
|
|
982
|
+
|
|
983
|
+
def cortical_depth(mesh_fn, geo_fn=None, write_xdmf=True, skin_surface_id=1005, verbose=False):
|
|
984
|
+
"""
|
|
985
|
+
Compute skin-cortex-distance (SCD) for surface and volume data in ``mesh_fn``.
|
|
986
|
+
|
|
987
|
+
.. figure:: ../../doc/images/cortical_depth.png
|
|
988
|
+
:scale: 50 %
|
|
989
|
+
:alt: Visualized cortical depth.
|
|
990
|
+
|
|
991
|
+
Cortical depth computed against skin surface.
|
|
992
|
+
|
|
993
|
+
Parameters
|
|
994
|
+
----------
|
|
995
|
+
mesh_fn : str
|
|
996
|
+
:py:class:`~pynibs.mesh.mesh_struct.TetrahedraLinear` mesh file.
|
|
997
|
+
geo_fn : str, optional
|
|
998
|
+
:py:class:`~pynibs.mesh.mesh_struct.TetrahedraLinear` mesh file with geometric data. If provided, geometric
|
|
999
|
+
information is read from here.
|
|
1000
|
+
write_xdmf : bool, default: True
|
|
1001
|
+
Write .xdmf or not.
|
|
1002
|
+
skin_surface_id : int, default: 1005
|
|
1003
|
+
Which tissue type nr to compute distance against.
|
|
1004
|
+
verbose : bool, default: False
|
|
1005
|
+
Print some verbosity information.
|
|
1006
|
+
|
|
1007
|
+
Returns
|
|
1008
|
+
-------
|
|
1009
|
+
<file> : .hdf5
|
|
1010
|
+
``mesh_fn`` or ``geo_fn`` with SCD information in ``/data/tris/Cortex_dist`` and ``/data/tets/Cortex_dist``.
|
|
1011
|
+
<file> : .xdmf
|
|
1012
|
+
Only if ``write_xdmf == True``.
|
|
1013
|
+
"""
|
|
1014
|
+
if geo_fn is None:
|
|
1015
|
+
geo_fn = mesh_fn
|
|
1016
|
+
with h5py.File(mesh_fn, 'r') as f:
|
|
1017
|
+
skin_tri_idx = f['/mesh/elm/tri_tissue_type'][:] == skin_surface_id
|
|
1018
|
+
tri_nodes = f['/mesh/elm/triangle_number_list'][:][skin_tri_idx]
|
|
1019
|
+
|
|
1020
|
+
elms_with_island, counter_visited, counter_not_visited = pynibs.find_islands(connectivity=tri_nodes,
|
|
1021
|
+
verbose=True,
|
|
1022
|
+
island_crit='edge',
|
|
1023
|
+
largest=True)
|
|
1024
|
+
hdf5 = pynibs.load_mesh_hdf5(mesh_fn)
|
|
1025
|
+
|
|
1026
|
+
with h5py.File(geo_fn, 'r') as geo:
|
|
1027
|
+
# get indices for skin elements
|
|
1028
|
+
skin_tri_idx = np.squeeze(np.argwhere((geo['mesh/elm/tri_tissue_type'][:] == skin_surface_id)))
|
|
1029
|
+
skin_positions = hdf5.triangles_center[skin_tri_idx[counter_visited > 0]]
|
|
1030
|
+
|
|
1031
|
+
def fun(row):
|
|
1032
|
+
return np.min(np.linalg.norm(row - skin_positions, axis=1))
|
|
1033
|
+
|
|
1034
|
+
if verbose:
|
|
1035
|
+
print("Computing triangles")
|
|
1036
|
+
distances_tri = np.apply_along_axis(fun, axis=1, arr=hdf5.triangles_center)
|
|
1037
|
+
if verbose:
|
|
1038
|
+
print("Computing tetrahedra")
|
|
1039
|
+
|
|
1040
|
+
distances_tets = np.apply_along_axis(fun, axis=1, arr=hdf5.tetrahedra_center)
|
|
1041
|
+
with h5py.File(mesh_fn, 'a') as f:
|
|
1042
|
+
try:
|
|
1043
|
+
del f['data/tris/Cortex_dist']
|
|
1044
|
+
del f['data/tets/Cortex_dist']
|
|
1045
|
+
except KeyError:
|
|
1046
|
+
pass
|
|
1047
|
+
f.create_dataset(name='data/tris/Cortex_dist', data=distances_tri)
|
|
1048
|
+
f.create_dataset(name='data/tets/Cortex_dist', data=distances_tets)
|
|
1049
|
+
|
|
1050
|
+
if write_xdmf:
|
|
1051
|
+
pynibs.write_xdmf(overwrite_xdmf=True, hdf5_geo_fn=geo_fn, hdf5_fn=mesh_fn)
|
|
1052
|
+
|
|
1053
|
+
|
|
1054
|
+
def calc_distances(coords, mesh_fn, tissues=None):
|
|
1055
|
+
"""
|
|
1056
|
+
Calculates the distances between ``coords`` and tissue types.
|
|
1057
|
+
|
|
1058
|
+
Parameters
|
|
1059
|
+
----------
|
|
1060
|
+
coords : list of list or list or np.ndarray
|
|
1061
|
+
Coordinates (X, Y, Z) to compute depths for.
|
|
1062
|
+
mesh_fn : str
|
|
1063
|
+
pynibs.Mesh hdf5 filename.
|
|
1064
|
+
tissues : list of int, optional
|
|
1065
|
+
Which tissue types to compute depths for. If none, distances to all tissue types are computed.
|
|
1066
|
+
|
|
1067
|
+
Returns
|
|
1068
|
+
-------
|
|
1069
|
+
distances : pd.Dataframe()
|
|
1070
|
+
colunms: coorrd, tissue_type, distance
|
|
1071
|
+
|
|
1072
|
+
"""
|
|
1073
|
+
coords = np.atleast_2d(coords)
|
|
1074
|
+
print("Coordinate | tissue type | Distance")
|
|
1075
|
+
print("=" * 40)
|
|
1076
|
+
res = {'coord': [],
|
|
1077
|
+
'tissue_type': [],
|
|
1078
|
+
'distance': []}
|
|
1079
|
+
with h5py.File(mesh_fn, 'r') as f:
|
|
1080
|
+
if tissues is None:
|
|
1081
|
+
tissues = np.unique(f['mesh/elm/tag2'][:])
|
|
1082
|
+
for coord in coords:
|
|
1083
|
+
assert np.min(f['mesh/elm/node_number_list'][:]) == 0
|
|
1084
|
+
|
|
1085
|
+
for tissue in tissues:
|
|
1086
|
+
if tissue > 100:
|
|
1087
|
+
# if elmtype == 'tris':
|
|
1088
|
+
elmtype = 2
|
|
1089
|
+
node_list = f['mesh/elm/node_number_list'][:][
|
|
1090
|
+
(f['mesh/elm/tag2'][:] == tissue) & (f['mesh/elm/elm_type'][:] == elmtype)]
|
|
1091
|
+
node_list = node_list[:, :3]
|
|
1092
|
+
else:
|
|
1093
|
+
elmtype = 4
|
|
1094
|
+
node_list = f['mesh/elm/node_number_list'][:][
|
|
1095
|
+
(f['mesh/elm/tag2'][:] == tissue) & (f['mesh/elm/elm_type'][:] == elmtype)]
|
|
1096
|
+
node_coords = f['mesh/nodes/node_coord'][:][node_list]
|
|
1097
|
+
distances = np.mean(np.linalg.norm(coord - node_coords, axis=2), axis=1).min()
|
|
1098
|
+
res['coord'].append(coord)
|
|
1099
|
+
res['tissue_type'].append(tissue)
|
|
1100
|
+
res['distance'].append(distances)
|
|
1101
|
+
print(f"{coord} | {tissue: >4} | {distances.round(2): >6} mm")
|
|
1102
|
+
print("-" * 40)
|
|
1103
|
+
return pd.DataFrame().from_dict(res)
|