pyNIBS 0.2024.8__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (107) hide show
  1. pyNIBS-0.2024.8.dist-info/LICENSE +623 -0
  2. pyNIBS-0.2024.8.dist-info/METADATA +723 -0
  3. pyNIBS-0.2024.8.dist-info/RECORD +107 -0
  4. pyNIBS-0.2024.8.dist-info/WHEEL +5 -0
  5. pyNIBS-0.2024.8.dist-info/top_level.txt +1 -0
  6. pynibs/__init__.py +34 -0
  7. pynibs/coil.py +1367 -0
  8. pynibs/congruence/__init__.py +15 -0
  9. pynibs/congruence/congruence.py +1108 -0
  10. pynibs/congruence/ext_metrics.py +257 -0
  11. pynibs/congruence/stimulation_threshold.py +318 -0
  12. pynibs/data/configuration_exp0.yaml +59 -0
  13. pynibs/data/configuration_linear_MEP.yaml +61 -0
  14. pynibs/data/configuration_linear_RT.yaml +61 -0
  15. pynibs/data/configuration_sigmoid4.yaml +68 -0
  16. pynibs/data/network mapping configuration/configuration guide.md +238 -0
  17. pynibs/data/network mapping configuration/configuration_TEMPLATE.yaml +42 -0
  18. pynibs/data/network mapping configuration/configuration_for_testing.yaml +43 -0
  19. pynibs/data/network mapping configuration/configuration_modelTMS.yaml +43 -0
  20. pynibs/data/network mapping configuration/configuration_reg_isi_05.yaml +43 -0
  21. pynibs/data/network mapping configuration/output_documentation.md +185 -0
  22. pynibs/data/network mapping configuration/recommendations_for_accuracy_threshold.md +77 -0
  23. pynibs/data/neuron/models/L23_PC_cADpyr_biphasic_v1.csv +1281 -0
  24. pynibs/data/neuron/models/L23_PC_cADpyr_monophasic_v1.csv +1281 -0
  25. pynibs/data/neuron/models/L4_LBC_biphasic_v1.csv +1281 -0
  26. pynibs/data/neuron/models/L4_LBC_monophasic_v1.csv +1281 -0
  27. pynibs/data/neuron/models/L4_NBC_biphasic_v1.csv +1281 -0
  28. pynibs/data/neuron/models/L4_NBC_monophasic_v1.csv +1281 -0
  29. pynibs/data/neuron/models/L4_SBC_biphasic_v1.csv +1281 -0
  30. pynibs/data/neuron/models/L4_SBC_monophasic_v1.csv +1281 -0
  31. pynibs/data/neuron/models/L5_TTPC2_cADpyr_biphasic_v1.csv +1281 -0
  32. pynibs/data/neuron/models/L5_TTPC2_cADpyr_monophasic_v1.csv +1281 -0
  33. pynibs/expio/Mep.py +1518 -0
  34. pynibs/expio/__init__.py +8 -0
  35. pynibs/expio/brainsight.py +979 -0
  36. pynibs/expio/brainvis.py +71 -0
  37. pynibs/expio/cobot.py +239 -0
  38. pynibs/expio/exp.py +1876 -0
  39. pynibs/expio/fit_funs.py +287 -0
  40. pynibs/expio/localite.py +1987 -0
  41. pynibs/expio/signal_ced.py +51 -0
  42. pynibs/expio/visor.py +624 -0
  43. pynibs/freesurfer.py +502 -0
  44. pynibs/hdf5_io/__init__.py +10 -0
  45. pynibs/hdf5_io/hdf5_io.py +1857 -0
  46. pynibs/hdf5_io/xdmf.py +1542 -0
  47. pynibs/mesh/__init__.py +3 -0
  48. pynibs/mesh/mesh_struct.py +1394 -0
  49. pynibs/mesh/transformations.py +866 -0
  50. pynibs/mesh/utils.py +1103 -0
  51. pynibs/models/_TMS.py +211 -0
  52. pynibs/models/__init__.py +0 -0
  53. pynibs/muap.py +392 -0
  54. pynibs/neuron/__init__.py +2 -0
  55. pynibs/neuron/neuron_regression.py +284 -0
  56. pynibs/neuron/util.py +58 -0
  57. pynibs/optimization/__init__.py +5 -0
  58. pynibs/optimization/multichannel.py +278 -0
  59. pynibs/optimization/opt_mep.py +152 -0
  60. pynibs/optimization/optimization.py +1445 -0
  61. pynibs/optimization/workhorses.py +698 -0
  62. pynibs/pckg/__init__.py +0 -0
  63. pynibs/pckg/biosig/biosig4c++-1.9.5.src_fixed.tar.gz +0 -0
  64. pynibs/pckg/libeep/__init__.py +0 -0
  65. pynibs/pckg/libeep/pyeep.so +0 -0
  66. pynibs/regression/__init__.py +11 -0
  67. pynibs/regression/dual_node_detection.py +2375 -0
  68. pynibs/regression/regression.py +2984 -0
  69. pynibs/regression/score_types.py +0 -0
  70. pynibs/roi/__init__.py +2 -0
  71. pynibs/roi/roi.py +895 -0
  72. pynibs/roi/roi_structs.py +1233 -0
  73. pynibs/subject.py +1009 -0
  74. pynibs/tensor_scaling.py +144 -0
  75. pynibs/tests/data/InstrumentMarker20200225163611937.xml +19 -0
  76. pynibs/tests/data/TriggerMarkers_Coil0_20200225163443682.xml +14 -0
  77. pynibs/tests/data/TriggerMarkers_Coil1_20200225170337572.xml +6373 -0
  78. pynibs/tests/data/Xdmf.dtd +89 -0
  79. pynibs/tests/data/brainsight_niiImage_nifticoord.txt +145 -0
  80. pynibs/tests/data/brainsight_niiImage_nifticoord_largefile.txt +1434 -0
  81. pynibs/tests/data/brainsight_niiImage_niifticoord_mixedtargets.txt +47 -0
  82. pynibs/tests/data/create_subject_testsub.py +332 -0
  83. pynibs/tests/data/data.hdf5 +0 -0
  84. pynibs/tests/data/geo.hdf5 +0 -0
  85. pynibs/tests/test_coil.py +474 -0
  86. pynibs/tests/test_elements2nodes.py +100 -0
  87. pynibs/tests/test_hdf5_io/test_xdmf.py +61 -0
  88. pynibs/tests/test_mesh_transformations.py +123 -0
  89. pynibs/tests/test_mesh_utils.py +143 -0
  90. pynibs/tests/test_nnav_imports.py +101 -0
  91. pynibs/tests/test_quality_measures.py +117 -0
  92. pynibs/tests/test_regressdata.py +289 -0
  93. pynibs/tests/test_roi.py +17 -0
  94. pynibs/tests/test_rotations.py +86 -0
  95. pynibs/tests/test_subject.py +71 -0
  96. pynibs/tests/test_util.py +24 -0
  97. pynibs/tms_pulse.py +34 -0
  98. pynibs/util/__init__.py +4 -0
  99. pynibs/util/dosing.py +233 -0
  100. pynibs/util/quality_measures.py +562 -0
  101. pynibs/util/rotations.py +340 -0
  102. pynibs/util/simnibs.py +763 -0
  103. pynibs/util/util.py +727 -0
  104. pynibs/visualization/__init__.py +2 -0
  105. pynibs/visualization/para.py +4372 -0
  106. pynibs/visualization/plot_2D.py +137 -0
  107. pynibs/visualization/render_3D.py +347 -0
@@ -0,0 +1,866 @@
1
+ import os
2
+ import meshio
3
+ import trimesh
4
+ import warnings
5
+ import nibabel
6
+ import numpy as np
7
+ from tqdm import tqdm
8
+ from scipy.interpolate import griddata
9
+ from vtkmodules.util import numpy_support # don't import vtk due to opengl issues when running headless
10
+ from vtkmodules.util.vtkConstants import VTK_TRIANGLE
11
+ from vtkmodules.vtkIOXML import vtkXMLUnstructuredGridWriter
12
+ from vtkmodules.vtkCommonDataModel import vtkUnstructuredGrid
13
+ from vtkmodules.vtkCommonCore import vtkPoints, vtkDoubleArray
14
+ from vtkmodules.vtkFiltersCore import vtkCellDataToPointData, vtkPointDataToCellData
15
+
16
+ import pynibs
17
+
18
+
19
+ def cell_data_to_point_data(tris, data_tris, nodes, method='nearest'):
20
+ """
21
+ A wrapper for scipy.interpolate.griddata to interpolate cell data to node data.
22
+
23
+ Parameters
24
+ ----------
25
+ tris : np.ndarray
26
+ (n_tri, 3) element number list.
27
+ data_tris : np.ndarray
28
+ (n_tri x 3) data in tris.
29
+ nodes : np.ndarray
30
+ (n_nodes, 3) nodes coordinates.
31
+ method: str, default: 'nearest'
32
+ Which method to use for interpolation. Default uses NearestNDInterpolator.
33
+
34
+ Returns
35
+ -------
36
+ data_nodes : np.ndarray
37
+ Data in nodes.
38
+ """
39
+ elms_center = np.mean(nodes[tris], axis=1)
40
+ return griddata(elms_center, data_tris, nodes, method)
41
+
42
+
43
+ def data_nodes2elements(data, con):
44
+ """
45
+ Transforms data in nodes to elements (triangles or tetrahedra).
46
+
47
+ Parameters
48
+ ----------
49
+ data : np.ndarray of float
50
+ (N_nodes, N_data) Data given in the nodes.
51
+ con : np.ndarray of int
52
+ triangles: (N_elements, 3).
53
+ tetrahedra: (N_elements, 4).
54
+ Connectivity index list forming the elements.
55
+
56
+ Returns
57
+ -------
58
+ out : np.ndarray of float
59
+ (N_elements, N_data) Data given in the element centers.
60
+ """
61
+ return np.average(data[con], axis=1)
62
+
63
+
64
+ def data_elements2nodes(data, con, precise=False):
65
+ """
66
+ Transforms data in elements (triangles or tetrahedra) to nodes.
67
+ Data can be list of multiple data arrays.
68
+
69
+ Parameters
70
+ ----------
71
+ data : np.ndarray of float or list of np.ndarray
72
+ (N_elements, N_data) Data given in the elements (multiple datasets who fit to con may be passed in a list).
73
+ con : np.ndarray of int
74
+ triangles: (N_elements. 3).
75
+ tetrahedra: (N_elements, 4).
76
+ Connectivity index list forming the elements.
77
+ precise : bool, default: False
78
+ Compute data transformation precisely but slow. Better for near-0 values.
79
+
80
+ Returns
81
+ -------
82
+ out : np.ndarray of float or list of np.ndarray
83
+ (N_nodes, N_data) Data in nodes.
84
+ """
85
+ # check if single dataset or a list of multiple datasets is passed
86
+ if type(data) is not list:
87
+ single_array_input = True
88
+ data = [data]
89
+ else:
90
+ single_array_input = False
91
+
92
+ n_elements = data[0].shape[0]
93
+ n_nodes = con.max() - con.min() + 1
94
+ if con.min() != 0:
95
+ warnings.warn("Node number list is not zero based")
96
+
97
+ # built connectivity matrix
98
+ if not precise:
99
+ try:
100
+ c = np.zeros([n_elements, n_nodes])
101
+
102
+ for i in range(n_elements):
103
+ c[i, (con[i])] = 1.0 / con.shape[1]
104
+
105
+ # filter out NaN from dataset
106
+ for i in range(len(data)):
107
+ data[i][np.isnan(data[i])] = 0
108
+
109
+ # determine inverse of node matrix
110
+ cinv = np.linalg.pinv(c)
111
+ # transform data from element center to element nodes
112
+ data_in_nodes = [np.dot(cinv, d) for d in data]
113
+
114
+ # if single array was provided, return array as well
115
+ if single_array_input:
116
+ data_in_nodes = np.array(data_in_nodes)
117
+
118
+ return data_in_nodes
119
+
120
+ except np.core._exceptions._ArrayMemoryError:
121
+ warnings.warn("Cannot allocate enough RAM to do fast data->nodes conversion. "
122
+ "Falling back to (slow) iterative mapping.")
123
+ data_in_nodes = []
124
+
125
+ con_flat = con.flatten()
126
+ n_dims = con.shape[1]
127
+
128
+ for d in data:
129
+ data_flat = np.repeat(d, n_dims)
130
+ # data_nodes = np.zeros(n_nodes, )
131
+ # for i in tqdm(range(n_nodes), desc="Mapping elements to node data"):
132
+ # data_nodes[i] = d[np.argwhere(con == i)[:, 0]].mean()
133
+
134
+ data_in_nodes.append(np.array([data_flat[con_flat == i].mean() for i in range(n_nodes)]))
135
+
136
+ # if single array was provided, return array as well
137
+ if single_array_input:
138
+ data_in_nodes = np.array(data_in_nodes)
139
+ return data_in_nodes
140
+
141
+
142
+ def project_on_scalp_hdf5(coords, mesh, scalp_tag=1005):
143
+ """
144
+ Find the node in the scalp closest to each coordinate.
145
+
146
+ Parameters
147
+ ----------
148
+ coords: np.ndarray
149
+ (n, 3) Vectors to be transformed.
150
+ mesh: str or pynibs.TetrahedraLinear
151
+ Filename of mesh in .hdf5 format or Mesh structure.
152
+ scalp_tag: int, default: 1005
153
+ Tag in the mesh where the scalp is to be set.
154
+
155
+ Returns
156
+ -------
157
+ points_closest: np.ndarray
158
+ (n, 3) coordinates projected scalp (closest skin points).
159
+ """
160
+ # read head mesh and extract skin surface
161
+ if isinstance(mesh, str):
162
+ mesh = pynibs.load_mesh_hdf5(mesh)
163
+
164
+ if coords.ndim == 1:
165
+ coords = coords[np.newaxis,]
166
+
167
+ # crop to skin surface
168
+ triangles_skin = mesh.triangles[mesh.triangles_regions == scalp_tag]
169
+ point_idx_skin = np.unique(triangles_skin)
170
+ points_skin = mesh.points[point_idx_skin]
171
+
172
+ # find points with smalled Euclidean distance
173
+ points_closest = np.zeros(coords.shape)
174
+ for i, c in enumerate(coords):
175
+ points_closest[i,] = points_skin[np.argmin(np.linalg.norm(points_skin - c, axis=1)),]
176
+
177
+ return points_closest
178
+
179
+
180
+ def project_on_scalp(coords, mesh, scalp_tag=1005):
181
+ """
182
+ Find the node in the scalp closest to each coordinate
183
+
184
+ Parameters
185
+ ----------
186
+ coords: nx3 np.ndarray
187
+ Vectors to be transformed
188
+ mesh: pynibs.TetrahedraLinear or simnibs.msh.mesh_io.Msh
189
+ Mesh structure in simnibs or pynibs format
190
+ scalp_tag: int, default: 1005
191
+ Tag in the mesh where the scalp is to be set.
192
+
193
+ Returns
194
+ -------
195
+ points_closest: np.ndarry
196
+ (n, 3) coordinates projected scalp (closest skin points)
197
+ """
198
+ from simnibs.msh.transformations import project_on_scalp as project_on_scalp_msh
199
+ from simnibs.msh.mesh_io import Msh
200
+
201
+ if isinstance(mesh, pynibs.TetrahedraLinear):
202
+ points_closest = project_on_scalp_hdf5(coords=coords, mesh=mesh, scalp_tag=scalp_tag)
203
+ elif isinstance(mesh, Msh):
204
+ points_closest = project_on_scalp_msh(coords=coords, mesh=mesh, scalp_tag=scalp_tag, distance=0.)
205
+ else:
206
+ raise ValueError(f"Unknown mesh type: {type(mesh)}.")
207
+
208
+ return points_closest
209
+
210
+
211
+ def refine_surface(fn_surf, fn_surf_refined, center, radius, repair=True, remesh=True, verbose=True):
212
+ """
213
+ Refines surface (.stl) in spherical ROI and saves as .stl file.
214
+
215
+ Parameters
216
+ ----------
217
+ fn_surf : str
218
+ Input filename (.stl).
219
+ fn_surf_refined : str
220
+ Output filename (.stl).
221
+ center : np.ndarray of float
222
+ (3) Center of spherical ROI (x,y,z).
223
+ radius : float
224
+ Radius of ROI.
225
+ repair : bool, default: True
226
+ Repair surface mesh to ensure that it is watertight and forms a volume.
227
+ remesh : bool, default: False
228
+ Perform remeshing with meshfix (also removes possibly overlapping facets and intersections).
229
+ verbose : bool, default: True
230
+ Print output messages.
231
+
232
+ Returns
233
+ -------
234
+ <file>: .stl file
235
+ """
236
+ radius_ = radius + 2
237
+ refine = True
238
+
239
+ while refine:
240
+ if verbose:
241
+ print(f"Loading {fn_surf} ...")
242
+ # reading original .stl file
243
+ wm = trimesh.load(fn_surf)
244
+
245
+ tris = wm.faces
246
+ tris_center = wm.triangles_center
247
+ points = wm.vertices
248
+
249
+ # Splitting elements by adding tris_center to points in ROI
250
+ mask_roi = np.linalg.norm(tris_center - center, axis=1) < radius
251
+ ele_idx_roi = np.where(np.linalg.norm(tris_center - center, axis=1) < radius)[0]
252
+ points_refine = points
253
+ tris_refine = tris
254
+
255
+ if verbose:
256
+ print(f"Splitting elements ...")
257
+
258
+ for ele_idx in tqdm(ele_idx_roi):
259
+ points_idx_ele = tris[ele_idx, :]
260
+ p_0 = points[points_idx_ele[0], :]
261
+ p_1 = points[points_idx_ele[1], :]
262
+ p_2 = points[points_idx_ele[2], :]
263
+ p_01 = p_0 + 0.5 * (p_1 - p_0)
264
+ p_02 = p_0 + 0.5 * (p_2 - p_0)
265
+ p_12 = p_1 + 0.5 * (p_2 - p_1)
266
+
267
+ points_refine = np.vstack((points_refine, p_01, p_02, p_12))
268
+
269
+ mask_roi = np.hstack((mask_roi, False, False, False, False))
270
+
271
+ # add 6 new triangles
272
+ p_0_idx = points_idx_ele[0]
273
+ p_1_idx = points_idx_ele[1]
274
+ p_2_idx = points_idx_ele[2]
275
+ p_01_idx = points_refine.shape[0] - 3
276
+ p_02_idx = points_refine.shape[0] - 2
277
+ p_12_idx = points_refine.shape[0] - 1
278
+
279
+ # adding 4 elements
280
+ tris_refine = np.vstack((tris_refine, np.array([[p_0_idx, p_01_idx, p_02_idx],
281
+ [p_01_idx, p_1_idx, p_12_idx],
282
+ [p_02_idx, p_12_idx, p_2_idx],
283
+ [p_01_idx, p_12_idx, p_02_idx]])))
284
+
285
+ ele_idx_del = []
286
+
287
+ if radius != np.inf:
288
+ if verbose:
289
+ print(f"Adding triangles in surrounding elements ...")
290
+ # add triangles in surrounding elements
291
+ ele_sur_idx = np.where(np.logical_and(np.linalg.norm(tris_center - center, axis=1) < radius_,
292
+ np.linalg.norm(tris_center - center, axis=1) >= radius))[0]
293
+
294
+ for ele_sur in tqdm(ele_sur_idx):
295
+ points_idx_ele = tris[ele_sur, :]
296
+ p_0 = points[points_idx_ele[0], :]
297
+ p_1 = points[points_idx_ele[1], :]
298
+ p_2 = points[points_idx_ele[2], :]
299
+ p_01 = p_0 + 0.5 * (p_1 - p_0)
300
+ p_02 = p_0 + 0.5 * (p_2 - p_0)
301
+ p_12 = p_1 + 0.5 * (p_2 - p_1)
302
+
303
+ p_0_idx = points_idx_ele[0]
304
+ p_1_idx = points_idx_ele[1]
305
+ p_2_idx = points_idx_ele[2]
306
+
307
+ p_on_02 = False
308
+ p_on_12 = False
309
+ p_on_01 = False
310
+
311
+ if (np.sum(p_01 == points_refine, axis=1) == 3).any():
312
+ p_on_01 = True
313
+
314
+ if (np.sum(p_02 == points_refine, axis=1) == 3).any():
315
+ p_on_02 = True
316
+
317
+ if (np.sum(p_12 == points_refine, axis=1) == 3).any():
318
+ p_on_12 = True
319
+
320
+ # no edge with point
321
+ if not p_on_01 and not p_on_02 and not p_on_12:
322
+ pass
323
+
324
+ # one edge with point
325
+ elif p_on_01 and not p_on_02 and not p_on_12:
326
+ ele_idx_del.append(ele_sur)
327
+ p_01_idx = np.where(np.sum(points_refine == p_01, axis=1) == 3)[0][0]
328
+ tris_refine = np.vstack((tris_refine, np.array([[p_0_idx, p_01_idx, p_2_idx],
329
+ [p_01_idx, p_1_idx, p_2_idx]])))
330
+
331
+ elif p_on_02 and not p_on_01 and not p_on_12:
332
+ ele_idx_del.append(ele_sur)
333
+ p_02_idx = np.where(np.sum(points_refine == p_02, axis=1) == 3)[0][0]
334
+ tris_refine = np.vstack((tris_refine, np.array([[p_0_idx, p_1_idx, p_02_idx],
335
+ [p_02_idx, p_1_idx, p_2_idx]])))
336
+
337
+ elif p_on_12 and not p_on_02 and not p_on_01:
338
+ ele_idx_del.append(ele_sur)
339
+ p_12_idx = np.where(np.sum(points_refine == p_12, axis=1) == 3)[0][0]
340
+ tris_refine = np.vstack((tris_refine, np.array([[p_0_idx, p_1_idx, p_12_idx],
341
+ [p_0_idx, p_12_idx, p_2_idx]])))
342
+
343
+ # 2 edges with points
344
+ elif p_on_02 and p_on_12 and not p_on_01:
345
+ ele_idx_del.append(ele_sur)
346
+ p_12_idx = np.where(np.sum(points_refine == p_12, axis=1) == 3)[0][0]
347
+ p_02_idx = np.where(np.sum(points_refine == p_02, axis=1) == 3)[0][0]
348
+ tris_refine = np.vstack((tris_refine, np.array([[p_0_idx, p_1_idx, p_02_idx],
349
+ [p_1_idx, p_12_idx, p_02_idx],
350
+ [p_02_idx, p_12_idx, p_2_idx]])))
351
+
352
+ elif p_on_02 and p_on_01 and not p_on_12:
353
+ ele_idx_del.append(ele_sur)
354
+ p_01_idx = np.where(np.sum(points_refine == p_01, axis=1) == 3)[0][0]
355
+ p_02_idx = np.where(np.sum(points_refine == p_02, axis=1) == 3)[0][0]
356
+ tris_refine = np.vstack((tris_refine, np.array([[p_0_idx, p_01_idx, p_02_idx],
357
+ [p_01_idx, p_2_idx, p_02_idx],
358
+ [p_01_idx, p_1_idx, p_2_idx]])))
359
+
360
+ elif p_on_01 and p_on_12 and not p_on_02:
361
+ ele_idx_del.append(ele_sur)
362
+ p_01_idx = np.where(np.sum(points_refine == p_01, axis=1) == 3)[0][0]
363
+ p_12_idx = np.where(np.sum(points_refine == p_12, axis=1) == 3)[0][0]
364
+ tris_refine = np.vstack((tris_refine, np.array([[p_0_idx, p_01_idx, p_2_idx],
365
+ [p_01_idx, p_12_idx, p_2_idx],
366
+ [p_01_idx, p_1_idx, p_12_idx]])))
367
+
368
+ if verbose:
369
+ print("Deleting old triangles ...")
370
+
371
+ # delete old triangles
372
+ ele_idx_roi = np.where(mask_roi)[0]
373
+ ele_idx_lst_del = ele_idx_del + list(ele_idx_roi)
374
+ tris_refine = np.delete(tris_refine, ele_idx_lst_del, 0)
375
+
376
+ points_refine = np.round_(points_refine, 5)
377
+
378
+ # # delete duplicate points
379
+ # p_added = points_refine[points.shape[0]:, :]
380
+ #
381
+ # point_idx_del = np.array([])
382
+ # for i_p, p in tqdm(enumerate(p_added)):
383
+ #
384
+ # p_idx = np.where(np.sum(p == points_refine, axis=1) == 3)[0]
385
+ #
386
+ # if len(p_idx) > 1:
387
+ # if p_idx[1] not in point_idx_del:
388
+ # point_idx_del = np.hstack((point_idx_del, p_idx[1:]))
389
+ #
390
+ # # loop over point_idx_del and replace with first point idx
391
+ # for p_d_idx in p_idx[1:]:
392
+ # tris_refine[tris_refine == p_d_idx] = p_idx[0]
393
+ #
394
+ # point_idx_keep = [i for i in range(points_refine.shape[0]) if i not in point_idx_del]
395
+ # point_idx_new = [i for i in range(len(point_idx_keep))]
396
+ # points_refine = points_refine[point_idx_keep, :]
397
+ #
398
+ # # renumber
399
+ # for p_idx_keep, p_idx_new in zip(point_idx_keep[points.shape[0]:], point_idx_new[points.shape[0]:]):
400
+ # tris_refine[tris_refine == p_idx_keep] = p_idx_new
401
+
402
+ # create new trimesh
403
+ mesh = trimesh.Trimesh(vertices=points_refine,
404
+ faces=tris_refine)
405
+
406
+ if repair:
407
+ if mesh.is_watertight:
408
+ if verbose:
409
+ print(f"Surface is watertight ...")
410
+ mesh_ok = True
411
+ else:
412
+ if verbose:
413
+ print(f"Surface is NOT watertight ... trying to repair mesh ... ")
414
+ # repair mesh
415
+ trimesh.repair.fill_holes(mesh)
416
+
417
+ if mesh.is_watertight:
418
+ if verbose:
419
+ print(f"Surface repaired ...")
420
+ mesh_ok = True
421
+
422
+ else:
423
+ mesh_ok = False
424
+ radius -= 1
425
+ radius_ = radius + 2
426
+
427
+ if verbose:
428
+ print(f"WARNING: Could not repair refined surface ... "
429
+ f"shrinking radius by 1 mm to {radius} mm")
430
+ else:
431
+ mesh_ok = True
432
+
433
+ if mesh_ok:
434
+ if verbose:
435
+ print(f"Saving {fn_surf_refined} ...")
436
+ mesh.export(fn_surf_refined, file_type='stl_ascii')
437
+
438
+ if remesh:
439
+ # remesh surface
440
+ print(f"Remeshing {fn_surf_refined} ...")
441
+ command = f"meshfix {fn_surf_refined} -a 2.0 -u 1 -q --shells 9 " \
442
+ f"--stl -o {fn_surf_refined}"
443
+ os.popen(command).read()
444
+
445
+ refine = False
446
+
447
+
448
+ def map_data_to_surface(datasets, points_datasets, con_datasets, fname_fsl_gm, fname_fsl_wm, fname_midlayer=None,
449
+ delta=0.5, input_data_in_center=True, return_data_in_center=True, data_substitute=-1):
450
+ """
451
+ Maps data from ROI of fsl surface (wm, gm, or midlayer) to given Freesurfer brain surface (wm, gm, inflated).
452
+
453
+ Parameters
454
+ ----------
455
+ datasets : np.ndarray of float [N_points x N_data] or list of np.ndarray
456
+ Data in nodes or center of triangles in ROI (specify this in "data_in_center")
457
+ points_datasets : np.ndarray of float [N_points x 3] or list of np.ndarray
458
+ Point coordinates (x,y,z) of ROI where data in datasets list is given, the points have to be a subset of the
459
+ GM/WM surface (has to be provided for each dataset)
460
+ con_datasets : np.ndarray of int [N_tri x 3] or list of np.ndarray
461
+ Connectivity matrix of dataset points (has to be provided for each dataset)
462
+ fname_fsl_gm : str or list of str or list of None
463
+ Filename of pial surface fsl file(s) (one or two hemispheres)
464
+ e.g. in mri2msh: .../fs_ID/surf/lh.pial
465
+ fname_fsl_wm : str or list of str or list of None
466
+ Filename of wm surface fsl file(s) (one or two hemispheres)
467
+ e.g. in mri2msh: .../fs_ID/surf/lh.white
468
+ fname_midlayer : str or list of str
469
+ Filename of midlayer surface fsl file(s) (one or two hemispheres)
470
+ e.g. in headreco: .../fs_ID/surf/lh.central
471
+ delta : float
472
+ Distance parameter where gm-wm surface was generated 0...1 (default: 0.5)
473
+ 0 -> WM surface
474
+ 1 -> GM surface
475
+ input_data_in_center : bool
476
+ Flag if data in datasets in given in triangle centers or in points (Default: True)
477
+ return_data_in_center : bool
478
+ Flag if data should be returned in nodes or in elements (Default: True)
479
+ data_substitute : float
480
+ Data substitute with this number for all points in the inflated brain, which do not belong to the given data set
481
+
482
+ Returns
483
+ -------
484
+ data_mapped : np.ndarray of float [N_points_inf x N_data]
485
+ Mapped data to target brain surface. In points or elements
486
+ """
487
+
488
+ if type(fname_fsl_gm) is not list:
489
+ fname_fsl_gm = [fname_fsl_gm]
490
+
491
+ if type(fname_fsl_wm) is not list:
492
+ fname_fsl_wm = [fname_fsl_wm]
493
+
494
+ if type(fname_midlayer) is not list:
495
+ fname_midlayer = [fname_midlayer]
496
+
497
+ if fname_midlayer[0] is None:
498
+ # load all freesurfer surfaces of gm and wm (hemispheres) and create midlayer
499
+ points_gm = []
500
+ con_target = []
501
+ points_wm = []
502
+ con_idx = 0
503
+
504
+ for f_gm, f_wm in zip(fname_fsl_gm, fname_fsl_wm):
505
+ p_gm, c_tar = nibabel.freesurfer.read_geometry(f_gm)
506
+ p_wm, _ = nibabel.freesurfer.read_geometry(f_wm)
507
+
508
+ points_gm.append(p_gm)
509
+ points_wm.append(p_wm)
510
+ con_target.append(c_tar + con_idx)
511
+ con_idx += np.max(c_tar) + 1 # c_tar.shape[0]
512
+
513
+ points_gm = np.vstack(points_gm)
514
+ points_wm = np.vstack(points_wm)
515
+ con_target = np.vstack(con_target)
516
+
517
+ # regenerate the gm-wm surface w/o cropping in order to find congruent points
518
+ wm_gm_vector = points_gm - points_wm
519
+
520
+ # determine wm-gm surface (midlayer)
521
+ points = points_wm + wm_gm_vector * delta
522
+
523
+ else:
524
+ # load directly all freesurfer midlayer surfaces (hemispheres)
525
+ points = []
526
+ con_target = []
527
+ con_idx = 0
528
+
529
+ for f_mid in fname_midlayer:
530
+ if f_mid.endswith('.gii'):
531
+ img = nibabel.gifti.giftiio.read(f_mid)
532
+ p_mid = img.agg_data('pointset')
533
+ c_tar = img.agg_data('triangle')
534
+ else:
535
+ p_mid, c_tar = nibabel.freesurfer.read_geometry(f_mid)
536
+
537
+ points.append(p_mid)
538
+ con_target.append(c_tar + con_idx)
539
+ con_idx += np.max(c_tar) + 1 # c_tar.shape[0]
540
+
541
+ points = np.vstack(points)
542
+ con_target = np.vstack(con_target)
543
+
544
+ # check datasets
545
+ if type(datasets) is not list:
546
+ datasets = [datasets]
547
+
548
+ for i in range(len(datasets)):
549
+ if datasets[i].ndim == 1:
550
+ datasets[i] = datasets[i][:, np.newaxis]
551
+ elif datasets[i].shape[0] < datasets[i].shape[1]:
552
+ raise Warning("Datasets #{} shape[0] dimension is smaller than shape[1] (less points than dataset"
553
+ "components). Input dimension should be [N_points x N_data] ")
554
+
555
+ if type(points_datasets) is not list:
556
+ points_datasets = [points_datasets]
557
+
558
+ if type(con_datasets) is not list:
559
+ con_datasets = [con_datasets]
560
+ # check if all points and all con are the same (if so, just map once and reuse results)
561
+ all_points_equal = all([(points_datasets[i] == points_datasets[i + 1]).all()
562
+ for i in range(len(points_datasets) - 1)])
563
+
564
+ all_con_equal = all([(con_datasets[i] == con_datasets[i + 1]).all()
565
+ for i in range(len(con_datasets) - 1)])
566
+
567
+ if all_points_equal and all_con_equal:
568
+ n_main_iter = 1
569
+ n_sub_iter = len(datasets)
570
+ else:
571
+ n_main_iter = len(datasets)
572
+ n_sub_iter = 1
573
+
574
+ # check if indexation starts with value greater zero
575
+ if np.min(con_target) > 0:
576
+ con_target = con_target - np.min(con_target)
577
+
578
+ n_points = points.shape[0]
579
+
580
+ data_mapped = []
581
+
582
+ # for i, data in enumerate(datasets):
583
+ for i in range(n_main_iter):
584
+ n_data = datasets[i].shape[1] if datasets[i].ndim > 1 else 1
585
+
586
+ # n_points_cropped = points_datasets[i].shape[0]
587
+
588
+ # check if indexation starts with value greater zero
589
+ if np.min(con_datasets[i]) > 0:
590
+ con_datasets[i] = con_datasets[i] - np.min(con_datasets[i])
591
+
592
+ if datasets[i].ndim == 1:
593
+ datasets[i] = datasets[i][:, np.newaxis]
594
+
595
+ if input_data_in_center and return_data_in_center:
596
+ # determine triangle center of dataset
597
+ triangle_center_datasets = np.average(points_datasets[i][con_datasets[i]], axis=1)
598
+
599
+ # determine triangle center of whole surface
600
+ triangle_center_surface = np.average(points[con_target], axis=1)
601
+
602
+ # loop over all points to get index list
603
+ # point_idx_target = []
604
+ # point_idx_data = []
605
+
606
+ point_idx_target = np.zeros(datasets[i].shape[0])
607
+ point_idx_data = np.arange(datasets[i].shape[0])
608
+ for j in tqdm(range(datasets[i].shape[0]), desc="Mapping ROI to surface"):
609
+ point_idx_target[j] = np.where(np.all(np.isclose(triangle_center_datasets[j,], triangle_center_surface),
610
+ axis=1))[0]
611
+ point_idx_target = point_idx_target.astype(int).tolist()
612
+ point_idx_data = point_idx_data.astype(int).tolist()
613
+
614
+ # run subiterations (if all points and cons are equal, we save a lot of time here)
615
+ for k in range(n_sub_iter):
616
+ data_mapped.append(np.zeros([triangle_center_surface.shape[0], n_data]) + data_substitute * 1.0)
617
+ data_mapped[k][point_idx_target, :] = datasets[k][point_idx_data, :]
618
+
619
+ else:
620
+ # loop over all points to get index list
621
+ point_idx_target = []
622
+ point_idx_data = list(range(datasets[i].shape[0]))
623
+
624
+ for j in range(datasets[i].shape[0]):
625
+ point_idx_target.append(np.where(np.all(np.isclose(points_datasets[i][j,], points), axis=1))[0])
626
+
627
+ point_idx_target = [int(p) for p in point_idx_target]
628
+ point_idx_data = [int(p) for p in point_idx_data]
629
+
630
+ # run subiterations (if all points and cons are equal, we save a lot of time here)
631
+ for k in range(n_sub_iter):
632
+ # transform data from triangle center to triangle nodes if necessary
633
+ if input_data_in_center:
634
+ data_nodes = data_elements2nodes(datasets[k], con_datasets[k])
635
+ else:
636
+ data_nodes = datasets[k]
637
+
638
+ # find and map data points
639
+ data_mapped.append(np.zeros([n_points, n_data]) + data_substitute * 1.0)
640
+ data_mapped[k][point_idx_target] = data_nodes[point_idx_data]
641
+
642
+ # return data in elements instead of points
643
+ if return_data_in_center:
644
+ data_mapped[k] = data_nodes2elements(data_mapped[k], con_target)
645
+
646
+ return data_mapped
647
+
648
+
649
+ def midlayer_2_surf(midlayer_data, coords_target, coords_midlayer, midlayer_con=None, midlayer_data_in_nodes=False,
650
+ max_dist=5,
651
+ outside_roi_val=0, precise_map=True):
652
+ """
653
+ Convert midlayer data to whole-brain surface data, e.g. grey matter.
654
+ Output is returned as data in nodes.
655
+
656
+ Parameters
657
+ ----------
658
+ midlayer_data : np.ndarray of float
659
+ (n_elm_midlayer,) or (n_nodes_midlayer,), the data in the midlayer.
660
+ coords_target : np.ndarray of float
661
+ (n_nodes_target, 3) Coordinates of the nodes of the target surface.
662
+ coords_midlayer : np.ndarray of float
663
+ (n_nodes_midlayer, 3) Coordinates of the nodes of the midlayer surface.
664
+ midlayer_con : np.ndarray of int, optional
665
+ (n_elm_midlayer, 3) Connectivity of the midlayer elements. Provide if data_in_points == True.
666
+ midlayer_data_in_nodes : bool, default=False
667
+ If midlayer data is provided in nodes, set to True and provide midlayer_con.
668
+ max_dist : float, default=5
669
+ Maximum distance between target and midlayer nodes to pull data from midlayer_data for.
670
+ outside_roi_val : float, default=0
671
+ Areas outside of max_dist are filled with outside_roi_val.
672
+ precise_map : bool, default=True
673
+ If elements to nodes mapping is done, perform this precise and slow or not.
674
+
675
+ Returns
676
+ -------
677
+ data_target : np.ndarray
678
+ (n_nodes_target, 1) The data in nodes of the target surface.
679
+ """
680
+ if not midlayer_data_in_nodes:
681
+ assert midlayer_con is not None
682
+ midlayer_data = np.squeeze(pynibs.data_elements2nodes(midlayer_data, midlayer_con, precise=precise_map))
683
+
684
+ data_target = np.zeros((coords_target.shape[0]))
685
+ for i in tqdm(range(data_target.shape[0]), desc='Mapping midlayer2surface'):
686
+ idx = np.linalg.norm(coords_target[i] - coords_midlayer, axis=1).argmin()
687
+ if np.linalg.norm(coords_target[i] - coords_midlayer[idx]) > max_dist:
688
+ data_target[i] = outside_roi_val
689
+ else:
690
+ data_target[i] = midlayer_data[idx]
691
+
692
+ return data_target
693
+
694
+
695
+ def point_data_to_cell_data_vtk(mesh=None, nodes=None, con=None, point_data=None, fn=None):
696
+ """
697
+ Convert point data to cell data in a VTK unstructured grid and save the result to a file.
698
+
699
+ Parameters
700
+ ----------
701
+ mesh : meshio.Mesh, optional
702
+ The mesh object containing points and cells.
703
+ nodes : np.ndarray of float, optional
704
+ (N_points, 3) Coordinates of the nodes.
705
+ con : np.ndarray of int, optional
706
+ (N_elements, 3) Connectivity index list forming the elements.
707
+ point_data : dict, optional
708
+ Point data to be transformed to cell data.
709
+ fn : str, optional
710
+ If provided, vtk file is written out to this file.
711
+
712
+ Returns
713
+ -------
714
+ dict : cell_data
715
+ All data sets from mesh transformed to cell_data.
716
+ """
717
+ if mesh is not None:
718
+ assert nodes is None and con is None, "Provide either mesh or nodes and con."
719
+ else:
720
+ assert nodes is not None and con is not None, "Provide either mesh or nodes and con."
721
+ mesh = meshio.Mesh(points=nodes, cells=[("triangle", con)], point_data=point_data)
722
+
723
+ # Create VTK mesh instance from meshio object
724
+ vtk_unstrgrid = vtkUnstructuredGrid()
725
+ points = vtkPoints()
726
+ number_of_points = mesh.points.shape[0]
727
+ number_of_cells = mesh.cells[0].data.shape[0]
728
+ cell_data = {}
729
+
730
+ # Insert points into VTK points object
731
+ for idx, p in tqdm(enumerate(mesh.points),
732
+ desc='Processing points', leave=False):
733
+ points.InsertPoint(idx, p)
734
+ vtk_unstrgrid.SetPoints(points)
735
+ del points
736
+
737
+ # Allocate and insert cells into VTK unstructured grid
738
+ vtk_unstrgrid.Allocate(number_of_cells)
739
+ assert len(mesh.cells) == 1, f"Only one cell block is supported, {len(mesh.cells)} present in mesh."
740
+ for idx in tqdm(mesh.cells[0].data,
741
+ desc='Processing cells', leave=False):
742
+ vtk_unstrgrid.InsertNextCell(VTK_TRIANGLE, 3, idx)
743
+
744
+ for arr_name, arr_data_points_meshio in mesh.point_data.items():
745
+ # Create and set point data array
746
+ if len(arr_data_points_meshio.shape) == 1:
747
+ arr_data_points_meshio = arr_data_points_meshio[:, np.newaxis]
748
+ # arr_data_points_meshio = np.atleast_2d(arr_data_points_meshio)
749
+ n_comps = arr_data_points_meshio.shape[1]
750
+ arr_data_points_vtk = vtkDoubleArray()
751
+ arr_data_points_vtk.SetNumberOfComponents(n_comps)
752
+ arr_data_points_vtk.SetNumberOfTuples(number_of_points)
753
+ arr_data_points_vtk.SetName(arr_name)
754
+
755
+ for idx, data in tqdm(enumerate(arr_data_points_meshio), total=arr_data_points_meshio.shape[0],
756
+ desc=f"Processing data {arr_name}", leave=False):
757
+ arr_data_points_vtk.SetTuple(idx, data)
758
+ vtk_unstrgrid.GetPointData().AddArray(arr_data_points_vtk)
759
+
760
+ # Convert point data to cell data
761
+ p2c_conv = vtkPointDataToCellData()
762
+ p2c_conv.SetInputData(vtk_unstrgrid)
763
+ p2c_conv.Update()
764
+ ptdata_unstrgrid = p2c_conv.GetOutput()
765
+ arr_data_cell_vtk = ptdata_unstrgrid.GetCellData().GetArray(arr_name)
766
+
767
+ # Add cell data array to the unstructured grid
768
+ vtk_unstrgrid.GetCellData().AddArray(arr_data_cell_vtk)
769
+ cell_data[arr_name] = numpy_support.vtk_to_numpy(arr_data_cell_vtk)
770
+
771
+ # Write the VTK mesh to a file
772
+ if fn is not None:
773
+ write_vtu(fn, vtk_unstrgrid)
774
+
775
+ return cell_data
776
+
777
+
778
+ def cell_data_to_point_data_vtk(mesh=None, nodes=None, con=None, cell_data=None, fn=None):
779
+ """
780
+ Convert cell data to point data in a VTK unstructured grid and save the result to a file.
781
+
782
+ Parameters
783
+ ----------
784
+ mesh : meshio.Mesh
785
+ The mesh object containing points and cells.
786
+ nodes : np.ndarray of float, optional
787
+ (N_points, 3) Coordinates of the nodes.
788
+ con : np.ndarray of int, optional
789
+ (N_elements, 3) Connectivity index list forming the elements.
790
+ cell_data : dict, optional
791
+ Cell data to be transformed to point data. keys: str, values: np.ndarray
792
+ fn : str, optional
793
+ If provided, vtk file is written out to this file.
794
+
795
+ Returns
796
+ -------
797
+ dict : point_data
798
+ All data sets from mesh transformed to point data.
799
+ """
800
+ if mesh is not None:
801
+ assert nodes is None and con is None, "Provide either mesh or nodes and con."
802
+ else:
803
+ assert nodes is not None and con is not None, "Provide either mesh or nodes and con."
804
+ mesh = meshio.Mesh(points=nodes, cells=[("triangle", con)], cell_data=cell_data)
805
+ # Create VTK mesh instance from meshio object
806
+ vtk_unstrgrid = vtkUnstructuredGrid()
807
+ points = vtkPoints()
808
+ # number_of_points = mesh.points.shape[0]
809
+ number_of_cells = mesh.cells[0].data.shape[0]
810
+ point_data = {}
811
+
812
+ # Insert points into VTK points object
813
+ for idx, p in tqdm(enumerate(mesh.points), desc='Processing points', leave=False):
814
+ points.InsertPoint(idx, p)
815
+ vtk_unstrgrid.SetPoints(points)
816
+ del points
817
+
818
+ # Allocate and insert cells into VTK unstructured grid
819
+ vtk_unstrgrid.Allocate(number_of_cells)
820
+ assert len(mesh.cells) == 1, f"Only one cell block is supported, {len(mesh.cells)} present in mesh."
821
+ for idx in tqdm(mesh.cells[0].data, desc='Processing cells', leave=False):
822
+ vtk_unstrgrid.InsertNextCell(VTK_TRIANGLE, 3, idx)
823
+
824
+ for arr_name, arr_data_cells_meshio in mesh.cell_data.items():
825
+ if isinstance(arr_data_cells_meshio, list):
826
+ if len(arr_data_cells_meshio) > 1:
827
+ raise ValueError
828
+ arr_data_cells_meshio = arr_data_cells_meshio[0]
829
+ # Create and set cell data array
830
+ if len(arr_data_cells_meshio.shape) == 1:
831
+ arr_data_cells_meshio = arr_data_cells_meshio[:, np.newaxis]
832
+ # arr_data_cells_meshio = np.atleast_2d(arr_data_cells_meshio)
833
+ n_comps = arr_data_cells_meshio.shape[1]
834
+ arr_data_cells_vtk = vtkDoubleArray()
835
+ arr_data_cells_vtk.SetNumberOfComponents(n_comps)
836
+ arr_data_cells_vtk.SetNumberOfTuples(number_of_cells)
837
+ arr_data_cells_vtk.SetName(f"{arr_name}")
838
+
839
+ for idx, data in tqdm(enumerate(arr_data_cells_meshio), total=arr_data_cells_meshio.shape[1],
840
+ desc=f"Processing data {arr_name}", leave=False):
841
+ arr_data_cells_vtk.SetTuple(idx, data)
842
+ vtk_unstrgrid.GetCellData().AddArray(arr_data_cells_vtk)
843
+
844
+ # Convert cell data to point data
845
+ c2p_conv = vtkCellDataToPointData()
846
+ c2p_conv.SetInputData(vtk_unstrgrid)
847
+ c2p_conv.Update()
848
+ ptdata_unstrgrid = c2p_conv.GetOutput()
849
+ arr_data_point_vtk = ptdata_unstrgrid.GetPointData().GetArray(f"{arr_name}")
850
+
851
+ # Add point data array to the unstructured grid
852
+ vtk_unstrgrid.GetPointData().AddArray(arr_data_point_vtk)
853
+ point_data[arr_name] = numpy_support.vtk_to_numpy(arr_data_point_vtk)
854
+
855
+ # Write the VTK mesh to a file
856
+ if fn is not None:
857
+ write_vtu(fn, vtk_unstrgrid)
858
+
859
+ return point_data
860
+
861
+
862
+ def write_vtu(fn, vtk_grid):
863
+ writer = vtkXMLUnstructuredGridWriter()
864
+ writer.SetFileName(fn)
865
+ writer.SetInputData(vtk_grid)
866
+ writer.Write()