pyNIBS 0.2024.8__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- pyNIBS-0.2024.8.dist-info/LICENSE +623 -0
- pyNIBS-0.2024.8.dist-info/METADATA +723 -0
- pyNIBS-0.2024.8.dist-info/RECORD +107 -0
- pyNIBS-0.2024.8.dist-info/WHEEL +5 -0
- pyNIBS-0.2024.8.dist-info/top_level.txt +1 -0
- pynibs/__init__.py +34 -0
- pynibs/coil.py +1367 -0
- pynibs/congruence/__init__.py +15 -0
- pynibs/congruence/congruence.py +1108 -0
- pynibs/congruence/ext_metrics.py +257 -0
- pynibs/congruence/stimulation_threshold.py +318 -0
- pynibs/data/configuration_exp0.yaml +59 -0
- pynibs/data/configuration_linear_MEP.yaml +61 -0
- pynibs/data/configuration_linear_RT.yaml +61 -0
- pynibs/data/configuration_sigmoid4.yaml +68 -0
- pynibs/data/network mapping configuration/configuration guide.md +238 -0
- pynibs/data/network mapping configuration/configuration_TEMPLATE.yaml +42 -0
- pynibs/data/network mapping configuration/configuration_for_testing.yaml +43 -0
- pynibs/data/network mapping configuration/configuration_modelTMS.yaml +43 -0
- pynibs/data/network mapping configuration/configuration_reg_isi_05.yaml +43 -0
- pynibs/data/network mapping configuration/output_documentation.md +185 -0
- pynibs/data/network mapping configuration/recommendations_for_accuracy_threshold.md +77 -0
- pynibs/data/neuron/models/L23_PC_cADpyr_biphasic_v1.csv +1281 -0
- pynibs/data/neuron/models/L23_PC_cADpyr_monophasic_v1.csv +1281 -0
- pynibs/data/neuron/models/L4_LBC_biphasic_v1.csv +1281 -0
- pynibs/data/neuron/models/L4_LBC_monophasic_v1.csv +1281 -0
- pynibs/data/neuron/models/L4_NBC_biphasic_v1.csv +1281 -0
- pynibs/data/neuron/models/L4_NBC_monophasic_v1.csv +1281 -0
- pynibs/data/neuron/models/L4_SBC_biphasic_v1.csv +1281 -0
- pynibs/data/neuron/models/L4_SBC_monophasic_v1.csv +1281 -0
- pynibs/data/neuron/models/L5_TTPC2_cADpyr_biphasic_v1.csv +1281 -0
- pynibs/data/neuron/models/L5_TTPC2_cADpyr_monophasic_v1.csv +1281 -0
- pynibs/expio/Mep.py +1518 -0
- pynibs/expio/__init__.py +8 -0
- pynibs/expio/brainsight.py +979 -0
- pynibs/expio/brainvis.py +71 -0
- pynibs/expio/cobot.py +239 -0
- pynibs/expio/exp.py +1876 -0
- pynibs/expio/fit_funs.py +287 -0
- pynibs/expio/localite.py +1987 -0
- pynibs/expio/signal_ced.py +51 -0
- pynibs/expio/visor.py +624 -0
- pynibs/freesurfer.py +502 -0
- pynibs/hdf5_io/__init__.py +10 -0
- pynibs/hdf5_io/hdf5_io.py +1857 -0
- pynibs/hdf5_io/xdmf.py +1542 -0
- pynibs/mesh/__init__.py +3 -0
- pynibs/mesh/mesh_struct.py +1394 -0
- pynibs/mesh/transformations.py +866 -0
- pynibs/mesh/utils.py +1103 -0
- pynibs/models/_TMS.py +211 -0
- pynibs/models/__init__.py +0 -0
- pynibs/muap.py +392 -0
- pynibs/neuron/__init__.py +2 -0
- pynibs/neuron/neuron_regression.py +284 -0
- pynibs/neuron/util.py +58 -0
- pynibs/optimization/__init__.py +5 -0
- pynibs/optimization/multichannel.py +278 -0
- pynibs/optimization/opt_mep.py +152 -0
- pynibs/optimization/optimization.py +1445 -0
- pynibs/optimization/workhorses.py +698 -0
- pynibs/pckg/__init__.py +0 -0
- pynibs/pckg/biosig/biosig4c++-1.9.5.src_fixed.tar.gz +0 -0
- pynibs/pckg/libeep/__init__.py +0 -0
- pynibs/pckg/libeep/pyeep.so +0 -0
- pynibs/regression/__init__.py +11 -0
- pynibs/regression/dual_node_detection.py +2375 -0
- pynibs/regression/regression.py +2984 -0
- pynibs/regression/score_types.py +0 -0
- pynibs/roi/__init__.py +2 -0
- pynibs/roi/roi.py +895 -0
- pynibs/roi/roi_structs.py +1233 -0
- pynibs/subject.py +1009 -0
- pynibs/tensor_scaling.py +144 -0
- pynibs/tests/data/InstrumentMarker20200225163611937.xml +19 -0
- pynibs/tests/data/TriggerMarkers_Coil0_20200225163443682.xml +14 -0
- pynibs/tests/data/TriggerMarkers_Coil1_20200225170337572.xml +6373 -0
- pynibs/tests/data/Xdmf.dtd +89 -0
- pynibs/tests/data/brainsight_niiImage_nifticoord.txt +145 -0
- pynibs/tests/data/brainsight_niiImage_nifticoord_largefile.txt +1434 -0
- pynibs/tests/data/brainsight_niiImage_niifticoord_mixedtargets.txt +47 -0
- pynibs/tests/data/create_subject_testsub.py +332 -0
- pynibs/tests/data/data.hdf5 +0 -0
- pynibs/tests/data/geo.hdf5 +0 -0
- pynibs/tests/test_coil.py +474 -0
- pynibs/tests/test_elements2nodes.py +100 -0
- pynibs/tests/test_hdf5_io/test_xdmf.py +61 -0
- pynibs/tests/test_mesh_transformations.py +123 -0
- pynibs/tests/test_mesh_utils.py +143 -0
- pynibs/tests/test_nnav_imports.py +101 -0
- pynibs/tests/test_quality_measures.py +117 -0
- pynibs/tests/test_regressdata.py +289 -0
- pynibs/tests/test_roi.py +17 -0
- pynibs/tests/test_rotations.py +86 -0
- pynibs/tests/test_subject.py +71 -0
- pynibs/tests/test_util.py +24 -0
- pynibs/tms_pulse.py +34 -0
- pynibs/util/__init__.py +4 -0
- pynibs/util/dosing.py +233 -0
- pynibs/util/quality_measures.py +562 -0
- pynibs/util/rotations.py +340 -0
- pynibs/util/simnibs.py +763 -0
- pynibs/util/util.py +727 -0
- pynibs/visualization/__init__.py +2 -0
- pynibs/visualization/para.py +4372 -0
- pynibs/visualization/plot_2D.py +137 -0
- pynibs/visualization/render_3D.py +347 -0
|
@@ -0,0 +1,1857 @@
|
|
|
1
|
+
"""This module contains functions to read and write .hdf5 files."""
|
|
2
|
+
import os
|
|
3
|
+
import re
|
|
4
|
+
import h5py
|
|
5
|
+
import nibabel
|
|
6
|
+
import datetime
|
|
7
|
+
import warnings
|
|
8
|
+
import numpy as np
|
|
9
|
+
import pandas as pd
|
|
10
|
+
import multiprocessing
|
|
11
|
+
from subprocess import call
|
|
12
|
+
from functools import partial
|
|
13
|
+
import pynibs
|
|
14
|
+
|
|
15
|
+
|
|
16
|
+
def split_hdf5(hdf5_in_fn, hdf5_geo_out_fn='', hdf5_data_out_fn=None):
|
|
17
|
+
"""
|
|
18
|
+
Splits one hdf5 into one with spatial data and one with statistical data.
|
|
19
|
+
If coil data is present in ``hdf5_in``, it is saved in ``hdf5Data_out``.
|
|
20
|
+
If new spatial data is added to file (curve, inflated, whatever), add this to the geogroups variable.
|
|
21
|
+
|
|
22
|
+
Parameters
|
|
23
|
+
----------
|
|
24
|
+
hdf5_in_fn : str
|
|
25
|
+
Filename of .hdf5 input file.
|
|
26
|
+
hdf5_geo_out_fn : str
|
|
27
|
+
Filename of .hdf5 .geo output file.
|
|
28
|
+
hdf5_data_out_fn : str
|
|
29
|
+
Filename of .hdf5 .data output file (ff none, remove data from hdf5_in).
|
|
30
|
+
|
|
31
|
+
Returns
|
|
32
|
+
-------
|
|
33
|
+
<File> : .hdf5 file
|
|
34
|
+
hdf5Geo_out_fn (spatial data).
|
|
35
|
+
<File> : .hdf5 file
|
|
36
|
+
hdf5Data_out_fn (data).
|
|
37
|
+
"""
|
|
38
|
+
hdf5_in = h5py.File(hdf5_in_fn, 'r+')
|
|
39
|
+
|
|
40
|
+
if hdf5_data_out_fn is not None:
|
|
41
|
+
hdf5_geo_out = None
|
|
42
|
+
hdf5_data_out = None
|
|
43
|
+
try:
|
|
44
|
+
hdf5_geo_out = h5py.File(hdf5_geo_out_fn, 'x')
|
|
45
|
+
hdf5_data_out = h5py.File(hdf5_data_out_fn, 'x')
|
|
46
|
+
except IOError:
|
|
47
|
+
print((hdf5_geo_out_fn + " or " + hdf5_data_out_fn + " already exists. Quitting."))
|
|
48
|
+
quit()
|
|
49
|
+
|
|
50
|
+
print(("Writing " + hdf5_geo_out_fn))
|
|
51
|
+
geogroups = ["/mesh/", "/nodes/"]
|
|
52
|
+
for group in geogroups:
|
|
53
|
+
if group in hdf5_in:
|
|
54
|
+
hdf5_in.copy(group, hdf5_geo_out)
|
|
55
|
+
else:
|
|
56
|
+
print((group + "not found in " + hdf5_in_fn))
|
|
57
|
+
|
|
58
|
+
print(("Writing " + hdf5_data_out_fn))
|
|
59
|
+
datagroups = ["/data/", "/fields/", "/coil/"]
|
|
60
|
+
for group in datagroups:
|
|
61
|
+
if group in hdf5_in:
|
|
62
|
+
hdf5_in.copy(group, hdf5_data_out)
|
|
63
|
+
else:
|
|
64
|
+
print((group + "not found in " + hdf5_in_fn))
|
|
65
|
+
|
|
66
|
+
# sometimes there is /mesh/field. move to /field/
|
|
67
|
+
if "/mesh/fields" in hdf5_geo_out:
|
|
68
|
+
print(("Moving datarrays " +
|
|
69
|
+
os.path.basename(hdf5_geo_out_fn) + "/mesh/fields/* to " +
|
|
70
|
+
os.path.basename(hdf5_data_out_fn) + "/data/fields*"))
|
|
71
|
+
if "/data/" not in hdf5_data_out:
|
|
72
|
+
hdf5_data_out.create_group("/data/")
|
|
73
|
+
hdf5_geo_out.copy("/mesh/fields/", hdf5_data_out)
|
|
74
|
+
del (hdf5_geo_out['/mesh/fields/'])
|
|
75
|
+
for field in hdf5_data_out['/fields/']:
|
|
76
|
+
hdf5_data_out.move('/fields/' + field, '/data/' + field)
|
|
77
|
+
del (hdf5_data_out['/fields'])
|
|
78
|
+
|
|
79
|
+
else: # remove spatial data from hdf5_in
|
|
80
|
+
if "/mesh/fields" in hdf5_in:
|
|
81
|
+
if "/data/" not in hdf5_in:
|
|
82
|
+
hdf5_in.create_group("/data/")
|
|
83
|
+
for dataset in hdf5_in["/mesh/fields"]:
|
|
84
|
+
hdf5_in.move("/mesh/fields/" + dataset, "/data/" + dataset)
|
|
85
|
+
for group in hdf5_in['/']:
|
|
86
|
+
if group != "data" and group != "coil":
|
|
87
|
+
del (hdf5_in['/' + group])
|
|
88
|
+
hdf5_in.close()
|
|
89
|
+
print("repacking .hdf5")
|
|
90
|
+
os.rename(hdf5_in_fn, hdf5_in_fn + '_temp')
|
|
91
|
+
# call("ptrepack -o --chunkshape=auto --propindexes " + hdf5_in_fn + '_temp ' + hdf5_in_fn)
|
|
92
|
+
|
|
93
|
+
command = ["ptrepack", "-o", "--chunkshape=auto", "--propindexes",
|
|
94
|
+
hdf5_in_fn + '_temp',
|
|
95
|
+
hdf5_in_fn]
|
|
96
|
+
call(command)
|
|
97
|
+
os.remove(hdf5_in_fn + '_temp')
|
|
98
|
+
|
|
99
|
+
|
|
100
|
+
def read_data_hdf5(fname):
|
|
101
|
+
"""
|
|
102
|
+
Reads phi and dA/dt data from .hdf5 file (phi and dAdt are given in the nodes).
|
|
103
|
+
|
|
104
|
+
Parameters
|
|
105
|
+
----------
|
|
106
|
+
fname : str
|
|
107
|
+
Filename of .hdf5 data file.
|
|
108
|
+
|
|
109
|
+
Returns
|
|
110
|
+
-------
|
|
111
|
+
phi : np.ndarray of float [N_nodes]
|
|
112
|
+
Electric potential in the nodes of the mesh.
|
|
113
|
+
da_dt : np.ndarray of float [N_nodesx3]
|
|
114
|
+
Magnetic vector potential in the nodes of the mesh.
|
|
115
|
+
"""
|
|
116
|
+
with h5py.File(fname, 'r') as f:
|
|
117
|
+
phi = np.array(f['data/potential']) # [N_nodes]
|
|
118
|
+
# [3*N_nodes x 1]
|
|
119
|
+
da_dt = np.array(f['data/dAdt'])
|
|
120
|
+
da_dt = np.reshape(da_dt, (phi.shape[0], 3), order='c') # [N_nodes x 3]
|
|
121
|
+
return phi, da_dt
|
|
122
|
+
|
|
123
|
+
|
|
124
|
+
def load_mesh_msh(fname):
|
|
125
|
+
"""
|
|
126
|
+
Loading mesh from .msh file and return :py:class:`~pynibs.mesh.mesh_struct.TetrahedraLinear` object.
|
|
127
|
+
|
|
128
|
+
Parameters
|
|
129
|
+
----------
|
|
130
|
+
fname : str
|
|
131
|
+
.msh filename (incl. path)
|
|
132
|
+
|
|
133
|
+
Returns
|
|
134
|
+
-------
|
|
135
|
+
obj : pynibs.mesh.mesh_struct.TetrahedraLinear
|
|
136
|
+
"""
|
|
137
|
+
import simnibs
|
|
138
|
+
msh_msh = simnibs.read_msh(fname)
|
|
139
|
+
|
|
140
|
+
points = msh_msh.nodes.node_coord
|
|
141
|
+
triangles = msh_msh.elm.node_number_list[msh_msh.elm.elm_type == 2, 0:3]
|
|
142
|
+
tetrahedra = msh_msh.elm.node_number_list[msh_msh.elm.elm_type == 4, 0:4]
|
|
143
|
+
|
|
144
|
+
if tetrahedra.shape[0] == 0:
|
|
145
|
+
offset_idx = np.min(triangles)
|
|
146
|
+
tetrahedra_regions, tetrahedra = np.array(()), np.array(())
|
|
147
|
+
triangles = triangles - offset_idx # set start index to 0
|
|
148
|
+
elif triangles.shape[0] == 0:
|
|
149
|
+
triangles_regions, triangles = np.array(()), np.array(())
|
|
150
|
+
tetrahedra_regions = msh_msh.elm.tag2[msh_msh.elm.elm_type == 4]
|
|
151
|
+
else:
|
|
152
|
+
offset_idx = np.min(np.array([np.min(triangles), np.min(tetrahedra)]))
|
|
153
|
+
tetrahedra = tetrahedra - offset_idx # set start index to 0
|
|
154
|
+
tetrahedra_regions = msh_msh.elm.tag2[msh_msh.elm.elm_type == 4]
|
|
155
|
+
triangles = triangles - offset_idx # set start index to 0
|
|
156
|
+
|
|
157
|
+
triangles_regions = msh_msh.elm.tag2[msh_msh.elm.elm_type == 2]
|
|
158
|
+
|
|
159
|
+
obj = pynibs.mesh_struct.TetrahedraLinear(points, triangles, triangles_regions, tetrahedra, tetrahedra_regions)
|
|
160
|
+
|
|
161
|
+
return obj
|
|
162
|
+
|
|
163
|
+
|
|
164
|
+
def load_mesh_hdf5(fname):
|
|
165
|
+
"""
|
|
166
|
+
Loading mesh from .hdf5 file and setting up :py:class:`~pynibs.mesh.mesh_struct.TetrahedraLinear` class.
|
|
167
|
+
|
|
168
|
+
Parameters
|
|
169
|
+
----------
|
|
170
|
+
fname : str
|
|
171
|
+
Name of .hdf5 file (incl. path)
|
|
172
|
+
|
|
173
|
+
Returns
|
|
174
|
+
-------
|
|
175
|
+
obj : pynibs.mesh.mesh_struct.TetrahedraLinear
|
|
176
|
+
:py:class:`~pynibs.mesh.mesh_struct.TetrahedraLinear` object
|
|
177
|
+
|
|
178
|
+
Example
|
|
179
|
+
-------
|
|
180
|
+
.hdf5 file format and contained groups. The content of .hdf5 files can be shown using the tool HDFView
|
|
181
|
+
(https://support.hdfgroup.org/products/java/hdfview/)
|
|
182
|
+
|
|
183
|
+
.. code-block:: sh
|
|
184
|
+
|
|
185
|
+
mesh
|
|
186
|
+
I---/elm
|
|
187
|
+
I I--/elm_number [1,2,3,...,N_ele] Running index over all elements starting at 1,
|
|
188
|
+
triangles and tetrahedra
|
|
189
|
+
I I--/elm_type [2,2,2,...,4,4] Element type: 2 triangles, 4 tetrahedra
|
|
190
|
+
I I--/node_number_list [1,5,6,0;... ;1,4,8,9] Connectivity of triangles [X, X, X, 0] and tetrahedra
|
|
191
|
+
[X, X, X, X]
|
|
192
|
+
I I--/tag1 [1001,1001, ..., 4,4,4] Surface (100X) and domain (X) indices with 1000 offset
|
|
193
|
+
for surfaces
|
|
194
|
+
I I--/tag2 [ 1, 1, ..., 4,4,4] Surface (X) and domain (X) indices w/o offset
|
|
195
|
+
I
|
|
196
|
+
I---/nodes
|
|
197
|
+
I I--/node_coord [1.254, 1.762, 1.875;...] Node coordinates in (mm)
|
|
198
|
+
I I--/node_number [1,2,3,...,N_nodes] Running index over all points starting at 1
|
|
199
|
+
I I--/units ["mm"] .value is unit of geometry
|
|
200
|
+
I
|
|
201
|
+
I---/fields
|
|
202
|
+
I I--/E/value [E_x_1, E_y_1, E_z_1;...] Electric field in all elms, triangles and tetrahedra
|
|
203
|
+
I I--/J/value [J_x_1, J_y_1, J_z_1;...] Current density in all elms, triangles and tetrahedra
|
|
204
|
+
I I--/normE/value [normE_1,..., normE_N_ele] Magnitude of electric field in all elements,
|
|
205
|
+
triangles and tetrahedra
|
|
206
|
+
I I--/normJ/value [normJ_1,..., normJ_N_ele] Magnitude of current density in all elements,
|
|
207
|
+
triangles and tetrahedra
|
|
208
|
+
|
|
209
|
+
/data
|
|
210
|
+
I---/potential [phi_1, ..., phi_N_nodes] Scalar electric potential in nodes (size N_nodes)
|
|
211
|
+
I---/dAdt [A_x_1, A_y_1, A_z_1,...] Magnetic vector potential (size 3xN_nodes)
|
|
212
|
+
"""
|
|
213
|
+
with h5py.File(fname, 'r') as f:
|
|
214
|
+
if 'mesh' in f.keys():
|
|
215
|
+
points = np.array(f['mesh/nodes/node_coord'])
|
|
216
|
+
# node_number_list = np.array(f['mesh/elm/node_number_list'])
|
|
217
|
+
# elm_type = np.array(f['mesh/elm/elm_type'])
|
|
218
|
+
# regions = np.array(f['mesh/elm/tag1'])
|
|
219
|
+
triangles = np.array(f['mesh/elm/triangle_number_list']) # node_number_list[elm_type == 2, 0:3]
|
|
220
|
+
tetrahedra = np.array(f['mesh/elm/tetrahedra_number_list']) # node_number_list[elm_type == 4, ]
|
|
221
|
+
triangles_regions = np.array(f['mesh/elm/tri_tissue_type'])
|
|
222
|
+
tetrahedra_regions = np.array(f['mesh/elm/tet_tissue_type'])
|
|
223
|
+
else:
|
|
224
|
+
points = np.array(f['nodes/node_coord'])
|
|
225
|
+
# node_number_list = np.array(f['elm/node_number_list'])
|
|
226
|
+
# elm_type = np.array(f['elm/elm_type'])
|
|
227
|
+
# regions = np.array(f['elm/tag1'])
|
|
228
|
+
triangles = np.array(f['elm/triangle_number_list']) # node_number_list[elm_type == 2, 0:3]
|
|
229
|
+
tetrahedra = np.array(f['elm/tetrahedra_number_list']) # node_number_list[elm_type == 4, ]
|
|
230
|
+
triangles_regions = np.array(f['elm/tri_tissue_type'])
|
|
231
|
+
tetrahedra_regions = np.array(f['elm/tet_tissue_type'])
|
|
232
|
+
obj = pynibs.mesh_struct.TetrahedraLinear(points, triangles, triangles_regions, tetrahedra, tetrahedra_regions)
|
|
233
|
+
return obj
|
|
234
|
+
|
|
235
|
+
|
|
236
|
+
def write_geo_hdf5(out_fn, msh, roi_dict=None, hdf5_path='/mesh'):
|
|
237
|
+
"""
|
|
238
|
+
Creates a .hdf5 file with geometry data from mesh including region of interest(s).
|
|
239
|
+
|
|
240
|
+
Parameters
|
|
241
|
+
----------
|
|
242
|
+
out_fn : str
|
|
243
|
+
Output hdf5 filename for mesh' geometry information.
|
|
244
|
+
msh: pynibs.mesh.mesh_struct.TetrahedraLinear
|
|
245
|
+
Mesh to write to file.
|
|
246
|
+
roi_dict : dict of (:py:class:`~pynibs.roi.RegionOfInterestSurface` or :py:class:`~pynibs.RegionOfInterestVolume`)
|
|
247
|
+
Region of interest (surface and/or volume) information.
|
|
248
|
+
hdf5_path : str, default: '/mesh'
|
|
249
|
+
Path in output file to store geometry information.
|
|
250
|
+
|
|
251
|
+
Returns
|
|
252
|
+
-------
|
|
253
|
+
<File> : .hdf5 file
|
|
254
|
+
File containing the geometry information
|
|
255
|
+
|
|
256
|
+
Example
|
|
257
|
+
-------
|
|
258
|
+
File structure of .hdf5 geometry file
|
|
259
|
+
|
|
260
|
+
.. code-block:: sh
|
|
261
|
+
|
|
262
|
+
mesh
|
|
263
|
+
I---/elm
|
|
264
|
+
I I--/elm_number [1,2,3,...,N_ele] Running index over all elements starting at 1
|
|
265
|
+
(triangles and tetrahedra)
|
|
266
|
+
I I--/elm_type [2,2,2,...,4,4] Element type: 2 triangles, 4 tetrahedra
|
|
267
|
+
I I--/tag1 [1001,1001, ..., 4,4,4] Surface (100X) and domain (X) indices with 1000
|
|
268
|
+
offset for surfaces
|
|
269
|
+
I I--/tag2 [ 1, 1, ..., 4,4,4] Surface (X) and domain (X) indices w/o offset
|
|
270
|
+
I I--/triangle_number_list [1,5,6;... ;1,4,8] Connectivity of triangles [X, X, X]
|
|
271
|
+
I I--/tri_tissue_type [1,1, ..., 3,3,3] Surface indices to differentiate between surfaces
|
|
272
|
+
I I--/tetrahedra_number_list [1,5,6,7;... ;1,4,8,12] Connectivity of tetrahedra [X, X, X, X]
|
|
273
|
+
I I--/tet_tissue_type [1,1, ..., 3,3,3] Volume indices to differentiate between volumes
|
|
274
|
+
I I--/node_number_list [1,5,6,0;... ;1,4,8,9] Connectivity of triangles [X, X, X, 0] and
|
|
275
|
+
tetrahedra [X, X, X, X]
|
|
276
|
+
I
|
|
277
|
+
I---/nodes
|
|
278
|
+
I I--/node_coord [1.254, 1.762, 1.875;...] Node coordinates in (mm)
|
|
279
|
+
I I--/node_number [1,2,3,...,N_nodes] Running index over all points starting at 1
|
|
280
|
+
I I--/units ['mm'] .value is unit of geometry
|
|
281
|
+
|
|
282
|
+
roi_surface
|
|
283
|
+
I---/0 Region of Interest number
|
|
284
|
+
I I--/node_coord_up [1.254, 1.762, 1.875;...] Coordinates of upper surface points
|
|
285
|
+
I I--/node_coord_mid [1.254, 1.762, 1.875;...] Coordinates of middle surface points
|
|
286
|
+
I I--/node_coord_low [1.254, 1.762, 1.875;...] Coordinates of lower surface points
|
|
287
|
+
I I--/tri_center_coord_up [1.254, 1.762, 1.875;...] Coordinates of upper triangle centers
|
|
288
|
+
I I--/tri_center_coord_mid [1.254, 1.762, 1.875;...] Coordinates of middle triangle centers
|
|
289
|
+
I I--/tri_center_coord_low [1.254, 1.762, 1.875;...] Coordinates of lower triangle centers
|
|
290
|
+
I I--/node_number_list [1,5,6,0;... ;1,4,8,9] Connectivity of triangles [X, X, X]
|
|
291
|
+
I I--/delta 0.5 Distance parameter between GM and WM surface
|
|
292
|
+
I I--/tet_idx_tri_center_up [183, 913, 56, ...] Tetrahedra indices where triangle center of
|
|
293
|
+
upper surface are lying in
|
|
294
|
+
I I--/tet_idx_tri_center_mid [185, 911, 58, ...] Tetrahedra indices where triangle center of
|
|
295
|
+
middle surface are lying in
|
|
296
|
+
I I--/tet_idx_tri_center_low [191, 912, 59, ...] Tetrahedra indices where triangle center of
|
|
297
|
+
lower surface are lying in
|
|
298
|
+
I I--/tet_idx_node_coord_mid [12, 15, 43, ...] Tetrahedra indices where the node_coords_mid
|
|
299
|
+
are lying in
|
|
300
|
+
I I--/gm_surf_fname .../surf/lh.pial Filename of GM surface from segmentation
|
|
301
|
+
I I--/wm_surf_fname .../surf/lh.white Filename of WM surface from segmentation
|
|
302
|
+
I I--/layer 3 Number of layers
|
|
303
|
+
I I--/fn_mask .../simnibs/mask.mgh Filename of region of interest mask
|
|
304
|
+
I I--/X_ROI [-10, 15] X limits of region of interest box
|
|
305
|
+
I I--/Y_ROI [-10, 15] Y limits of region of interest box
|
|
306
|
+
I I--/Z_ROI [-10, 15] Z limits of region of interest box
|
|
307
|
+
I
|
|
308
|
+
I---/1
|
|
309
|
+
I I ...
|
|
310
|
+
|
|
311
|
+
roi_volume
|
|
312
|
+
I---/0 Region of Interest number
|
|
313
|
+
I I--/node_coord [1.254, 1.762, 1.875;...] Coordinates (x,y,z) of ROI nodes
|
|
314
|
+
I I--/tet_node_number_list [1,5,6,7;... ;1,4,8,9] Connectivity matrix of ROI tetrahedra
|
|
315
|
+
I I--/tri_node_number_list [1,5,6;... ;1,4,8] Connectivity matrix of ROI triangles
|
|
316
|
+
I I--/tet_idx_node_coord [183, 913, 56, ...] Tetrahedra indices where ROI nodes are
|
|
317
|
+
I I--/tet_idx_tetrahedra_center [12, 15, 43, ...] Tetrahedra indices where center points of
|
|
318
|
+
ROI tetrahedra are
|
|
319
|
+
I I--/tet_idx_triangle_center [12, 15, 43, ...] Tetrahedra indices where center points of
|
|
320
|
+
ROI triangles are
|
|
321
|
+
|
|
322
|
+
I---/1
|
|
323
|
+
I I ...
|
|
324
|
+
"""
|
|
325
|
+
if os.path.exists(out_fn):
|
|
326
|
+
os.remove(out_fn)
|
|
327
|
+
|
|
328
|
+
with h5py.File(out_fn, 'w') as f:
|
|
329
|
+
f.create_dataset(hdf5_path + '/elm/elm_number', data=np.arange(msh.N_tet + msh.N_tri) + 1)
|
|
330
|
+
f.create_dataset(hdf5_path + '/elm/elm_type', data=np.array([2] * msh.N_tri + [4] * msh.N_tet))
|
|
331
|
+
f.create_dataset(hdf5_path + '/elm/tag1',
|
|
332
|
+
data=np.hstack((msh.triangles_regions + 1000, msh.tetrahedra_regions)).flatten())
|
|
333
|
+
f.create_dataset(hdf5_path + '/elm/tag2',
|
|
334
|
+
data=np.hstack((msh.triangles_regions, msh.tetrahedra_regions)).flatten())
|
|
335
|
+
f.create_dataset(hdf5_path + '/elm/triangle_number_list', data=msh.triangles)
|
|
336
|
+
f.create_dataset(hdf5_path + '/elm/tri_tissue_type', data=msh.triangles_regions.flatten())
|
|
337
|
+
f.create_dataset(hdf5_path + '/elm/tetrahedra_number_list', data=msh.tetrahedra)
|
|
338
|
+
f.create_dataset(hdf5_path + '/elm/tet_tissue_type', data=msh.tetrahedra_regions.flatten())
|
|
339
|
+
if msh.tetrahedra.size != 0:
|
|
340
|
+
if msh.triangles.size != 0:
|
|
341
|
+
f.create_dataset(hdf5_path + '/elm/node_number_list', data=np.vstack(
|
|
342
|
+
(np.hstack((msh.triangles, np.zeros((msh.N_tri, 1)))),
|
|
343
|
+
msh.tetrahedra)).astype(int))
|
|
344
|
+
else:
|
|
345
|
+
f.create_dataset(hdf5_path + '/elm/node_number_list', data=msh.tetrahedra).astype(int)
|
|
346
|
+
else:
|
|
347
|
+
f.create_dataset(hdf5_path + '/elm/node_number_list', data=np.vstack(
|
|
348
|
+
(np.hstack((msh.triangles, np.zeros((msh.N_tri, 1)))),
|
|
349
|
+
)).astype(int))
|
|
350
|
+
f.create_dataset(hdf5_path + '/nodes/node_coord', data=msh.points)
|
|
351
|
+
f.create_dataset(hdf5_path + '/nodes/node_number', data=np.arange(msh.N_points) + 1)
|
|
352
|
+
# f.create_dataset(hdf5_path + '/nodes/units', data=['mm'])
|
|
353
|
+
f.create_dataset(hdf5_path + '/elm/tet_elm_type', data=np.array([4] * msh.N_tet))
|
|
354
|
+
f.create_dataset(hdf5_path + '/elm/tri_elm_type', data=np.array([2] * msh.N_tri))
|
|
355
|
+
|
|
356
|
+
if roi_dict is not None:
|
|
357
|
+
for roi_key in roi_dict.keys():
|
|
358
|
+
# save roi surface information
|
|
359
|
+
if roi_dict[roi_key].__class__.__name__ == 'RegionOfInterestSurface':
|
|
360
|
+
|
|
361
|
+
f.create_dataset(f'roi_surface/{roi_key}/node_coord_up',
|
|
362
|
+
data=np.array(roi_dict[roi_key].node_coord_up))
|
|
363
|
+
f.create_dataset(f'roi_surface/{roi_key}/node_coord_mid',
|
|
364
|
+
data=np.array(roi_dict[roi_key].node_coord_mid))
|
|
365
|
+
f.create_dataset(f'roi_surface/{roi_key}/node_coord_low',
|
|
366
|
+
data=np.array(roi_dict[roi_key].node_coord_low))
|
|
367
|
+
f.create_dataset(f'roi_surface/{roi_key}/tri_center_coord_up',
|
|
368
|
+
data=np.array(roi_dict[roi_key].tri_center_coord_up))
|
|
369
|
+
f.create_dataset(f'roi_surface/{roi_key}/tri_center_coord_mid',
|
|
370
|
+
data=np.array(roi_dict[roi_key].tri_center_coord_mid))
|
|
371
|
+
f.create_dataset(f'roi_surface/{roi_key}/tri_center_coord_low',
|
|
372
|
+
data=np.array(roi_dict[roi_key].tri_center_coord_low))
|
|
373
|
+
f.create_dataset(f'roi_surface/{roi_key}/node_number_list',
|
|
374
|
+
data=np.array(roi_dict[roi_key].node_number_list))
|
|
375
|
+
f.create_dataset(f'roi_surface/{roi_key}/delta',
|
|
376
|
+
data=np.array(roi_dict[roi_key].delta))
|
|
377
|
+
|
|
378
|
+
if roi_dict[roi_key].tet_idx_tri_center_up is not None:
|
|
379
|
+
f.create_dataset(f'roi_surface/{roi_key}/tet_idx_tri_center_up',
|
|
380
|
+
data=np.array(roi_dict[roi_key].tet_idx_tri_center_up).astype(int))
|
|
381
|
+
|
|
382
|
+
f.create_dataset(f'roi_surface/{roi_key}/tet_idx_tri_center_mid',
|
|
383
|
+
data=np.array(roi_dict[roi_key].tet_idx_tri_center_mid).astype(int))
|
|
384
|
+
|
|
385
|
+
if roi_dict[roi_key].tet_idx_tri_center_low is not None:
|
|
386
|
+
f.create_dataset(f'roi_surface/{roi_key}/tet_idx_tri_center_low',
|
|
387
|
+
data=np.array(roi_dict[roi_key].tet_idx_tri_center_low).astype(int))
|
|
388
|
+
|
|
389
|
+
f.create_dataset(f'roi_surface/{roi_key}/tet_idx_node_coord_mid',
|
|
390
|
+
data=np.array(roi_dict[roi_key].tet_idx_node_coord_mid).astype(int))
|
|
391
|
+
f.create_dataset(f'roi_surface/{roi_key}/gm_surf_fname',
|
|
392
|
+
data=np.array(roi_dict[roi_key].gm_surf_fname).astype("S"))
|
|
393
|
+
f.create_dataset(f'roi_surface/{roi_key}/wm_surf_fname',
|
|
394
|
+
data=np.array(roi_dict[roi_key].wm_surf_fname).astype("S"))
|
|
395
|
+
f.create_dataset(f'roi_surface/{roi_key}/midlayer_surf_fname',
|
|
396
|
+
data=np.array(roi_dict[roi_key].midlayer_surf_fname).astype("S"))
|
|
397
|
+
f.create_dataset(f'roi_surface/{roi_key}/layer',
|
|
398
|
+
data=roi_dict[roi_key].layer)
|
|
399
|
+
f.create_dataset(f'roi_surface/{roi_key}/refine',
|
|
400
|
+
data=roi_dict[roi_key].refine)
|
|
401
|
+
|
|
402
|
+
if roi_dict[roi_key].fn_mask is not None:
|
|
403
|
+
f.create_dataset(f'roi_surface/{roi_key}/fn_mask',
|
|
404
|
+
data=np.array(roi_dict[roi_key].fn_mask).astype("S"))
|
|
405
|
+
|
|
406
|
+
if roi_dict[roi_key].X_ROI is not None:
|
|
407
|
+
f.create_dataset(f'roi_surface/{roi_key}/X_ROI',
|
|
408
|
+
data=np.array(roi_dict[roi_key].X_ROI))
|
|
409
|
+
|
|
410
|
+
if roi_dict[roi_key].Y_ROI is not None:
|
|
411
|
+
f.create_dataset(f'roi_surface/{roi_key}/Y_ROI',
|
|
412
|
+
data=np.array(roi_dict[roi_key].Y_ROI))
|
|
413
|
+
|
|
414
|
+
if roi_dict[roi_key].Z_ROI is not None:
|
|
415
|
+
f.create_dataset(f'roi_surface/{roi_key}/Z_ROI',
|
|
416
|
+
data=np.array(roi_dict[roi_key].Z_ROI))
|
|
417
|
+
|
|
418
|
+
if roi_dict[roi_key].layers is not None:
|
|
419
|
+
for layer in roi_dict[roi_key].layers:
|
|
420
|
+
f.create_dataset(f'roi_surface/{roi_key}/layers/{layer.id}/node_coord',
|
|
421
|
+
data=np.array(layer.surface.nodes.node_coord))
|
|
422
|
+
f.create_dataset(f'roi_surface/{roi_key}/layers/{layer.id}/node_number_list',
|
|
423
|
+
data=np.array(layer.surface.elm.node_number_list[:, :3] - 1))
|
|
424
|
+
|
|
425
|
+
# save roi volume information
|
|
426
|
+
if roi_dict[roi_key].__class__.__name__ == 'RegionOfInterestVolume':
|
|
427
|
+
|
|
428
|
+
f.create_dataset(f'roi_volume/{roi_key}/node_coord',
|
|
429
|
+
data=np.array(roi_dict[roi_key].node_coord))
|
|
430
|
+
f.create_dataset(f'roi_volume/{roi_key}/tet_node_number_list',
|
|
431
|
+
data=np.array(roi_dict[roi_key].tet_node_number_list))
|
|
432
|
+
f.create_dataset(f'roi_volume/{roi_key}/tri_node_number_list',
|
|
433
|
+
data=np.array(roi_dict[roi_key].tri_node_number_list))
|
|
434
|
+
|
|
435
|
+
if roi_dict[roi_key].tet_idx_node_coord is not None:
|
|
436
|
+
f.create_dataset(f'roi_volume/{roi_key}/tet_idx_node_coord',
|
|
437
|
+
data=np.array(roi_dict[roi_key].tet_idx_node_coord))
|
|
438
|
+
|
|
439
|
+
f.create_dataset(f'roi_volume/{roi_key}/tet_idx_tetrahedra_center',
|
|
440
|
+
data=np.array(roi_dict[roi_key].tet_idx_tetrahedra_center))
|
|
441
|
+
f.create_dataset(f'roi_volume/{roi_key}/tet_idx_triangle_center',
|
|
442
|
+
data=np.array(roi_dict[roi_key].tet_idx_triangle_center))
|
|
443
|
+
|
|
444
|
+
|
|
445
|
+
def write_geo_hdf5_surf(out_fn, points, con, replace=False, hdf5_path='/mesh'):
|
|
446
|
+
"""
|
|
447
|
+
Creates a .hdf5 file with geometry data from midlayer.
|
|
448
|
+
|
|
449
|
+
Parameters
|
|
450
|
+
----------
|
|
451
|
+
out_fn : str
|
|
452
|
+
Filename of output .hdf5 file containing the geometry information.
|
|
453
|
+
points : np.ndarray
|
|
454
|
+
(N_points, 3) Coordinates of nodes (x,y,z).
|
|
455
|
+
con : np.ndarray
|
|
456
|
+
(N_tri, 3) Connectivity list of triangles.
|
|
457
|
+
replace : bool
|
|
458
|
+
Replace .hdf5 geometry file (True / False).
|
|
459
|
+
hdf5_path : str, default: '/mesh'
|
|
460
|
+
Folder in .hdf5 geometry file, where the geometry information is saved in.
|
|
461
|
+
|
|
462
|
+
Returns
|
|
463
|
+
-------
|
|
464
|
+
<File> : .hdf5 file
|
|
465
|
+
File containing the geometry information.
|
|
466
|
+
|
|
467
|
+
Example
|
|
468
|
+
-------
|
|
469
|
+
File structure of .hdf5 geometry file:
|
|
470
|
+
|
|
471
|
+
.. code-block:: sh
|
|
472
|
+
|
|
473
|
+
mesh
|
|
474
|
+
|---/elm
|
|
475
|
+
| |--/triangle_number_list [1,5,6;... ;1,4,8] Connectivity of triangles [X, X, X]
|
|
476
|
+
| |--/tri_tissue_type [1,1, ..., 3,3,3] Surface indices to differentiate between surfaces
|
|
477
|
+
|
|
|
478
|
+
|---/nodes
|
|
479
|
+
| |--/node_coord [1.2, 1.7, 1.8; ...] Node coordinates in (mm)
|
|
480
|
+
"""
|
|
481
|
+
assert out_fn.endswith('.hdf5') or out_fn.endswith('.h5')
|
|
482
|
+
os.makedirs(os.path.split(out_fn)[0], exist_ok=True)
|
|
483
|
+
|
|
484
|
+
if os.path.exists(out_fn):
|
|
485
|
+
os.remove(out_fn)
|
|
486
|
+
|
|
487
|
+
if not con.min() == 0:
|
|
488
|
+
warnings.warn("Minunum connectivity is not 0.")
|
|
489
|
+
|
|
490
|
+
with h5py.File(out_fn, 'w') as h5:
|
|
491
|
+
h5.create_dataset(hdf5_path + '/nodes/' + 'node_coord',
|
|
492
|
+
data=points)
|
|
493
|
+
h5.create_dataset(hdf5_path + '/elm/triangle_number_list',
|
|
494
|
+
data=con)
|
|
495
|
+
h5.create_dataset(hdf5_path + '/elm/tri_tissue_type',
|
|
496
|
+
data=np.zeros((points.shape[0], 1)).astype(int))
|
|
497
|
+
|
|
498
|
+
|
|
499
|
+
def write_geo_hdf5_vol(out_fn, points, con, replace=False, hdf5_path='/mesh'):
|
|
500
|
+
"""
|
|
501
|
+
Creates a .hdf5 file with geometry data from midlayer.
|
|
502
|
+
|
|
503
|
+
Parameters
|
|
504
|
+
----------
|
|
505
|
+
out_fn : str
|
|
506
|
+
Filename of output .hdf5 file containing the geometry information.
|
|
507
|
+
points : np.ndarray
|
|
508
|
+
(N_points, 3) Coordinates of nodes (x,y,z).
|
|
509
|
+
con : np.ndarray
|
|
510
|
+
(N_tri, 3) Connectivity list of triangles.
|
|
511
|
+
replace : bool
|
|
512
|
+
Replace .hdf5 geometry file (True / False).
|
|
513
|
+
hdf5_path : str, default: '/mesh'
|
|
514
|
+
Folder in .hdf5 geometry file, where the geometry information is saved in.
|
|
515
|
+
|
|
516
|
+
Returns
|
|
517
|
+
-------
|
|
518
|
+
<File> : .hdf5 file
|
|
519
|
+
File containing the geometry information.
|
|
520
|
+
|
|
521
|
+
Example
|
|
522
|
+
-------
|
|
523
|
+
File structure of .hdf5 geometry file:
|
|
524
|
+
|
|
525
|
+
.. code-block:: sh
|
|
526
|
+
|
|
527
|
+
mesh
|
|
528
|
+
|---/elm
|
|
529
|
+
| |--/triangle_number_list [1,5,6;... ;1,4,8] Connectivity of triangles [X, X, X]
|
|
530
|
+
| |--/tri_tissue_type [1,1, ..., 3,3,3] Surface indices to differentiate between surfaces
|
|
531
|
+
|
|
|
532
|
+
|---/nodes
|
|
533
|
+
| |--/node_coord [1.2, 1.7, 1.8; ...] Node coordinates in (mm)
|
|
534
|
+
"""
|
|
535
|
+
if os.path.exists(out_fn):
|
|
536
|
+
os.remove(out_fn)
|
|
537
|
+
|
|
538
|
+
with h5py.File(out_fn, 'w') as h5:
|
|
539
|
+
h5.create_dataset(hdf5_path + '/nodes/' + 'node_coord',
|
|
540
|
+
data=points)
|
|
541
|
+
h5.create_dataset(hdf5_path + '/elm/tetrahedra_number_list',
|
|
542
|
+
data=con)
|
|
543
|
+
h5.create_dataset(hdf5_path + '/elm/tet_tissue_type',
|
|
544
|
+
data=np.zeros((points.shape[0], 1)).astype(int))
|
|
545
|
+
|
|
546
|
+
|
|
547
|
+
def write_data_hdf5(out_fn, data, data_names, hdf5_path='/data', mode="a"):
|
|
548
|
+
"""
|
|
549
|
+
Creates a .hdf5 file with data.
|
|
550
|
+
|
|
551
|
+
Parameters
|
|
552
|
+
----------
|
|
553
|
+
out_fn : str
|
|
554
|
+
Filename of output .hdf5 file containing the geometry information.
|
|
555
|
+
data : np.ndarray or list of nparrays of float
|
|
556
|
+
Data to save in hdf5 data file.
|
|
557
|
+
data_names : str or list of str
|
|
558
|
+
Labels of data.
|
|
559
|
+
hdf5_path : str, default: '/data'
|
|
560
|
+
Folder in .hdf5 geometry file, where the data is saved in.
|
|
561
|
+
mode : str, default: "a"
|
|
562
|
+
Mode: "a" append, "w" write (overwrite).
|
|
563
|
+
|
|
564
|
+
Returns
|
|
565
|
+
-------
|
|
566
|
+
<File> : .hdf5 file
|
|
567
|
+
File containing the stored data.
|
|
568
|
+
|
|
569
|
+
Example
|
|
570
|
+
-------
|
|
571
|
+
File structure of .hdf5 data file
|
|
572
|
+
|
|
573
|
+
.. code-block:: sh
|
|
574
|
+
|
|
575
|
+
data
|
|
576
|
+
|---/data_names[0] [data[0]] First dataset
|
|
577
|
+
|---/ ... ... ...
|
|
578
|
+
|---/data_names[N-1] [data[N-1]] Last dataset
|
|
579
|
+
"""
|
|
580
|
+
if type(data) is not list:
|
|
581
|
+
data = [data]
|
|
582
|
+
|
|
583
|
+
if type(data_names) is not list:
|
|
584
|
+
data_names = [data_names]
|
|
585
|
+
|
|
586
|
+
assert len(data_names) == len(data), f"Different number of data_names {len(data_names)} and data ({len(data)}."
|
|
587
|
+
with h5py.File(out_fn, mode) as f:
|
|
588
|
+
for i, data_name in enumerate(data_names):
|
|
589
|
+
if isinstance(data[i], np.ndarray):
|
|
590
|
+
f.create_dataset(hdf5_path + '/' + data_name, data=data[i], dtype="float64")
|
|
591
|
+
else:
|
|
592
|
+
f.create_dataset(hdf5_path + '/' + data_name, data=data[i])
|
|
593
|
+
|
|
594
|
+
|
|
595
|
+
def write_data_hdf5_surf(data, data_names, data_hdf_fn_out, geo_hdf_fn,
|
|
596
|
+
replace=False, replace_array_in_file=True, datatype='tris'):
|
|
597
|
+
"""
|
|
598
|
+
Saves surface data to .hdf5 data file and generates corresponding .xdmf file linking both.
|
|
599
|
+
The directory of data_hdf_fn_out and geo_hdf_fn should be the same, as only basenames of files are stored
|
|
600
|
+
in the .xdmf file.
|
|
601
|
+
|
|
602
|
+
Parameters
|
|
603
|
+
----------
|
|
604
|
+
data : np.ndarray or list
|
|
605
|
+
(N_points_ROI, N_components) Data to map on surfaces.
|
|
606
|
+
data_names : str or list
|
|
607
|
+
Names for datasets.
|
|
608
|
+
data_hdf_fn_out : str
|
|
609
|
+
Filename of .hdf5 data file.
|
|
610
|
+
geo_hdf_fn : str
|
|
611
|
+
Filename of .hdf5 geo file containing the geometry information (has to exist).
|
|
612
|
+
replace : bool, default: False
|
|
613
|
+
Replace existing .hdf5 and .xdmf file completely.
|
|
614
|
+
replace_array_in_file : bool, default: True
|
|
615
|
+
Replace existing array in file.
|
|
616
|
+
datatype : str, default: 'tris'
|
|
617
|
+
Triangle or node data.
|
|
618
|
+
|
|
619
|
+
Returns
|
|
620
|
+
-------
|
|
621
|
+
<File> : .hdf5 file
|
|
622
|
+
data_hdf_fn_out.hdf5 containing data
|
|
623
|
+
<File> : .xdmf file
|
|
624
|
+
data_hdf_fn_out.xdmf containing information about .hdf5 file structure for Paraview
|
|
625
|
+
|
|
626
|
+
Example
|
|
627
|
+
-------
|
|
628
|
+
File structure of .hdf5 data file
|
|
629
|
+
|
|
630
|
+
.. code-block:: sh
|
|
631
|
+
|
|
632
|
+
/data
|
|
633
|
+
|---/tris
|
|
634
|
+
| |---dataset_0 [dataset_0] (size: N_dataset_0 x M_dataset_0)
|
|
635
|
+
| |--- ...
|
|
636
|
+
| |---dataset_K [dataset_K] (size: N_dataset_K x M_dataset_K)
|
|
637
|
+
"""
|
|
638
|
+
assert datatype in ['tris', 'nodes']
|
|
639
|
+
|
|
640
|
+
# Check if files already exists
|
|
641
|
+
try:
|
|
642
|
+
if data_hdf_fn_out[-4:] != 'hdf5':
|
|
643
|
+
data_hdf_fn_out += '.hdf5'
|
|
644
|
+
except IndexError:
|
|
645
|
+
data_hdf_fn_out += '.hdf5'
|
|
646
|
+
|
|
647
|
+
data_xdmf_fn = data_hdf_fn_out[:-4] + 'xdmf'
|
|
648
|
+
if os.path.exists(data_hdf_fn_out):
|
|
649
|
+
if replace:
|
|
650
|
+
os.remove(data_hdf_fn_out)
|
|
651
|
+
elif not replace and replace_array_in_file:
|
|
652
|
+
pass
|
|
653
|
+
else:
|
|
654
|
+
raise FileExistsError(data_hdf_fn_out + " already exists")
|
|
655
|
+
if os.path.exists(data_xdmf_fn):
|
|
656
|
+
if replace:
|
|
657
|
+
os.remove(data_xdmf_fn)
|
|
658
|
+
elif not replace and replace_array_in_file:
|
|
659
|
+
pass
|
|
660
|
+
else:
|
|
661
|
+
raise FileExistsError(data_xdmf_fn + " already exists")
|
|
662
|
+
|
|
663
|
+
# Check for correct data and data_names
|
|
664
|
+
if type(data) is np.ndarray:
|
|
665
|
+
data = [data]
|
|
666
|
+
elif type(data) is list:
|
|
667
|
+
for dat in data:
|
|
668
|
+
if type(dat) is not np.ndarray:
|
|
669
|
+
raise NotImplementedError
|
|
670
|
+
else:
|
|
671
|
+
raise NotImplementedError
|
|
672
|
+
|
|
673
|
+
if type(data_names) is str:
|
|
674
|
+
data_names = [data_names]
|
|
675
|
+
elif type(data_names) is not list:
|
|
676
|
+
raise NotImplementedError
|
|
677
|
+
|
|
678
|
+
if len(data) != len(data_names):
|
|
679
|
+
raise ValueError(f'Dimension mismatch, data (len: {len(data)}) <-> data_names (len: {len(data_names)})')
|
|
680
|
+
|
|
681
|
+
with h5py.File(data_hdf_fn_out, 'a') as h5_data:
|
|
682
|
+
|
|
683
|
+
# write data
|
|
684
|
+
only_data_replaced = True # keep .xdmf if some data is only replaced in .hdf5 file and no new data is added
|
|
685
|
+
|
|
686
|
+
for idx, dat in enumerate(data):
|
|
687
|
+
if replace_array_in_file:
|
|
688
|
+
try:
|
|
689
|
+
del h5_data[f'/data/{datatype}/' + data_names[idx]]
|
|
690
|
+
except KeyError:
|
|
691
|
+
only_data_replaced = False
|
|
692
|
+
|
|
693
|
+
else:
|
|
694
|
+
only_data_replaced = False
|
|
695
|
+
|
|
696
|
+
h5_data.create_dataset(f'/data/{datatype}/' + data_names[idx], data=data[idx])
|
|
697
|
+
|
|
698
|
+
if not only_data_replaced:
|
|
699
|
+
data_dims = [dat.shape[1] if dat.ndim > 1 else 1 for dat in data]
|
|
700
|
+
pynibs.write_xdmf_surf(data_hdf_fn_out, data_names, data_xdmf_fn, geo_hdf_fn, data_dims)
|
|
701
|
+
|
|
702
|
+
|
|
703
|
+
def write_data_hdf5_vol(data, data_names, data_hdf_fn_out, geo_hdf_fn, replace=False, replace_array_in_file=True):
|
|
704
|
+
"""
|
|
705
|
+
Saves surface data to .hdf5 data file and generates corresponding .xdmf file linking both.
|
|
706
|
+
The directory of data_hdf_fn_out and geo_hdf_fn should be the same, as only basenames of files are stored
|
|
707
|
+
in the .xdmf file.
|
|
708
|
+
|
|
709
|
+
Parameters
|
|
710
|
+
----------
|
|
711
|
+
data : np.ndarray or list
|
|
712
|
+
(N_points_ROI, N_components) Data to map on surfaces.
|
|
713
|
+
data_names : str or list
|
|
714
|
+
Names for datasets.
|
|
715
|
+
data_hdf_fn_out : str
|
|
716
|
+
Filename of .hdf5 data file.
|
|
717
|
+
geo_hdf_fn : str
|
|
718
|
+
Filename of .hdf5 geo file containing the geometry information (has to exist).
|
|
719
|
+
replace : bool, default: False
|
|
720
|
+
Replace existing .hdf5 and .xdmf file completely.
|
|
721
|
+
replace_array_in_file : bool, default: True
|
|
722
|
+
Replace existing array in file.
|
|
723
|
+
|
|
724
|
+
Returns
|
|
725
|
+
-------
|
|
726
|
+
<File> : .hdf5 file
|
|
727
|
+
data_hdf_fn_out.hdf5 containing data
|
|
728
|
+
<File> : .xdmf file
|
|
729
|
+
data_hdf_fn_out.xdmf containing information about .hdf5 file structure for Paraview.
|
|
730
|
+
|
|
731
|
+
Example
|
|
732
|
+
-------
|
|
733
|
+
File structure of .hdf5 data file
|
|
734
|
+
|
|
735
|
+
.. code-block:: sh
|
|
736
|
+
|
|
737
|
+
/data
|
|
738
|
+
|---/tris
|
|
739
|
+
| |---dataset_0 [dataset_0] (size: N_dataset_0 x M_dataset_0)
|
|
740
|
+
| |--- ...
|
|
741
|
+
| |---dataset_K [dataset_K] (size: N_dataset_K x M_dataset_K)
|
|
742
|
+
"""
|
|
743
|
+
# Check if files already exists
|
|
744
|
+
try:
|
|
745
|
+
if data_hdf_fn_out[-4:] != 'hdf5':
|
|
746
|
+
data_hdf_fn_out += '.hdf5'
|
|
747
|
+
except IndexError:
|
|
748
|
+
data_hdf_fn_out += '.hdf5'
|
|
749
|
+
|
|
750
|
+
data_xdmf_fn = data_hdf_fn_out[:-4] + 'xdmf'
|
|
751
|
+
if os.path.exists(data_hdf_fn_out):
|
|
752
|
+
if replace:
|
|
753
|
+
os.remove(data_hdf_fn_out)
|
|
754
|
+
elif not replace and replace_array_in_file:
|
|
755
|
+
pass
|
|
756
|
+
else:
|
|
757
|
+
warnings.warn(data_hdf_fn_out + " already exists. Quitting")
|
|
758
|
+
return
|
|
759
|
+
if os.path.exists(data_xdmf_fn):
|
|
760
|
+
if replace:
|
|
761
|
+
os.remove(data_xdmf_fn)
|
|
762
|
+
elif not replace and replace_array_in_file:
|
|
763
|
+
pass
|
|
764
|
+
else:
|
|
765
|
+
warnings.warn(data_xdmf_fn + " already exists. Quitting")
|
|
766
|
+
return
|
|
767
|
+
|
|
768
|
+
# Check for correct data and data_names
|
|
769
|
+
if type(data) == np.ndarray:
|
|
770
|
+
data = [data]
|
|
771
|
+
elif type(data) == list:
|
|
772
|
+
for dat in data:
|
|
773
|
+
if type(dat) is not np.ndarray:
|
|
774
|
+
raise NotImplementedError
|
|
775
|
+
else:
|
|
776
|
+
raise NotImplementedError
|
|
777
|
+
|
|
778
|
+
if type(data_names) is str:
|
|
779
|
+
data_names = [data_names]
|
|
780
|
+
elif type(data_names) is not list:
|
|
781
|
+
raise NotImplementedError
|
|
782
|
+
|
|
783
|
+
if len(data) != len(data_names):
|
|
784
|
+
raise ValueError(f'Dimension mismatch, data (len: {len(data)}) <-> data_names (len: {len(data_names)})')
|
|
785
|
+
|
|
786
|
+
with h5py.File(data_hdf_fn_out, 'a') as h5_data, \
|
|
787
|
+
h5py.File(geo_hdf_fn, 'r') as h5_geo:
|
|
788
|
+
|
|
789
|
+
# if geo file exists in same folder than data file only use relative path
|
|
790
|
+
if os.path.split(data_hdf_fn_out) == os.path.split(geo_hdf_fn):
|
|
791
|
+
geo_hdf_fn = os.path.basename(geo_hdf_fn)
|
|
792
|
+
|
|
793
|
+
# write data
|
|
794
|
+
only_data_replaced = True # keep .xdmf if some data is only replaced in .hdf5 file and no new data is added
|
|
795
|
+
|
|
796
|
+
for idx, dat in enumerate(data):
|
|
797
|
+
if replace_array_in_file:
|
|
798
|
+
try:
|
|
799
|
+
del h5_data['/data/tris/' + data_names[idx]]
|
|
800
|
+
except KeyError:
|
|
801
|
+
only_data_replaced = False
|
|
802
|
+
|
|
803
|
+
else:
|
|
804
|
+
only_data_replaced = False
|
|
805
|
+
|
|
806
|
+
h5_data.create_dataset('/data/tets/' + data_names[idx], data=data[idx])
|
|
807
|
+
|
|
808
|
+
if not only_data_replaced:
|
|
809
|
+
with open(data_xdmf_fn, 'w') as xdmf:
|
|
810
|
+
# write xdmf file linking the data to the surfaces in geo_hdf_fn
|
|
811
|
+
xdmf.write('<?xml version="1.0"?>\n')
|
|
812
|
+
xdmf.write('<!DOCTYPE Xdmf SYSTEM "Xdmf.dtd" []>\n')
|
|
813
|
+
xdmf.write('<Xdmf Version="2.0" xmlns:xi="http://www.w3.org/2001/XInclude">\n')
|
|
814
|
+
xdmf.write('<Domain>\n')
|
|
815
|
+
|
|
816
|
+
# one collection grid
|
|
817
|
+
xdmf.write('<Grid\nCollectionType="Spatial"\nGridType="Collection"\nName="Collection">\n')
|
|
818
|
+
|
|
819
|
+
# read all available surfaces
|
|
820
|
+
volume = []
|
|
821
|
+
lookup_str = 'tetrahedra_number_list_'
|
|
822
|
+
lookup_str_node = 'node_coord_'
|
|
823
|
+
lookup_str_tri = 'tet_tissue_type_'
|
|
824
|
+
|
|
825
|
+
keys = list(h5_geo['mesh/elm/'].keys())
|
|
826
|
+
for key in keys:
|
|
827
|
+
idx = key.find(lookup_str)
|
|
828
|
+
if idx >= 0:
|
|
829
|
+
volume.append(key[(idx + len(lookup_str)):])
|
|
830
|
+
|
|
831
|
+
if not volume:
|
|
832
|
+
volume = []
|
|
833
|
+
lookup_str = 'tetrahedra_number_list'
|
|
834
|
+
lookup_str_node = 'node_coord'
|
|
835
|
+
lookup_str_tri = 'tet_tissue_type'
|
|
836
|
+
keys = list(h5_geo['mesh/elm/'].keys())
|
|
837
|
+
for key in keys:
|
|
838
|
+
idx = key.find(lookup_str)
|
|
839
|
+
if idx >= 0:
|
|
840
|
+
volume.append(key[(idx + len(lookup_str)):])
|
|
841
|
+
|
|
842
|
+
data_written = False
|
|
843
|
+
|
|
844
|
+
for vol in volume:
|
|
845
|
+
|
|
846
|
+
n_tets = len(h5_geo['/mesh/elm/' + lookup_str + vol][:])
|
|
847
|
+
n_nodes = len(h5_geo['/mesh/nodes/' + lookup_str_node + vol][:])
|
|
848
|
+
assert n_tets, n_nodes
|
|
849
|
+
|
|
850
|
+
# one grid for triangles...
|
|
851
|
+
###########################
|
|
852
|
+
xdmf.write('<Grid Name="tris" GridType="Uniform">\n')
|
|
853
|
+
xdmf.write('<Topology NumberOfElements="' + str(n_tets) +
|
|
854
|
+
'" TopologyType="Tetrahedron" Name="' + vol + '_Tet">\n')
|
|
855
|
+
xdmf.write('<DataItem Format="HDF" Dimensions="' + str(n_tets) + ' 4">\n')
|
|
856
|
+
xdmf.write(geo_hdf_fn + ':' + '/mesh/elm/' + lookup_str + vol + '\n')
|
|
857
|
+
xdmf.write('</DataItem>\n')
|
|
858
|
+
xdmf.write('</Topology>\n')
|
|
859
|
+
|
|
860
|
+
# nodes
|
|
861
|
+
xdmf.write('<Geometry GeometryType="XYZ">\n')
|
|
862
|
+
xdmf.write('<DataItem Format="HDF" Dimensions="' + str(n_nodes) + ' 3">\n')
|
|
863
|
+
xdmf.write(geo_hdf_fn + ':' + '/mesh/nodes/' + lookup_str_node + vol + '\n')
|
|
864
|
+
xdmf.write('</DataItem>\n')
|
|
865
|
+
xdmf.write('</Geometry>\n')
|
|
866
|
+
|
|
867
|
+
# data
|
|
868
|
+
for idx, dat in enumerate(data):
|
|
869
|
+
data_dim = dat.shape[1] if dat.ndim > 1 else 1
|
|
870
|
+
|
|
871
|
+
xdmf.write('<Attribute Name="' + data_names[idx] + '" AttributeType="Scalar" Center="Cell">\n')
|
|
872
|
+
xdmf.write('<DataItem Format="HDF" Dimensions="' + str(n_tets) + ' ' + str(data_dim) + '">\n')
|
|
873
|
+
xdmf.write(os.path.basename(data_hdf_fn_out) + ':' + '/data/tets/' + data_names[idx] + '\n')
|
|
874
|
+
xdmf.write('</DataItem>\n')
|
|
875
|
+
xdmf.write('</Attribute>\n')
|
|
876
|
+
|
|
877
|
+
# tissue_type
|
|
878
|
+
xdmf.write('<Attribute Name="tissue_type" AttributeType="Scalar" Center="Node">\n')
|
|
879
|
+
xdmf.write('<DataItem Format="HDF" Dimensions="' + str(n_nodes) + ' 1">\n')
|
|
880
|
+
xdmf.write(geo_hdf_fn + ':' + '/mesh/elm/' + lookup_str_tri + vol + '\n')
|
|
881
|
+
xdmf.write('</DataItem>\n')
|
|
882
|
+
xdmf.write('</Attribute>\n')
|
|
883
|
+
xdmf.write('</Grid>\n')
|
|
884
|
+
|
|
885
|
+
xdmf.write('</Grid>\n')
|
|
886
|
+
xdmf.write('</Domain>\n')
|
|
887
|
+
xdmf.write('</Xdmf>\n')
|
|
888
|
+
|
|
889
|
+
|
|
890
|
+
def print_attrs(name, obj):
|
|
891
|
+
"""
|
|
892
|
+
Helper function for :py:meth:`hdf_2_ascii()`. To be called from ``h5py.Group.visititems()``
|
|
893
|
+
|
|
894
|
+
Parameters
|
|
895
|
+
----------
|
|
896
|
+
name : str
|
|
897
|
+
Name of structural element.
|
|
898
|
+
obj : object
|
|
899
|
+
Structural element.
|
|
900
|
+
|
|
901
|
+
Returns
|
|
902
|
+
-------
|
|
903
|
+
<Print> : Structure of .hdf5 file.
|
|
904
|
+
"""
|
|
905
|
+
import h5py
|
|
906
|
+
|
|
907
|
+
if isinstance(obj, h5py.Dataset):
|
|
908
|
+
print(('/' + name + '\t\t ' + str(obj.shape)))
|
|
909
|
+
else:
|
|
910
|
+
print(('\n/' + name))
|
|
911
|
+
|
|
912
|
+
for key, val in obj.attrs.items():
|
|
913
|
+
print(" %s: %s" % (key, val))
|
|
914
|
+
|
|
915
|
+
|
|
916
|
+
def hdf_2_ascii(hdf5_fn):
|
|
917
|
+
"""
|
|
918
|
+
Prints out structure of given .hdf5 file.
|
|
919
|
+
|
|
920
|
+
Parameters
|
|
921
|
+
----------
|
|
922
|
+
hdf5_fn : str
|
|
923
|
+
Filename of .hdf5 file.
|
|
924
|
+
|
|
925
|
+
Returns
|
|
926
|
+
-------
|
|
927
|
+
h5 : items
|
|
928
|
+
Structure of .hdf5 file.
|
|
929
|
+
"""
|
|
930
|
+
print('/')
|
|
931
|
+
h5 = h5py.File(hdf5_fn, 'r')
|
|
932
|
+
h5.visititems(print_attrs)
|
|
933
|
+
|
|
934
|
+
|
|
935
|
+
def simnibs_results_msh2hdf5_workhorse(fn_msh, fn_hdf5, session, pos_tms_idx, pos_local_idx, subject, mesh_idx,
|
|
936
|
+
mode_xdmf="r+",
|
|
937
|
+
verbose=False, overwrite=False, mid2roi=False):
|
|
938
|
+
"""
|
|
939
|
+
Converts simnibs .msh results file to .hdf5 (including midlayer data if desired)
|
|
940
|
+
|
|
941
|
+
Parameters
|
|
942
|
+
----------
|
|
943
|
+
fn_msh : list of str
|
|
944
|
+
Filenames of .msh results files from SimNIBS.
|
|
945
|
+
fn_hdf5 : str or list of str
|
|
946
|
+
Filenames of .hdf5 results files.
|
|
947
|
+
session : Simnibs Session object
|
|
948
|
+
Simnibs session the simulations were conducted with.
|
|
949
|
+
pos_tms_idx : list of int
|
|
950
|
+
Index of the simulation w.r.t. to the simnibs TMSList (inside ``session``).
|
|
951
|
+
For every coil a separate TMSList exists, which contains multiple coil positions.
|
|
952
|
+
pos_local_idx : list of int
|
|
953
|
+
Index of the simulation w.r.t. to the simnibs POSlist in the TMSList (inside ``session``).
|
|
954
|
+
For every coil a separate TMSList exists, which contains multiple coil positions.
|
|
955
|
+
subject : Subject object
|
|
956
|
+
pynibs.Subject.
|
|
957
|
+
mesh_idx : int or str
|
|
958
|
+
Mesh index or id.
|
|
959
|
+
mode_xdmf : str, default: "r+"
|
|
960
|
+
Mode to open hdf5_geo file to write xdmf. If hdf5_geo is already separated in tets and tris etc.,
|
|
961
|
+
the file is not changed, use "r" to avoid IOErrors in case of parallel computing.
|
|
962
|
+
verbose : bool, default: False
|
|
963
|
+
Print output messages.
|
|
964
|
+
overwrite: bool, default: False
|
|
965
|
+
Overwrite .hdf5 file if existing.
|
|
966
|
+
mid2roi : bool, list of string, or string, default: False
|
|
967
|
+
If the mesh contains ROIs and the e-field was calculated in the midlayer using SimNIBS
|
|
968
|
+
(``S.map_to_surf = True``), the midlayer results will be mapped from the simnibs midlayer to the ROIs
|
|
969
|
+
(takes some time for large ROIs).
|
|
970
|
+
|
|
971
|
+
Returns
|
|
972
|
+
-------
|
|
973
|
+
<File> : .hdf5 file
|
|
974
|
+
.hdf5 file containing the results. An .xdmf file is also created to link the results with the mesh .hdf5 file
|
|
975
|
+
of the subject.
|
|
976
|
+
"""
|
|
977
|
+
import simnibs
|
|
978
|
+
if type(fn_msh) is not list:
|
|
979
|
+
fn_msh = [fn_msh]
|
|
980
|
+
|
|
981
|
+
if type(fn_hdf5) is not list:
|
|
982
|
+
fn_hdf5 = [fn_hdf5]
|
|
983
|
+
|
|
984
|
+
if type(mid2roi) is str:
|
|
985
|
+
mid2roi = [mid2roi]
|
|
986
|
+
|
|
987
|
+
# Save results in .hdf5 format
|
|
988
|
+
regex = r"[\d]-[\d]{4}"
|
|
989
|
+
|
|
990
|
+
idx = np.hstack((np.array(pos_tms_idx)[:, None], np.array(pos_local_idx)[:, None]))
|
|
991
|
+
|
|
992
|
+
for f_msh in fn_msh:
|
|
993
|
+
indices = np.array(re.findall(regex, f_msh)[0].split("-")).astype(int) - 1
|
|
994
|
+
f_tms_idx = indices[0]
|
|
995
|
+
f_local_idx = indices[1]
|
|
996
|
+
f_global_idx = np.where((idx == indices).all(axis=1))[0][0]
|
|
997
|
+
|
|
998
|
+
if os.path.exists(fn_hdf5[f_global_idx] + ".hdf5") and not overwrite:
|
|
999
|
+
if verbose:
|
|
1000
|
+
print(f"Skipping {f_msh} --> {fn_hdf5[f_global_idx]}.hdf5")
|
|
1001
|
+
continue
|
|
1002
|
+
|
|
1003
|
+
if verbose:
|
|
1004
|
+
print(f"Transforming {f_msh} --> {fn_hdf5[f_global_idx]}.hdf5")
|
|
1005
|
+
|
|
1006
|
+
# read dipole position and magnitude
|
|
1007
|
+
fn_coil_geo = os.path.join(session.pathfem,
|
|
1008
|
+
os.path.splitext(os.path.split(session.fnamehead)[1])[0] +
|
|
1009
|
+
"_TMS_" +
|
|
1010
|
+
str(f_tms_idx + 1) + "-" + str(f_local_idx + 1).zfill(4) + "_" +
|
|
1011
|
+
os.path.splitext(
|
|
1012
|
+
os.path.splitext(os.path.split(session.poslists[f_tms_idx].fnamecoil)[1])[
|
|
1013
|
+
0])[0] +
|
|
1014
|
+
"_nii_coil_pos.geo")
|
|
1015
|
+
|
|
1016
|
+
# for some reason, the .geo file of the simulation was not saved ...
|
|
1017
|
+
try:
|
|
1018
|
+
dipole_position, dipole_moment_mag = pynibs.read_coil_geo(fn_coil_geo)
|
|
1019
|
+
except FileNotFoundError:
|
|
1020
|
+
dipole_position = np.array([[0, 0, 0]])
|
|
1021
|
+
dipole_moment_mag = np.array([[0]])
|
|
1022
|
+
Warning(f"Coil .geo file not found... Skipping coil positions ({fn_coil_geo})")
|
|
1023
|
+
|
|
1024
|
+
# read .msh results file from SimNIBS
|
|
1025
|
+
msh = simnibs.read_msh(f_msh)
|
|
1026
|
+
|
|
1027
|
+
# collect data
|
|
1028
|
+
data = [dipole_position, dipole_moment_mag]
|
|
1029
|
+
data_names = ["coil/dipole_position", "coil/dipole_moment_mag"]
|
|
1030
|
+
|
|
1031
|
+
for i in range(len(msh.nodedata)):
|
|
1032
|
+
data_names.append("data/nodes/" + msh.nodedata[i].field_name)
|
|
1033
|
+
data.append(msh.nodedata[i].value)
|
|
1034
|
+
|
|
1035
|
+
for i in range(len(msh.elmdata)):
|
|
1036
|
+
data_names.append("data/tris/" + msh.elmdata[i].field_name)
|
|
1037
|
+
data.append(msh.elmdata[i].value[msh.elm.elm_type == 2,])
|
|
1038
|
+
|
|
1039
|
+
data_names.append("data/tets/" + msh.elmdata[i].field_name)
|
|
1040
|
+
data.append(msh.elmdata[i].value[msh.elm.elm_type == 4,])
|
|
1041
|
+
|
|
1042
|
+
# save dadt also in nodes (does require TBs of RAM because of inversion,
|
|
1043
|
+
# there is maybe a more elegant way using SimNIBS directly)
|
|
1044
|
+
# if msh.elmdata[i].field_name == "D":
|
|
1045
|
+
# data_names.append("data/nodes/D")
|
|
1046
|
+
# con_tets = msh.elm.node_number_list[msh.elm.elm_type == 4, ]
|
|
1047
|
+
# data.append(pynibs.mesh.data_elements2nodes(msh.elmdata[i].value[msh.elm.elm_type == 4, ], con_tets))
|
|
1048
|
+
|
|
1049
|
+
# write .hdf5 file data
|
|
1050
|
+
write_data_hdf5(out_fn=os.path.splitext(fn_hdf5[f_global_idx])[0] + ".hdf5",
|
|
1051
|
+
data=data,
|
|
1052
|
+
data_names=data_names,
|
|
1053
|
+
hdf5_path='',
|
|
1054
|
+
mode="w")
|
|
1055
|
+
|
|
1056
|
+
# write .xdmf markup file for paraview
|
|
1057
|
+
pynibs.write_xdmf(hdf5_fn=os.path.splitext(fn_hdf5[f_global_idx])[0] + ".hdf5",
|
|
1058
|
+
hdf5_geo_fn=subject.mesh[mesh_idx]["fn_mesh_hdf5"],
|
|
1059
|
+
overwrite_xdmf=True,
|
|
1060
|
+
verbose=False,
|
|
1061
|
+
mode=mode_xdmf)
|
|
1062
|
+
|
|
1063
|
+
# if calculated from Simnibs copy and crop midlayer results to ROIs
|
|
1064
|
+
if session.map_to_surf and mid2roi is not False:
|
|
1065
|
+
try:
|
|
1066
|
+
|
|
1067
|
+
# load rois
|
|
1068
|
+
if verbose:
|
|
1069
|
+
print(f"Loading ROIs")
|
|
1070
|
+
|
|
1071
|
+
roi = pynibs.load_roi_surface_obj_from_hdf5(fname=subject.mesh[mesh_idx]["fn_mesh_hdf5"])
|
|
1072
|
+
mesh_folder = os.path.join(subject.subject_folder,
|
|
1073
|
+
'mesh', str(mesh_idx),
|
|
1074
|
+
subject.mesh[mesh_idx]["mesh_folder"])
|
|
1075
|
+
|
|
1076
|
+
for roi_idx in roi.keys():
|
|
1077
|
+
# skip rois that are not wanted
|
|
1078
|
+
if isinstance(mid2roi, list) and roi_idx not in mid2roi:
|
|
1079
|
+
continue
|
|
1080
|
+
# load freesurfer surface
|
|
1081
|
+
if type(roi[roi_idx].gm_surf_fname) is not list:
|
|
1082
|
+
roi[roi_idx].gm_surf_fname = [roi[roi_idx].gm_surf_fname]
|
|
1083
|
+
|
|
1084
|
+
points_gm = [None for _ in range(len(roi[roi_idx].gm_surf_fname))]
|
|
1085
|
+
con_gm = [None for _ in range(len(roi[roi_idx].gm_surf_fname))]
|
|
1086
|
+
|
|
1087
|
+
max_idx_gm = 0
|
|
1088
|
+
|
|
1089
|
+
if (type(roi[roi_idx].gm_surf_fname) is list and roi[roi_idx].gm_surf_fname[0] is not None) or \
|
|
1090
|
+
(type(roi[roi_idx].gm_surf_fname) is str):
|
|
1091
|
+
if type(roi[roi_idx].gm_surf_fname) is str:
|
|
1092
|
+
fn_surface = [roi[roi_idx].gm_surf_fname]
|
|
1093
|
+
else:
|
|
1094
|
+
fn_surface = roi[roi_idx].gm_surf_fname
|
|
1095
|
+
|
|
1096
|
+
elif (type(roi[roi_idx].midlayer_surf_fname) is list and
|
|
1097
|
+
roi[roi_idx].gm_surf_fname is not None) or \
|
|
1098
|
+
(type(roi[roi_idx].midlayer_surf_fname) is str):
|
|
1099
|
+
if type(roi[roi_idx].midlayer_surf_fname) is str:
|
|
1100
|
+
fn_surface = [roi[roi_idx].midlayer_surf_fname]
|
|
1101
|
+
else:
|
|
1102
|
+
fn_surface = roi[roi_idx].midlayer_surf_fname
|
|
1103
|
+
|
|
1104
|
+
for i in range(len(fn_surface)):
|
|
1105
|
+
if fn_surface[i].endswith('.gii') or fn_surface[i].endswith('.gii.gz'):
|
|
1106
|
+
gii_obj = nibabel.load(os.path.join(mesh_folder, fn_surface[i]))
|
|
1107
|
+
points_gm[i] = gii_obj.darrays[0].data
|
|
1108
|
+
con_gm[i] = gii_obj.darrays[1].data
|
|
1109
|
+
else:
|
|
1110
|
+
points_gm[i], con_gm[i] = nibabel.freesurfer.read_geometry(
|
|
1111
|
+
os.path.join(mesh_folder, fn_surface[i]))
|
|
1112
|
+
|
|
1113
|
+
con_gm[i] = con_gm[i] + max_idx_gm
|
|
1114
|
+
max_idx_gm = max_idx_gm + points_gm[i].shape[0]
|
|
1115
|
+
|
|
1116
|
+
# points_gm = np.vstack(points_gm)
|
|
1117
|
+
con_gm = np.vstack(con_gm)
|
|
1118
|
+
|
|
1119
|
+
if verbose:
|
|
1120
|
+
print(f"Processing data to ROI #{roi_idx}")
|
|
1121
|
+
|
|
1122
|
+
if roi[roi_idx].fn_mask is None or roi[roi_idx].fn_mask == []:
|
|
1123
|
+
if roi[roi_idx].X_ROI is None or roi[roi_idx].X_ROI == []:
|
|
1124
|
+
roi[roi_idx].X_ROI = [-np.inf, np.inf]
|
|
1125
|
+
if roi[roi_idx].Y_ROI is None or roi[roi_idx].Y_ROI == []:
|
|
1126
|
+
roi[roi_idx].Y_ROI = [-np.inf, np.inf]
|
|
1127
|
+
if roi[roi_idx].Z_ROI is None or roi[roi_idx].Z_ROI == []:
|
|
1128
|
+
roi[roi_idx].Z_ROI = [-np.inf, np.inf]
|
|
1129
|
+
|
|
1130
|
+
roi_mask_bool = (roi[roi_idx].node_coord_mid[:, 0] > min(roi[roi_idx].X_ROI)) & (
|
|
1131
|
+
roi[roi_idx].node_coord_mid[:, 0] < max(roi[roi_idx].X_ROI)) & \
|
|
1132
|
+
(roi[roi_idx].node_coord_mid[:, 1] > min(roi[roi_idx].Y_ROI)) & (
|
|
1133
|
+
roi[roi_idx].node_coord_mid[:, 1] < max(roi[roi_idx].Y_ROI)) & \
|
|
1134
|
+
(roi[roi_idx].node_coord_mid[:, 2] > min(roi[roi_idx].Z_ROI)) & (
|
|
1135
|
+
roi[roi_idx].node_coord_mid[:, 2] < max(roi[roi_idx].Z_ROI))
|
|
1136
|
+
roi_mask_idx = np.where(roi_mask_bool)
|
|
1137
|
+
|
|
1138
|
+
else:
|
|
1139
|
+
if type(roi[roi_idx].fn_mask) is np.ndarray:
|
|
1140
|
+
if roi[roi_idx].fn_mask.ndim == 0:
|
|
1141
|
+
roi[roi_idx].fn_mask = roi[roi_idx].fn_mask.astype(str).tolist()
|
|
1142
|
+
|
|
1143
|
+
# read mask from freesurfer mask file
|
|
1144
|
+
mask = nibabel.freesurfer.mghformat.MGHImage.from_filename(
|
|
1145
|
+
os.path.join(mesh_folder, roi[roi_idx].fn_mask)).dataobj[:]
|
|
1146
|
+
roi_mask_idx = np.where(mask > 0.5)
|
|
1147
|
+
|
|
1148
|
+
# read results data
|
|
1149
|
+
if verbose:
|
|
1150
|
+
print("Reading SimNIBS midlayer data")
|
|
1151
|
+
e_normal = []
|
|
1152
|
+
e_tan = []
|
|
1153
|
+
|
|
1154
|
+
for fn_surf in fn_surface:
|
|
1155
|
+
fn_msh_base = os.path.splitext(os.path.split(f_msh)[1])[0]
|
|
1156
|
+
|
|
1157
|
+
if "lh" in os.path.split(fn_surf)[1]:
|
|
1158
|
+
fname_base = os.path.join(os.path.split(f_msh)[0], "subject_overlays", "lh." + fn_msh_base)
|
|
1159
|
+
|
|
1160
|
+
if "rh" in os.path.split(fn_surf)[1]:
|
|
1161
|
+
fname_base = os.path.join(os.path.split(f_msh)[0], "subject_overlays", "rh." + fn_msh_base)
|
|
1162
|
+
|
|
1163
|
+
e_normal.append(
|
|
1164
|
+
nibabel.freesurfer.read_morph_data(fname_base + ".central.E.normal").flatten()[:,
|
|
1165
|
+
np.newaxis])
|
|
1166
|
+
e_tan.append(
|
|
1167
|
+
nibabel.freesurfer.read_morph_data(fname_base + ".central.E.tangent").flatten()[:,
|
|
1168
|
+
np.newaxis])
|
|
1169
|
+
|
|
1170
|
+
e_normal = np.vstack(e_normal)
|
|
1171
|
+
e_tan = np.vstack(e_tan)
|
|
1172
|
+
|
|
1173
|
+
# transform point data to element data
|
|
1174
|
+
if verbose:
|
|
1175
|
+
print("Transforming point data to element data")
|
|
1176
|
+
e_normal = pynibs.data_nodes2elements(data=e_normal, con=con_gm)
|
|
1177
|
+
e_tan = pynibs.data_nodes2elements(data=e_tan, con=con_gm)
|
|
1178
|
+
|
|
1179
|
+
# crop results data to ROI
|
|
1180
|
+
# if not roi_mask_bool.all():
|
|
1181
|
+
if roi_mask_idx:
|
|
1182
|
+
if verbose:
|
|
1183
|
+
print("Cropping data to ROI")
|
|
1184
|
+
|
|
1185
|
+
# get row index where all points are lying inside ROI
|
|
1186
|
+
con_row_idx = [i for i in range(con_gm.shape[0]) if
|
|
1187
|
+
len(np.intersect1d(con_gm[i,], roi_mask_idx)) == 3]
|
|
1188
|
+
|
|
1189
|
+
e_normal = e_normal[con_row_idx, :].flatten()
|
|
1190
|
+
e_tan = e_tan[con_row_idx, :].flatten()
|
|
1191
|
+
|
|
1192
|
+
e_mag = np.linalg.norm(np.vstack([e_normal, e_tan]).transpose(), axis=1).flatten()
|
|
1193
|
+
|
|
1194
|
+
if verbose:
|
|
1195
|
+
print("Writing data to .hdf5")
|
|
1196
|
+
|
|
1197
|
+
with h5py.File(os.path.splitext(fn_hdf5[f_global_idx])[0] + ".hdf5", 'a') as f:
|
|
1198
|
+
try:
|
|
1199
|
+
del f['data/midlayer/roi_surface/{}/E_mag'.format(roi_idx)]
|
|
1200
|
+
del f['data/midlayer/roi_surface/{}/E_tan'.format(roi_idx)]
|
|
1201
|
+
del f['data/midlayer/roi_surface/{}/E_norm'.format(roi_idx)]
|
|
1202
|
+
except KeyError:
|
|
1203
|
+
pass
|
|
1204
|
+
|
|
1205
|
+
f.create_dataset('data/midlayer/roi_surface/{}/E_mag'.format(roi_idx), data=e_mag)
|
|
1206
|
+
f.create_dataset('data/midlayer/roi_surface/{}/E_tan'.format(roi_idx), data=e_tan)
|
|
1207
|
+
f.create_dataset('data/midlayer/roi_surface/{}/E_norm'.format(roi_idx), data=e_normal)
|
|
1208
|
+
|
|
1209
|
+
del e_mag, e_normal, e_tan
|
|
1210
|
+
except KeyError as e:
|
|
1211
|
+
warnings.warn(f"Could not map2surf: {e}")
|
|
1212
|
+
|
|
1213
|
+
# Write info in .hdf5 file
|
|
1214
|
+
#######################################################################
|
|
1215
|
+
with h5py.File(os.path.splitext(fn_hdf5[f_global_idx])[0] + ".hdf5", 'a') as f:
|
|
1216
|
+
try:
|
|
1217
|
+
del f["info"]
|
|
1218
|
+
except KeyError:
|
|
1219
|
+
pass
|
|
1220
|
+
|
|
1221
|
+
f.create_dataset("info/date", data=datetime.datetime.now().strftime("%Y-%m-%d %H:%M:%S"))
|
|
1222
|
+
f.create_dataset("info/sigma_WM", data=session.poslists[f_tms_idx].cond[0].value)
|
|
1223
|
+
f.create_dataset("info/sigma_GM", data=session.poslists[f_tms_idx].cond[1].value)
|
|
1224
|
+
f.create_dataset("info/sigma_CSF", data=session.poslists[f_tms_idx].cond[2].value)
|
|
1225
|
+
f.create_dataset("info/sigma_Skull", data=session.poslists[f_tms_idx].cond[3].value)
|
|
1226
|
+
f.create_dataset("info/sigma_Scalp", data=session.poslists[f_tms_idx].cond[4].value)
|
|
1227
|
+
if len(session.poslists[f_tms_idx].cond) > 5:
|
|
1228
|
+
f.create_dataset("info/sigma_EyeBalls", data=session.poslists[f_tms_idx].cond[5].value)
|
|
1229
|
+
f.create_dataset("info/fn_coil", data=session.poslists[f_tms_idx].fnamecoil)
|
|
1230
|
+
if session.poslists[f_tms_idx].pos[f_local_idx].matsimnibs is not None:
|
|
1231
|
+
f.create_dataset("info/matsimnibs", data=session.poslists[f_tms_idx].pos[f_local_idx].matsimnibs)
|
|
1232
|
+
f.create_dataset("info/dIdt", data=session.poslists[f_tms_idx].pos[f_local_idx].didt)
|
|
1233
|
+
f.create_dataset("info/anisotropy_type", data=session.poslists[f_tms_idx].anisotropy_type)
|
|
1234
|
+
f.create_dataset("info/fn_mesh_msh", data=session.fnamehead)
|
|
1235
|
+
|
|
1236
|
+
del f_global_idx
|
|
1237
|
+
|
|
1238
|
+
|
|
1239
|
+
def simnibs_results_msh2hdf5(fn_msh, fn_hdf5, S, pos_tms_idx, pos_local_idx, subject, mesh_idx, mode_xdmf="r+",
|
|
1240
|
+
n_cpu=4, verbose=False, overwrite=False, mid2roi=False):
|
|
1241
|
+
"""
|
|
1242
|
+
Converts simnibs .msh results file(s) to .hdf5 / .xdmf tuple.
|
|
1243
|
+
|
|
1244
|
+
Parameters
|
|
1245
|
+
----------
|
|
1246
|
+
fn_msh : str list of str
|
|
1247
|
+
Filenames (incl. path) of .msh results files from SimNIBS.
|
|
1248
|
+
fn_hdf5 : str or list of str
|
|
1249
|
+
Filenames (incl. path) of .hdf5 results files.
|
|
1250
|
+
S : Simnibs Session object
|
|
1251
|
+
Simnibs Session object the simulations are conducted with.
|
|
1252
|
+
pos_tms_idx : list of int
|
|
1253
|
+
Index of the simulation w.r.t. to the simnibs TMSList (inside Session object S).
|
|
1254
|
+
For every coil a separate TMSList exists, which contains multiple coil positions.
|
|
1255
|
+
pos_local_idx : list of int
|
|
1256
|
+
Index of the simulation w.r.t. to the simnibs POSlist in the TMSList (inside Session object S)
|
|
1257
|
+
For every coil a separate TMSList exists, which contains multiple coil positions.
|
|
1258
|
+
subject : pynibs.subject.Subject
|
|
1259
|
+
Subject object.
|
|
1260
|
+
mesh_idx : int or str
|
|
1261
|
+
Mesh id.
|
|
1262
|
+
mode_xdmf : str, default: "r+"
|
|
1263
|
+
Mode to open hdf5_geo file to write xdmf. If hdf5_geo is already separated in tets and tris etc.,
|
|
1264
|
+
the file is not changed, use "r" to avoid IOErrors in case of parallel computing.
|
|
1265
|
+
n_cpu : int
|
|
1266
|
+
Number of processes.
|
|
1267
|
+
verbose : bool, default: False
|
|
1268
|
+
Print output messages.
|
|
1269
|
+
overwrite: bool, default: False
|
|
1270
|
+
Overwrite .hdf5 file if existing.
|
|
1271
|
+
mid2roi : bool or string, default: False
|
|
1272
|
+
If the mesh contains ROIs and the e-field was calculated in the midlayer using simnibs
|
|
1273
|
+
(``S.map_to_surf = True``),
|
|
1274
|
+
the midlayer results will be mapped from the simnibs midlayer to the ROIs (takes some time for large ROIs).
|
|
1275
|
+
|
|
1276
|
+
Returns
|
|
1277
|
+
-------
|
|
1278
|
+
<File> : .hdf5 file
|
|
1279
|
+
.hdf5 file containing the results. An .xdmf file is also created to link the results with the mesh .hdf5 file
|
|
1280
|
+
of the subject.
|
|
1281
|
+
"""
|
|
1282
|
+
n_cpu_available = multiprocessing.cpu_count()
|
|
1283
|
+
n_cpu = min(n_cpu, n_cpu_available, len(fn_msh))
|
|
1284
|
+
pool = multiprocessing.Pool(n_cpu)
|
|
1285
|
+
save_hdf5_partial = partial(simnibs_results_msh2hdf5_workhorse,
|
|
1286
|
+
fn_hdf5=fn_hdf5,
|
|
1287
|
+
session=S,
|
|
1288
|
+
pos_tms_idx=pos_tms_idx,
|
|
1289
|
+
pos_local_idx=pos_local_idx,
|
|
1290
|
+
subject=subject,
|
|
1291
|
+
mesh_idx=mesh_idx,
|
|
1292
|
+
mode_xdmf="r",
|
|
1293
|
+
verbose=verbose,
|
|
1294
|
+
overwrite=overwrite,
|
|
1295
|
+
mid2roi=mid2roi)
|
|
1296
|
+
|
|
1297
|
+
filenames_chunks = pynibs.compute_chunks(fn_msh, n_cpu)
|
|
1298
|
+
pool.map(save_hdf5_partial, filenames_chunks)
|
|
1299
|
+
pool.close()
|
|
1300
|
+
pool.join()
|
|
1301
|
+
|
|
1302
|
+
|
|
1303
|
+
def msh2hdf5(fn_msh=None, skip_roi=False, skip_layer=True, include_data=False,
|
|
1304
|
+
approach="mri2mesh", subject=None, mesh_idx=None):
|
|
1305
|
+
"""
|
|
1306
|
+
Transforms mesh from .msh to .hdf5 format. Mesh is read from subject object or from fn_msh.
|
|
1307
|
+
|
|
1308
|
+
Parameters
|
|
1309
|
+
----------
|
|
1310
|
+
fn_msh : str, optional
|
|
1311
|
+
Filename of .msh file.
|
|
1312
|
+
skip_roi : bool, default: False
|
|
1313
|
+
Skip generating ROI in .hdf5
|
|
1314
|
+
skip_layer : bool, default: True
|
|
1315
|
+
Don't create gm layers.
|
|
1316
|
+
include_data : bool, default: False
|
|
1317
|
+
Also convert data in .msh file to .hdf5 file
|
|
1318
|
+
subject : pynibs.Subject, optional
|
|
1319
|
+
Subject information, must be set to use skip_roi=False.
|
|
1320
|
+
mesh_idx : int or list of int or str or list of str, optional
|
|
1321
|
+
Mesh index, the conversion from .msh to .hdf5 is conducted for.
|
|
1322
|
+
approach : str
|
|
1323
|
+
Approach the headmodel was created with ("mri2mesh" or "headreco").
|
|
1324
|
+
|
|
1325
|
+
.. deprecated:: 0.0.1
|
|
1326
|
+
Not supported anymore.
|
|
1327
|
+
|
|
1328
|
+
|
|
1329
|
+
Returns
|
|
1330
|
+
-------
|
|
1331
|
+
<File> : .hdf5 file
|
|
1332
|
+
.hdf5 file with mesh information
|
|
1333
|
+
"""
|
|
1334
|
+
import simnibs
|
|
1335
|
+
|
|
1336
|
+
if approach is not None:
|
|
1337
|
+
warnings.warn("'approach' parameter is depreciated.", category=DeprecationWarning)
|
|
1338
|
+
|
|
1339
|
+
if subject is not None:
|
|
1340
|
+
mesh_folder = subject.mesh[mesh_idx]["mesh_folder"]
|
|
1341
|
+
|
|
1342
|
+
# load mesh from .msh file and generate mesh object
|
|
1343
|
+
if fn_msh is not None:
|
|
1344
|
+
msh = load_mesh_msh(fn_msh)
|
|
1345
|
+
print("Loading mesh from file: {}".format(fn_msh))
|
|
1346
|
+
out_fn = os.path.splitext(fn_msh)[0] + ".hdf5"
|
|
1347
|
+
|
|
1348
|
+
else:
|
|
1349
|
+
print("Loading mesh #{} from .msh file: {}".format(mesh_idx, subject.mesh[mesh_idx]['fn_mesh_msh']))
|
|
1350
|
+
msh = load_mesh_msh(subject.mesh[mesh_idx]['fn_mesh_msh'])
|
|
1351
|
+
out_fn = subject.mesh[mesh_idx]['fn_mesh_hdf5']
|
|
1352
|
+
|
|
1353
|
+
roi_passed = None
|
|
1354
|
+
if not skip_roi:
|
|
1355
|
+
assert subject is not None, f"'msh2hdf5(skip_roi=False)': Must provide subject object to write out ROIs. " \
|
|
1356
|
+
f"Or set 'skip_roi=True'"
|
|
1357
|
+
roi = dict()
|
|
1358
|
+
|
|
1359
|
+
if mesh_idx in subject.roi.keys():
|
|
1360
|
+
|
|
1361
|
+
roi[mesh_idx] = dict()
|
|
1362
|
+
|
|
1363
|
+
for roi_idx in subject.roi[mesh_idx].keys():
|
|
1364
|
+
|
|
1365
|
+
print(("\t Initializing ROI #{} {} (type: {})".format(roi_idx,
|
|
1366
|
+
subject.roi[mesh_idx][roi_idx]['info'],
|
|
1367
|
+
subject.roi[mesh_idx][roi_idx]['type'])))
|
|
1368
|
+
# surface ROIs
|
|
1369
|
+
if subject.roi[mesh_idx][roi_idx]['type'] == 'surface':
|
|
1370
|
+
print("\t\t Generating ROI")
|
|
1371
|
+
# generate RegionOfInterestSurface object instance
|
|
1372
|
+
roi[mesh_idx][roi_idx] = pynibs.RegionOfInterestSurface()
|
|
1373
|
+
# generate the region
|
|
1374
|
+
refine = False
|
|
1375
|
+
try:
|
|
1376
|
+
refine = subject.roi[mesh_idx][roi_idx]['refine']
|
|
1377
|
+
except KeyError:
|
|
1378
|
+
pass
|
|
1379
|
+
roi[mesh_idx][roi_idx].make_GM_WM_surface(
|
|
1380
|
+
gm_surf_fname=subject.roi[mesh_idx][roi_idx]['gm_surf_fname'],
|
|
1381
|
+
wm_surf_fname=subject.roi[mesh_idx][roi_idx]['wm_surf_fname'],
|
|
1382
|
+
midlayer_surf_fname=subject.roi[mesh_idx][roi_idx]['midlayer_surf_fname'],
|
|
1383
|
+
mesh_folder=mesh_folder,
|
|
1384
|
+
delta=subject.roi[mesh_idx][roi_idx]['delta'],
|
|
1385
|
+
x_roi=subject.roi[mesh_idx][roi_idx]['X_ROI'],
|
|
1386
|
+
y_roi=subject.roi[mesh_idx][roi_idx]['Y_ROI'],
|
|
1387
|
+
z_roi=subject.roi[mesh_idx][roi_idx]['Z_ROI'],
|
|
1388
|
+
layer=subject.roi[mesh_idx][roi_idx]['layer'],
|
|
1389
|
+
fn_mask=subject.roi[mesh_idx][roi_idx]['fn_mask'],
|
|
1390
|
+
refine=refine)
|
|
1391
|
+
volmesh = simnibs.read_msh(subject.mesh[mesh_idx]['fn_mesh_msh'])
|
|
1392
|
+
|
|
1393
|
+
if not skip_layer:
|
|
1394
|
+
try:
|
|
1395
|
+
print("\t\t Generating cortical layers")
|
|
1396
|
+
roi[mesh_idx][roi_idx].generate_cortical_laminae(volmesh)
|
|
1397
|
+
|
|
1398
|
+
# write .geo files of layers
|
|
1399
|
+
for layer in roi[mesh_idx][roi_idx].layers:
|
|
1400
|
+
fn_geo = os.path.join(subject.mesh[mesh_idx]["mesh_folder"], "roi", roi_idx,
|
|
1401
|
+
f"geo_{layer.id}.hdf5")
|
|
1402
|
+
print(f"\t\t Writing layer {layer.id} to {fn_geo}")
|
|
1403
|
+
points = layer.surface.nodes.node_coord
|
|
1404
|
+
con = layer.surface.elm.node_number_list[:, :3] - 1
|
|
1405
|
+
|
|
1406
|
+
pynibs.write_geo_hdf5_surf(out_fn=fn_geo,
|
|
1407
|
+
points=points,
|
|
1408
|
+
con=con,
|
|
1409
|
+
replace=True,
|
|
1410
|
+
hdf5_path='/mesh')
|
|
1411
|
+
pynibs.write_xdmf(hdf5_fn=fn_geo, hdf5_geo_fn=None, overwrite_xdmf=True,
|
|
1412
|
+
overwrite_array=True,
|
|
1413
|
+
verbose=False, mode="r+")
|
|
1414
|
+
|
|
1415
|
+
# find tet_idx for roi in triangle_center of all 3 layers
|
|
1416
|
+
print("\t\t Determining tetrahedra indices of triangle centers of midlayer")
|
|
1417
|
+
roi[mesh_idx][roi_idx].determine_element_idx_in_mesh(msh=msh)
|
|
1418
|
+
except ValueError:
|
|
1419
|
+
print("[WARN] Layer creation requested but no white matter found in ROI bounding box."
|
|
1420
|
+
"Cannot create cortical layers without the white matter boundary surface."
|
|
1421
|
+
"Layer creation will be skipped...")
|
|
1422
|
+
else:
|
|
1423
|
+
print("\t\t Skipping creation of cortical layers.")
|
|
1424
|
+
|
|
1425
|
+
# volume ROIs
|
|
1426
|
+
if subject.roi[mesh_idx][roi_idx]['type'] == 'volume':
|
|
1427
|
+
print("\t\t Generating ROI")
|
|
1428
|
+
# TODO generate ROI here
|
|
1429
|
+
roi[mesh_idx].append(pynibs.RegionOfInterestVolume())
|
|
1430
|
+
# TODO: include make_roi_volume_from_points
|
|
1431
|
+
# generate the region
|
|
1432
|
+
roi[mesh_idx][roi_idx].make_roi_volume_from_msh(msh=msh,
|
|
1433
|
+
volume_type=subject.roi[mesh_idx][roi_idx][
|
|
1434
|
+
'volume_type'],
|
|
1435
|
+
x_roi=subject.roi[mesh_idx][roi_idx]['X_ROI'],
|
|
1436
|
+
y_roi=subject.roi[mesh_idx][roi_idx]['Y_ROI'],
|
|
1437
|
+
z_roi=subject.roi[mesh_idx][roi_idx]['Z_ROI'])
|
|
1438
|
+
|
|
1439
|
+
roi_passed = roi[mesh_idx]
|
|
1440
|
+
|
|
1441
|
+
# save mesh with roi information in .hdf5 format
|
|
1442
|
+
write_geo_hdf5(out_fn=out_fn,
|
|
1443
|
+
msh=msh,
|
|
1444
|
+
roi_dict=roi_passed,
|
|
1445
|
+
hdf5_path='/mesh')
|
|
1446
|
+
|
|
1447
|
+
hdf5_geo_fn = None
|
|
1448
|
+
if include_data:
|
|
1449
|
+
# load mesh in .msh format
|
|
1450
|
+
msh_simnibs = simnibs.read_msh(fn_msh)
|
|
1451
|
+
|
|
1452
|
+
for field in msh_simnibs.field:
|
|
1453
|
+
|
|
1454
|
+
# write node data
|
|
1455
|
+
if isinstance(msh_simnibs.field[field], simnibs.mesh_io.NodeData):
|
|
1456
|
+
|
|
1457
|
+
data = msh_simnibs.field[field].value
|
|
1458
|
+
|
|
1459
|
+
with h5py.File(out_fn, "a") as f:
|
|
1460
|
+
f.create_dataset("data/nodes/" + field, data=data, dtype="float64")
|
|
1461
|
+
|
|
1462
|
+
# write tet and tri data
|
|
1463
|
+
elif isinstance(msh_simnibs.field[field], simnibs.mesh_io.ElementData):
|
|
1464
|
+
data_tris = msh_simnibs.field[field].value[msh_simnibs.elm.elm_type == 2,]
|
|
1465
|
+
data_tets = msh_simnibs.field[field].value[msh_simnibs.elm.elm_type == 4,]
|
|
1466
|
+
|
|
1467
|
+
with h5py.File(out_fn, "a") as f:
|
|
1468
|
+
f.create_dataset("data/tets/" + field, data=data_tets)
|
|
1469
|
+
f.create_dataset("data/tris/" + field, data=data_tris)
|
|
1470
|
+
hdf5_geo_fn = out_fn
|
|
1471
|
+
|
|
1472
|
+
pynibs.write_xdmf(hdf5_fn=out_fn,
|
|
1473
|
+
hdf5_geo_fn=hdf5_geo_fn,
|
|
1474
|
+
overwrite_xdmf=True,
|
|
1475
|
+
verbose=False)
|
|
1476
|
+
|
|
1477
|
+
|
|
1478
|
+
def write_arr_to_hdf5(fn_hdf5, arr_name, data, overwrite_arr=True, verbose=False, check_file_exist=False):
|
|
1479
|
+
"""
|
|
1480
|
+
Takes an array and adds it to an hdf5 file.
|
|
1481
|
+
|
|
1482
|
+
If data is list of dict, ``write_dict_to_hdf5()`` is called for each dict with adapted hdf5-folder name
|
|
1483
|
+
Otherwise, data is casted to np.ndarray and dtype of unicode data casted to ``'|S'``.
|
|
1484
|
+
|
|
1485
|
+
Parameters
|
|
1486
|
+
----------
|
|
1487
|
+
fn_hdf5 : str
|
|
1488
|
+
Filename of .hdf5 file.
|
|
1489
|
+
arr_name : str
|
|
1490
|
+
Complete path in .hdf5 file with array name.
|
|
1491
|
+
data : ndarray, list or dict
|
|
1492
|
+
Data to write.
|
|
1493
|
+
overwrite_arr : bool, default: True
|
|
1494
|
+
Overwrite existing array.
|
|
1495
|
+
verbose : bool, default: False
|
|
1496
|
+
Print information.
|
|
1497
|
+
"""
|
|
1498
|
+
# dictionary
|
|
1499
|
+
if isinstance(data, dict):
|
|
1500
|
+
write_dict_to_hdf5(fn_hdf5=fn_hdf5,
|
|
1501
|
+
data=data,
|
|
1502
|
+
folder=f"{arr_name}",
|
|
1503
|
+
verbose=verbose,
|
|
1504
|
+
check_file_exist=check_file_exist)
|
|
1505
|
+
return
|
|
1506
|
+
|
|
1507
|
+
# list of dictionaries:
|
|
1508
|
+
elif isinstance(data, list) and len(data) > 0 and isinstance(data[0], dict):
|
|
1509
|
+
for idx, lst in enumerate(data):
|
|
1510
|
+
write_dict_to_hdf5(fn_hdf5=fn_hdf5,
|
|
1511
|
+
data=lst,
|
|
1512
|
+
folder=f"{arr_name}/{idx}",
|
|
1513
|
+
verbose=verbose,
|
|
1514
|
+
check_file_exist=check_file_exist)
|
|
1515
|
+
return
|
|
1516
|
+
elif not isinstance(data, np.ndarray):
|
|
1517
|
+
data = np.array(data)
|
|
1518
|
+
|
|
1519
|
+
# do some type casting from numpy/pd -> h5py
|
|
1520
|
+
# date column from experiment.csv is O
|
|
1521
|
+
# plotsetting["view"] is O list of list of different length
|
|
1522
|
+
# coil1 and coil2 columns names from experiment.csv is <U8
|
|
1523
|
+
# coil_mean column name from experiment.csv is <U12
|
|
1524
|
+
if data.dtype == 'O' or data.dtype.kind == 'U':
|
|
1525
|
+
data = data.astype('|S')
|
|
1526
|
+
if verbose:
|
|
1527
|
+
warnings.warn(f"Converting array '{arr_name}'' to string")
|
|
1528
|
+
|
|
1529
|
+
try:
|
|
1530
|
+
key = os.path.split(arr_name)[1]
|
|
1531
|
+
if key.endswith('fname') or key.startswith('fn'):
|
|
1532
|
+
if data.dtype.type == np.bytes_:
|
|
1533
|
+
data_l = np.char.decode(data, 'UTF-8').tolist()
|
|
1534
|
+
if not isinstance(data_l, list):
|
|
1535
|
+
data_l = [data_l]
|
|
1536
|
+
for d in data_l:
|
|
1537
|
+
if d and d != 'None':
|
|
1538
|
+
if isinstance(d, list):
|
|
1539
|
+
for d_sublist in d:
|
|
1540
|
+
if not os.path.exists(d_sublist) and check_file_exist:
|
|
1541
|
+
warnings.warn(f'Key: {arr_name}: file {d_sublist} does not exist.')
|
|
1542
|
+
else:
|
|
1543
|
+
if not os.path.exists(d) and check_file_exist:
|
|
1544
|
+
warnings.warn(f'Key: {arr_name}: file {d} does not exist.')
|
|
1545
|
+
except OSError:
|
|
1546
|
+
pass
|
|
1547
|
+
|
|
1548
|
+
with h5py.File(fn_hdf5, 'a') as f:
|
|
1549
|
+
# create data_set
|
|
1550
|
+
if overwrite_arr:
|
|
1551
|
+
try:
|
|
1552
|
+
del f[arr_name]
|
|
1553
|
+
except KeyError:
|
|
1554
|
+
pass
|
|
1555
|
+
f.create_dataset(arr_name, data=data)
|
|
1556
|
+
|
|
1557
|
+
|
|
1558
|
+
def write_dict_to_hdf5(fn_hdf5, data, folder, check_file_exist=False, verbose=False):
|
|
1559
|
+
"""
|
|
1560
|
+
Takes dict (from subject.py) and passes its keys to write_arr_to_hdf5()
|
|
1561
|
+
|
|
1562
|
+
.. code-block:: python
|
|
1563
|
+
|
|
1564
|
+
fn_hdf5:folder/
|
|
1565
|
+
|--key1
|
|
1566
|
+
|--key2
|
|
1567
|
+
|...
|
|
1568
|
+
|
|
1569
|
+
Parameters
|
|
1570
|
+
----------
|
|
1571
|
+
fn_hdf5 : str
|
|
1572
|
+
data : dict or pynibs.Mesh
|
|
1573
|
+
folder : str
|
|
1574
|
+
verbose : bool
|
|
1575
|
+
check_file_exist : bool
|
|
1576
|
+
|
|
1577
|
+
"""
|
|
1578
|
+
for key in data.keys():
|
|
1579
|
+
write_arr_to_hdf5(fn_hdf5=fn_hdf5,
|
|
1580
|
+
arr_name=f"{folder}/{key}",
|
|
1581
|
+
data=data[key],
|
|
1582
|
+
verbose=verbose,
|
|
1583
|
+
check_file_exist=check_file_exist)
|
|
1584
|
+
|
|
1585
|
+
|
|
1586
|
+
def read_dict_from_hdf5(fn_hdf5, folder):
|
|
1587
|
+
"""
|
|
1588
|
+
Read all arrays from from hdf5 file and return them as dict
|
|
1589
|
+
|
|
1590
|
+
Parameters
|
|
1591
|
+
----------
|
|
1592
|
+
fn_hdf5: str
|
|
1593
|
+
Filename of .hdf5 file
|
|
1594
|
+
folder: str
|
|
1595
|
+
Folder inside .hdf5 file to read
|
|
1596
|
+
|
|
1597
|
+
Returns
|
|
1598
|
+
-------
|
|
1599
|
+
d : dict
|
|
1600
|
+
Dictionary from .hdf5 file folder
|
|
1601
|
+
"""
|
|
1602
|
+
d = dict()
|
|
1603
|
+
with h5py.File(fn_hdf5, 'r') as f:
|
|
1604
|
+
for key in f[folder].keys():
|
|
1605
|
+
|
|
1606
|
+
# read datasets contained hdf5 folder
|
|
1607
|
+
if isinstance(f[folder][key], h5py.Dataset):
|
|
1608
|
+
|
|
1609
|
+
# converting strings saved as np.bytes_ in hdf5 to str and converting 'None' to None
|
|
1610
|
+
if type(f[folder][key][()]) == np.bytes_:
|
|
1611
|
+
|
|
1612
|
+
d[key] = str(f[folder][key][()].astype(str))
|
|
1613
|
+
|
|
1614
|
+
# setting None values correctly
|
|
1615
|
+
if d[key] == 'None':
|
|
1616
|
+
d[key] = None
|
|
1617
|
+
|
|
1618
|
+
# reading np.ndarray and looking for strings and None
|
|
1619
|
+
elif type(f[folder][key][()]) == np.ndarray:
|
|
1620
|
+
d[key] = read_arr_from_hdf5(fn_hdf5, folder + '/' + key)
|
|
1621
|
+
|
|
1622
|
+
else:
|
|
1623
|
+
d[key] = f[folder][key][()]
|
|
1624
|
+
|
|
1625
|
+
# read datasets contained in (multiple) hdf5 sub-folder
|
|
1626
|
+
else:
|
|
1627
|
+
# d[key] = list()
|
|
1628
|
+
# d[key].append(read_dict_from_hdf5(fn_hdf5, f"{folder}/{key}"))
|
|
1629
|
+
d[key] = read_dict_from_hdf5(fn_hdf5, f"{folder}/{key}")
|
|
1630
|
+
|
|
1631
|
+
return d
|
|
1632
|
+
|
|
1633
|
+
|
|
1634
|
+
def read_arr_from_hdf5(fn_hdf5, folder):
|
|
1635
|
+
"""
|
|
1636
|
+
Reads array from and .hdf5 files and returns as list:
|
|
1637
|
+
Strings are returned as `np.bytes_ to str` and 'None' to None
|
|
1638
|
+
|
|
1639
|
+
Parameters
|
|
1640
|
+
----------
|
|
1641
|
+
fn_hdf5: str
|
|
1642
|
+
Filename of .hdf5 file.
|
|
1643
|
+
folder: str
|
|
1644
|
+
Folder inside .hdf5 file to read.
|
|
1645
|
+
|
|
1646
|
+
Returns
|
|
1647
|
+
-------
|
|
1648
|
+
data_from_hdf5 : list
|
|
1649
|
+
List containing data from .hdf5 file.
|
|
1650
|
+
"""
|
|
1651
|
+
arr_1d = False
|
|
1652
|
+
|
|
1653
|
+
with h5py.File(fn_hdf5, 'r') as f:
|
|
1654
|
+
a = f[folder][:]
|
|
1655
|
+
|
|
1656
|
+
if a.size == 0:
|
|
1657
|
+
return []
|
|
1658
|
+
|
|
1659
|
+
else:
|
|
1660
|
+
if a.ndim == 1:
|
|
1661
|
+
arr_1d = True
|
|
1662
|
+
a = a[np.newaxis, :]
|
|
1663
|
+
|
|
1664
|
+
df = pd.DataFrame(a.tolist())
|
|
1665
|
+
|
|
1666
|
+
with np.nditer(a, op_flags=['readwrite'], flags=["multi_index"]) as it:
|
|
1667
|
+
for x in it:
|
|
1668
|
+
if type(x[np.newaxis][0]) == np.bytes_:
|
|
1669
|
+
df.iat[it.multi_index] = str(x.astype(str))
|
|
1670
|
+
|
|
1671
|
+
try:
|
|
1672
|
+
df.iat[it.multi_index] = float(df.iat[it.multi_index])
|
|
1673
|
+
except ValueError:
|
|
1674
|
+
pass
|
|
1675
|
+
|
|
1676
|
+
if df.iat[it.multi_index] == 'None':
|
|
1677
|
+
df.iat[it.multi_index] = None
|
|
1678
|
+
|
|
1679
|
+
data_from_hdf5 = df.values.tolist()
|
|
1680
|
+
|
|
1681
|
+
if arr_1d:
|
|
1682
|
+
data_from_hdf5 = data_from_hdf5[0]
|
|
1683
|
+
|
|
1684
|
+
return data_from_hdf5
|
|
1685
|
+
|
|
1686
|
+
|
|
1687
|
+
def create_fibre_geo_hdf5(fn_fibres_hdf5, overwrite=True):
|
|
1688
|
+
"""
|
|
1689
|
+
Reformats geometrical fibre data and adds a /plot subfolder containing geometrical fibre data including connectivity
|
|
1690
|
+
|
|
1691
|
+
Parameters
|
|
1692
|
+
----------
|
|
1693
|
+
fn_fibres_hdf5 : str
|
|
1694
|
+
Path to fibre.hdf5 file containing the original fibre data.
|
|
1695
|
+
overwrite : bool
|
|
1696
|
+
Overwrites existing /plot subfolder in .hdf5 file.
|
|
1697
|
+
"""
|
|
1698
|
+
fibres = []
|
|
1699
|
+
# read fibre data from parent folder
|
|
1700
|
+
with h5py.File(fn_fibres_hdf5, "a") as f:
|
|
1701
|
+
for key in f.keys():
|
|
1702
|
+
if key == "plot" and overwrite:
|
|
1703
|
+
print("/plot subfolder exists but will be overwritten (overwrite=True)")
|
|
1704
|
+
del f[key]
|
|
1705
|
+
elif key == "plot" and not overwrite:
|
|
1706
|
+
print("/plot subfolder already exists and will not be overwritten (overwrite=False)")
|
|
1707
|
+
return None
|
|
1708
|
+
else:
|
|
1709
|
+
tmp = f[key][:]
|
|
1710
|
+
if type(tmp) is np.ndarray and tmp.shape[1] == 3:
|
|
1711
|
+
fibres.append(tmp)
|
|
1712
|
+
|
|
1713
|
+
# concatenate all points
|
|
1714
|
+
fibre_points = np.vstack(fibres)
|
|
1715
|
+
|
|
1716
|
+
# create connectivity list
|
|
1717
|
+
fibre_con = np.hstack(
|
|
1718
|
+
(np.arange(fibre_points.shape[0])[:, np.newaxis], np.arange(fibre_points.shape[0])[:, np.newaxis] + 1))
|
|
1719
|
+
|
|
1720
|
+
# delete connectivities between fibres
|
|
1721
|
+
fibre_con = np.delete(fibre_con, np.cumsum([len(fib) for fib in fibres]) - 1, 0)
|
|
1722
|
+
|
|
1723
|
+
# append data to .hdf5
|
|
1724
|
+
with h5py.File(fn_fibres_hdf5, "a") as f:
|
|
1725
|
+
f.create_dataset(name="plot/fibre_points", data=fibre_points, dtype=float)
|
|
1726
|
+
f.create_dataset(name="plot/fibre_con", data=fibre_con, dtype=int)
|
|
1727
|
+
|
|
1728
|
+
# create .xdmf file for plotting
|
|
1729
|
+
pynibs.create_fibre_xdmf(fn_fibre_geo_hdf5=fn_fibres_hdf5, overwrite=overwrite)
|
|
1730
|
+
|
|
1731
|
+
|
|
1732
|
+
def write_coil_hdf5(tms_coil, fn):
|
|
1733
|
+
"""
|
|
1734
|
+
Creates .hdf5/.xdmf file tuples with information to visualize SimNIBS .tcd coil information.
|
|
1735
|
+
|
|
1736
|
+
Can be visualized with ParaView (use Glyph plugin to view wires).
|
|
1737
|
+
|
|
1738
|
+
.. figure:: ../doc/images/two_stimulator_coil.png
|
|
1739
|
+
:scale: 80 %
|
|
1740
|
+
:alt: Example coil visualization
|
|
1741
|
+
|
|
1742
|
+
SimNIBS .tcd coil model from create_two_stimulator_coil.py visualized with ParaView.
|
|
1743
|
+
|
|
1744
|
+
Parameters
|
|
1745
|
+
----------
|
|
1746
|
+
tms_coil : SimNIBS.simulation.tms_coil.tms_coil or str
|
|
1747
|
+
The coil (object or .tcd file) that shall be written to disk as an .hdf5 file.
|
|
1748
|
+
fn : str
|
|
1749
|
+
Filename where to write the files.
|
|
1750
|
+
|
|
1751
|
+
Returns
|
|
1752
|
+
-------
|
|
1753
|
+
fn.hdf5/fn.xdmf : <file>
|
|
1754
|
+
Paraview file tuple with casing data.
|
|
1755
|
+
fn_wires.hdf5/fn_wires.xdmf : <file>
|
|
1756
|
+
Paraview file tuple with wiring data.
|
|
1757
|
+
"""
|
|
1758
|
+
if isinstance(tms_coil, str):
|
|
1759
|
+
assert tms_coil.endswith('.tcd'), 'Only .tcd files are supported.'
|
|
1760
|
+
import simnibs
|
|
1761
|
+
tms_coil = simnibs.simulation.TmsCoil.from_file(tms_coil)
|
|
1762
|
+
|
|
1763
|
+
# take care of output filenames
|
|
1764
|
+
folder, fn_coil = os.path.split(fn)
|
|
1765
|
+
if not fn_coil.endswith('.hdf5'):
|
|
1766
|
+
fn_coil = f"{os.path.splitext(fn_coil)[0]}.hdf5"
|
|
1767
|
+
fn_coil_wires = f"{os.path.splitext(fn_coil)[0]}_wires.hdf5"
|
|
1768
|
+
|
|
1769
|
+
# write casing file
|
|
1770
|
+
casing_msh = tms_coil.get_mesh(apply_deformation=False,
|
|
1771
|
+
include_optimization_points=False,
|
|
1772
|
+
include_coil_elements=False, )
|
|
1773
|
+
|
|
1774
|
+
pynibs.write_geo_hdf5_surf(f"{folder}/{fn_coil}",
|
|
1775
|
+
casing_msh.nodes.node_coord,
|
|
1776
|
+
casing_msh.elm.node_number_list - 1,
|
|
1777
|
+
replace=False, hdf5_path='/mesh')
|
|
1778
|
+
pynibs.write_xdmf(f"{folder}/{fn_coil}", overwrite_xdmf=True)
|
|
1779
|
+
|
|
1780
|
+
# write file with wiring information
|
|
1781
|
+
if isinstance(tms_coil.elements, list):
|
|
1782
|
+
wire_nodes = tms_coil.elements[0].points
|
|
1783
|
+
wire_data = tms_coil.elements[0].values
|
|
1784
|
+
coil_name = np.full((tms_coil.elements[0].points.shape[0]),
|
|
1785
|
+
tms_coil.elements[0].name)
|
|
1786
|
+
stim_name = np.full((tms_coil.elements[0].points.shape[0]),
|
|
1787
|
+
tms_coil.elements[0].stimulator.name)
|
|
1788
|
+
for idx in range(1, len(tms_coil.elements)):
|
|
1789
|
+
wire_nodes = np.vstack((wire_nodes, tms_coil.elements[idx].points))
|
|
1790
|
+
wire_data = np.vstack((wire_data, tms_coil.elements[idx].values))
|
|
1791
|
+
coil_name = np.concatenate((coil_name,
|
|
1792
|
+
np.full((tms_coil.elements[idx].points.shape[0]),
|
|
1793
|
+
tms_coil.elements[idx].name)))
|
|
1794
|
+
stim_name = np.concatenate((stim_name,
|
|
1795
|
+
np.full((tms_coil.elements[idx].points.shape[0]),
|
|
1796
|
+
tms_coil.elements[idx].stimulator.name)))
|
|
1797
|
+
|
|
1798
|
+
|
|
1799
|
+
else:
|
|
1800
|
+
wire_nodes = tms_coil.elements.points
|
|
1801
|
+
wire_data = tms_coil.elements.values
|
|
1802
|
+
coil_name = np.full((tms_coil.elements.points.shape[0]), tms_coil.elements.name)
|
|
1803
|
+
stim_name = np.full((tms_coil.elements.points.shape[0]), tms_coil.elements.stimulator.name)
|
|
1804
|
+
|
|
1805
|
+
# Convert strings to their corresponding float values
|
|
1806
|
+
unique_strings = np.unique(coil_name)
|
|
1807
|
+
string_to_float_map = {string: i for i, string in enumerate(unique_strings)}
|
|
1808
|
+
coil_name = np.array([string_to_float_map[string] for string in coil_name])
|
|
1809
|
+
|
|
1810
|
+
# Convert strings to their corresponding float values
|
|
1811
|
+
unique_strings = np.unique(stim_name)
|
|
1812
|
+
string_to_float_map = {string: i for i, string in enumerate(unique_strings)}
|
|
1813
|
+
stim_name = np.array([string_to_float_map[string] for string in stim_name])
|
|
1814
|
+
|
|
1815
|
+
n_nodes = wire_nodes.shape[0]
|
|
1816
|
+
pynibs.write_arr_to_hdf5(f"{folder}/{fn_coil_wires}", arr_name='mesh/nodes/node_coord', data=wire_nodes)
|
|
1817
|
+
pynibs.write_arr_to_hdf5(f"{folder}/{fn_coil_wires}", arr_name='data/nodes/wire', data=wire_data)
|
|
1818
|
+
pynibs.write_arr_to_hdf5(f"{folder}/{fn_coil_wires}", arr_name='data/nodes/coil_name', data=coil_name)
|
|
1819
|
+
pynibs.write_arr_to_hdf5(f"{folder}/{fn_coil_wires}", arr_name='data/nodes/stim_name', data=stim_name)
|
|
1820
|
+
with open(f"{folder}/{fn_coil_wires.replace('.hdf5', '.xdmf')}", 'w') as f:
|
|
1821
|
+
f.writelines(f'<?xml version="1.0"?>\n')
|
|
1822
|
+
f.writelines(f'<!DOCTYPE Xdmf>\n')
|
|
1823
|
+
f.writelines(f'<Xdmf Version="2.0" xmlns:xi="http://www.w3.org/2001/XInclude">\n')
|
|
1824
|
+
f.writelines(f'<Domain>\n')
|
|
1825
|
+
f.writelines(f'<Grid CollectionType="Spatial" GridType="Collection" Name="Collection">\n')
|
|
1826
|
+
f.writelines(f' <Grid Name="wire" GridType="Uniform">\n')
|
|
1827
|
+
f.writelines(f' <Topology NumberOfElements="{n_nodes}" TopologyType="Polyvertex" Name="nodes"\n')
|
|
1828
|
+
f.writelines(f' NodesPerElement="1">\n')
|
|
1829
|
+
f.writelines(f' \n')
|
|
1830
|
+
f.writelines(f' </Topology>\n')
|
|
1831
|
+
f.writelines(f' <Geometry GeometryType="XYZ">\n')
|
|
1832
|
+
f.writelines(f' <DataItem Format="HDF" Dimensions="{n_nodes} 3">\n')
|
|
1833
|
+
f.writelines(f' {fn_coil_wires}:/mesh/nodes/node_coord\n')
|
|
1834
|
+
f.writelines(f' </DataItem>\n')
|
|
1835
|
+
f.writelines(f' </Geometry>\n')
|
|
1836
|
+
f.writelines(f'\n')
|
|
1837
|
+
f.writelines(f' \n')
|
|
1838
|
+
f.writelines(f' <Attribute Name="data" AttributeType="Vector" Center="Node">\n')
|
|
1839
|
+
f.writelines(f' <DataItem Format="HDF" Dimensions="{n_nodes} 3">\n')
|
|
1840
|
+
f.writelines(f' {fn_coil_wires}:/data/nodes/wire\n')
|
|
1841
|
+
f.writelines(f' </DataItem>\n')
|
|
1842
|
+
f.writelines(f' </Attribute>\n')
|
|
1843
|
+
f.writelines(f' <Attribute Name="coil_id" AttributeType="Scalar" Center="Node">\n')
|
|
1844
|
+
f.writelines(f' <DataItem Format="HDF" Dimensions="{n_nodes}">\n')
|
|
1845
|
+
f.writelines(f' {fn_coil_wires}:/data/nodes/coil_name\n')
|
|
1846
|
+
f.writelines(f' </DataItem>\n')
|
|
1847
|
+
f.writelines(f' </Attribute>\n')
|
|
1848
|
+
f.writelines(f' <Attribute Name="stim_id" AttributeType="Scalar" Center="Node">\n')
|
|
1849
|
+
f.writelines(f' <DataItem Format="HDF" Dimensions="{n_nodes}">\n')
|
|
1850
|
+
f.writelines(f' {fn_coil_wires}:/data/nodes/stim_name\n')
|
|
1851
|
+
f.writelines(f' </DataItem>\n')
|
|
1852
|
+
f.writelines(f' </Attribute>\n')
|
|
1853
|
+
f.writelines(f' </Grid>\n')
|
|
1854
|
+
f.writelines(f'\n')
|
|
1855
|
+
f.writelines(f'</Grid>\n')
|
|
1856
|
+
f.writelines(f'</Domain>\n')
|
|
1857
|
+
f.writelines(f'</Xdmf>')
|