pyNIBS 0.2024.8__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (107) hide show
  1. pyNIBS-0.2024.8.dist-info/LICENSE +623 -0
  2. pyNIBS-0.2024.8.dist-info/METADATA +723 -0
  3. pyNIBS-0.2024.8.dist-info/RECORD +107 -0
  4. pyNIBS-0.2024.8.dist-info/WHEEL +5 -0
  5. pyNIBS-0.2024.8.dist-info/top_level.txt +1 -0
  6. pynibs/__init__.py +34 -0
  7. pynibs/coil.py +1367 -0
  8. pynibs/congruence/__init__.py +15 -0
  9. pynibs/congruence/congruence.py +1108 -0
  10. pynibs/congruence/ext_metrics.py +257 -0
  11. pynibs/congruence/stimulation_threshold.py +318 -0
  12. pynibs/data/configuration_exp0.yaml +59 -0
  13. pynibs/data/configuration_linear_MEP.yaml +61 -0
  14. pynibs/data/configuration_linear_RT.yaml +61 -0
  15. pynibs/data/configuration_sigmoid4.yaml +68 -0
  16. pynibs/data/network mapping configuration/configuration guide.md +238 -0
  17. pynibs/data/network mapping configuration/configuration_TEMPLATE.yaml +42 -0
  18. pynibs/data/network mapping configuration/configuration_for_testing.yaml +43 -0
  19. pynibs/data/network mapping configuration/configuration_modelTMS.yaml +43 -0
  20. pynibs/data/network mapping configuration/configuration_reg_isi_05.yaml +43 -0
  21. pynibs/data/network mapping configuration/output_documentation.md +185 -0
  22. pynibs/data/network mapping configuration/recommendations_for_accuracy_threshold.md +77 -0
  23. pynibs/data/neuron/models/L23_PC_cADpyr_biphasic_v1.csv +1281 -0
  24. pynibs/data/neuron/models/L23_PC_cADpyr_monophasic_v1.csv +1281 -0
  25. pynibs/data/neuron/models/L4_LBC_biphasic_v1.csv +1281 -0
  26. pynibs/data/neuron/models/L4_LBC_monophasic_v1.csv +1281 -0
  27. pynibs/data/neuron/models/L4_NBC_biphasic_v1.csv +1281 -0
  28. pynibs/data/neuron/models/L4_NBC_monophasic_v1.csv +1281 -0
  29. pynibs/data/neuron/models/L4_SBC_biphasic_v1.csv +1281 -0
  30. pynibs/data/neuron/models/L4_SBC_monophasic_v1.csv +1281 -0
  31. pynibs/data/neuron/models/L5_TTPC2_cADpyr_biphasic_v1.csv +1281 -0
  32. pynibs/data/neuron/models/L5_TTPC2_cADpyr_monophasic_v1.csv +1281 -0
  33. pynibs/expio/Mep.py +1518 -0
  34. pynibs/expio/__init__.py +8 -0
  35. pynibs/expio/brainsight.py +979 -0
  36. pynibs/expio/brainvis.py +71 -0
  37. pynibs/expio/cobot.py +239 -0
  38. pynibs/expio/exp.py +1876 -0
  39. pynibs/expio/fit_funs.py +287 -0
  40. pynibs/expio/localite.py +1987 -0
  41. pynibs/expio/signal_ced.py +51 -0
  42. pynibs/expio/visor.py +624 -0
  43. pynibs/freesurfer.py +502 -0
  44. pynibs/hdf5_io/__init__.py +10 -0
  45. pynibs/hdf5_io/hdf5_io.py +1857 -0
  46. pynibs/hdf5_io/xdmf.py +1542 -0
  47. pynibs/mesh/__init__.py +3 -0
  48. pynibs/mesh/mesh_struct.py +1394 -0
  49. pynibs/mesh/transformations.py +866 -0
  50. pynibs/mesh/utils.py +1103 -0
  51. pynibs/models/_TMS.py +211 -0
  52. pynibs/models/__init__.py +0 -0
  53. pynibs/muap.py +392 -0
  54. pynibs/neuron/__init__.py +2 -0
  55. pynibs/neuron/neuron_regression.py +284 -0
  56. pynibs/neuron/util.py +58 -0
  57. pynibs/optimization/__init__.py +5 -0
  58. pynibs/optimization/multichannel.py +278 -0
  59. pynibs/optimization/opt_mep.py +152 -0
  60. pynibs/optimization/optimization.py +1445 -0
  61. pynibs/optimization/workhorses.py +698 -0
  62. pynibs/pckg/__init__.py +0 -0
  63. pynibs/pckg/biosig/biosig4c++-1.9.5.src_fixed.tar.gz +0 -0
  64. pynibs/pckg/libeep/__init__.py +0 -0
  65. pynibs/pckg/libeep/pyeep.so +0 -0
  66. pynibs/regression/__init__.py +11 -0
  67. pynibs/regression/dual_node_detection.py +2375 -0
  68. pynibs/regression/regression.py +2984 -0
  69. pynibs/regression/score_types.py +0 -0
  70. pynibs/roi/__init__.py +2 -0
  71. pynibs/roi/roi.py +895 -0
  72. pynibs/roi/roi_structs.py +1233 -0
  73. pynibs/subject.py +1009 -0
  74. pynibs/tensor_scaling.py +144 -0
  75. pynibs/tests/data/InstrumentMarker20200225163611937.xml +19 -0
  76. pynibs/tests/data/TriggerMarkers_Coil0_20200225163443682.xml +14 -0
  77. pynibs/tests/data/TriggerMarkers_Coil1_20200225170337572.xml +6373 -0
  78. pynibs/tests/data/Xdmf.dtd +89 -0
  79. pynibs/tests/data/brainsight_niiImage_nifticoord.txt +145 -0
  80. pynibs/tests/data/brainsight_niiImage_nifticoord_largefile.txt +1434 -0
  81. pynibs/tests/data/brainsight_niiImage_niifticoord_mixedtargets.txt +47 -0
  82. pynibs/tests/data/create_subject_testsub.py +332 -0
  83. pynibs/tests/data/data.hdf5 +0 -0
  84. pynibs/tests/data/geo.hdf5 +0 -0
  85. pynibs/tests/test_coil.py +474 -0
  86. pynibs/tests/test_elements2nodes.py +100 -0
  87. pynibs/tests/test_hdf5_io/test_xdmf.py +61 -0
  88. pynibs/tests/test_mesh_transformations.py +123 -0
  89. pynibs/tests/test_mesh_utils.py +143 -0
  90. pynibs/tests/test_nnav_imports.py +101 -0
  91. pynibs/tests/test_quality_measures.py +117 -0
  92. pynibs/tests/test_regressdata.py +289 -0
  93. pynibs/tests/test_roi.py +17 -0
  94. pynibs/tests/test_rotations.py +86 -0
  95. pynibs/tests/test_subject.py +71 -0
  96. pynibs/tests/test_util.py +24 -0
  97. pynibs/tms_pulse.py +34 -0
  98. pynibs/util/__init__.py +4 -0
  99. pynibs/util/dosing.py +233 -0
  100. pynibs/util/quality_measures.py +562 -0
  101. pynibs/util/rotations.py +340 -0
  102. pynibs/util/simnibs.py +763 -0
  103. pynibs/util/util.py +727 -0
  104. pynibs/visualization/__init__.py +2 -0
  105. pynibs/visualization/para.py +4372 -0
  106. pynibs/visualization/plot_2D.py +137 -0
  107. pynibs/visualization/render_3D.py +347 -0
pynibs/hdf5_io/xdmf.py ADDED
@@ -0,0 +1,1542 @@
1
+ """
2
+ The `xdmf.py` module provides utilities for creating, writing, and manipulating XDMF files. XDMF (eXtensible
3
+ Data Model and Format) is a format that allows for the exchange of scientific data between High Performance Computing
4
+ codes and tools for visualization, analysis, and data processing.
5
+
6
+ The module includes functions for:
7
+
8
+ - Writing XDMF files for surfaces, such as ROIs (`write_xdmf_surf`).
9
+ - Creating XDMF markup files for given HDF5 files, mainly for paraview visualization (`write_xdmf`).
10
+ - Creating XDMF markup files for given ROI HDF5 data files with 4D data (`write_temporal_xdmf`).
11
+ - Creating one XDMF file that allows paraview plottings of coil position paths (`create_position_path_xdmf`).
12
+ - Overlaying data stored in HDF5 files except in regions where data_substitute is found (`data_superimpose`).
13
+ - Writing the coordinates to an XDMF file for visualization (`write_xdmf_coordinates`).
14
+ - Creating XDMF file to plot fibres in Paraview (`create_fibre_xdmf`).
15
+
16
+ This module is primarily used for handling and visualizing data related to neuroimaging and brain stimulation studies.
17
+ """
18
+ import warnings
19
+ import os
20
+ import h5py
21
+ import numpy as np
22
+ import pynibs
23
+
24
+
25
+ def write_xdmf_surf(data_hdf_fn_out, data_names, data_xdmf_fn, geo_hdf_fn, data_dims,
26
+ data_in_tris=True, data_prefix='/data/tris/'):
27
+ """
28
+ Write XDMF files for surfaces, such as ROIs.
29
+
30
+ Parameters
31
+ ----------
32
+ data_hdf_fn_out : str
33
+
34
+ data_names : list of str
35
+
36
+ data_xdmf_fn : str
37
+
38
+ geo_hdf_fn : str
39
+
40
+ data_dims : list of int
41
+ The data dimensions.
42
+ data_in_tris : bool, default: True.
43
+
44
+ data_prefix : str, default: '/data/tris/'
45
+
46
+
47
+ Returns
48
+ -------
49
+ <File> : .xdmf file
50
+ Descriptor file pointing to geo and data .hdf5 files
51
+ """
52
+ if not data_in_tris:
53
+ raise NotImplementedError
54
+ # if geo file exists in same folder then data file only use relative path
55
+ if os.path.split(data_hdf_fn_out)[0] == os.path.split(geo_hdf_fn)[0]:
56
+ geo_hdf_fn_xdmf = os.path.basename(geo_hdf_fn)
57
+ else:
58
+ geo_hdf_fn_xdmf = geo_hdf_fn
59
+
60
+ with open(data_xdmf_fn, 'w') as xdmf, h5py.File(geo_hdf_fn, 'r') as h5_geo:
61
+ # write xdmf file linking the data to the surfaces in geo_hdf_fn
62
+ xdmf.write('<?xml version="1.0"?>\n')
63
+ xdmf.write('<!DOCTYPE Xdmf SYSTEM "Xdmf.dtd" []>\n')
64
+ xdmf.write('<Xdmf Version="2.0" xmlns:xi="http://www.w3.org/2001/XInclude">\n')
65
+ xdmf.write('<Domain>\n')
66
+
67
+ # one collection grid
68
+ xdmf.write('<Grid\nCollectionType="Spatial"\nGridType="Collection"\nName="Collection">\n')
69
+
70
+ # read all available surfaces
71
+ surface = []
72
+ lookup_str = 'triangle_number_list_'
73
+ lookup_str_node = 'node_coord_'
74
+ lookup_str_tri = 'tri_tissue_type_'
75
+
76
+ keys = list(h5_geo['mesh/elm/'].keys())
77
+ for key in keys:
78
+ idx = key.find(lookup_str)
79
+ if idx >= 0:
80
+ surface.append(key[(idx + len(lookup_str)):])
81
+
82
+ if not surface:
83
+ surface = []
84
+ lookup_str = 'triangle_number_list'
85
+ lookup_str_node = 'node_coord'
86
+ lookup_str_tri = 'tri_tissue_type'
87
+ keys = list(h5_geo['mesh/elm/'].keys())
88
+ for key in keys:
89
+ idx = key.find(lookup_str)
90
+ if idx >= 0:
91
+ surface.append(key[(idx + len(lookup_str)):])
92
+
93
+ for surf in surface:
94
+ n_tris = len(h5_geo['/mesh/elm/' + lookup_str + surf][:])
95
+ n_nodes = len(h5_geo['/mesh/nodes/' + lookup_str_node + surf][:])
96
+ assert n_tris, n_nodes
97
+
98
+ # one grid for triangles...
99
+ ###########################
100
+ xdmf.write('<Grid Name="tris" GridType="Uniform">\n')
101
+ xdmf.write('<Topology NumberOfElements="' + str(n_tris) +
102
+ '" TopologyType="Triangle" Name="' + surf + '_Tri">\n')
103
+ xdmf.write('<DataItem Format="HDF" Dimensions="' + str(n_tris) + ' 3">\n')
104
+ xdmf.write(geo_hdf_fn_xdmf + ':' + '/mesh/elm/' + lookup_str + surf + '\n')
105
+ xdmf.write('</DataItem>\n')
106
+ xdmf.write('</Topology>\n')
107
+
108
+ # nodes
109
+ xdmf.write('<Geometry GeometryType="XYZ">\n')
110
+ xdmf.write('<DataItem Format="HDF" Dimensions="' + str(n_nodes) + ' 3">\n')
111
+ xdmf.write(geo_hdf_fn_xdmf + ':' + '/mesh/nodes/' + lookup_str_node + surf + '\n')
112
+ xdmf.write('</DataItem>\n')
113
+ xdmf.write('</Geometry>\n')
114
+
115
+ # data
116
+ for idx, dat in enumerate(data_dims):
117
+ xdmf.write(f'<Attribute Name="{data_names[idx]}" AttributeType="Scalar" Center="Cell">\n')
118
+ xdmf.write(f'<DataItem Format="HDF" Dimensions="{str(n_tris)} {str(data_dims[idx])}">\n')
119
+ xdmf.write(os.path.basename(data_hdf_fn_out) + ':' + data_prefix + data_names[idx] + '\n')
120
+ xdmf.write('</DataItem>\n')
121
+ xdmf.write('</Attribute>\n')
122
+
123
+ # tissue_type
124
+ xdmf.write('<Attribute Name="tissue_type" AttributeType="Scalar" Center="Node">\n')
125
+ xdmf.write('<DataItem Format="HDF" Dimensions="' + str(n_nodes) + ' 1">\n')
126
+ xdmf.write(geo_hdf_fn_xdmf + ':' + '/mesh/elm/' + lookup_str_tri + surf + '\n')
127
+ xdmf.write('</DataItem>\n')
128
+ xdmf.write('</Attribute>\n')
129
+ xdmf.write('</Grid>\n')
130
+
131
+ xdmf.write('</Grid>\n')
132
+ xdmf.write('</Domain>\n')
133
+ xdmf.write('</Xdmf>\n')
134
+
135
+
136
+ def write_xdmf(hdf5_fn, hdf5_geo_fn=None, overwrite_xdmf=False, overwrite_array=False, verbose=False, mode="r+"):
137
+ """
138
+ Creates .xdmf markup file for given hdf5 file, mainly for paraview visualization. Checks if triangles and
139
+ tetrahedra already exists as distinct arrays in ``hdf5_fn``. If not, these are added to the .hdf5 file and
140
+ rebased to 0 (from 1). If only ``hdf5_fn`` is provided, spatial information has to be present as arrays for tris
141
+ and tets in this dataset.
142
+
143
+ Parameters
144
+ ----------
145
+ hdf5_fn : str
146
+ Filename of hdf5 file containing the data
147
+ hdf5_geo_fn : str, optional
148
+ Filename of hdf5 file containing the geometry
149
+ overwrite_xdmf : bool, default: False
150
+ Overwrite existing xdmf file if present.
151
+ overwrite_array : bool, default: False
152
+ Overwrite existing arrays if present
153
+ verbose : bool
154
+ Print output.
155
+ mode : str, default: "r+"
156
+ Mode to open hdf5_geo file. If hdf5_geo is already separated in tets and tris etc., nothing has to be written,
157
+ use "r" to avoid IOErrors in case of parallel computing.
158
+
159
+ Returns
160
+ -------
161
+ fn_xml : str
162
+ Filename of the created .xml file
163
+ <File> : .xdmf file
164
+ hdf5_fn[-4].xdmf (only data if hdf5Geo_fn provided)
165
+ <File> : .hdf5 file
166
+ hdf5_fn changed if neccessary
167
+ <File> : .hdf5 file
168
+ hdf5geo_fn containing spatial data
169
+ """
170
+
171
+ if os.path.splitext(hdf5_fn)[1] not in ['.hdf5', '.h5', '.hdf']:
172
+ print("Provide .hdf5 filename for existing file.")
173
+ return
174
+
175
+ xdmf_fn = os.path.splitext(hdf5_fn)[0] + '.xdmf'
176
+ try:
177
+ hdf5 = h5py.File(hdf5_fn, mode)
178
+ hdf5_geo = h5py.File(hdf5_fn, mode)
179
+ except IOError:
180
+ print(f"Error opening file: {hdf5_fn} ... Quitting")
181
+ raise IOError
182
+
183
+ hdf5_fn = os.path.basename(hdf5_fn)
184
+
185
+ if verbose:
186
+ print("Creating " + xdmf_fn)
187
+
188
+ if hdf5_geo_fn is not None:
189
+
190
+ try:
191
+ hdf5_geo = h5py.File(hdf5_geo_fn, mode)
192
+ except IOError:
193
+ print(f"Error opening file: {hdf5_geo_fn} ... Quitting")
194
+ hdf5.close()
195
+ hdf5_geo.close()
196
+ raise IOError
197
+ else:
198
+ hdf5_geo_fn = os.path.basename(hdf5_fn)
199
+
200
+ if os.path.isfile(xdmf_fn) and not overwrite_xdmf:
201
+ hdf5.close()
202
+ hdf5_geo.close()
203
+ raise FileExistsError(f'{xdmf_fn} already exists. Remove or set overwrite_xdmf=True. Quitting.')
204
+
205
+ # check if triangle and tetra data is already in 2 dataframes in hdf5
206
+ # /mesh/elm or /elm/?
207
+ if "/elm/" in hdf5_geo:
208
+ path_elm = '/elm'
209
+ else:
210
+ path_elm = '/mesh/elm'
211
+
212
+ if "/nodes/" in hdf5_geo:
213
+ node_path = '/nodes'
214
+ else:
215
+ node_path = '/mesh/nodes'
216
+
217
+ if path_elm not in hdf5_geo:
218
+ print(f"Not elements (triangles or tetrahedra) present in {hdf5_geo_fn}")
219
+ triangles, tetrahedra = None, None
220
+
221
+ elif path_elm + "/triangle_number_list" not in hdf5_geo:
222
+ if verbose:
223
+ print(("triangle_number_list and tetrahedra_number_list do not exist. Adding to " + hdf5_geo_fn + "."))
224
+
225
+ # get tris and tets from node_number list ... take the triangle ones
226
+ triangles = (hdf5_geo[path_elm + '/node_number_list'][:][hdf5_geo[path_elm + '/elm_type'][:] == 2][:, 0:3])
227
+ tetrahedra = (hdf5_geo[path_elm + '/node_number_list'][:][hdf5_geo[path_elm + '/elm_type'][:] == 4])
228
+
229
+ # add to hdf5_fn and rebase to 0
230
+ hdf5_geo.create_dataset(path_elm + '/triangle_number_list', data=triangles - 1)
231
+ hdf5_geo.create_dataset(path_elm + '/tetrahedra_number_list', data=tetrahedra - 1)
232
+
233
+ else:
234
+ triangles = hdf5_geo[path_elm + '/triangle_number_list']
235
+ try:
236
+ tetrahedra = hdf5_geo[path_elm + '/tetrahedra_number_list']
237
+ except KeyError:
238
+ tetrahedra = None
239
+
240
+ # check if data is divided into tets and tris
241
+
242
+ # get information for .xdmf
243
+ n_nodes = len(hdf5_geo[node_path + '/node_coord'])
244
+ try:
245
+ n_tets = len(tetrahedra)
246
+ except TypeError:
247
+ n_tets = -1
248
+
249
+ try:
250
+ n_tris = len(triangles)
251
+ except TypeError:
252
+ n_tris = -1
253
+
254
+ if not path_elm + "/tri_tissue_type" in hdf5_geo:
255
+ if (n_tris > -1 or n_tets > -1):
256
+ if verbose:
257
+ print("elm data is not divided into tris and tets. Doing that now")
258
+ if 'tag1' in hdf5_geo[path_elm + '/']:
259
+ hdf5_geo.create_dataset(path_elm + '/tri_tissue_type',
260
+ data=hdf5_geo[path_elm + '/tag1'][:][hdf5_geo[path_elm + '/elm_type'][:] == 2])
261
+ hdf5_geo.create_dataset(path_elm + '/tet_tissue_type',
262
+ data=hdf5_geo[path_elm + '/tag1'][:][hdf5_geo[path_elm + '/elm_type'][:] == 4])
263
+
264
+ hdf5_geo.create_dataset(path_elm + '/tri_elm_type',
265
+ data=hdf5_geo[path_elm + '/elm_type'][:][hdf5_geo[path_elm + '/elm_type'][:] == 2])
266
+ hdf5_geo.create_dataset(path_elm + '/tet_elm_type',
267
+ data=hdf5_geo[path_elm + '/elm_type'][:][hdf5_geo[path_elm + '/elm_type'][:] == 4])
268
+
269
+ if "data" in hdf5:
270
+ for data in hdf5['/data/']:
271
+ value = "" # remove .value structure and save data directly in /data/dataname array
272
+ try:
273
+ if 'value' in list(hdf5['/data/' + data].keys()):
274
+ value = '/value'
275
+ except (KeyError, AttributeError):
276
+ pass
277
+ if verbose:
278
+ print(('Processing ' + data))
279
+ if len(hdf5['/data/' + data + value]) == n_tris:
280
+ if not "data/tris/" + data in hdf5:
281
+ if verbose:
282
+ print(('Writing /data/tris/' + data))
283
+ hdf5.create_dataset('/data/tris/' + data, data=hdf5['/data/' + data + value + value][:])
284
+
285
+ elif len(hdf5['/data/' + data + value]) == n_tets:
286
+ if not "data/tets/" + data in hdf5:
287
+ if verbose:
288
+ print(('Writing /data/tets/' + data))
289
+ hdf5.create_dataset('/data/tets/' + data, data=hdf5['/data/' + data + value + value][:])
290
+
291
+ elif len(hdf5['/data/' + data + value]) == n_tris + n_tets and n_tets > 0:
292
+ if not "data/tris" + data in hdf5:
293
+ if verbose:
294
+ print(('Writing /data/tris/' + data))
295
+ hdf5.create_dataset('/data/tris/' + data,
296
+ data=hdf5['/data/' + data + value][:][hdf5_geo[path_elm + '/elm_type'][:] == 2])
297
+
298
+ if not "data/tets/" + data in hdf5:
299
+ if verbose:
300
+ print(('Writing /data/tets/' + data))
301
+ hdf5.create_dataset('/data/tets/' + data,
302
+ data=hdf5['/data/' + data + value][:][hdf5_geo[path_elm + '/elm_type'][:] == 4])
303
+
304
+ elif len(hdf5['/data/' + data + value]) == n_nodes:
305
+ if not "data/nodes" + data in hdf5:
306
+ if verbose:
307
+ print(("Writing /data/nodes/" + data))
308
+ if overwrite_array:
309
+ try:
310
+ del hdf5[f'/data/nodes/{data}']
311
+ except KeyError:
312
+ pass
313
+ try:
314
+ hdf5.create_dataset('/data/nodes/' + data, data=hdf5['/data/' + data + value][:])
315
+ except RuntimeError:
316
+ print(('/data/nodes/' + data + " already exists"))
317
+ elif verbose:
318
+ print((data + " not fitting to triangle or tetrahedra or total number. Ignoring."))
319
+
320
+ if '/mesh/fields' in hdf5:
321
+ for field in hdf5['/mesh/fields']:
322
+ if verbose:
323
+ print(('Processing ' + field))
324
+ if '/data/tris/' + field not in hdf5:
325
+ hdf5.create_dataset('/data/tris/' + field,
326
+ data=hdf5['/mesh/fields/' + field + '/value'][:][
327
+ hdf5_geo[path_elm + '/elm_type'][:] == 2])
328
+ if '/data/tets/' + field not in hdf5:
329
+ hdf5.create_dataset('/data/tets/' + field,
330
+ data=hdf5['/mesh/fields/' + field + '/value'][:][
331
+ hdf5_geo[path_elm + '/elm_type'][:] == 4])
332
+
333
+ if '/elmdata/' in hdf5:
334
+ for field in hdf5['/elmdata']:
335
+ if verbose:
336
+ print(('Processing ' + field))
337
+ if '/data/tris/' + field not in hdf5:
338
+ # sometimes data is stored in a 'value' subfolder
339
+ try:
340
+ subfolder = '/value'
341
+ _ = hdf5['/elmdata/' + field + subfolder][0]
342
+ # ... sometimes not
343
+ except KeyError:
344
+ subfolder = ''
345
+ hdf5.create_dataset('/data/tris/' + field,
346
+ data=hdf5[f'/elmdata/{field}{subfolder}'][:][
347
+ hdf5_geo[f'{path_elm}/elm_type'][:] == 2])
348
+ if '/data/tets/' + field not in hdf5:
349
+ try:
350
+ subfolder = '/value'
351
+ _ = hdf5[f'/elmdata/{field}{subfolder}'][0]
352
+ except KeyError:
353
+ subfolder = ''
354
+ hdf5.create_dataset(f'/data/tets/{field}',
355
+ data=hdf5[f'/elmdata/{field}{subfolder}'][:][
356
+ hdf5_geo[f'{path_elm}/elm_type'][:] == 4])
357
+
358
+ # create .xdmf file
359
+ f = open(xdmf_fn, 'w')
360
+ space = '\t'
361
+
362
+ # header
363
+ f.write('<?xml version="1.0"?>\n')
364
+ # f.write('<!DOCTYPE Xdmf SYSTEM "Xdmf.dtd" []>\n')
365
+ f.write('<!DOCTYPE Xdmf>\n')
366
+ f.write('<Xdmf Version="2.0" xmlns:xi="http://www.w3.org/2001/XInclude">\n')
367
+ f.write('<Domain>\n')
368
+
369
+ # one collection grid
370
+ f.write('<Grid CollectionType="Spatial" GridType="Collection" Name="Collection">\n')
371
+
372
+ # one grid for triangles...
373
+ ###########################
374
+ f.write(f'{space}<Grid Name="tris" GridType="Uniform">\n')
375
+ space += '\t'
376
+ f.write(f'{space}<Topology NumberOfElements="{n_tris}" TopologyType="Triangle" Name="Tri">\n')
377
+ space += '\t'
378
+ f.write(f'{space}<DataItem Format="HDF" Dimensions="{n_tris} 3">\n')
379
+ f.write(f'{space}{hdf5_geo_fn}:{path_elm}/triangle_number_list\n')
380
+ f.write(f'{space}</DataItem>\n')
381
+ space = space[:-1]
382
+ f.write(f'{space}</Topology>\n\n')
383
+
384
+ # nodes
385
+ f.write(f'{space}<Geometry GeometryType="XYZ">\n')
386
+ space += '\t'
387
+ f.write(f'{space}<DataItem Format="HDF" Dimensions="{n_nodes} 3">\n')
388
+ f.write(f'{space}{hdf5_geo_fn}:{node_path}/node_coord\n')
389
+ f.write(f'{space}</DataItem>\n')
390
+ space = space[:-1]
391
+ f.write(f'{space}</Geometry>\n\n')
392
+
393
+ # link tissue type to tris geometry for visualization
394
+ if n_tris > -1 and 'tri_tissue_type' in hdf5_geo[path_elm + '/']:
395
+ f.write(f'{space}<Attribute Name="tissue_type" AttributeType="Scalar" Center="Cell">\n')
396
+ space += '\t'
397
+ f.write(f'{space}<DataItem Format="HDF" Dimensions="{n_tris} 1">\n')
398
+ f.write(f'{space}{hdf5_geo_fn}:{path_elm}/tri_tissue_type\n')
399
+ f.write(f'{space}</DataItem>\n')
400
+ space = space[:-1]
401
+ f.write(f'{space}</Attribute>\n\n')
402
+ # link data in tris to geometry
403
+ if '/data/tris' in hdf5:
404
+
405
+ # elm type
406
+ if 'tri_elm_type' in hdf5_geo[path_elm + '/']:
407
+ f.write(f'{space}<Attribute Name="elm_type" AttributeType="Scalar" Center="Cell">\n')
408
+ space += '\t'
409
+ f.write(f'{space}<DataItem Format="HDF" Dimensions="{n_tris} 1">\n')
410
+ f.write(f'{space}{hdf5_geo_fn}:{path_elm}/tri_elm_type\n')
411
+ f.write(f'{space}</DataItem>\n')
412
+ space = space[:-1]
413
+ f.write(f'{space}</Attribute>\n')
414
+
415
+ for key, data in hdf5['/data/tris'].items():
416
+
417
+ value = ""
418
+ try:
419
+ if 'value' in list(data.keys()):
420
+ data = data['value']
421
+ value = '/value'
422
+ except (KeyError, AttributeError):
423
+ pass
424
+ if hasattr(data, 'shape') and len(data.shape) > 1:
425
+ if data.shape[1] == 3:
426
+ attr_type = "Vector"
427
+ dim = 3
428
+ elif data.shape[1] == 1:
429
+ attr_type = "Scalar"
430
+ dim = 1
431
+ else:
432
+ print(("Data shape unknown: " + str(data.shape[1])))
433
+ attr_type, dim = None, None # just to make compiler happy
434
+ quit()
435
+ else:
436
+ attr_type = "Scalar"
437
+ dim = 1
438
+
439
+ f.write(f'{space}<Attribute Name="{key}" AttributeType="{attr_type}" Center="Cell">\n')
440
+ space += '\t'
441
+ f.write(f'{space}<DataItem Format="HDF" Dimensions="{n_tris} {dim} ">\n')
442
+ f.write(f'{space}{hdf5_fn}:/data/tris/{key}{value}\n')
443
+ f.write(f'{space}</DataItem>\n')
444
+ space = space[:-1]
445
+ f.write(f'{space}</Attribute>\n\n')
446
+
447
+ # node data
448
+ if '/data/nodes' in hdf5:
449
+ # data sets (mostly statistics)
450
+ space += '\t'
451
+ for key, data in hdf5['/data/nodes'].items():
452
+ value = ""
453
+ try:
454
+ if 'value' in list(data.keys()):
455
+ data = data['value']
456
+ value = '/value'
457
+ except (KeyError, AttributeError):
458
+ pass
459
+ if hasattr(data, 'shape') and len(data.shape) > 1:
460
+ if data.shape[1] == 3:
461
+ attr_type = "Vector"
462
+ dim = 3
463
+ elif data.shape[1] == 1:
464
+ attr_type = "Scalar"
465
+ dim = 1
466
+ else:
467
+ print(("Data shape unknown: " + str(data.shape[1])))
468
+ attr_type, dim = None, None # just to make compiler happy
469
+ quit()
470
+
471
+ else:
472
+ attr_type = "Scalar"
473
+ dim = 1
474
+
475
+ f.write(f'{space}<Attribute Name="{key}" AttributeType="{attr_type}" Center="Node">\n')
476
+ f.write(f'{space}<DataItem Format="HDF" Dimensions="{n_nodes} {dim}">\n')
477
+ f.write(f'{hdf5_fn}:/data/nodes/{key}{value}\n')
478
+
479
+ f.write('</DataItem>\n')
480
+ f.write('</Attribute>\n')
481
+ space = space[:-1]
482
+ f.write(f'{space}</Grid>\n\n')
483
+
484
+ # ...one grid for tetrahedra...
485
+ ##################################
486
+ if n_tets > 0:
487
+ f.write(f'{space}<Grid Name="tets" GridType="Uniform">\n')
488
+ space += '\t'
489
+ f.write(f'{space}<Topology NumberOfElements="{n_tets}" TopologyType="Tetrahedron" Name="Tet">\n')
490
+ space += '\t'
491
+ f.write(f'{space}<DataItem Format="HDF" Dimensions="{n_tets} 4">\n')
492
+ f.write(f'{space}{hdf5_geo_fn}:{path_elm}/tetrahedra_number_list\n')
493
+ f.write(f'{space}</DataItem>\n')
494
+ space = space[:-1]
495
+ f.write(f'{space}</Topology>\n\n')
496
+
497
+ # nodes
498
+ f.write(f'{space}<Geometry GeometryType="XYZ">\n')
499
+ space += '\t'
500
+ f.write(f'{space}<DataItem Format=\"HDF\" Dimensions=\"{n_nodes} 3">\n')
501
+ f.write(f'{space}{hdf5_geo_fn}:{node_path}/node_coord\n')
502
+ f.write(f'{space}</DataItem>\n')
503
+ space = space[:-1]
504
+ f.write(f'{space}</Geometry>\n')
505
+
506
+ # link tissue type to tets geometry for visualization
507
+ if 'tet_tissue_type' in hdf5_geo[path_elm + '/']:
508
+ f.write(f'{space}<Attribute Name="tissue_type" AttributeType="Scalar" Center="Cell">\n')
509
+ space += '\t'
510
+ f.write(f'{space}<DataItem Format=\"HDF\" Dimensions=\"{n_tets} 1\">\n')
511
+ f.write(f'{space}{hdf5_geo_fn}:{path_elm}/tet_tissue_type\n')
512
+
513
+ f.write(f'{space}</DataItem>\n')
514
+ space = space[:-1]
515
+ f.write(f'{space}</Attribute>\n')
516
+
517
+ # data in tets
518
+ if '/data/tets' in hdf5 or '/data/nodes' in hdf5 or '/mesh/fields' in hdf5:
519
+
520
+ # elm type
521
+ if 'tet_elm_type' in hdf5_geo[path_elm + '/']:
522
+ f.write('<Attribute Name="elm_type" AttributeType="Scalar" Center="Cell">\n')
523
+ f.write(f'<DataItem Format=\"HDF\" Dimensions=\"{n_tets} 1\">\n')
524
+ f.write(f'{hdf5_geo_fn}:{path_elm}/tet_elm_type\n')
525
+ f.write(f'</DataItem>\n')
526
+ f.write(f'</Attribute>\n')
527
+
528
+ # link tet data to geometry
529
+ if '/data/tets' in hdf5:
530
+ # data sets (mostly statistics)
531
+ for key, data in hdf5['/data/tets'].items():
532
+ value = ""
533
+ try:
534
+ if 'value' in list(data.keys()):
535
+ data = data['value']
536
+ value = '/value'
537
+ except (KeyError, AttributeError):
538
+ pass
539
+ if hasattr(data, 'shape') and len(data.shape) > 1:
540
+ if data.shape[1] == 3:
541
+ attr_type = "Vector"
542
+ dim = 3
543
+ elif data.shape[1] == 1:
544
+ attr_type = "Scalar"
545
+ dim = 1
546
+ else:
547
+ print(("Data shape unknown: " + str(data.shape[1])))
548
+ attr_type, dim = None, None # just to make compiler happy
549
+ quit()
550
+ else:
551
+ attr_type = "Scalar"
552
+ dim = 1
553
+
554
+ f.write('<Attribute Name="' + key + '" AttributeType="' + attr_type + '" Center="Cell">\n')
555
+ f.write('<DataItem Format="HDF" Dimensions="' + str(n_tets) + ' ' + str(dim) + '">\n')
556
+ f.write(hdf5_fn + ':/data/tets/' + key + value + '\n')
557
+
558
+ f.write('</DataItem>\n')
559
+ f.write('</Attribute>\n')
560
+ space = space[:-1]
561
+ f.write(f'{space}</Grid>\n')
562
+ # end tetrahedra data
563
+
564
+ # one grid for coil dipole nodes...store data hdf5.
565
+ #######################################################
566
+ if '/coil' in hdf5:
567
+ f.write('<Grid Name="coil" GridType="Uniform">\n')
568
+ f.write('<Topology NumberOfElements="' + str(len(hdf5['/coil/dipole_position'][:])) +
569
+ '" TopologyType="Polyvertex" Name="Tri">\n')
570
+ f.write('<DataItem Format="XML" Dimensions="' + str(len(hdf5['/coil/dipole_position'][:])) + ' 1">\n')
571
+ # f.write(hdf5_fn + ':' + path + '/triangle_number_list\n')
572
+ np.savetxt(f, list(range(len(hdf5['/coil/dipole_position'][:]))), fmt='%d',
573
+ delimiter=' ') # 1 2 3 4 ... N_Points
574
+ f.write('</DataItem>\n')
575
+ f.write('</Topology>\n')
576
+
577
+ # nodes
578
+ f.write('<Geometry GeometryType="XYZ">\n')
579
+ f.write('<DataItem Format="HDF" Dimensions="' + str(len(hdf5['/coil/dipole_position'][:])) + ' 3">\n')
580
+ f.write(hdf5_fn + ':' + '/coil/dipole_position\n')
581
+ f.write('</DataItem>\n')
582
+ f.write('</Geometry>\n')
583
+
584
+ # data
585
+ if '/coil/dipole_moment_mag' in hdf5:
586
+ # dipole magnitude
587
+ f.write('<Attribute Name="dipole_mag" AttributeType="Scalar" Center="Cell">\n')
588
+ f.write('<DataItem Format="HDF" Dimensions="' + str(len(hdf5['/coil/dipole_moment_mag'][:])) + ' 1">\n')
589
+ f.write(hdf5_fn + ':' + '/coil/dipole_moment_mag\n')
590
+
591
+ f.write('</DataItem>\n')
592
+ f.write('</Attribute>\n')
593
+
594
+ f.write('</Grid>\n')
595
+ # end coil dipole data
596
+
597
+ # footer
598
+ f.write('</Grid>\n')
599
+ f.write('</Domain>\n')
600
+ f.write('</Xdmf>\n')
601
+ f.close()
602
+
603
+ return xdmf_fn
604
+
605
+
606
+ def write_temporal_xdmf(hdf5_fn, data_folder='c', coil_center_folder=None, coil_ori_0_folder=None,
607
+ coil_ori_1_folder=None, coil_ori_2_folder=None, coil_current_folder=None, hdf5_geo_fn=None,
608
+ overwrite_xdmf=True, verbose=False, xdmf_fn=None):
609
+ """
610
+ Creates .xdmf markup file for given ROI hdf5 data file with 4D data. This was written to be able to visualize data
611
+ from the permutation analysis of the regression approach
612
+ It expects an .hdf5 with a data group with (many) subarrays. The N subarrays name should be named from 0 to N-1
613
+ Each subarray has shape ``(N_elemns, 1)``
614
+
615
+ Not tested for whole brain.
616
+
617
+ .. code-block:: sh
618
+
619
+ hdf5:/data_folder/0
620
+ /1
621
+ /2
622
+ /3
623
+ /4
624
+ ...
625
+
626
+ Parameters
627
+ ----------
628
+ hdf5_fn : str
629
+ Filename of hdf5 file containing the data.
630
+ data_folder : str or list of str
631
+ Path within hdf5 to group of dataframes.
632
+ hdf5_geo_fn : str, optional
633
+ Filename of hdf5 file containing the geometry.
634
+ overwrite_xdmf : bool, default: False
635
+ Overwrite existing .xdmf file if present.
636
+ coil_center_folder : str, optional
637
+ coil_ori_0_folder : str, optional
638
+ coil_ori_1_folder : str, optional
639
+ coil_ori_2_folder : str, optional
640
+ coil_current_folder : str, optional
641
+ xdmf_fn : str, optional
642
+ Filename of the temporal xdmf file. If not given, created from hdf5 hdf5_fn.
643
+ verbose : bool, default: False
644
+ Print output or not.
645
+
646
+ Returns
647
+ -------
648
+ <File> : .xdmf file
649
+ hdf5_fn[-4].xdmf
650
+ """
651
+ hdf5_fn_full = hdf5_fn
652
+
653
+ if os.path.splitext(hdf5_fn)[1] not in ['.hdf5', '.h5', '.hdf']:
654
+ raise ValueError("Provide .hdf5 filename for existing file.")
655
+
656
+ if xdmf_fn is None:
657
+ xdmf_fn = os.path.splitext(hdf5_fn)[0] + '.xdmf'
658
+ else:
659
+ if not os.path.isabs(xdmf_fn):
660
+ xdmf_fn = os.path.join(os.path.split(hdf5_fn_full)[0], xdmf_fn)
661
+
662
+ if hdf5_geo_fn is None:
663
+ hdf5_geo_fn = hdf5_fn
664
+
665
+ with h5py.File(hdf5_fn, 'r+') as hdf5, h5py.File(hdf5_geo_fn, 'r+') as hdf5_geo:
666
+ # hdf5 = h5py.File(hdf5_fn, 'r+')
667
+ # hdf5_geo = h5py.File(hdf5_geo_fn, 'r+')
668
+
669
+ if os.path.split(hdf5_fn)[0] == os.path.split(hdf5_geo_fn)[0]:
670
+ hdf5_geo_fn = os.path.basename(hdf5_geo_fn)
671
+
672
+ hdf5_fn = os.path.basename(hdf5_fn)
673
+ if os.path.isfile(xdmf_fn) and not overwrite_xdmf:
674
+ print((xdmf_fn + ' already exists. Remove or set overwriteXDMF. Quitting.'))
675
+ return
676
+
677
+ # check if triangle and tetra data is already in 2 dataframes in hdf5
678
+ # /mesh/elm or /elm/?
679
+ if "/elm/" in hdf5_geo:
680
+ path = '/elm'
681
+ else:
682
+ path = '/mesh/elm'
683
+
684
+ if "/nodes/" in hdf5_geo:
685
+ node_path = '/nodes'
686
+ else:
687
+ node_path = '/mesh/nodes'
688
+
689
+ if path + "/triangle_number_list" not in hdf5_geo:
690
+
691
+ # if not, create
692
+ if verbose:
693
+ print(("triangle_number_list and tetrahedra_number_list do not exist. Adding to " + hdf5_geo_fn + "."))
694
+
695
+ # get tris and tets
696
+ triangles = (hdf5_geo[path + '/node_number_list'][:] # from node_number list...
697
+ [hdf5_geo[path + '/elm_type'][:] == 2] # ... take the triangle ones...
698
+ [:, 0:3])
699
+ tetrahedra = (hdf5_geo[path + '/node_number_list'][:] # same with tetrahedra nodelist
700
+ [hdf5_geo[path + '/elm_type'][:] == 4])
701
+
702
+ # add to hdf5_fn and rebase to 0
703
+ hdf5_geo.create_dataset(f'{path}/triangle_number_list', data=triangles - 1)
704
+ hdf5_geo.create_dataset(f'{path}/tetrahedra_number_list', data=tetrahedra - 1)
705
+ n_tets = len(tetrahedra)
706
+
707
+ else:
708
+ triangles = hdf5_geo[f'{path}/triangle_number_list']
709
+ try:
710
+ tetrahedra = hdf5_geo[path + '/tetrahedra_number_list']
711
+ n_tets = len(tetrahedra)
712
+ except KeyError:
713
+ tetrahedra = None
714
+ n_tets = 0
715
+
716
+ # check if data is divided into tets and tris
717
+
718
+ # get information for .xdmf
719
+ n_nodes = len(hdf5_geo[f'{node_path}/node_coord'])
720
+ n_tris = len(triangles)
721
+
722
+ # get shape of temporal information
723
+ dimensions = dict()
724
+ if data_folder is not None:
725
+ if isinstance(data_folder, list):
726
+ allkeys = [set(hdf5[dat_folder].keys()) for dat_folder in data_folder]
727
+ data_keys = set.intersection(*allkeys)
728
+ allkeys = set.union(*allkeys)
729
+ dif = allkeys.difference(data_keys)
730
+ if len(dif) != 0:
731
+ warnings.warn(f"Unequal sets of keys found. Missing keys: {dif}")
732
+
733
+ # get first value from dict to get shape of data array
734
+ for dat_folder in data_folder:
735
+ dimensions[dat_folder] = next(iter(hdf5[dat_folder].values())).shape[0]
736
+
737
+ else:
738
+ data_keys = hdf5[data_folder].keys()
739
+ dimensions[data_folder] = next(iter(hdf5[data_folder].values())).shape[0]
740
+ else:
741
+ data_keys = hdf5[coil_center_folder].keys()
742
+ dimensions[data_folder] = next(iter(hdf5[coil_center_folder].values())).shape[0]
743
+
744
+ # create .xdmf file
745
+ f = open(xdmf_fn, 'w')
746
+ space = '\t'
747
+ # header
748
+ f.write('<?xml version="1.0"?>\n')
749
+ f.write('<!DOCTYPE Xdmf SYSTEM "Xdmf.dtd" []>\n')
750
+ f.write('<Xdmf Version="2.0" xmlns:xi="http://www.w3.org/2001/XInclude">\n')
751
+ f.write('<Domain>\n')
752
+
753
+ # one collection grid
754
+ # f.write('<Grid\nCollectionType="Spatial"\nGridType="Collection"\nName="Collection">\n')
755
+ f.write(f'{space}<Grid Name="GridTime" GridType="Collection" CollectionType="Temporal">\n')
756
+ space += '\t'
757
+ for i in data_keys:
758
+ f.write("\n<!-- " + '#' * 20 + f" Timestep {i:0>3}/{len(data_keys): >3} " + '#' * 20 + ' -->\n')
759
+ if data_folder is not None:
760
+ # one grid for triangles...
761
+ ###########################
762
+ f.write(f'{space}<Grid Name="tris" GridType="Uniform">\n')
763
+ space += '\t'
764
+ f.write(f'{space}<Time Value="{i}" /> \n')
765
+ f.write(f'{space}<Topology NumberOfElements="{n_tris}" TopologyType="Triangle" Name="Tri">\n')
766
+ space += '\t'
767
+ f.write(f'{space}<DataItem Format="HDF" Dimensions="{n_tris} 3">\n')
768
+ space += '\t'
769
+ f.write(f'{space}{hdf5_geo_fn}:{path}/triangle_number_list\n')
770
+ space = space[:-1]
771
+ f.write(f'{space}</DataItem>\n')
772
+ space = space[:-1]
773
+ f.write(f'{space}</Topology>\n\n')
774
+
775
+ # nodes
776
+ f.write(f'{space}<Geometry GeometryType="XYZ">\n')
777
+ space += '\t'
778
+ f.write(f'{space}<DataItem Format="HDF" Dimensions="{n_nodes} 3">\n')
779
+ f.write(f'{space}{hdf5_geo_fn}:{node_path}/node_coord\n')
780
+ f.write(f'{space}</DataItem>\n')
781
+ space = space[:-1]
782
+ f.write(f'{space}</Geometry>\n\n')
783
+
784
+ # data
785
+ if isinstance(data_folder, list):
786
+ for dat_folder in data_folder:
787
+ attribute_name = dat_folder.replace('/', '_').replace('\\', '_')
788
+ # scalar or vector
789
+ if len(next(iter(hdf5[dat_folder].values())).shape) > 1:
790
+ attrtype = 'Vector'
791
+ data_dims = 3
792
+ else:
793
+ attrtype = 'Scalar'
794
+ data_dims = 1
795
+
796
+ f.write(f'{space}<Attribute Name="{attribute_name}" AttributeType="{attrtype}" Center="Cell">\n')
797
+ space += '\t'
798
+ f.write(f'{space}<DataItem Format="HDF" Dimensions="{dimensions[dat_folder]} {data_dims}">\n')
799
+ f.write(f'{space}{hdf5_fn}:{dat_folder}/{i}\n')
800
+ f.write(f'{space}</DataItem>\n')
801
+ space = space[:-1]
802
+ f.write(f'{space}</Attribute>\n\n')
803
+ else:
804
+ attribute_name = data_folder.replace('/', '_').replace('\\', '_')
805
+ # scalar or vector
806
+ if len(np.squeeze(next(iter(hdf5[data_folder].values()))).shape) > 1:
807
+ attrtype = 'Vector'
808
+ data_dims = 3
809
+ else:
810
+ attrtype = 'Scalar'
811
+ data_dims = 1
812
+ f.write(f'{space}<Attribute Name="{attribute_name}" AttributeType="{attrtype}" Center="Cell">\n')
813
+ space += '\t'
814
+ f.write(f'{space}<DataItem Format="HDF" Dimensions="{dimensions[data_folder]} {data_dims}">\n')
815
+ f.write(f'{space}{hdf5_fn}:{data_folder}/{i}\n')
816
+ f.write(f'{space}</DataItem>\n')
817
+
818
+ space = space[:-1]
819
+ f.write(f'{space}</Attribute>\n\n')
820
+
821
+ # for key, data in hdf5['/data/tris'].items():
822
+ #
823
+ # value = ""
824
+ # try:
825
+ # if 'value' in list(data.keys()):
826
+ # data = data['value']
827
+ # value = '/value'
828
+ # except (KeyError, AttributeError):
829
+ # pass
830
+ # if hasattr(data, 'shape') and len(data.shape) > 1:
831
+ # if data.shape[1] == 3:
832
+ # attr_type = "Vector"
833
+ # dim = 3
834
+ # elif data.shape[1] == 1:
835
+ # attr_type = "Scalar"
836
+ # dim = 1
837
+ # else:
838
+ # print(("Data shape unknown: " + str(data.shape[1])))
839
+ # quit()
840
+ # else:
841
+ # attr_type = "Scalar"
842
+ # dim = 1
843
+ # assert attr_type
844
+ # assert dim
845
+ # # except IndexError or AttributeError:
846
+ # # AttrType = "Scalar"
847
+ # # dim = 1
848
+ #
849
+ # f.write('<Attribute Name="' + key + '" AttributeType="' + attr_type + '" Center="Cell">\n')
850
+ # f.write('<DataItem Format="HDF" Dimensions="' + str(n_tris) + ' ' + str(dim) + '">\n')
851
+ # f.write(hdf5_fn + ':/data/tris/' + key + value + '\n')
852
+ # f.write('</DataItem>\n')
853
+ # f.write('</Attribute>\n')
854
+ # # node data
855
+ # if '/data/nodes' in hdf5:
856
+ # # data sets (mostly statistics)
857
+ # for key, data in hdf5['/data/nodes'].items():
858
+ # value = ""
859
+ # try:
860
+ # if 'value' in list(data.keys()):
861
+ # data = data['value']
862
+ # value = '/value'
863
+ # except (KeyError, AttributeError):
864
+ # pass
865
+ # if hasattr(data, 'shape') and len(data.shape) > 1:
866
+ # if data.shape[1] == 3:
867
+ # attr_type = "Vector"
868
+ # dim = 3
869
+ # elif data.shape[1] == 1:
870
+ # attr_type = "Scalar"
871
+ # dim = 1
872
+ # else:
873
+ # print(("Data shape unknown: " + str(data.shape[1])))
874
+ # quit()
875
+ #
876
+ # # except IndexError or AttributeError:
877
+ # else:
878
+ # attr_type = "Scalar"
879
+ # dim = 1
880
+ #
881
+ # f.write('<Attribute Name="' + key + '" AttributeType="' + attr_type + '" Center="Node">\n')
882
+ # f.write('<DataItem Format="HDF" Dimensions="' + str(n_nodes) + ' ' + str(dim) + '">\n')
883
+ # f.write(hdf5_fn + ':/data/nodes/' + key + value + '\n')
884
+ #
885
+ # f.write('</DataItem>\n')
886
+ # f.write('</Attribute>\n')
887
+ # f.write('</Grid>\n')
888
+
889
+ # # ...one grid for tetrahedra...
890
+ # ##################################
891
+ # f.write('<Grid Name="tets" GridType="Uniform">\n')
892
+ # f.write('<Topology NumberOfElements="' + str(n_tris) + '" TopologyType="Tetrahedron" Name="Tet">\n')
893
+ # f.write('<DataItem Format="HDF" Dimensions="' + str(n_tets) + ' 4">\n')
894
+ # f.write(hdf5_geo_fn + ':' + path + '/tetrahedra_number_list\n')
895
+ # f.write('</DataItem>\n')
896
+ # f.write('</Topology>\n')
897
+
898
+ # nodes
899
+ f.write(f'{space}<Geometry GeometryType="XYZ">\n')
900
+ space += '\t'
901
+ f.write(f'{space}<DataItem Format="HDF" Dimensions="{n_nodes} 3">\n')
902
+ f.write(f'{space}{hdf5_geo_fn}:{node_path}/node_coord\n')
903
+ f.write(f'{space}</DataItem>\n')
904
+ space = space[:-1]
905
+ f.write(f'{space}</Geometry>\n')
906
+
907
+ # link tissue type to tets geometry for visualization
908
+ # if 'tet_tissue_type' in hdf5_geo[path + '/']:
909
+ # f.write(f'{space}<Attribute Name="tissue_type" AttributeType="Scalar" Center="Cell">\n')
910
+ # space += '\t'
911
+ # f.write(f'{space}<DataItem Format=\"HDF\" Dimensions=\"{n_tets} 1\">\n')
912
+ # f.write(f'{space}{hdf5_geo_fn}:{path}/tet_tissue_type\n')
913
+ # f.write(f'{space}</DataItem>\n')
914
+ # space = space[:-1]
915
+ # f.write(f'{space}</Attribute>\n')
916
+ # space = space[:-1]
917
+ # if 'tet_tissue_type' in hdf5_geo[path + '/']:
918
+ if 'tri_tissue_type' in hdf5_geo[path].keys():
919
+ f.write(f'{space}<Attribute Name="tissue_type" AttributeType="Scalar" Center="Cell">\n')
920
+ space += '\t'
921
+ f.write(f'{space}<DataItem Format=\"HDF\" Dimensions=\"{n_tris} 1\">\n')
922
+ f.write(f'{space}{hdf5_geo_fn}:{path}/tri_tissue_type\n')
923
+ f.write(f'{space}</DataItem>\n')
924
+ space = space[:-1]
925
+ f.write(f'{space}</Attribute>\n')
926
+ space = space[:-1]
927
+
928
+ # data in tets
929
+ if '/data/tets' in hdf5 or '/data/nodes' in hdf5 or '/mesh/fields' in hdf5:
930
+
931
+ # elm type
932
+ if 'tet_elm_type' in hdf5_geo[path + '/']:
933
+ f.write('<Attribute Name="elm_type" AttributeType="Scalar" Center="Cell">\n')
934
+ f.write(f'<DataItem Format=\"HDF\" Dimensions=\"{str(n_tets)} 1\">\n')
935
+ f.write(hdf5_geo_fn + ':' + path + '/tet_elm_type\n')
936
+ f.write('</DataItem>\n')
937
+ f.write('</Attribute>\n\n')
938
+
939
+ # link tet data to geometry
940
+ if '/data/tets' in hdf5:
941
+ # data sets (mostly statistics)
942
+ for key, data in hdf5['/data/tets'].items():
943
+ value = ""
944
+ try:
945
+ if 'value' in list(data.keys()):
946
+ data = data['value']
947
+ value = '/value'
948
+ except (KeyError, AttributeError):
949
+ pass
950
+ if hasattr(data, 'shape') and len(data.shape) > 1:
951
+ if data.shape[1] == 3:
952
+ attr_type = "Vector"
953
+ dim = 3
954
+ elif data.shape[1] == 1:
955
+ attr_type = "Scalar"
956
+ dim = 1
957
+ else:
958
+ print(("Data shape unknown: " + str(data.shape[1])))
959
+ attr_type, dim = 0, 0
960
+ quit()
961
+ else:
962
+ attr_type = "Scalar"
963
+ dim = 1
964
+
965
+ f.write('<Attribute Name="' + key + '" AttributeType="' + attr_type + '" Center="Cell">\n')
966
+ f.write(f'<DataItem Format=\"HDF\" Dimensions=\"{n_tets} {dim}\">\n')
967
+ f.write(hdf5_fn + ':/data/tets/' + key + value + '\n')
968
+
969
+ f.write('</DataItem>\n')
970
+ f.write('</Attribute>\n')
971
+
972
+ f.write(f'{space}</Grid>\n\n')
973
+ # end tetrahedra data
974
+
975
+ # footer
976
+ space = space[:-1]
977
+ f.write(f'{space}</Grid>\n\n')
978
+
979
+ # one grid for coil dipole nodes...store data hdf5.
980
+ #######################################################
981
+ if '/coil' in hdf5:
982
+ for i in data_keys:
983
+ f.write(f'{space}<Grid Name="GridTime" GridType="Collection" CollectionType="Temporal">\n')
984
+
985
+ f.write(f'{space}<Grid Name="coil" GridType="Uniform">\n')
986
+ space += '\t'
987
+ f.write(f'{space}<Time Value="{i}" /> \n')
988
+ f.write(f'{space}<Topology NumberOfElements="' + str(len(hdf5[f'/coil/dipole_position/{i}'][:])) +
989
+ '" TopologyType="Polyvertex" Name="Tri">\n')
990
+ space += '\t'
991
+ f.write(
992
+ f'{space}<DataItem Format="XML" Dimensions="' + str(
993
+ len(hdf5[f'/coil/dipole_position/{i}'][:])) + ' 1">\n')
994
+ # f.write(hdf5_fn + ':' + path + '/triangle_number_list\n')
995
+ np.savetxt(f, list(range(len(hdf5[f'/coil/dipole_position/{i}'][:]))), fmt='%d',
996
+ delimiter=' ') # 1 2 3 4 ... N_Points
997
+ f.write(f'{space}</DataItem>\n')
998
+ space = space[:-1]
999
+ f.write(f'{space}</Topology>\n\n')
1000
+
1001
+ # nodes
1002
+ f.write(f'{space}<Geometry GeometryType="XYZ">\n')
1003
+ space += '\t'
1004
+ f.write(
1005
+ f'{space}<DataItem Format="HDF" Dimensions="' + str(
1006
+ len(hdf5[f'/coil/dipole_position/{i}'][:])) + ' 3">\n')
1007
+
1008
+ f.write(space + hdf5_fn + ':' + f'/coil/dipole_position/{i}\n')
1009
+
1010
+ f.write(f'{space}</DataItem>\n')
1011
+ space = space[:-1]
1012
+ f.write(f'{space}</Geometry>\n\n')
1013
+ space = space[:-1]
1014
+ # data
1015
+ if '/coil/dipole_moment_mag' in hdf5:
1016
+ # dipole magnitude
1017
+ f.write('<Attribute Name="dipole_mag" AttributeType="Scalar" Center="Cell">\n')
1018
+ f.write('<DataItem Format="HDF" Dimensions="' + str(
1019
+ len(hdf5[f'/coil/dipole_moment_mag/{i}'][:])) + ' 1">\n')
1020
+ f.write(hdf5_fn + ':' + f'/coil/dipole_moment_mag/{i}\n')
1021
+
1022
+ f.write('</DataItem>\n')
1023
+ f.write('</Attribute>\n')
1024
+
1025
+ f.write(f'{space}</Grid>\n')
1026
+ # end coil dipole data
1027
+ f.write(f'{space}</Grid>\n\n')
1028
+
1029
+ # one grid for coil dipole nodes...store data hdf5.
1030
+ #######################################################
1031
+ if coil_center_folder is not None:
1032
+ f.write(f'{space}<Grid Name="GridTime" GridType="Collection" CollectionType="Temporal">\n')
1033
+ for i in data_keys:
1034
+ space += '\t'
1035
+ with h5py.File(hdf5_fn_full, "r") as g:
1036
+ n_coil_pos = g[f"{coil_center_folder}/{i}"][:].shape[0]
1037
+ f.write(f'{space}<Grid Name="stimsites" GridType="Uniform">\n')
1038
+ space += '\t'
1039
+ f.write(f'{space}<Time Value="{i}" /> \n')
1040
+
1041
+ f.write(
1042
+ f'{space}<Topology NumberOfElements="' + str(
1043
+ n_coil_pos) + '" TopologyType="Polyvertex" Name="Tri">\n')
1044
+ space += '\t'
1045
+ f.write(f'{space}<DataItem Format="XML" Dimensions="' + str(n_coil_pos) + ' 1">\n')
1046
+ space += '\t'
1047
+ np.savetxt(f, list(range(n_coil_pos)), fmt='%d', delimiter=' ') # 1 2 3 4 ... N_Points
1048
+ f.write(f'{space}</DataItem>\n')
1049
+ space = space[:-1]
1050
+ f.write(f'{space}</Topology>\n\n')
1051
+
1052
+ # nodes
1053
+ f.write(f'{space}<Geometry GeometryType="XYZ">\n')
1054
+ space += '\t'
1055
+ f.write(f'{space}<DataItem Format="HDF" Dimensions="' + str(n_coil_pos) + ' 3">\n')
1056
+ space += '\t'
1057
+ f.write(f'{space}{hdf5_fn}:{coil_center_folder}/{i}\n')
1058
+ space = space[:-1]
1059
+ f.write(f'{space}</DataItem>\n')
1060
+ space = space[:-1]
1061
+ f.write(f'{space}</Geometry>\n\n')
1062
+
1063
+ coil_ori_folder = [coil_ori_0_folder, coil_ori_1_folder, coil_ori_2_folder]
1064
+
1065
+ for j in range(3):
1066
+ f.write(f'{space}<Attribute Name="dir_' + str(j) + '" AttributeType="Vector" Center="Cell">\n')
1067
+ space += '\t'
1068
+ f.write(f'{space}<DataItem Format="HDF" Dimensions="' + str(n_coil_pos) + ' 3">\n')
1069
+ space += '\t'
1070
+ f.write(f'{space}{hdf5_fn}:{coil_ori_folder[j]}/{i}\n')
1071
+ space = space[:-1]
1072
+ f.write(f'{space}</DataItem>\n')
1073
+ space = space[:-1]
1074
+ f.write(f'{space}</Attribute>\n\n')
1075
+
1076
+ if coil_current_folder is not None:
1077
+ f.write(f'{space}<Attribute Name="current" AttributeType="Scalar" Center="Cell">\n')
1078
+ space += '\t'
1079
+ f.write(f'{space}<DataItem Format="HDF" Dimensions="' + str(n_coil_pos) + ' 1">\n')
1080
+ space += '\t'
1081
+ f.write(f'{space}{hdf5_fn}:{coil_current_folder}/{i}\n')
1082
+ space = space[:-1]
1083
+ f.write(f'{space}</DataItem>\n')
1084
+ space = space[:-1]
1085
+ f.write(f'{space}</Attribute>\n\n')
1086
+
1087
+ space = space[:-1]
1088
+ f.write(f'{space}</Grid>\n\n')
1089
+ space = space[:-1]
1090
+ f.write(f'{space}</Grid>\n\n')
1091
+
1092
+ f.write('</Domain>\n')
1093
+ f.write('</Xdmf>\n')
1094
+ f.close()
1095
+
1096
+ hdf5.close()
1097
+ hdf5_geo.close()
1098
+
1099
+
1100
+ def create_position_path_xdmf(sorted_fn, coil_pos_fn, output_xdmf, stim_intens=None,
1101
+ coil_sorted='/0/0/coil_seq'):
1102
+ """
1103
+ Creates one .xdmf file that allows paraview plottings of coil position paths.
1104
+
1105
+ .. figure:: ../../doc/images/create_position_path_xdmf.png
1106
+ :scale: 50 %
1107
+ :alt: A set of coil positions plotted to show the path of coil movement.
1108
+
1109
+ Paraview can be used to visualize the order of realized stimulation positions.
1110
+
1111
+ Parameters
1112
+ ----------
1113
+ sorted_fn : str
1114
+ .hdf5 filename with position indices, values, intensities from ``pynibs.sort_opt_coil_positions()``.
1115
+ coil_pos_fn : str
1116
+ .hdf5 filename with original set of coil positions. Indices from sorted_fn are mapped to this.
1117
+ Either '/matsimnibs' or 'm1' and 'm2' datasets.
1118
+ output_xdmf : str
1119
+ stim_intens : int, optional
1120
+ Intensities are multiplied by this factor.
1121
+
1122
+ Returns
1123
+ -------
1124
+ output_xdmf : <file>
1125
+
1126
+ Other Parameters
1127
+ ----------------
1128
+ coil_sorted : str
1129
+ Path to coil positions in sorted_fn
1130
+ """
1131
+ # get datasets for nodes used in path, goal value, intensity
1132
+ sorted_data = h5py.File(sorted_fn, 'r')[coil_sorted][:]
1133
+ nodes_idx, goal_val, intens = sorted_data[:, 0].astype(int), sorted_data[:, 1], sorted_data[:, 2]
1134
+
1135
+ # get direction vectors (for all positions)
1136
+ with h5py.File(coil_pos_fn, 'r') as f:
1137
+ m0 = f['/m0'][:]
1138
+ m1 = f['/m1'][:]
1139
+ m2 = f['/m2'][:]
1140
+
1141
+ if stim_intens is not None and stim_intens != 0:
1142
+ intens *= stim_intens
1143
+ write_coil_sequence_xdmf(coil_pos_fn, intens, m0, m1, m2, output_xdmf)
1144
+
1145
+
1146
+ def write_coil_sequence_xdmf(coil_pos_fn, data, vec1, vec2, vec3, output_xdmf):
1147
+ # get path from node to node
1148
+ n_nodes = vec2.shape[0]
1149
+ nodes_path = []
1150
+ for i in range(n_nodes - 1):
1151
+ nodes_path.append([i, i + 1])
1152
+ nodes_path = np.array(nodes_path).astype(int)
1153
+
1154
+ # write .xdmf file
1155
+ with open(output_xdmf, 'w') as f:
1156
+ # header
1157
+ f.writelines('<?xml version="1.0"?>\n'
1158
+ '<!DOCTYPE Xdmf SYSTEM "Xdmf.dtd" []>\n'
1159
+ '<Xdmf Version="2.0" xmlns:xi="http://www.w3.org/2001/XInclude">\n'
1160
+ '<Domain>\n')
1161
+
1162
+ # one collection grid for everything
1163
+ f.writelines(' <Grid CollectionType="Spatial" GridType="Collection" Name="Collection">\n')
1164
+
1165
+ # one grid for the lines
1166
+ f.writelines(' <Grid Name="path" GridType="Uniform">\n')
1167
+ f.writelines(' <Topology NumberOfElements="1764860" TopologyType="Polyline" Name="Tri" '
1168
+ 'NodesPerElement="2">\n')
1169
+ f.writelines(f' <DataItem Format="XML" Dimensions="{nodes_path.shape[0]} 2">\n')
1170
+ for node_path in nodes_path:
1171
+ f.writelines(f" {node_path[0]} {node_path[1]}\n")
1172
+ f.writelines(' </DataItem>\n')
1173
+ f.writelines(' </Topology>\n')
1174
+ f.writelines(' <Geometry GeometryType="XYZ">\n')
1175
+ f.writelines(f' <DataItem Format="HDF" Dimensions="{vec2.shape[0]} 3">\n')
1176
+ f.writelines(f' {coil_pos_fn}:/centers\n')
1177
+ f.writelines(' </DataItem>\n')
1178
+ f.writelines(' </Geometry>\n')
1179
+
1180
+ f.writelines(' <Attribute Name="Stimulation #" AttributeType="Scalar" Center="Cell">\n')
1181
+ f.writelines(f' <DataItem Format="XML" Dimensions="{nodes_path.shape[0]}">\n')
1182
+ for i in range(n_nodes - 1):
1183
+ f.writelines(f" {i + 1}\n")
1184
+ f.writelines(' </DataItem>\n')
1185
+ f.writelines(' </Attribute>\n')
1186
+
1187
+ f.writelines(' <Attribute Name="line" AttributeType="Scalar" Center="Cell">\n')
1188
+ f.writelines(f' <DataItem Format="XML" Dimensions="{nodes_path.shape[0]}">\n')
1189
+ for i in range(n_nodes - 1):
1190
+ f.writelines(f" {i + 1}\n")
1191
+ f.writelines(' </DataItem>\n')
1192
+ f.writelines(' </Attribute>\n')
1193
+
1194
+ f.writelines(' <Attribute Name="data" AttributeType="Scalar" Center="Cell">\n')
1195
+ f.writelines(f' <DataItem Format="XML" Dimensions="{nodes_path.shape[0]}">\n')
1196
+ for i in data[:-1]:
1197
+ f.writelines(f" {i}\n")
1198
+ f.writelines(' </DataItem>\n')
1199
+ f.writelines(' </Attribute>\n')
1200
+
1201
+ f.writelines(' </Grid>\n')
1202
+
1203
+ # one grid for the spheres
1204
+ f.writelines(' <Grid Name="nodes" GridType="Uniform">\n')
1205
+ f.writelines(' <Topology NumberOfElements="1764860" TopologyType="Polyvertex" Name="nodes" '
1206
+ 'NodesPerElement="2">\n')
1207
+ f.writelines(f' <DataItem Format="XML" Dimensions="{nodes_path.shape[0]} 1">\n')
1208
+ for i in range(n_nodes):
1209
+ f.writelines(f" {int(i)}\n")
1210
+ f.writelines(' </DataItem>\n\n')
1211
+ f.writelines(' </Topology>\n\n')
1212
+ f.writelines(' <Geometry GeometryType="XYZ">\n')
1213
+ f.writelines(f' <DataItem Format="HDF" Dimensions="{vec2.shape[0]} 3">\n')
1214
+ f.writelines(f' {coil_pos_fn}:/centers\n')
1215
+ f.writelines(' </DataItem>\n')
1216
+ f.writelines(' </Geometry>\n')
1217
+
1218
+ # intensity dataset for the spheres
1219
+ f.writelines(' <Attribute Name="data" AttributeType="Scalar" Center="Cell">\n')
1220
+ f.writelines(f' <DataItem Format="XML" Dimensions="{n_nodes}">\n')
1221
+ for i in data:
1222
+ f.writelines(f" {i}\n")
1223
+ f.writelines(' </DataItem>\n')
1224
+ f.writelines(' </Attribute>\n')
1225
+
1226
+ f.writelines(' <Attribute Name="Stimulation #" AttributeType="Scalar" Center="Cell">\n')
1227
+ f.writelines(f' <DataItem Format="XML" Dimensions="{n_nodes}">\n')
1228
+ for i in range(n_nodes):
1229
+ f.writelines(f" {int(i)}\n")
1230
+ f.writelines(' </DataItem>\n')
1231
+ f.writelines(' </Attribute>\n')
1232
+
1233
+ f.writelines(' <Attribute Name="sphere" AttributeType="Scalar" Center="Cell">\n')
1234
+ f.writelines(f' <DataItem Format="XML" Dimensions="{n_nodes}">\n')
1235
+ for i in range(n_nodes):
1236
+ f.writelines(f" {int(i)}\n")
1237
+ f.writelines(' </DataItem>\n')
1238
+ f.writelines(' </Attribute>\n')
1239
+
1240
+ # direction dataset for spheres
1241
+ for idx, vecs in enumerate([vec1, vec2, vec3]):
1242
+ f.writelines(f' <Attribute Name="dir_{idx}" AttributeType="Vector" Center="Cell">\n')
1243
+ f.writelines(f' <DataItem Format="XML" Dimensions="{n_nodes} 3">\n')
1244
+ for i in range(n_nodes):
1245
+ f.writelines(f" {vecs[i][0]} {vecs[i][1]} {vecs[i][2]} \n")
1246
+ f.writelines(' </DataItem>\n')
1247
+ f.writelines(' </Attribute>\n')
1248
+ f.writelines(' \n')
1249
+ f.writelines('\n')
1250
+ f.writelines(' </Grid>\n')
1251
+
1252
+ # collection grid close
1253
+ f.writelines(' </Grid>\n')
1254
+ f.writelines('</Domain>\n</Xdmf>')
1255
+
1256
+
1257
+ def create_fibre_xdmf(fn_fibre_geo_hdf5, fn_fibre_data_hdf5=None, overwrite=True, fibre_points_path="fibre_points",
1258
+ fibre_con_path="fibre_con", fibre_data_path=""):
1259
+ """
1260
+ Creates .xdmf file to plot fibres in Paraview
1261
+
1262
+ Parameters
1263
+ ----------
1264
+ fn_fibre_geo_hdf5 : str
1265
+ Path to fibre_geo.hdf5 file containing the geometry (in /plot subfolder created with create_fibre_geo_hdf5())
1266
+ fn_fibre_data_hdf5 : str (optional) default: None
1267
+ Path to fibre_data.hdf5 file containing the data to plot (in parent folder)
1268
+ fibre_points_path : str (optional) default: fibre_points
1269
+ Path to fibre point array in .hdf5 file
1270
+ fibre_con_path : str (optional) default: fibre_con
1271
+ Path to fibre connectivity array in .hdf5 file
1272
+ fibre_data_path : str (optional) default: ""
1273
+ Path to parent data folder in data.hdf5 file (Default: no parent folder)
1274
+
1275
+ Returns
1276
+ -------
1277
+ <File> : .xdmf file for Paraview
1278
+ """
1279
+ if fn_fibre_data_hdf5 is None:
1280
+ fn_xdmf = os.path.splitext(fn_fibre_geo_hdf5)[0] + ".xdmf"
1281
+
1282
+ else:
1283
+ fn_xdmf = os.path.splitext(fn_fibre_data_hdf5)[0] + ".xdmf"
1284
+
1285
+ data_dict = dict()
1286
+ with h5py.File(fn_fibre_data_hdf5, "r") as f:
1287
+ for key in f.keys():
1288
+ data_dict[key] = f[key][:]
1289
+
1290
+ with h5py.File(fn_fibre_geo_hdf5, "r") as f:
1291
+ n_con = f[fibre_con_path][:].shape[0]
1292
+ n_points = f[fibre_points_path][:].shape[0]
1293
+
1294
+ if os.path.exists(fn_xdmf) and not overwrite:
1295
+ print("Aborting .xdmf file already exists (overwrite=False)")
1296
+ return
1297
+
1298
+ with open(fn_xdmf, 'w') as xdmf:
1299
+ # Header
1300
+ xdmf.write(f'<?xml version="1.0"?>\n')
1301
+ xdmf.write(f'<!DOCTYPE Xdmf SYSTEM "Xdmf.dtd" []>\n')
1302
+ xdmf.write(f'<Xdmf Version="2.0" xmlns:xi="http://www.w3.org/2001/XInclude">\n')
1303
+
1304
+ # Domain and Grid
1305
+ xdmf.write(f'<Domain>\n')
1306
+ xdmf.write(f'<Grid\n')
1307
+ xdmf.write(f'CollectionType="Spatial"\n')
1308
+ xdmf.write(f'GridType="Collection"\n')
1309
+ xdmf.write(f'Name="Collection">\n')
1310
+ xdmf.write(f'<Grid Name="fibres" GridType="Uniform">\n')
1311
+
1312
+ # Topology (connectivity)
1313
+ xdmf.write(f'<Topology NumberOfElements="{n_con}" TopologyType="Polyline" NodesPerElement="2" Name="fibres">\n')
1314
+ xdmf.write(f'<DataItem Format="HDF" Dimensions="{n_con} 2">\n')
1315
+ xdmf.write(f'{fn_fibre_geo_hdf5}:{fibre_con_path}\n')
1316
+ xdmf.write(f'</DataItem>\n')
1317
+ xdmf.write(f'</Topology>\n')
1318
+
1319
+ # Geometry (points)
1320
+ xdmf.write(f'<Geometry GeometryType="XYZ">\n')
1321
+ xdmf.write(f'<DataItem Format="HDF" Dimensions="{n_points} 3">\n')
1322
+ xdmf.write(f'{fn_fibre_geo_hdf5}:{fibre_points_path}\n')
1323
+ xdmf.write(f'</DataItem>\n')
1324
+ xdmf.write(f'</Geometry>\n')
1325
+
1326
+ # Data
1327
+ if fn_fibre_data_hdf5 is not None:
1328
+ for data_name in data_dict.keys():
1329
+ data_shape_0 = data_dict[data_name].shape[0]
1330
+
1331
+ if data_dict[data_name].ndim < 2:
1332
+ data_shape_1 = 1
1333
+ else:
1334
+ data_shape_1 = data_dict[data_name].shape[1]
1335
+
1336
+ xdmf.write(f'<Attribute Name="{data_name}" AttributeType="Scalar" Center="Cell">\n')
1337
+ xdmf.write(f'<DataItem Format="HDF" Dimensions="{data_shape_0} {data_shape_1}">\n')
1338
+ xdmf.write(f'{fn_fibre_data_hdf5}:{fibre_data_path}/{data_name}\n')
1339
+ xdmf.write(f'</DataItem>\n')
1340
+ xdmf.write(f'</Attribute>\n')
1341
+
1342
+ xdmf.write(f'</Grid>\n')
1343
+ xdmf.write(f'</Grid>\n')
1344
+ xdmf.write(f'</Domain>\n')
1345
+ xdmf.write(f'</Xdmf>\n')
1346
+
1347
+
1348
+ def data_superimpose(fn_in_hdf5_data, fn_in_geo_hdf5, fn_out_hdf5_data, data_hdf5_path='/data/tris/',
1349
+ data_substitute=-1, normalize=False):
1350
+ """
1351
+ Overlaying data stored in .hdf5 files except in regions where data_substitute is found. These points
1352
+ are omitted in the analysis and will be replaced by data_substitute instead.
1353
+
1354
+ Parameters
1355
+ ----------
1356
+ fn_in_hdf5_data: list of str
1357
+ Filenames of .hdf5 data files with common geometry, e.g. generated by pynibs.data_sub2avg(...).
1358
+ fn_in_geo_hdf5: str
1359
+ Geometry .hdf5 file, which corresponds to the .hdf5 data files.
1360
+ fn_out_hdf5_data: str
1361
+ Filename of .hdf5 data output file containing the superimposed data.
1362
+ data_hdf5_path: str
1363
+ Path in .hdf5 data file where data is stored (e.g. ``'/data/tris/'``).
1364
+ data_substitute: float or np.NaN, default: -1
1365
+ Data substitute with this number for all points in the inflated brain, which do not belong to
1366
+ the given data set.
1367
+ normalize: bool or str, default: False
1368
+ Decide if individual datasets are normalized w.r.t. their maximum values before they are superimposed.
1369
+
1370
+ * 'global': global normalization w.r.t. maximum value over all datasets and subjects
1371
+ * 'dataset': dataset wise normalization w.r.t. maximum of each dataset individually (over subjects)
1372
+ * 'subject': subject wise normalization (over datasets)
1373
+
1374
+ Returns
1375
+ -------
1376
+ <File>: .hdf5 file
1377
+ Overlayed data.
1378
+ """
1379
+
1380
+ n_subjects = len(fn_in_hdf5_data)
1381
+ data_dic = [dict() for _ in range(n_subjects)]
1382
+ labels = [''] * n_subjects
1383
+ percentile = [99]
1384
+
1385
+ # load .hdf5 data files and save them in dictionaries
1386
+ for i, filename in enumerate(fn_in_hdf5_data):
1387
+ with h5py.File(filename, 'r') as f:
1388
+ labels[i] = list(f[data_hdf5_path].keys())
1389
+ for j, label in enumerate(labels[i]):
1390
+ data_dic[i][label] = f[data_hdf5_path + label][:]
1391
+ # normalize data if desired
1392
+
1393
+ # find matching labels in all datasets
1394
+ cmd = " ".join(['set(labels[' + str(int(i)) + ']) &' for i in range(n_subjects)])[0:-2]
1395
+ data_labels = list(eval(cmd))
1396
+
1397
+ # reform data
1398
+ data = [np.zeros((data_dic[0][data_labels[i]].shape[0], n_subjects)) for i in range(len(data_labels))]
1399
+ for i, label in enumerate(data_labels):
1400
+ for j in range(n_subjects):
1401
+ data[i][:, j] = data_dic[j][label].flatten()
1402
+
1403
+ del data_dic
1404
+
1405
+ # Normalize each dataset over subjects to 1
1406
+ if normalize == 'dataset':
1407
+ for i in range(len(data_labels)):
1408
+ mask = np.all(data[i] != data_substitute, axis=1)
1409
+ data[i][mask, :] = data[i][mask, :] / np.tile(np.percentile(data[i][mask, :],
1410
+ percentile)[0],
1411
+ (np.sum(mask), 1))
1412
+
1413
+ # trim values > 1 from percentile to 1
1414
+ mask_idx = np.where(mask)[0]
1415
+ data[i][mask_idx[data[i][mask, :] > 1], :] = 1
1416
+ # np.max(data[i][mask, :], axis=0)
1417
+
1418
+ elif normalize == 'subject':
1419
+ # subject - wise
1420
+ for i_subj in range(n_subjects):
1421
+ sub_data = np.array(())
1422
+ mask = np.array(())
1423
+ max_val = []
1424
+
1425
+ # dataset - wise
1426
+ for i_data in range(len(data_labels)):
1427
+ mask = np.append(mask, np.all(data[i_data] != data_substitute, axis=1))
1428
+ sub_data = np.append(sub_data, data[i_data][:, i_subj])
1429
+ max_val.append(np.percentile(sub_data[mask == 1.], percentile)[0])
1430
+
1431
+ # max(max) over all datasets
1432
+ max_val = np.max(max_val)
1433
+ for i_data in range(len(data_labels)):
1434
+ mask = np.all(data[i_data] != data_substitute, axis=1)
1435
+ data[i_data][mask, i_subj] /= max_val
1436
+
1437
+ # trim values > 1 from percentile to 1
1438
+ mask_idx = np.where(mask)[0]
1439
+ data[i_data][mask_idx[data[i_data][mask, i_subj] > 1], i_subj] = 1
1440
+
1441
+ # Find max of all datasets of all subject and normalize w.r.t. this value
1442
+ elif normalize == 'global':
1443
+ data_max = []
1444
+ # mag, norm, tan
1445
+ for i in range(len(data_labels)):
1446
+ mask = np.all(data[i] != data_substitute, axis=1)
1447
+ # take max(subject-wise 99.9percentile)
1448
+ data_max.append(np.max(np.percentile(data[i][mask, :], percentile, axis=0)[0]))
1449
+
1450
+ # find maximum of mag, norm, tan
1451
+ data_max = np.max(data_max)
1452
+
1453
+ # normalize
1454
+ for i in range(len(data_labels)):
1455
+ mask = np.all(data[i] != data_substitute, axis=1)
1456
+ # data[i][mask, :] = data[i][mask, :]/np.tile(data_max, (np.sum(mask), 1))
1457
+ data[i][mask, :] = data[i][mask, :] / data_max
1458
+
1459
+ # trim values > 1 from percentile to 1
1460
+ mask_idx = np.where(mask)[0]
1461
+ data[i][mask_idx[data[i][mask, :] > 1], :] = 1
1462
+
1463
+ # average data in regions where values are defined in every dataset
1464
+ data_mean = [np.ones(data[i].shape[0]) * data_substitute for i in range(len(data_labels))]
1465
+
1466
+ for i in range(len(data_labels)):
1467
+ mask = np.all(data[i] != data_substitute, axis=1)
1468
+ data_mean[i][mask] = np.mean(data[i][mask, :], axis=1)
1469
+
1470
+ # create results directory
1471
+ if not os.path.exists(os.path.split(fn_out_hdf5_data)[0]):
1472
+ os.makedirs(os.path.split(fn_out_hdf5_data)[0])
1473
+
1474
+ # copy .hdf5 geometry file to results folder of .hdf5 data file
1475
+ os.system('cp ' + fn_in_geo_hdf5 + ' ' + os.path.split(fn_out_hdf5_data)[0])
1476
+
1477
+ # rename .hdf5 geo file to match with .hdf5 data file
1478
+ fn_in_geo_hdf5_new = os.path.splitext(fn_out_hdf5_data)[0] + '_geo.hdf5'
1479
+ os.system('mv ' + os.path.join(os.path.split(fn_out_hdf5_data)[0], os.path.split(fn_in_geo_hdf5)[1]) + ' ' +
1480
+ fn_in_geo_hdf5_new)
1481
+
1482
+ # write data to .hdf5 data file
1483
+ pynibs.write_data_hdf5_surf(data=data_mean,
1484
+ data_names=data_labels,
1485
+ data_hdf_fn_out=fn_out_hdf5_data,
1486
+ geo_hdf_fn=fn_in_geo_hdf5_new,
1487
+ replace=True)
1488
+
1489
+
1490
+ def write_xdmf_coordinates(fn_xdmf, coords_center):
1491
+ """
1492
+ Writes the coordinates to an XDMF file for visualization.
1493
+
1494
+ Parameters
1495
+ ----------
1496
+ fn_xdmf : str
1497
+ The filename of the XDMF file to be written.
1498
+ coords_center : np.ndarray
1499
+ The coordinates to be written to the XDMF file.
1500
+ This should be a 2D array where each row represents a point in 3D space.
1501
+ """
1502
+ with open(fn_xdmf, 'w') as f:
1503
+ # header
1504
+ f.write('<?xml version="1.0"?>\n')
1505
+ f.write('<!DOCTYPE Xdmf SYSTEM "Xdmf.dtd" []>\n')
1506
+ f.write('<Xdmf Version="2.0" xmlns:xi="http://www.w3.org/2001/XInclude">\n')
1507
+ f.write('<Domain>\n')
1508
+ f.write('<Grid CollectionType="Spatial" GridType="Collection" Name="Collection">\n')
1509
+
1510
+ # one grid for coil dipole nodes...store data hdf5.
1511
+ #######################################################
1512
+ f.write('<Grid Name="stimsites" GridType="Uniform">\n')
1513
+ f.write(f'<Topology NumberOfElements="{coords_center.shape[0]}" TopologyType="Polyvertex" Name="Tri">\n')
1514
+ f.write(f'\t<DataItem Format="XML" Dimensions="{coords_center.shape[0]} 1">\n')
1515
+ np.savetxt(f, list(range(coords_center.shape[0])), fmt='\t%d', delimiter=' ') # 1 2 3 4 ... N_Points
1516
+ f.write('\t</DataItem>\n')
1517
+ f.write('</Topology>\n\n')
1518
+
1519
+ # nodes
1520
+ f.write('<Geometry GeometryType="XYZ">\n')
1521
+ f.write(f'\t<DataItem Format="XML" Dimensions="{coords_center.shape[0]} 3">\n')
1522
+ np.savetxt(f, coords_center, fmt='\t%d', delimiter=' ') # 1 2 3 4 ... N_Points
1523
+ f.write('\t</DataItem>\n')
1524
+ f.write('</Geometry>\n\n')
1525
+
1526
+ # data
1527
+ # dipole magnitude
1528
+ # the 4 vectors
1529
+ f.write(f'\t\t<Attribute Name="id" AttributeType="Scalar" Center="Cell">\n')
1530
+ f.write('\t\t\t<DataItem Format="XML" Dimensions="' + str(coords_center.shape[0]) + ' 1">\n')
1531
+ for i in range(coords_center.shape[0]):
1532
+ f.write(f'\t\t\t{i}\n')
1533
+ f.write('\t\t\t</DataItem>\n')
1534
+ f.write('\t\t</Attribute>\n\n')
1535
+
1536
+ f.write('</Grid>\n')
1537
+ # end coil dipole data
1538
+
1539
+ # footer
1540
+ f.write('</Grid>\n')
1541
+ f.write('</Domain>\n')
1542
+ f.write('</Xdmf>\n')