pyNIBS 0.2024.8__py3-none-any.whl → 0.2026.1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- pynibs/__init__.py +26 -14
- pynibs/coil/__init__.py +6 -0
- pynibs/{coil.py → coil/coil.py} +213 -543
- pynibs/coil/export.py +508 -0
- pynibs/congruence/__init__.py +4 -1
- pynibs/congruence/congruence.py +37 -45
- pynibs/congruence/ext_metrics.py +40 -11
- pynibs/congruence/stimulation_threshold.py +1 -2
- pynibs/expio/Mep.py +120 -370
- pynibs/expio/__init__.py +10 -0
- pynibs/expio/brainsight.py +34 -37
- pynibs/expio/cobot.py +25 -25
- pynibs/expio/exp.py +10 -7
- pynibs/expio/fit_funs.py +3 -0
- pynibs/expio/invesalius.py +70 -0
- pynibs/expio/localite.py +190 -91
- pynibs/expio/neurone.py +139 -0
- pynibs/expio/signal_ced.py +345 -2
- pynibs/expio/visor.py +16 -15
- pynibs/freesurfer.py +34 -33
- pynibs/hdf5_io/hdf5_io.py +149 -132
- pynibs/hdf5_io/xdmf.py +35 -31
- pynibs/mesh/__init__.py +1 -1
- pynibs/mesh/mesh_struct.py +77 -92
- pynibs/mesh/transformations.py +121 -21
- pynibs/mesh/utils.py +191 -99
- pynibs/models/_TMS.py +2 -1
- pynibs/muap.py +1 -2
- pynibs/neuron/__init__.py +10 -0
- pynibs/neuron/models/mep.py +566 -0
- pynibs/neuron/neuron_regression.py +98 -8
- pynibs/optimization/__init__.py +12 -2
- pynibs/optimization/{optimization.py → coil_opt.py} +157 -133
- pynibs/optimization/multichannel.py +1174 -24
- pynibs/optimization/workhorses.py +7 -8
- pynibs/regression/__init__.py +4 -2
- pynibs/regression/dual_node_detection.py +229 -219
- pynibs/regression/regression.py +92 -61
- pynibs/roi/__init__.py +4 -1
- pynibs/roi/roi_structs.py +19 -21
- pynibs/roi/{roi.py → roi_utils.py} +56 -33
- pynibs/subject.py +24 -14
- pynibs/util/__init__.py +20 -4
- pynibs/util/dosing.py +4 -5
- pynibs/util/quality_measures.py +39 -38
- pynibs/util/rotations.py +116 -9
- pynibs/util/{simnibs.py → simnibs_io.py} +29 -19
- pynibs/util/{util.py → utils.py} +20 -22
- pynibs/visualization/para.py +4 -4
- pynibs/visualization/render_3D.py +4 -4
- pynibs-0.2026.1.dist-info/METADATA +105 -0
- pynibs-0.2026.1.dist-info/RECORD +69 -0
- {pyNIBS-0.2024.8.dist-info → pynibs-0.2026.1.dist-info}/WHEEL +1 -1
- pyNIBS-0.2024.8.dist-info/METADATA +0 -723
- pyNIBS-0.2024.8.dist-info/RECORD +0 -107
- pynibs/data/configuration_exp0.yaml +0 -59
- pynibs/data/configuration_linear_MEP.yaml +0 -61
- pynibs/data/configuration_linear_RT.yaml +0 -61
- pynibs/data/configuration_sigmoid4.yaml +0 -68
- pynibs/data/network mapping configuration/configuration guide.md +0 -238
- pynibs/data/network mapping configuration/configuration_TEMPLATE.yaml +0 -42
- pynibs/data/network mapping configuration/configuration_for_testing.yaml +0 -43
- pynibs/data/network mapping configuration/configuration_modelTMS.yaml +0 -43
- pynibs/data/network mapping configuration/configuration_reg_isi_05.yaml +0 -43
- pynibs/data/network mapping configuration/output_documentation.md +0 -185
- pynibs/data/network mapping configuration/recommendations_for_accuracy_threshold.md +0 -77
- pynibs/data/neuron/models/L23_PC_cADpyr_biphasic_v1.csv +0 -1281
- pynibs/data/neuron/models/L23_PC_cADpyr_monophasic_v1.csv +0 -1281
- pynibs/data/neuron/models/L4_LBC_biphasic_v1.csv +0 -1281
- pynibs/data/neuron/models/L4_LBC_monophasic_v1.csv +0 -1281
- pynibs/data/neuron/models/L4_NBC_biphasic_v1.csv +0 -1281
- pynibs/data/neuron/models/L4_NBC_monophasic_v1.csv +0 -1281
- pynibs/data/neuron/models/L4_SBC_biphasic_v1.csv +0 -1281
- pynibs/data/neuron/models/L4_SBC_monophasic_v1.csv +0 -1281
- pynibs/data/neuron/models/L5_TTPC2_cADpyr_biphasic_v1.csv +0 -1281
- pynibs/data/neuron/models/L5_TTPC2_cADpyr_monophasic_v1.csv +0 -1281
- pynibs/tests/data/InstrumentMarker20200225163611937.xml +0 -19
- pynibs/tests/data/TriggerMarkers_Coil0_20200225163443682.xml +0 -14
- pynibs/tests/data/TriggerMarkers_Coil1_20200225170337572.xml +0 -6373
- pynibs/tests/data/Xdmf.dtd +0 -89
- pynibs/tests/data/brainsight_niiImage_nifticoord.txt +0 -145
- pynibs/tests/data/brainsight_niiImage_nifticoord_largefile.txt +0 -1434
- pynibs/tests/data/brainsight_niiImage_niifticoord_mixedtargets.txt +0 -47
- pynibs/tests/data/create_subject_testsub.py +0 -332
- pynibs/tests/data/data.hdf5 +0 -0
- pynibs/tests/data/geo.hdf5 +0 -0
- pynibs/tests/test_coil.py +0 -474
- pynibs/tests/test_elements2nodes.py +0 -100
- pynibs/tests/test_hdf5_io/test_xdmf.py +0 -61
- pynibs/tests/test_mesh_transformations.py +0 -123
- pynibs/tests/test_mesh_utils.py +0 -143
- pynibs/tests/test_nnav_imports.py +0 -101
- pynibs/tests/test_quality_measures.py +0 -117
- pynibs/tests/test_regressdata.py +0 -289
- pynibs/tests/test_roi.py +0 -17
- pynibs/tests/test_rotations.py +0 -86
- pynibs/tests/test_subject.py +0 -71
- pynibs/tests/test_util.py +0 -24
- /pynibs/{regression/score_types.py → neuron/models/m1_montbrio.py} +0 -0
- {pyNIBS-0.2024.8.dist-info → pynibs-0.2026.1.dist-info/licenses}/LICENSE +0 -0
- {pyNIBS-0.2024.8.dist-info → pynibs-0.2026.1.dist-info}/top_level.txt +0 -0
pynibs/hdf5_io/hdf5_io.py
CHANGED
|
@@ -156,7 +156,7 @@ def load_mesh_msh(fname):
|
|
|
156
156
|
|
|
157
157
|
triangles_regions = msh_msh.elm.tag2[msh_msh.elm.elm_type == 2]
|
|
158
158
|
|
|
159
|
-
obj = pynibs.
|
|
159
|
+
obj = pynibs.mesh.TetrahedraLinear(points, triangles, triangles_regions, tetrahedra, tetrahedra_regions)
|
|
160
160
|
|
|
161
161
|
return obj
|
|
162
162
|
|
|
@@ -229,7 +229,7 @@ def load_mesh_hdf5(fname):
|
|
|
229
229
|
tetrahedra = np.array(f['elm/tetrahedra_number_list']) # node_number_list[elm_type == 4, ]
|
|
230
230
|
triangles_regions = np.array(f['elm/tri_tissue_type'])
|
|
231
231
|
tetrahedra_regions = np.array(f['elm/tet_tissue_type'])
|
|
232
|
-
obj = pynibs.
|
|
232
|
+
obj = pynibs.mesh.TetrahedraLinear(points, triangles, triangles_regions, tetrahedra, tetrahedra_regions)
|
|
233
233
|
return obj
|
|
234
234
|
|
|
235
235
|
|
|
@@ -326,32 +326,30 @@ def write_geo_hdf5(out_fn, msh, roi_dict=None, hdf5_path='/mesh'):
|
|
|
326
326
|
os.remove(out_fn)
|
|
327
327
|
|
|
328
328
|
with h5py.File(out_fn, 'w') as f:
|
|
329
|
-
f.create_dataset(hdf5_path
|
|
330
|
-
f.create_dataset(hdf5_path
|
|
331
|
-
f.create_dataset(hdf5_path
|
|
332
|
-
|
|
333
|
-
f.create_dataset(hdf5_path
|
|
334
|
-
|
|
335
|
-
f.create_dataset(hdf5_path
|
|
336
|
-
f.create_dataset(hdf5_path
|
|
337
|
-
f.create_dataset(hdf5_path + '/elm/tetrahedra_number_list', data=msh.tetrahedra)
|
|
338
|
-
f.create_dataset(hdf5_path + '/elm/tet_tissue_type', data=msh.tetrahedra_regions.flatten())
|
|
329
|
+
f.create_dataset(f'{hdf5_path}/elm/elm_number', data=np.arange(msh.N_tet + msh.N_tri) + 1)
|
|
330
|
+
f.create_dataset(f'{hdf5_path}/elm/elm_type', data=np.array([2] * msh.N_tri + [4] * msh.N_tet))
|
|
331
|
+
f.create_dataset(f'{hdf5_path}/elm/tag1', data=np.hstack((msh.triangles_regions + 1000, msh.tetrahedra_regions)).flatten())
|
|
332
|
+
f.create_dataset(f'{hdf5_path}/elm/tag2', data=np.hstack((msh.triangles_regions, msh.tetrahedra_regions)).flatten())
|
|
333
|
+
f.create_dataset(f'{hdf5_path}/elm/triangle_number_list', data=msh.triangles)
|
|
334
|
+
f.create_dataset(f'{hdf5_path}/elm/tri_tissue_type', data=msh.triangles_regions.flatten())
|
|
335
|
+
f.create_dataset(f'{hdf5_path}/elm/tetrahedra_number_list', data=msh.tetrahedra)
|
|
336
|
+
f.create_dataset(f'{hdf5_path}/elm/tet_tissue_type', data=msh.tetrahedra_regions.flatten())
|
|
339
337
|
if msh.tetrahedra.size != 0:
|
|
340
338
|
if msh.triangles.size != 0:
|
|
341
|
-
f.create_dataset(hdf5_path
|
|
342
|
-
|
|
343
|
-
|
|
339
|
+
f.create_dataset(f'{hdf5_path}/elm/node_number_list', data=np.vstack(
|
|
340
|
+
(np.hstack((msh.triangles, np.zeros((msh.N_tri, 1)))),
|
|
341
|
+
msh.tetrahedra)).astype(int))
|
|
344
342
|
else:
|
|
345
|
-
f.create_dataset(hdf5_path
|
|
343
|
+
f.create_dataset(f'{hdf5_path}/elm/node_number_list', data=msh.tetrahedra).astype(int)
|
|
346
344
|
else:
|
|
347
|
-
f.create_dataset(hdf5_path
|
|
348
|
-
|
|
349
|
-
|
|
350
|
-
f.create_dataset(hdf5_path
|
|
351
|
-
f.create_dataset(hdf5_path
|
|
352
|
-
# f.
|
|
353
|
-
f.create_dataset(hdf5_path
|
|
354
|
-
f.create_dataset(hdf5_path
|
|
345
|
+
f.create_dataset(f'{hdf5_path}/elm/node_number_list', data=np.vstack(
|
|
346
|
+
(np.hstack((msh.triangles, np.zeros((msh.N_tri, 1)))),
|
|
347
|
+
)).astype(int))
|
|
348
|
+
f.create_dataset(f'{hdf5_path}/nodes/node_coord', data=msh.points)
|
|
349
|
+
f.create_dataset(f'{hdf5_path}/nodes/node_number', data=np.arange(msh.N_points) + 1)
|
|
350
|
+
# f.create_datasef'{t}5_path + '/nodes/units', data=['mm'])
|
|
351
|
+
f.create_dataset(f'{hdf5_path}/elm/tet_elm_type', data=np.array([4] * msh.N_tet))
|
|
352
|
+
f.create_dataset(f'{hdf5_path}/elm/tri_elm_type', data=np.array([2] * msh.N_tri))
|
|
355
353
|
|
|
356
354
|
if roi_dict is not None:
|
|
357
355
|
for roi_key in roi_dict.keys():
|
|
@@ -644,21 +642,21 @@ def write_data_hdf5_surf(data, data_names, data_hdf_fn_out, geo_hdf_fn,
|
|
|
644
642
|
except IndexError:
|
|
645
643
|
data_hdf_fn_out += '.hdf5'
|
|
646
644
|
|
|
647
|
-
data_xdmf_fn = data_hdf_fn_out[:-4]
|
|
645
|
+
data_xdmf_fn = f'{data_hdf_fn_out[:-4]}xdmf'
|
|
648
646
|
if os.path.exists(data_hdf_fn_out):
|
|
649
647
|
if replace:
|
|
650
648
|
os.remove(data_hdf_fn_out)
|
|
651
649
|
elif not replace and replace_array_in_file:
|
|
652
650
|
pass
|
|
653
651
|
else:
|
|
654
|
-
raise FileExistsError(data_hdf_fn_out
|
|
652
|
+
raise FileExistsError(f"{data_hdf_fn_out} already exists")
|
|
655
653
|
if os.path.exists(data_xdmf_fn):
|
|
656
654
|
if replace:
|
|
657
655
|
os.remove(data_xdmf_fn)
|
|
658
656
|
elif not replace and replace_array_in_file:
|
|
659
657
|
pass
|
|
660
658
|
else:
|
|
661
|
-
raise FileExistsError(data_xdmf_fn
|
|
659
|
+
raise FileExistsError(f"{data_xdmf_fn} already exists")
|
|
662
660
|
|
|
663
661
|
# Check for correct data and data_names
|
|
664
662
|
if type(data) is np.ndarray:
|
|
@@ -666,7 +664,7 @@ def write_data_hdf5_surf(data, data_names, data_hdf_fn_out, geo_hdf_fn,
|
|
|
666
664
|
elif type(data) is list:
|
|
667
665
|
for dat in data:
|
|
668
666
|
if type(dat) is not np.ndarray:
|
|
669
|
-
raise
|
|
667
|
+
raise ValueError(f'Expected np.ndarray, got {type(dat)}')
|
|
670
668
|
else:
|
|
671
669
|
raise NotImplementedError
|
|
672
670
|
|
|
@@ -686,18 +684,19 @@ def write_data_hdf5_surf(data, data_names, data_hdf_fn_out, geo_hdf_fn,
|
|
|
686
684
|
for idx, dat in enumerate(data):
|
|
687
685
|
if replace_array_in_file:
|
|
688
686
|
try:
|
|
689
|
-
del h5_data[f'/data/{datatype}/
|
|
687
|
+
del h5_data[f'/data/{datatype}/{data_names[idx]}']
|
|
690
688
|
except KeyError:
|
|
691
689
|
only_data_replaced = False
|
|
692
690
|
|
|
693
691
|
else:
|
|
694
692
|
only_data_replaced = False
|
|
695
693
|
|
|
696
|
-
h5_data.create_dataset(f'/data/{datatype}/
|
|
694
|
+
h5_data.create_dataset(f'/data/{datatype}/{data_names[idx]}', data=data[idx])
|
|
697
695
|
|
|
698
696
|
if not only_data_replaced:
|
|
699
697
|
data_dims = [dat.shape[1] if dat.ndim > 1 else 1 for dat in data]
|
|
700
|
-
pynibs.write_xdmf_surf(data_hdf_fn_out, data_names,
|
|
698
|
+
pynibs.write_xdmf_surf(data_hdf=data_hdf_fn_out, data_names=data_names,
|
|
699
|
+
data_xdmf_fn=data_xdmf_fn, geo_hdf_fn=geo_hdf_fn, data_dims=data_dims)
|
|
701
700
|
|
|
702
701
|
|
|
703
702
|
def write_data_hdf5_vol(data, data_names, data_hdf_fn_out, geo_hdf_fn, replace=False, replace_array_in_file=True):
|
|
@@ -754,7 +753,7 @@ def write_data_hdf5_vol(data, data_names, data_hdf_fn_out, geo_hdf_fn, replace=F
|
|
|
754
753
|
elif not replace and replace_array_in_file:
|
|
755
754
|
pass
|
|
756
755
|
else:
|
|
757
|
-
warnings.warn(data_hdf_fn_out
|
|
756
|
+
warnings.warn(f"{data_hdf_fn_out} already exists. Quitting")
|
|
758
757
|
return
|
|
759
758
|
if os.path.exists(data_xdmf_fn):
|
|
760
759
|
if replace:
|
|
@@ -762,7 +761,7 @@ def write_data_hdf5_vol(data, data_names, data_hdf_fn_out, geo_hdf_fn, replace=F
|
|
|
762
761
|
elif not replace and replace_array_in_file:
|
|
763
762
|
pass
|
|
764
763
|
else:
|
|
765
|
-
warnings.warn(data_xdmf_fn
|
|
764
|
+
warnings.warn(f"{data_xdmf_fn} already exists. Quitting")
|
|
766
765
|
return
|
|
767
766
|
|
|
768
767
|
# Check for correct data and data_names
|
|
@@ -843,16 +842,16 @@ def write_data_hdf5_vol(data, data_names, data_hdf_fn_out, geo_hdf_fn, replace=F
|
|
|
843
842
|
|
|
844
843
|
for vol in volume:
|
|
845
844
|
|
|
846
|
-
n_tets = len(h5_geo['/mesh/elm/
|
|
847
|
-
n_nodes = len(h5_geo['/mesh/nodes/
|
|
845
|
+
n_tets = len(h5_geo[f'/mesh/elm/{lookup_str + vol}'][:])
|
|
846
|
+
n_nodes = len(h5_geo[f'/mesh/nodes/{lookup_str_node + vol}'][:])
|
|
848
847
|
assert n_tets, n_nodes
|
|
849
848
|
|
|
850
849
|
# one grid for triangles...
|
|
851
850
|
###########################
|
|
852
851
|
xdmf.write('<Grid Name="tris" GridType="Uniform">\n')
|
|
853
|
-
xdmf.write('<Topology NumberOfElements="' +
|
|
854
|
-
'" TopologyType="Tetrahedron" Name="
|
|
855
|
-
xdmf.write('<DataItem Format="HDF" Dimensions="' + str(n_tets) + ' 4">\n')
|
|
852
|
+
xdmf.write(f'<Topology NumberOfElements="{n_tets}' +
|
|
853
|
+
f'" TopologyType="Tetrahedron" Name="{vol}_Tet">\n')
|
|
854
|
+
xdmf.write(f'<DataItem Format="HDF" Dimensions="' + str(n_tets) + ' 4">\n')
|
|
856
855
|
xdmf.write(geo_hdf_fn + ':' + '/mesh/elm/' + lookup_str + vol + '\n')
|
|
857
856
|
xdmf.write('</DataItem>\n')
|
|
858
857
|
xdmf.write('</Topology>\n')
|
|
@@ -932,9 +931,9 @@ def hdf_2_ascii(hdf5_fn):
|
|
|
932
931
|
h5.visititems(print_attrs)
|
|
933
932
|
|
|
934
933
|
|
|
935
|
-
def simnibs_results_msh2hdf5_workhorse(fn_msh, fn_hdf5, session, pos_tms_idx, pos_local_idx, subject,
|
|
934
|
+
def simnibs_results_msh2hdf5_workhorse(fn_msh, fn_hdf5, session, pos_tms_idx, pos_local_idx, subject, mesh_id=None,
|
|
936
935
|
mode_xdmf="r+",
|
|
937
|
-
verbose=False, overwrite=False, mid2roi=False):
|
|
936
|
+
verbose=False, overwrite=False, mid2roi=False, mesh_idx=None):
|
|
938
937
|
"""
|
|
939
938
|
Converts simnibs .msh results file to .hdf5 (including midlayer data if desired)
|
|
940
939
|
|
|
@@ -954,7 +953,7 @@ def simnibs_results_msh2hdf5_workhorse(fn_msh, fn_hdf5, session, pos_tms_idx, po
|
|
|
954
953
|
For every coil a separate TMSList exists, which contains multiple coil positions.
|
|
955
954
|
subject : Subject object
|
|
956
955
|
pynibs.Subject.
|
|
957
|
-
|
|
956
|
+
mesh_id : int or str
|
|
958
957
|
Mesh index or id.
|
|
959
958
|
mode_xdmf : str, default: "r+"
|
|
960
959
|
Mode to open hdf5_geo file to write xdmf. If hdf5_geo is already separated in tets and tris etc.,
|
|
@@ -967,6 +966,8 @@ def simnibs_results_msh2hdf5_workhorse(fn_msh, fn_hdf5, session, pos_tms_idx, po
|
|
|
967
966
|
If the mesh contains ROIs and the e-field was calculated in the midlayer using SimNIBS
|
|
968
967
|
(``S.map_to_surf = True``), the midlayer results will be mapped from the simnibs midlayer to the ROIs
|
|
969
968
|
(takes some time for large ROIs).
|
|
969
|
+
mesh_idx : int or str, optional
|
|
970
|
+
Deprecated, use mesh_id instead.
|
|
970
971
|
|
|
971
972
|
Returns
|
|
972
973
|
-------
|
|
@@ -974,6 +975,10 @@ def simnibs_results_msh2hdf5_workhorse(fn_msh, fn_hdf5, session, pos_tms_idx, po
|
|
|
974
975
|
.hdf5 file containing the results. An .xdmf file is also created to link the results with the mesh .hdf5 file
|
|
975
976
|
of the subject.
|
|
976
977
|
"""
|
|
978
|
+
if mesh_idx is not None:
|
|
979
|
+
warnings.warn("simnibs_results_msh2hdf5_workhorse(): Use 'mesh_id' instead of 'mesh_idx'", DeprecationWarning)
|
|
980
|
+
mesh_id = mesh_idx
|
|
981
|
+
|
|
977
982
|
import simnibs
|
|
978
983
|
if type(fn_msh) is not list:
|
|
979
984
|
fn_msh = [fn_msh]
|
|
@@ -1009,13 +1014,13 @@ def simnibs_results_msh2hdf5_workhorse(fn_msh, fn_hdf5, session, pos_tms_idx, po
|
|
|
1009
1014
|
"_TMS_" +
|
|
1010
1015
|
str(f_tms_idx + 1) + "-" + str(f_local_idx + 1).zfill(4) + "_" +
|
|
1011
1016
|
os.path.splitext(
|
|
1012
|
-
|
|
1013
|
-
|
|
1017
|
+
os.path.splitext(os.path.split(session.poslists[f_tms_idx].fnamecoil)[1])[
|
|
1018
|
+
0])[0] +
|
|
1014
1019
|
"_nii_coil_pos.geo")
|
|
1015
1020
|
|
|
1016
1021
|
# for some reason, the .geo file of the simulation was not saved ...
|
|
1017
1022
|
try:
|
|
1018
|
-
dipole_position, dipole_moment_mag = pynibs.read_coil_geo(fn_coil_geo)
|
|
1023
|
+
dipole_position, dipole_moment_mag = pynibs.util.simnibs_io.read_coil_geo(fn_coil_geo)
|
|
1019
1024
|
except FileNotFoundError:
|
|
1020
1025
|
dipole_position = np.array([[0, 0, 0]])
|
|
1021
1026
|
dipole_moment_mag = np.array([[0]])
|
|
@@ -1055,10 +1060,10 @@ def simnibs_results_msh2hdf5_workhorse(fn_msh, fn_hdf5, session, pos_tms_idx, po
|
|
|
1055
1060
|
|
|
1056
1061
|
# write .xdmf markup file for paraview
|
|
1057
1062
|
pynibs.write_xdmf(hdf5_fn=os.path.splitext(fn_hdf5[f_global_idx])[0] + ".hdf5",
|
|
1058
|
-
|
|
1059
|
-
|
|
1060
|
-
|
|
1061
|
-
|
|
1063
|
+
hdf5_geo_fn=subject.mesh[mesh_id]["fn_mesh_hdf5"],
|
|
1064
|
+
overwrite_xdmf=True,
|
|
1065
|
+
verbose=False,
|
|
1066
|
+
mode=mode_xdmf)
|
|
1062
1067
|
|
|
1063
1068
|
# if calculated from Simnibs copy and crop midlayer results to ROIs
|
|
1064
1069
|
if session.map_to_surf and mid2roi is not False:
|
|
@@ -1068,38 +1073,40 @@ def simnibs_results_msh2hdf5_workhorse(fn_msh, fn_hdf5, session, pos_tms_idx, po
|
|
|
1068
1073
|
if verbose:
|
|
1069
1074
|
print(f"Loading ROIs")
|
|
1070
1075
|
|
|
1071
|
-
roi = pynibs.load_roi_surface_obj_from_hdf5(fname=subject.mesh[
|
|
1076
|
+
roi = pynibs.load_roi_surface_obj_from_hdf5(fname=subject.mesh[mesh_id]["fn_mesh_hdf5"])
|
|
1072
1077
|
mesh_folder = os.path.join(subject.subject_folder,
|
|
1073
|
-
'mesh', str(
|
|
1074
|
-
subject.mesh[
|
|
1078
|
+
'mesh', str(mesh_id),
|
|
1079
|
+
subject.mesh[mesh_id]["mesh_folder"])
|
|
1075
1080
|
|
|
1076
|
-
for
|
|
1081
|
+
for roi_id in roi.keys():
|
|
1077
1082
|
# skip rois that are not wanted
|
|
1078
|
-
if isinstance(mid2roi, list) and
|
|
1083
|
+
if isinstance(mid2roi, list) and roi_id not in mid2roi:
|
|
1079
1084
|
continue
|
|
1080
1085
|
# load freesurfer surface
|
|
1081
|
-
if type(roi[
|
|
1082
|
-
roi[
|
|
1086
|
+
if type(roi[roi_id].gm_surf_fname) is not list:
|
|
1087
|
+
roi[roi_id].gm_surf_fname = [roi[roi_id].gm_surf_fname]
|
|
1083
1088
|
|
|
1084
|
-
points_gm = [None for _ in range(len(roi[
|
|
1085
|
-
con_gm = [None for _ in range(len(roi[
|
|
1089
|
+
points_gm = [None for _ in range(len(roi[roi_id].gm_surf_fname))]
|
|
1090
|
+
con_gm = [None for _ in range(len(roi[roi_id].gm_surf_fname))]
|
|
1086
1091
|
|
|
1087
1092
|
max_idx_gm = 0
|
|
1088
1093
|
|
|
1089
|
-
if (type(roi[
|
|
1090
|
-
(type(roi[
|
|
1091
|
-
if type(roi[
|
|
1092
|
-
fn_surface = [roi[
|
|
1094
|
+
if (type(roi[roi_id].gm_surf_fname) is list and roi[roi_id].gm_surf_fname[0] is not None) or \
|
|
1095
|
+
(type(roi[roi_id].gm_surf_fname) is str):
|
|
1096
|
+
if type(roi[roi_id].gm_surf_fname) is str:
|
|
1097
|
+
fn_surface = [roi[roi_id].gm_surf_fname]
|
|
1093
1098
|
else:
|
|
1094
|
-
fn_surface = roi[
|
|
1099
|
+
fn_surface = roi[roi_id].gm_surf_fname
|
|
1095
1100
|
|
|
1096
|
-
elif (type(roi[
|
|
1097
|
-
roi[
|
|
1098
|
-
(type(roi[
|
|
1099
|
-
if type(roi[
|
|
1100
|
-
fn_surface = [roi[
|
|
1101
|
+
elif (type(roi[roi_id].midlayer_surf_fname) is list and
|
|
1102
|
+
roi[roi_id].gm_surf_fname is not None) or \
|
|
1103
|
+
(type(roi[roi_id].midlayer_surf_fname) is str):
|
|
1104
|
+
if type(roi[roi_id].midlayer_surf_fname) is str:
|
|
1105
|
+
fn_surface = [roi[roi_id].midlayer_surf_fname]
|
|
1101
1106
|
else:
|
|
1102
|
-
fn_surface = roi[
|
|
1107
|
+
fn_surface = roi[roi_id].midlayer_surf_fname
|
|
1108
|
+
else:
|
|
1109
|
+
raise ValueError
|
|
1103
1110
|
|
|
1104
1111
|
for i in range(len(fn_surface)):
|
|
1105
1112
|
if fn_surface[i].endswith('.gii') or fn_surface[i].endswith('.gii.gz'):
|
|
@@ -1108,7 +1115,7 @@ def simnibs_results_msh2hdf5_workhorse(fn_msh, fn_hdf5, session, pos_tms_idx, po
|
|
|
1108
1115
|
con_gm[i] = gii_obj.darrays[1].data
|
|
1109
1116
|
else:
|
|
1110
1117
|
points_gm[i], con_gm[i] = nibabel.freesurfer.read_geometry(
|
|
1111
|
-
|
|
1118
|
+
os.path.join(mesh_folder, fn_surface[i]))
|
|
1112
1119
|
|
|
1113
1120
|
con_gm[i] = con_gm[i] + max_idx_gm
|
|
1114
1121
|
max_idx_gm = max_idx_gm + points_gm[i].shape[0]
|
|
@@ -1117,32 +1124,32 @@ def simnibs_results_msh2hdf5_workhorse(fn_msh, fn_hdf5, session, pos_tms_idx, po
|
|
|
1117
1124
|
con_gm = np.vstack(con_gm)
|
|
1118
1125
|
|
|
1119
1126
|
if verbose:
|
|
1120
|
-
print(f"Processing data to ROI #{
|
|
1121
|
-
|
|
1122
|
-
if roi[
|
|
1123
|
-
if roi[
|
|
1124
|
-
roi[
|
|
1125
|
-
if roi[
|
|
1126
|
-
roi[
|
|
1127
|
-
if roi[
|
|
1128
|
-
roi[
|
|
1129
|
-
|
|
1130
|
-
roi_mask_bool = (roi[
|
|
1131
|
-
roi[
|
|
1132
|
-
(roi[
|
|
1133
|
-
roi[
|
|
1134
|
-
(roi[
|
|
1135
|
-
roi[
|
|
1127
|
+
print(f"Processing data to ROI #{roi_id}")
|
|
1128
|
+
|
|
1129
|
+
if roi[roi_id].fn_mask is None or roi[roi_id].fn_mask == []:
|
|
1130
|
+
if roi[roi_id].X_ROI is None or roi[roi_id].X_ROI == []:
|
|
1131
|
+
roi[roi_id].X_ROI = [-np.inf, np.inf]
|
|
1132
|
+
if roi[roi_id].Y_ROI is None or roi[roi_id].Y_ROI == []:
|
|
1133
|
+
roi[roi_id].Y_ROI = [-np.inf, np.inf]
|
|
1134
|
+
if roi[roi_id].Z_ROI is None or roi[roi_id].Z_ROI == []:
|
|
1135
|
+
roi[roi_id].Z_ROI = [-np.inf, np.inf]
|
|
1136
|
+
|
|
1137
|
+
roi_mask_bool = (roi[roi_id].node_coord_mid[:, 0] > min(roi[roi_id].X_ROI)) & (
|
|
1138
|
+
roi[roi_id].node_coord_mid[:, 0] < max(roi[roi_id].X_ROI)) & \
|
|
1139
|
+
(roi[roi_id].node_coord_mid[:, 1] > min(roi[roi_id].Y_ROI)) & (
|
|
1140
|
+
roi[roi_id].node_coord_mid[:, 1] < max(roi[roi_id].Y_ROI)) & \
|
|
1141
|
+
(roi[roi_id].node_coord_mid[:, 2] > min(roi[roi_id].Z_ROI)) & (
|
|
1142
|
+
roi[roi_id].node_coord_mid[:, 2] < max(roi[roi_id].Z_ROI))
|
|
1136
1143
|
roi_mask_idx = np.where(roi_mask_bool)
|
|
1137
1144
|
|
|
1138
1145
|
else:
|
|
1139
|
-
if type(roi[
|
|
1140
|
-
if roi[
|
|
1141
|
-
roi[
|
|
1146
|
+
if type(roi[roi_id].fn_mask) is np.ndarray:
|
|
1147
|
+
if roi[roi_id].fn_mask.ndim == 0:
|
|
1148
|
+
roi[roi_id].fn_mask = roi[roi_id].fn_mask.astype(str).tolist()
|
|
1142
1149
|
|
|
1143
1150
|
# read mask from freesurfer mask file
|
|
1144
1151
|
mask = nibabel.freesurfer.mghformat.MGHImage.from_filename(
|
|
1145
|
-
|
|
1152
|
+
os.path.join(mesh_folder, roi[roi_id].fn_mask)).dataobj[:]
|
|
1146
1153
|
roi_mask_idx = np.where(mask > 0.5)
|
|
1147
1154
|
|
|
1148
1155
|
# read results data
|
|
@@ -1157,15 +1164,17 @@ def simnibs_results_msh2hdf5_workhorse(fn_msh, fn_hdf5, session, pos_tms_idx, po
|
|
|
1157
1164
|
if "lh" in os.path.split(fn_surf)[1]:
|
|
1158
1165
|
fname_base = os.path.join(os.path.split(f_msh)[0], "subject_overlays", "lh." + fn_msh_base)
|
|
1159
1166
|
|
|
1160
|
-
|
|
1167
|
+
elif "rh" in os.path.split(fn_surf)[1]:
|
|
1161
1168
|
fname_base = os.path.join(os.path.split(f_msh)[0], "subject_overlays", "rh." + fn_msh_base)
|
|
1169
|
+
else:
|
|
1170
|
+
raise ValueError
|
|
1162
1171
|
|
|
1163
1172
|
e_normal.append(
|
|
1164
|
-
|
|
1165
|
-
|
|
1173
|
+
nibabel.freesurfer.read_morph_data(f"{fname_base}.central.E.normal").flatten()[:,
|
|
1174
|
+
np.newaxis])
|
|
1166
1175
|
e_tan.append(
|
|
1167
|
-
|
|
1168
|
-
|
|
1176
|
+
nibabel.freesurfer.read_morph_data(f"{fname_base}.central.E.tangent").flatten()[:,
|
|
1177
|
+
np.newaxis])
|
|
1169
1178
|
|
|
1170
1179
|
e_normal = np.vstack(e_normal)
|
|
1171
1180
|
e_tan = np.vstack(e_tan)
|
|
@@ -1173,8 +1182,8 @@ def simnibs_results_msh2hdf5_workhorse(fn_msh, fn_hdf5, session, pos_tms_idx, po
|
|
|
1173
1182
|
# transform point data to element data
|
|
1174
1183
|
if verbose:
|
|
1175
1184
|
print("Transforming point data to element data")
|
|
1176
|
-
e_normal = pynibs.data_nodes2elements(data=e_normal, con=con_gm)
|
|
1177
|
-
e_tan = pynibs.data_nodes2elements(data=e_tan, con=con_gm)
|
|
1185
|
+
e_normal = pynibs.mesh.data_nodes2elements(data=e_normal, con=con_gm)
|
|
1186
|
+
e_tan = pynibs.mesh.data_nodes2elements(data=e_tan, con=con_gm)
|
|
1178
1187
|
|
|
1179
1188
|
# crop results data to ROI
|
|
1180
1189
|
# if not roi_mask_bool.all():
|
|
@@ -1196,15 +1205,15 @@ def simnibs_results_msh2hdf5_workhorse(fn_msh, fn_hdf5, session, pos_tms_idx, po
|
|
|
1196
1205
|
|
|
1197
1206
|
with h5py.File(os.path.splitext(fn_hdf5[f_global_idx])[0] + ".hdf5", 'a') as f:
|
|
1198
1207
|
try:
|
|
1199
|
-
del f['data/midlayer/roi_surface/{}/E_mag'
|
|
1200
|
-
del f['data/midlayer/roi_surface/{}/E_tan'
|
|
1201
|
-
del f['data/midlayer/roi_surface/{}/E_norm'
|
|
1208
|
+
del f[f'data/midlayer/roi_surface/{roi_id}/E_mag']
|
|
1209
|
+
del f[f'data/midlayer/roi_surface/{roi_id}/E_tan']
|
|
1210
|
+
del f[f'data/midlayer/roi_surface/{roi_id}/E_norm']
|
|
1202
1211
|
except KeyError:
|
|
1203
1212
|
pass
|
|
1204
1213
|
|
|
1205
|
-
f.create_dataset('data/midlayer/roi_surface/{}/E_mag'
|
|
1206
|
-
f.create_dataset('data/midlayer/roi_surface/{}/E_tan'
|
|
1207
|
-
f.create_dataset('data/midlayer/roi_surface/{}/E_norm'
|
|
1214
|
+
f.create_dataset(f'data/midlayer/roi_surface/{roi_id}/E_mag', data=e_mag)
|
|
1215
|
+
f.create_dataset(f'data/midlayer/roi_surface/{roi_id}/E_tan', data=e_tan)
|
|
1216
|
+
f.create_dataset(f'data/midlayer/roi_surface/{roi_id}/E_norm', data=e_normal)
|
|
1208
1217
|
|
|
1209
1218
|
del e_mag, e_normal, e_tan
|
|
1210
1219
|
except KeyError as e:
|
|
@@ -1236,8 +1245,8 @@ def simnibs_results_msh2hdf5_workhorse(fn_msh, fn_hdf5, session, pos_tms_idx, po
|
|
|
1236
1245
|
del f_global_idx
|
|
1237
1246
|
|
|
1238
1247
|
|
|
1239
|
-
def simnibs_results_msh2hdf5(fn_msh, fn_hdf5, S, pos_tms_idx, pos_local_idx, subject,
|
|
1240
|
-
n_cpu=4, verbose=False, overwrite=False, mid2roi=False):
|
|
1248
|
+
def simnibs_results_msh2hdf5(fn_msh, fn_hdf5, S, pos_tms_idx, pos_local_idx, subject, mesh_id=None, mode_xdmf="r+",
|
|
1249
|
+
n_cpu=4, verbose=False, overwrite=False, mid2roi=False, mesh_idx=None):
|
|
1241
1250
|
"""
|
|
1242
1251
|
Converts simnibs .msh results file(s) to .hdf5 / .xdmf tuple.
|
|
1243
1252
|
|
|
@@ -1257,7 +1266,7 @@ def simnibs_results_msh2hdf5(fn_msh, fn_hdf5, S, pos_tms_idx, pos_local_idx, sub
|
|
|
1257
1266
|
For every coil a separate TMSList exists, which contains multiple coil positions.
|
|
1258
1267
|
subject : pynibs.subject.Subject
|
|
1259
1268
|
Subject object.
|
|
1260
|
-
|
|
1269
|
+
mesh_id : int or str
|
|
1261
1270
|
Mesh id.
|
|
1262
1271
|
mode_xdmf : str, default: "r+"
|
|
1263
1272
|
Mode to open hdf5_geo file to write xdmf. If hdf5_geo is already separated in tets and tris etc.,
|
|
@@ -1272,6 +1281,8 @@ def simnibs_results_msh2hdf5(fn_msh, fn_hdf5, S, pos_tms_idx, pos_local_idx, sub
|
|
|
1272
1281
|
If the mesh contains ROIs and the e-field was calculated in the midlayer using simnibs
|
|
1273
1282
|
(``S.map_to_surf = True``),
|
|
1274
1283
|
the midlayer results will be mapped from the simnibs midlayer to the ROIs (takes some time for large ROIs).
|
|
1284
|
+
mesh_idx : int or str,
|
|
1285
|
+
Deprecated, use mesh_id instead.
|
|
1275
1286
|
|
|
1276
1287
|
Returns
|
|
1277
1288
|
-------
|
|
@@ -1279,22 +1290,27 @@ def simnibs_results_msh2hdf5(fn_msh, fn_hdf5, S, pos_tms_idx, pos_local_idx, sub
|
|
|
1279
1290
|
.hdf5 file containing the results. An .xdmf file is also created to link the results with the mesh .hdf5 file
|
|
1280
1291
|
of the subject.
|
|
1281
1292
|
"""
|
|
1293
|
+
if mesh_idx is not None:
|
|
1294
|
+
warnings.warn("Use 'mesh_id' instead of 'mesh_idx'", DeprecationWarning)
|
|
1295
|
+
mesh_id = mesh_idx
|
|
1282
1296
|
n_cpu_available = multiprocessing.cpu_count()
|
|
1283
1297
|
n_cpu = min(n_cpu, n_cpu_available, len(fn_msh))
|
|
1284
1298
|
pool = multiprocessing.Pool(n_cpu)
|
|
1299
|
+
if mid2roi and not S.map_to_surf:
|
|
1300
|
+
warnings.warn("mid2roi is set to True, but S.map_to_surf is False. mid2roi will be ignored.")
|
|
1285
1301
|
save_hdf5_partial = partial(simnibs_results_msh2hdf5_workhorse,
|
|
1286
1302
|
fn_hdf5=fn_hdf5,
|
|
1287
1303
|
session=S,
|
|
1288
1304
|
pos_tms_idx=pos_tms_idx,
|
|
1289
1305
|
pos_local_idx=pos_local_idx,
|
|
1290
1306
|
subject=subject,
|
|
1291
|
-
|
|
1307
|
+
mesh_id=mesh_id,
|
|
1292
1308
|
mode_xdmf="r",
|
|
1293
1309
|
verbose=verbose,
|
|
1294
1310
|
overwrite=overwrite,
|
|
1295
1311
|
mid2roi=mid2roi)
|
|
1296
1312
|
|
|
1297
|
-
filenames_chunks = pynibs.compute_chunks(fn_msh, n_cpu)
|
|
1313
|
+
filenames_chunks = pynibs.util.utils.compute_chunks(fn_msh, n_cpu)
|
|
1298
1314
|
pool.map(save_hdf5_partial, filenames_chunks)
|
|
1299
1315
|
pool.close()
|
|
1300
1316
|
pool.join()
|
|
@@ -1377,17 +1393,17 @@ def msh2hdf5(fn_msh=None, skip_roi=False, skip_layer=True, include_data=False,
|
|
|
1377
1393
|
except KeyError:
|
|
1378
1394
|
pass
|
|
1379
1395
|
roi[mesh_idx][roi_idx].make_GM_WM_surface(
|
|
1380
|
-
|
|
1381
|
-
|
|
1382
|
-
|
|
1383
|
-
|
|
1384
|
-
|
|
1385
|
-
|
|
1386
|
-
|
|
1387
|
-
|
|
1388
|
-
|
|
1389
|
-
|
|
1390
|
-
|
|
1396
|
+
gm_surf_fname=subject.roi[mesh_idx][roi_idx]['gm_surf_fname'],
|
|
1397
|
+
wm_surf_fname=subject.roi[mesh_idx][roi_idx]['wm_surf_fname'],
|
|
1398
|
+
midlayer_surf_fname=subject.roi[mesh_idx][roi_idx]['midlayer_surf_fname'],
|
|
1399
|
+
mesh_folder=mesh_folder,
|
|
1400
|
+
delta=subject.roi[mesh_idx][roi_idx]['delta'],
|
|
1401
|
+
x_roi=subject.roi[mesh_idx][roi_idx]['X_ROI'],
|
|
1402
|
+
y_roi=subject.roi[mesh_idx][roi_idx]['Y_ROI'],
|
|
1403
|
+
z_roi=subject.roi[mesh_idx][roi_idx]['Z_ROI'],
|
|
1404
|
+
layer=subject.roi[mesh_idx][roi_idx]['layer'],
|
|
1405
|
+
fn_mask=subject.roi[mesh_idx][roi_idx]['fn_mask'],
|
|
1406
|
+
refine=refine)
|
|
1391
1407
|
volmesh = simnibs.read_msh(subject.mesh[mesh_idx]['fn_mesh_msh'])
|
|
1392
1408
|
|
|
1393
1409
|
if not skip_layer:
|
|
@@ -1470,9 +1486,9 @@ def msh2hdf5(fn_msh=None, skip_roi=False, skip_layer=True, include_data=False,
|
|
|
1470
1486
|
hdf5_geo_fn = out_fn
|
|
1471
1487
|
|
|
1472
1488
|
pynibs.write_xdmf(hdf5_fn=out_fn,
|
|
1473
|
-
|
|
1474
|
-
|
|
1475
|
-
|
|
1489
|
+
hdf5_geo_fn=hdf5_geo_fn,
|
|
1490
|
+
overwrite_xdmf=True,
|
|
1491
|
+
verbose=False)
|
|
1476
1492
|
|
|
1477
1493
|
|
|
1478
1494
|
def write_arr_to_hdf5(fn_hdf5, arr_name, data, overwrite_arr=True, verbose=False, check_file_exist=False):
|
|
@@ -1480,7 +1496,7 @@ def write_arr_to_hdf5(fn_hdf5, arr_name, data, overwrite_arr=True, verbose=False
|
|
|
1480
1496
|
Takes an array and adds it to an hdf5 file.
|
|
1481
1497
|
|
|
1482
1498
|
If data is list of dict, ``write_dict_to_hdf5()`` is called for each dict with adapted hdf5-folder name
|
|
1483
|
-
Otherwise, data is
|
|
1499
|
+
Otherwise, data is cast to np.ndarray and dtype of unicode data cast to ``'|S'``.
|
|
1484
1500
|
|
|
1485
1501
|
Parameters
|
|
1486
1502
|
----------
|
|
@@ -1573,7 +1589,6 @@ def write_dict_to_hdf5(fn_hdf5, data, folder, check_file_exist=False, verbose=Fa
|
|
|
1573
1589
|
folder : str
|
|
1574
1590
|
verbose : bool
|
|
1575
1591
|
check_file_exist : bool
|
|
1576
|
-
|
|
1577
1592
|
"""
|
|
1578
1593
|
for key in data.keys():
|
|
1579
1594
|
write_arr_to_hdf5(fn_hdf5=fn_hdf5,
|
|
@@ -1585,7 +1600,7 @@ def write_dict_to_hdf5(fn_hdf5, data, folder, check_file_exist=False, verbose=Fa
|
|
|
1585
1600
|
|
|
1586
1601
|
def read_dict_from_hdf5(fn_hdf5, folder):
|
|
1587
1602
|
"""
|
|
1588
|
-
Read all arrays from
|
|
1603
|
+
Read all arrays from hdf5 file and return them as dict
|
|
1589
1604
|
|
|
1590
1605
|
Parameters
|
|
1591
1606
|
----------
|
|
@@ -1646,7 +1661,7 @@ def read_arr_from_hdf5(fn_hdf5, folder):
|
|
|
1646
1661
|
Returns
|
|
1647
1662
|
-------
|
|
1648
1663
|
data_from_hdf5 : list
|
|
1649
|
-
|
|
1664
|
+
Contains data from .hdf5 file.
|
|
1650
1665
|
"""
|
|
1651
1666
|
arr_1d = False
|
|
1652
1667
|
|
|
@@ -1715,7 +1730,7 @@ def create_fibre_geo_hdf5(fn_fibres_hdf5, overwrite=True):
|
|
|
1715
1730
|
|
|
1716
1731
|
# create connectivity list
|
|
1717
1732
|
fibre_con = np.hstack(
|
|
1718
|
-
|
|
1733
|
+
(np.arange(fibre_points.shape[0])[:, np.newaxis], np.arange(fibre_points.shape[0])[:, np.newaxis] + 1))
|
|
1719
1734
|
|
|
1720
1735
|
# delete connectivities between fibres
|
|
1721
1736
|
fibre_con = np.delete(fibre_con, np.cumsum([len(fib) for fib in fibres]) - 1, 0)
|
|
@@ -1735,7 +1750,7 @@ def write_coil_hdf5(tms_coil, fn):
|
|
|
1735
1750
|
|
|
1736
1751
|
Can be visualized with ParaView (use Glyph plugin to view wires).
|
|
1737
1752
|
|
|
1738
|
-
.. figure::
|
|
1753
|
+
.. figure:: ../../doc/images/two_stimulator_coil.png
|
|
1739
1754
|
:scale: 80 %
|
|
1740
1755
|
:alt: Example coil visualization
|
|
1741
1756
|
|
|
@@ -1795,21 +1810,23 @@ def write_coil_hdf5(tms_coil, fn):
|
|
|
1795
1810
|
np.full((tms_coil.elements[idx].points.shape[0]),
|
|
1796
1811
|
tms_coil.elements[idx].stimulator.name)))
|
|
1797
1812
|
|
|
1798
|
-
|
|
1799
1813
|
else:
|
|
1800
1814
|
wire_nodes = tms_coil.elements.points
|
|
1801
1815
|
wire_data = tms_coil.elements.values
|
|
1802
1816
|
coil_name = np.full((tms_coil.elements.points.shape[0]), tms_coil.elements.name)
|
|
1803
1817
|
stim_name = np.full((tms_coil.elements.points.shape[0]), tms_coil.elements.stimulator.name)
|
|
1804
1818
|
|
|
1819
|
+
coil_names_ordered = [i.name for i in tms_coil.elements]
|
|
1820
|
+
stim_names_ordered = [i.stimulator.name for i in tms_coil.elements]
|
|
1821
|
+
|
|
1805
1822
|
# Convert strings to their corresponding float values
|
|
1806
|
-
unique_strings = np.unique(coil_name)
|
|
1807
|
-
string_to_float_map = {string: i for i, string in enumerate(
|
|
1823
|
+
# unique_strings = np.unique(coil_name)
|
|
1824
|
+
string_to_float_map = {string: i for i, string in enumerate(coil_names_ordered)}
|
|
1808
1825
|
coil_name = np.array([string_to_float_map[string] for string in coil_name])
|
|
1809
1826
|
|
|
1810
1827
|
# Convert strings to their corresponding float values
|
|
1811
|
-
unique_strings = np.unique(stim_name)
|
|
1812
|
-
string_to_float_map = {string: i for i, string in enumerate(
|
|
1828
|
+
# unique_strings = np.unique(stim_name)
|
|
1829
|
+
string_to_float_map = {string: i for i, string in enumerate(stim_names_ordered)}
|
|
1813
1830
|
stim_name = np.array([string_to_float_map[string] for string in stim_name])
|
|
1814
1831
|
|
|
1815
1832
|
n_nodes = wire_nodes.shape[0]
|