pyNIBS 0.2024.8__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- pyNIBS-0.2024.8.dist-info/LICENSE +623 -0
- pyNIBS-0.2024.8.dist-info/METADATA +723 -0
- pyNIBS-0.2024.8.dist-info/RECORD +107 -0
- pyNIBS-0.2024.8.dist-info/WHEEL +5 -0
- pyNIBS-0.2024.8.dist-info/top_level.txt +1 -0
- pynibs/__init__.py +34 -0
- pynibs/coil.py +1367 -0
- pynibs/congruence/__init__.py +15 -0
- pynibs/congruence/congruence.py +1108 -0
- pynibs/congruence/ext_metrics.py +257 -0
- pynibs/congruence/stimulation_threshold.py +318 -0
- pynibs/data/configuration_exp0.yaml +59 -0
- pynibs/data/configuration_linear_MEP.yaml +61 -0
- pynibs/data/configuration_linear_RT.yaml +61 -0
- pynibs/data/configuration_sigmoid4.yaml +68 -0
- pynibs/data/network mapping configuration/configuration guide.md +238 -0
- pynibs/data/network mapping configuration/configuration_TEMPLATE.yaml +42 -0
- pynibs/data/network mapping configuration/configuration_for_testing.yaml +43 -0
- pynibs/data/network mapping configuration/configuration_modelTMS.yaml +43 -0
- pynibs/data/network mapping configuration/configuration_reg_isi_05.yaml +43 -0
- pynibs/data/network mapping configuration/output_documentation.md +185 -0
- pynibs/data/network mapping configuration/recommendations_for_accuracy_threshold.md +77 -0
- pynibs/data/neuron/models/L23_PC_cADpyr_biphasic_v1.csv +1281 -0
- pynibs/data/neuron/models/L23_PC_cADpyr_monophasic_v1.csv +1281 -0
- pynibs/data/neuron/models/L4_LBC_biphasic_v1.csv +1281 -0
- pynibs/data/neuron/models/L4_LBC_monophasic_v1.csv +1281 -0
- pynibs/data/neuron/models/L4_NBC_biphasic_v1.csv +1281 -0
- pynibs/data/neuron/models/L4_NBC_monophasic_v1.csv +1281 -0
- pynibs/data/neuron/models/L4_SBC_biphasic_v1.csv +1281 -0
- pynibs/data/neuron/models/L4_SBC_monophasic_v1.csv +1281 -0
- pynibs/data/neuron/models/L5_TTPC2_cADpyr_biphasic_v1.csv +1281 -0
- pynibs/data/neuron/models/L5_TTPC2_cADpyr_monophasic_v1.csv +1281 -0
- pynibs/expio/Mep.py +1518 -0
- pynibs/expio/__init__.py +8 -0
- pynibs/expio/brainsight.py +979 -0
- pynibs/expio/brainvis.py +71 -0
- pynibs/expio/cobot.py +239 -0
- pynibs/expio/exp.py +1876 -0
- pynibs/expio/fit_funs.py +287 -0
- pynibs/expio/localite.py +1987 -0
- pynibs/expio/signal_ced.py +51 -0
- pynibs/expio/visor.py +624 -0
- pynibs/freesurfer.py +502 -0
- pynibs/hdf5_io/__init__.py +10 -0
- pynibs/hdf5_io/hdf5_io.py +1857 -0
- pynibs/hdf5_io/xdmf.py +1542 -0
- pynibs/mesh/__init__.py +3 -0
- pynibs/mesh/mesh_struct.py +1394 -0
- pynibs/mesh/transformations.py +866 -0
- pynibs/mesh/utils.py +1103 -0
- pynibs/models/_TMS.py +211 -0
- pynibs/models/__init__.py +0 -0
- pynibs/muap.py +392 -0
- pynibs/neuron/__init__.py +2 -0
- pynibs/neuron/neuron_regression.py +284 -0
- pynibs/neuron/util.py +58 -0
- pynibs/optimization/__init__.py +5 -0
- pynibs/optimization/multichannel.py +278 -0
- pynibs/optimization/opt_mep.py +152 -0
- pynibs/optimization/optimization.py +1445 -0
- pynibs/optimization/workhorses.py +698 -0
- pynibs/pckg/__init__.py +0 -0
- pynibs/pckg/biosig/biosig4c++-1.9.5.src_fixed.tar.gz +0 -0
- pynibs/pckg/libeep/__init__.py +0 -0
- pynibs/pckg/libeep/pyeep.so +0 -0
- pynibs/regression/__init__.py +11 -0
- pynibs/regression/dual_node_detection.py +2375 -0
- pynibs/regression/regression.py +2984 -0
- pynibs/regression/score_types.py +0 -0
- pynibs/roi/__init__.py +2 -0
- pynibs/roi/roi.py +895 -0
- pynibs/roi/roi_structs.py +1233 -0
- pynibs/subject.py +1009 -0
- pynibs/tensor_scaling.py +144 -0
- pynibs/tests/data/InstrumentMarker20200225163611937.xml +19 -0
- pynibs/tests/data/TriggerMarkers_Coil0_20200225163443682.xml +14 -0
- pynibs/tests/data/TriggerMarkers_Coil1_20200225170337572.xml +6373 -0
- pynibs/tests/data/Xdmf.dtd +89 -0
- pynibs/tests/data/brainsight_niiImage_nifticoord.txt +145 -0
- pynibs/tests/data/brainsight_niiImage_nifticoord_largefile.txt +1434 -0
- pynibs/tests/data/brainsight_niiImage_niifticoord_mixedtargets.txt +47 -0
- pynibs/tests/data/create_subject_testsub.py +332 -0
- pynibs/tests/data/data.hdf5 +0 -0
- pynibs/tests/data/geo.hdf5 +0 -0
- pynibs/tests/test_coil.py +474 -0
- pynibs/tests/test_elements2nodes.py +100 -0
- pynibs/tests/test_hdf5_io/test_xdmf.py +61 -0
- pynibs/tests/test_mesh_transformations.py +123 -0
- pynibs/tests/test_mesh_utils.py +143 -0
- pynibs/tests/test_nnav_imports.py +101 -0
- pynibs/tests/test_quality_measures.py +117 -0
- pynibs/tests/test_regressdata.py +289 -0
- pynibs/tests/test_roi.py +17 -0
- pynibs/tests/test_rotations.py +86 -0
- pynibs/tests/test_subject.py +71 -0
- pynibs/tests/test_util.py +24 -0
- pynibs/tms_pulse.py +34 -0
- pynibs/util/__init__.py +4 -0
- pynibs/util/dosing.py +233 -0
- pynibs/util/quality_measures.py +562 -0
- pynibs/util/rotations.py +340 -0
- pynibs/util/simnibs.py +763 -0
- pynibs/util/util.py +727 -0
- pynibs/visualization/__init__.py +2 -0
- pynibs/visualization/para.py +4372 -0
- pynibs/visualization/plot_2D.py +137 -0
- pynibs/visualization/render_3D.py +347 -0
pynibs/expio/exp.py
ADDED
|
@@ -0,0 +1,1876 @@
|
|
|
1
|
+
import os
|
|
2
|
+
import csv
|
|
3
|
+
import copy
|
|
4
|
+
import nibabel
|
|
5
|
+
import warnings
|
|
6
|
+
import datetime
|
|
7
|
+
import numpy as np
|
|
8
|
+
import pandas as pd
|
|
9
|
+
import matplotlib.pyplot as plt
|
|
10
|
+
from matplotlib.colors import Normalize
|
|
11
|
+
from collections import OrderedDict
|
|
12
|
+
from fsl.transform.flirt import fromFlirt
|
|
13
|
+
from fsl.data.image import Image
|
|
14
|
+
import pynibs
|
|
15
|
+
|
|
16
|
+
try:
|
|
17
|
+
from pynibs.pckg import libeep
|
|
18
|
+
except (ImportError, SyntaxError):
|
|
19
|
+
pass
|
|
20
|
+
|
|
21
|
+
|
|
22
|
+
def read_exp_stimulations(fname_results_conditions, fname_simpos, filter_bad_trials=False, drop_idx=None):
|
|
23
|
+
"""
|
|
24
|
+
Reads results_conditions.csv and simPos.csv and returns data.
|
|
25
|
+
|
|
26
|
+
Parameters
|
|
27
|
+
----------
|
|
28
|
+
fname_results_conditions : str
|
|
29
|
+
Filename of results_conditions.csv file.
|
|
30
|
+
fname_simpos : str
|
|
31
|
+
Filename of simPos.csv file.
|
|
32
|
+
filter_bad_trials : bool, default: False
|
|
33
|
+
If true, some filtering will be done to exclude erroneous data.
|
|
34
|
+
drop_idx : list, optional
|
|
35
|
+
Indices of trials to drop.
|
|
36
|
+
|
|
37
|
+
Returns
|
|
38
|
+
-------
|
|
39
|
+
positions_all : list of np.ndarrays of float
|
|
40
|
+
(N_zaps, (4, 4)) List of position matrices of TMS coil, formatted in simnibs style.
|
|
41
|
+
|
|
42
|
+
.. math::
|
|
43
|
+
\\begin{bmatrix}
|
|
44
|
+
| & | & | & | \\\\
|
|
45
|
+
x & y & z & pos \\\\
|
|
46
|
+
| & | & | & | \\\\
|
|
47
|
+
0 & 0 & 0 & 1 \\\\
|
|
48
|
+
\\end{bmatrix}
|
|
49
|
+
|
|
50
|
+
conditions : list of str
|
|
51
|
+
(N_zaps) Str labels of the condition corresponding to each zap.
|
|
52
|
+
position_list : list of float and str
|
|
53
|
+
(N_zaps x 55) List of data stored in results_conditions.csv (condition, MEP amplitude, locations of neuronavigation trackers).
|
|
54
|
+
mep_amp : np.array of float
|
|
55
|
+
(N_zaps) MEP amplitude in [V] corresponding to each zap.
|
|
56
|
+
intensities : np.ndarray of float
|
|
57
|
+
(N_zaps) Stimulator intensities corresponding to each zap.
|
|
58
|
+
fails_idx : np.ndarray
|
|
59
|
+
(N_fails_idx, 1) Which trials were dropped through filtering (only if filter_bad_trials).
|
|
60
|
+
"""
|
|
61
|
+
if drop_idx is None:
|
|
62
|
+
drop_idx = []
|
|
63
|
+
if not type(drop_idx) == list:
|
|
64
|
+
drop_idx = [drop_idx]
|
|
65
|
+
|
|
66
|
+
# store rows from file in list. each row is one list ins positionList
|
|
67
|
+
position_list = []
|
|
68
|
+
positionfile = fname_results_conditions
|
|
69
|
+
with open(positionfile, 'rb') as positions:
|
|
70
|
+
posreader = csv.reader(positions, delimiter=',', quotechar='|')
|
|
71
|
+
next(posreader, None) # skip header
|
|
72
|
+
for row in posreader:
|
|
73
|
+
position_list.append(row)
|
|
74
|
+
|
|
75
|
+
# read simPos.csv file
|
|
76
|
+
sim_pos_fn = fname_simpos
|
|
77
|
+
sim_pos_list = []
|
|
78
|
+
with open(sim_pos_fn, 'rb') as simPosFile:
|
|
79
|
+
posreader = csv.reader(simPosFile, delimiter=',', quotechar='|')
|
|
80
|
+
for row in posreader:
|
|
81
|
+
sim_pos_list.append(row)
|
|
82
|
+
|
|
83
|
+
conditions = [position_list[i][len(position_list[0]) - 3] for i in range(len(position_list))]
|
|
84
|
+
positions_all = []
|
|
85
|
+
mep_amp = [float(position_list[i][48]) for i in range(len(position_list))]
|
|
86
|
+
|
|
87
|
+
frametime = [float(cell[54]) for cell in position_list]
|
|
88
|
+
intensities = [float(position_list[i][3]) for i in range(len(position_list))]
|
|
89
|
+
time_diff = [float(cell[49]) for cell in position_list]
|
|
90
|
+
|
|
91
|
+
fails_idx = None
|
|
92
|
+
if filter_bad_trials:
|
|
93
|
+
# convert to masked array
|
|
94
|
+
mep_amp = np.ma.array(mep_amp, mask=False)
|
|
95
|
+
frametime = np.ma.array(frametime, mask=False)
|
|
96
|
+
time_diff = np.ma.array(time_diff, mask=False)
|
|
97
|
+
intensities = np.ma.array(intensities, mask=False)
|
|
98
|
+
conditions = np.ma.array(conditions, mask=False)
|
|
99
|
+
|
|
100
|
+
# get idx to drop
|
|
101
|
+
fails_idx = np.where((mep_amp < 0) |
|
|
102
|
+
(mep_amp > 30) |
|
|
103
|
+
(frametime > 0.235) |
|
|
104
|
+
(frametime < 0.218) |
|
|
105
|
+
(intensities < 20) |
|
|
106
|
+
(time_diff > 100))[0]
|
|
107
|
+
for idx in drop_idx:
|
|
108
|
+
fails_idx = np.append(fails_idx, idx)
|
|
109
|
+
|
|
110
|
+
# set drop idx to true
|
|
111
|
+
intensities.mask[fails_idx] = True
|
|
112
|
+
conditions.mask[fails_idx] = True
|
|
113
|
+
mep_amp.mask[fails_idx] = True
|
|
114
|
+
|
|
115
|
+
# remove drop idx from lists
|
|
116
|
+
intensities = intensities.compressed()
|
|
117
|
+
mep_amp = mep_amp.compressed()
|
|
118
|
+
conditions = conditions.compressed()
|
|
119
|
+
|
|
120
|
+
position_list_filtered = []
|
|
121
|
+
sim_pos_list_filtered = []
|
|
122
|
+
for idx in range(len(position_list)):
|
|
123
|
+
if idx not in fails_idx and idx not in drop_idx:
|
|
124
|
+
position_list_filtered.append(position_list[idx])
|
|
125
|
+
sim_pos_list_filtered.append(sim_pos_list[idx])
|
|
126
|
+
|
|
127
|
+
position_list = position_list_filtered
|
|
128
|
+
sim_pos_list = sim_pos_list_filtered
|
|
129
|
+
|
|
130
|
+
elif len(drop_idx) > 0 and not filter_bad_trials:
|
|
131
|
+
raise NotImplementedError
|
|
132
|
+
|
|
133
|
+
for idx, row in enumerate(position_list):
|
|
134
|
+
# x to z, z to x, y to -y
|
|
135
|
+
# simnibs: 02 -01 00 03 12 -11 10 13 22
|
|
136
|
+
# -21 20 23 32 -31 30 33
|
|
137
|
+
|
|
138
|
+
# results.csv 38 -37 36 39 42 -41 40 43 46
|
|
139
|
+
# -45 44 47 0 0 0 1
|
|
140
|
+
|
|
141
|
+
# intermangle results_conditions and simpos.csv...
|
|
142
|
+
positions_all.append([[float(row[38]), - float(row[37]), float(row[36]), float(sim_pos_list[idx][0])],
|
|
143
|
+
[float(row[42]), - float(row[41]), float(row[40]), float(sim_pos_list[idx][1])],
|
|
144
|
+
[float(row[46]), - float(row[45]), float(row[44]), float(sim_pos_list[idx][2])],
|
|
145
|
+
[0, 0, 0, 1]])
|
|
146
|
+
|
|
147
|
+
if filter_bad_trials:
|
|
148
|
+
return positions_all, conditions, position_list, \
|
|
149
|
+
np.array(mep_amp).astype(float), np.array(intensities).astype(float), fails_idx
|
|
150
|
+
|
|
151
|
+
else:
|
|
152
|
+
return positions_all, conditions, position_list, \
|
|
153
|
+
np.array(mep_amp).astype(float), np.array(intensities).astype(float)
|
|
154
|
+
|
|
155
|
+
|
|
156
|
+
def sort_data_by_condition(conditions, return_alph_sorted=True, conditions_selected=None, *data):
|
|
157
|
+
"""
|
|
158
|
+
Sorts data by condition and returns tuples of data with corresponding labels.
|
|
159
|
+
|
|
160
|
+
Parameters
|
|
161
|
+
----------
|
|
162
|
+
conditions : list of str
|
|
163
|
+
(N_zaps) Str labels of the condition corresponding to each data.
|
|
164
|
+
return_alph_sorted : bool, default: True
|
|
165
|
+
Shall returns be in alphabetically or original order.
|
|
166
|
+
conditions_selected: list of str, optional
|
|
167
|
+
List of conditions returned by the function (in this order), the others are omitted.
|
|
168
|
+
data : tuple of data indexed by condition
|
|
169
|
+
(N_data, N_zaps, m) Data to sort.
|
|
170
|
+
|
|
171
|
+
Returns
|
|
172
|
+
-------
|
|
173
|
+
cond_labels : list of str
|
|
174
|
+
(N_cond) Labels of conditions.
|
|
175
|
+
data_sorted : tuple of sorted data
|
|
176
|
+
(N_cond, N_data, N_zaps, m) Sorted data by condition.
|
|
177
|
+
"""
|
|
178
|
+
# sorts condition labels alphabetically (return idx to redo it optionally)
|
|
179
|
+
cond_labels, idx = np.unique(conditions, return_index=True)
|
|
180
|
+
|
|
181
|
+
n_data = len(data)
|
|
182
|
+
|
|
183
|
+
data_sorted = []
|
|
184
|
+
temp = []
|
|
185
|
+
|
|
186
|
+
# loop over cond_labels (sorted alphabetically) or conditions[idx] (sorted in original order)
|
|
187
|
+
if not return_alph_sorted:
|
|
188
|
+
cond_labels = np.array(conditions)[np.sort(idx)]
|
|
189
|
+
|
|
190
|
+
for i in range(n_data):
|
|
191
|
+
for cond in cond_labels:
|
|
192
|
+
mask = [idx for idx in range(len(conditions)) if conditions[idx] == cond]
|
|
193
|
+
temp.append(data[i][mask,])
|
|
194
|
+
data_sorted.append(temp)
|
|
195
|
+
temp = []
|
|
196
|
+
|
|
197
|
+
if conditions_selected:
|
|
198
|
+
data_sorted_selected = [[0 for _ in range(len(conditions_selected))] for __ in range(n_data)]
|
|
199
|
+
|
|
200
|
+
for i_data in range(n_data):
|
|
201
|
+
for i_cond, c in enumerate(conditions_selected):
|
|
202
|
+
for i_cond_all in range(len(cond_labels)):
|
|
203
|
+
if cond_labels[i_cond_all] == c:
|
|
204
|
+
data_sorted_selected[i_data][i_cond] = data_sorted[i_data][i_cond_all]
|
|
205
|
+
|
|
206
|
+
return conditions_selected, data_sorted_selected
|
|
207
|
+
else:
|
|
208
|
+
return cond_labels, data_sorted
|
|
209
|
+
|
|
210
|
+
|
|
211
|
+
# TODO: @Lucas: Bitte dokumentieren
|
|
212
|
+
def outliers_mask(data, m=2.):
|
|
213
|
+
"""
|
|
214
|
+
Generate a mask to identify outliers in a dataset using the Median Absolute Deviation (MAD) method.
|
|
215
|
+
|
|
216
|
+
Parameters
|
|
217
|
+
----------
|
|
218
|
+
data : array-like
|
|
219
|
+
Input data for which outliers are to be detected.
|
|
220
|
+
m : float, default: 2.0
|
|
221
|
+
The threshold multiplier for identifying outliers.
|
|
222
|
+
|
|
223
|
+
Returns
|
|
224
|
+
-------
|
|
225
|
+
mask : array-like
|
|
226
|
+
A boolean mask with the same shape as 'data' where True indicates outliers.
|
|
227
|
+
"""
|
|
228
|
+
d = np.abs(data - np.median(data))
|
|
229
|
+
mdev = np.median(d)
|
|
230
|
+
s = d / mdev if mdev else 0.
|
|
231
|
+
return s < m
|
|
232
|
+
|
|
233
|
+
|
|
234
|
+
def square(x, a, b, c):
|
|
235
|
+
"""
|
|
236
|
+
Parametrized quadratic function.
|
|
237
|
+
|
|
238
|
+
.. math::
|
|
239
|
+
y = ax^2+bx+c
|
|
240
|
+
|
|
241
|
+
Parameters
|
|
242
|
+
----------
|
|
243
|
+
x : np.ndarray of float
|
|
244
|
+
(N_x) X-values the function is evaluated in.
|
|
245
|
+
a : float
|
|
246
|
+
Slope parameter of x^2.
|
|
247
|
+
b : float
|
|
248
|
+
Slope parameter of x.
|
|
249
|
+
c : float
|
|
250
|
+
Offset parameter.
|
|
251
|
+
|
|
252
|
+
Returns
|
|
253
|
+
-------
|
|
254
|
+
y : np.ndarray of float
|
|
255
|
+
(N_x) Function value at argument x.
|
|
256
|
+
"""
|
|
257
|
+
y = a * x ** 2 + b * x + c
|
|
258
|
+
return y
|
|
259
|
+
|
|
260
|
+
|
|
261
|
+
def splitext_niigz(fn):
|
|
262
|
+
"""
|
|
263
|
+
Splitting extension(s) from .nii or .nii.gz file.
|
|
264
|
+
|
|
265
|
+
Parameters
|
|
266
|
+
----------
|
|
267
|
+
fn : str
|
|
268
|
+
Filename of input image .nii or .nii.gz file.
|
|
269
|
+
|
|
270
|
+
Returns
|
|
271
|
+
-------
|
|
272
|
+
path : str
|
|
273
|
+
Path and filename without extension(s).
|
|
274
|
+
ext : str
|
|
275
|
+
Extension, either .nii or .nii.gz.
|
|
276
|
+
"""
|
|
277
|
+
|
|
278
|
+
path, filename = os.path.split(fn)
|
|
279
|
+
|
|
280
|
+
file0, ext0 = os.path.splitext(filename)
|
|
281
|
+
|
|
282
|
+
if ext0 == '.gz':
|
|
283
|
+
file1, ext1 = os.path.splitext(file0)
|
|
284
|
+
return os.path.join(path, file1), ext1 + ext0
|
|
285
|
+
elif ext0 == '.nii':
|
|
286
|
+
return os.path.join(path, file0), ext0
|
|
287
|
+
else:
|
|
288
|
+
raise Exception('File extension is neither .nii or .nii.gz!')
|
|
289
|
+
|
|
290
|
+
|
|
291
|
+
def toRAS(fn_in, fn_out):
|
|
292
|
+
"""
|
|
293
|
+
Transforming MRI .nii image to RAS space.
|
|
294
|
+
|
|
295
|
+
Parameters
|
|
296
|
+
----------
|
|
297
|
+
fn_in : str
|
|
298
|
+
Filename of input image .nii file.
|
|
299
|
+
fn_out : str
|
|
300
|
+
Filename of output image .nii file in RAS space.
|
|
301
|
+
|
|
302
|
+
Returns
|
|
303
|
+
-------
|
|
304
|
+
<File> : .nii file
|
|
305
|
+
.nii image in RAS space with filename fn_out.
|
|
306
|
+
"""
|
|
307
|
+
|
|
308
|
+
# read image data
|
|
309
|
+
img_in = nibabel.load(fn_in)
|
|
310
|
+
img_in_hdr = img_in.header
|
|
311
|
+
img_out = copy.deepcopy(img_in)
|
|
312
|
+
|
|
313
|
+
# read and invert q-form of original image
|
|
314
|
+
m_qform_in = img_in.get_qform()
|
|
315
|
+
m_qform_inv_in = np.linalg.inv(m_qform_in)
|
|
316
|
+
|
|
317
|
+
# identify axes to flip
|
|
318
|
+
mathlp = np.sign(m_qform_inv_in)
|
|
319
|
+
|
|
320
|
+
ras_dim = np.zeros(3)
|
|
321
|
+
ras_sign = np.zeros(3)
|
|
322
|
+
|
|
323
|
+
for i in range(3):
|
|
324
|
+
ras_dim[i] = np.where(np.abs(m_qform_inv_in[:, i]) == np.max(np.abs(m_qform_inv_in[:, i])))[0]
|
|
325
|
+
ras_sign[i] = mathlp[int(ras_dim[i]), i]
|
|
326
|
+
|
|
327
|
+
ras_dim = ras_dim.astype(int)
|
|
328
|
+
|
|
329
|
+
# apply sorting to qform: first permute, then flip
|
|
330
|
+
m_perm = np.zeros((4, 4))
|
|
331
|
+
m_perm[3, 3] = 1
|
|
332
|
+
|
|
333
|
+
for i in range(3):
|
|
334
|
+
m_perm[ras_dim[i], i] = 1
|
|
335
|
+
|
|
336
|
+
imgsize = img_in_hdr['dim'][1:4]
|
|
337
|
+
imgsize = imgsize[ras_dim]
|
|
338
|
+
|
|
339
|
+
m_flip = np.eye(4)
|
|
340
|
+
|
|
341
|
+
for i in range(3):
|
|
342
|
+
if ras_sign[i] < 0:
|
|
343
|
+
m_flip[i, i] = -1
|
|
344
|
+
m_flip[i, 3] = imgsize[i] - 1
|
|
345
|
+
|
|
346
|
+
m_qform_out = np.dot(np.dot(m_qform_in, m_perm), m_flip)
|
|
347
|
+
img_out.set_qform(m_qform_out)
|
|
348
|
+
img_out.set_sform(m_qform_out)
|
|
349
|
+
# m_toORG = np.dot(m_perm, m_flip)
|
|
350
|
+
|
|
351
|
+
# apply sorting to image: first permute, then flip
|
|
352
|
+
img_out_data = np.transpose(img_in.get_fdata(), ras_dim)
|
|
353
|
+
|
|
354
|
+
for i in range(3):
|
|
355
|
+
if ras_sign[i] < 0:
|
|
356
|
+
img_out_data = np.flip(img_out_data, i)
|
|
357
|
+
|
|
358
|
+
# save transformed image in .nii file
|
|
359
|
+
img_out = nibabel.Nifti1Image(img_out_data, img_out.affine, img_out.header)
|
|
360
|
+
nibabel.save(img_out, fn_out)
|
|
361
|
+
|
|
362
|
+
|
|
363
|
+
def get_coil_flip_m(source_system='simnibs', target_system=None):
|
|
364
|
+
"""
|
|
365
|
+
Returns a flimp matrix 4x4 to flip coil axis from one system to another.
|
|
366
|
+
|
|
367
|
+
Parameters
|
|
368
|
+
----------
|
|
369
|
+
source_system : str, default: 'simnibs'
|
|
370
|
+
Atm only possible source: ``'simnibs'``.
|
|
371
|
+
target_system : str, optional
|
|
372
|
+
tmsnavigator, visor, brainsight.
|
|
373
|
+
|
|
374
|
+
Returns
|
|
375
|
+
-------
|
|
376
|
+
flip_m : np.ndarray
|
|
377
|
+
(4, 4) Flipped matrix.
|
|
378
|
+
"""
|
|
379
|
+
if source_system.lower() == 'simnibs':
|
|
380
|
+
if target_system.lower() in ["localite", "tmsnavigator"]:
|
|
381
|
+
return np.array([[0, 0, 1, 0],
|
|
382
|
+
[0, -1, 0, 0],
|
|
383
|
+
[1, 0, 0, 0],
|
|
384
|
+
[0, 0, 0, 1]])
|
|
385
|
+
|
|
386
|
+
elif target_system.lower() in ["visor", "brainsight"]:
|
|
387
|
+
return np.array([[-1, 0, 0, 0],
|
|
388
|
+
[0, 1, 0, 0],
|
|
389
|
+
[0, 0, -1, 0],
|
|
390
|
+
[0, 0, 0, 1]])
|
|
391
|
+
|
|
392
|
+
else:
|
|
393
|
+
raise NotImplementedError(
|
|
394
|
+
"Neuronavigation system: {} not implemented! ('tmsnavigator', 'Visor' or 'brainsight')".format(
|
|
395
|
+
target_system))
|
|
396
|
+
|
|
397
|
+
raise NotImplementedError(
|
|
398
|
+
"Source system: {} not implemented! ('simnibs')".format(source_system))
|
|
399
|
+
|
|
400
|
+
|
|
401
|
+
def nnav2simnibs(fn_exp_nii, fn_conform_nii, m_nnav, nnav_system, mesh_approach="headreco",
|
|
402
|
+
fiducials=None, orientation='RAS', fsl_cmd=None, target='simnibs', temp_dir=None, rem_tmp=False,
|
|
403
|
+
verbose=True):
|
|
404
|
+
"""
|
|
405
|
+
Transforming TMS coil positions from neuronavigation to simnibs space.
|
|
406
|
+
|
|
407
|
+
Parameters
|
|
408
|
+
----------
|
|
409
|
+
fn_exp_nii : str
|
|
410
|
+
Filename of .nii file the experiments were conducted with.
|
|
411
|
+
fn_conform_nii : str
|
|
412
|
+
Filename of .nii file from SimNIBS mri2msh function, e.g. ´´".../fs_subjectID/subjectID_T1fs_conform.nii.gz"´´.
|
|
413
|
+
m_nnav : np.ndarray
|
|
414
|
+
(4, 4, N) Position matrices from neuronavigation.
|
|
415
|
+
nnav_system : str
|
|
416
|
+
Neuronavigation system:
|
|
417
|
+
|
|
418
|
+
* "Localite" ... Localite neuronavigation system
|
|
419
|
+
* "Visor" ... Visor neuronavigation system from ANT
|
|
420
|
+
* "Brainsight" ... Brainsight neuronavigation system from Rougue Research
|
|
421
|
+
mesh_approach : str, default: "headreco"
|
|
422
|
+
Approach the mesh is generated with (``"headreco"`` or ``"mri2mesh"``).
|
|
423
|
+
fiducials : np.ndarray of float, optional
|
|
424
|
+
(3, 3) Fiducial points in ANT nifti space from file,
|
|
425
|
+
e.g.: ´´"/data/pt_01756/probands/33791.b8/exp/1/33791.b8_recording/MRI/33791.b8_recording.mri"´´
|
|
426
|
+
(x frontal-occipital, y right-left, z inferior-superior).
|
|
427
|
+
|
|
428
|
+
.. code-block:: sh
|
|
429
|
+
VoxelOnPositiveXAxis (Nasion, first row)
|
|
430
|
+
221 131 127
|
|
431
|
+
VoxelOnPositiveYAxis (left ear, second row)
|
|
432
|
+
121 203 105
|
|
433
|
+
VoxelOnNegativeYAxis (right ear, third row)
|
|
434
|
+
121 57 105
|
|
435
|
+
orientation : str, default: 'RAS'
|
|
436
|
+
Orientation convention (``'RAS'`` or ``'LPS'``), can be read from neuronavigation .xml
|
|
437
|
+
file under ``coordinateSpace="RAS"``.
|
|
438
|
+
fsl_cmd : str, optional
|
|
439
|
+
bash command to start FSL environment.
|
|
440
|
+
target : str, default: 'simnibs'
|
|
441
|
+
Either transform to ``'simnibs'`` or to ``'nnav'`` space.
|
|
442
|
+
temp_dir : str, optional
|
|
443
|
+
Directory to save temporary files (transformation .nii and .mat files) (fn_exp_mri_nii folder).
|
|
444
|
+
rem_tmp : bool, default: False
|
|
445
|
+
Remove temporary files from registration.
|
|
446
|
+
verbose : bool, default: True
|
|
447
|
+
Print output.
|
|
448
|
+
|
|
449
|
+
Returns
|
|
450
|
+
-------
|
|
451
|
+
m_simnibs : np.ndarray of float
|
|
452
|
+
(4, 4, N) Transformed coil positions.
|
|
453
|
+
"""
|
|
454
|
+
assert len(m_nnav.shape) == 3, f"m_nnav needs to be in shape [4, 4, N]"
|
|
455
|
+
assert m_nnav.shape[:2] == (4, 4), f"m_nnav needs to be in shape [4, 4, N]"
|
|
456
|
+
|
|
457
|
+
if temp_dir is None:
|
|
458
|
+
temp_dir = os.path.split(fn_exp_nii)[0]
|
|
459
|
+
|
|
460
|
+
assert target in ['nnav', 'simnibs']
|
|
461
|
+
# get original qform without RAS
|
|
462
|
+
exp_nii_original = nibabel.load(fn_exp_nii)
|
|
463
|
+
conform_nii_original = nibabel.load(fn_conform_nii)
|
|
464
|
+
m_qform_exp_original = exp_nii_original.get_qform()
|
|
465
|
+
m_qform_conform_original = conform_nii_original.get_qform()
|
|
466
|
+
|
|
467
|
+
# check if conform_nii and exp_nii are the same and have the same q-form
|
|
468
|
+
skip_flirt = np.all((np.isclose(m_qform_conform_original, m_qform_exp_original)))
|
|
469
|
+
|
|
470
|
+
fn_exp_nii_ras = os.path.join(temp_dir,
|
|
471
|
+
os.path.split(splitext_niigz(fn_exp_nii)[0] +
|
|
472
|
+
'_RAS' +
|
|
473
|
+
splitext_niigz(fn_exp_nii)[1])[1])
|
|
474
|
+
|
|
475
|
+
if not os.path.exists(temp_dir):
|
|
476
|
+
os.makedirs(temp_dir)
|
|
477
|
+
|
|
478
|
+
# transform exp to RAS
|
|
479
|
+
toRAS(fn_exp_nii, fn_exp_nii_ras)
|
|
480
|
+
|
|
481
|
+
# load .nii files
|
|
482
|
+
if verbose:
|
|
483
|
+
print('Loading .nii files:')
|
|
484
|
+
print((' > {}'.format(fn_exp_nii_ras)))
|
|
485
|
+
# print((' > {}'.format(fn_conform_nii)))
|
|
486
|
+
exp_nii = nibabel.load(fn_exp_nii_ras)
|
|
487
|
+
|
|
488
|
+
if verbose:
|
|
489
|
+
print('Gathering header information...')
|
|
490
|
+
|
|
491
|
+
# construct flip matrix
|
|
492
|
+
if verbose:
|
|
493
|
+
print(' > flip matrix')
|
|
494
|
+
|
|
495
|
+
m_flip = get_coil_flip_m(target_system=nnav_system)
|
|
496
|
+
|
|
497
|
+
# construct flip matrix
|
|
498
|
+
if verbose:
|
|
499
|
+
print(' > RAS matrix')
|
|
500
|
+
if orientation != 'RAS':
|
|
501
|
+
raise NotImplementedError(f"Orientation {orientation} not implemented.")
|
|
502
|
+
|
|
503
|
+
# construct flirt transformation matrix if necessary
|
|
504
|
+
if verbose:
|
|
505
|
+
print(' > flirt transformation matrix')
|
|
506
|
+
if skip_flirt:
|
|
507
|
+
if verbose:
|
|
508
|
+
print(' - experimental and simnibs .nii files are equal... Accelerating process')
|
|
509
|
+
m_exp2conf = np.eye(4)
|
|
510
|
+
|
|
511
|
+
else:
|
|
512
|
+
if verbose:
|
|
513
|
+
print(' - starting coregistration of exp and conform .nii images')
|
|
514
|
+
fn_flip = os.path.join(temp_dir, os.path.split(splitext_niigz(fn_exp_nii_ras)[0] + '_flipped_temp.nii')[1])
|
|
515
|
+
fn_out_fslmaths = os.path.join(temp_dir, os.path.split(splitext_niigz(fn_conform_nii)[0] +
|
|
516
|
+
'_binarized_temp')[1])
|
|
517
|
+
fn_mat_m_2conform = os.path.join(temp_dir,
|
|
518
|
+
os.path.split(splitext_niigz(fn_exp_nii_ras)[0] + '_m_2conform_temp')[1])
|
|
519
|
+
dof = 6
|
|
520
|
+
|
|
521
|
+
# define binarization threshold on the image. .80 seems to work.
|
|
522
|
+
# thresh = np.quantile(conform_nii.get_data(), 0.80)
|
|
523
|
+
|
|
524
|
+
# flip image of exp along first dimension and save it (to LAS, radiologic)
|
|
525
|
+
data_exp_flipped = np.flip(exp_nii.get_fdata(), axis=0)
|
|
526
|
+
exp_flipped_nii = nibabel.Nifti1Image(data_exp_flipped, exp_nii.affine, exp_nii.header)
|
|
527
|
+
nibabel.save(exp_flipped_nii, fn_flip)
|
|
528
|
+
|
|
529
|
+
# call FSL to align exp to conform
|
|
530
|
+
if not os.path.exists(fn_mat_m_2conform + '.mat'):
|
|
531
|
+
cmdstr = ['' for _ in range(3)]
|
|
532
|
+
cmdstr[0] = fsl_cmd + ' fslorient -setqformcode 1 ' + fn_flip
|
|
533
|
+
cmdstr[1] = fsl_cmd + ' fslorient -forceradiological ' + fn_flip
|
|
534
|
+
# this doesn't work stable for all images
|
|
535
|
+
# cmdstr[2] = f'{fsl_cmd} fslmaths {fn_conform_nii} -thr {thresh} -bin -s 1 {fn_out_fslmaths}.nii.gz'
|
|
536
|
+
cmdstr[2] = f'{fsl_cmd} flirt -in {fn_flip} -ref {fn_conform_nii} ' \
|
|
537
|
+
f' -searchrx -30 30 ' \
|
|
538
|
+
f'-searchry -30 30 -searchrz -30 30 -interp sinc ' \
|
|
539
|
+
f'-cost mutualinfo -searchcost mutualinfo -dof {str(dof)} ' \
|
|
540
|
+
f'-omat {fn_mat_m_2conform}.mat -out {fn_mat_m_2conform}.nii.gz'
|
|
541
|
+
|
|
542
|
+
# execute FSL commands
|
|
543
|
+
if verbose:
|
|
544
|
+
print(' - Executing:')
|
|
545
|
+
for i in range(len(cmdstr)):
|
|
546
|
+
if verbose:
|
|
547
|
+
print((' > {}'.format(cmdstr[i])))
|
|
548
|
+
os.system(cmdstr[i])
|
|
549
|
+
|
|
550
|
+
m_2conform = np.loadtxt(f'{fn_mat_m_2conform}.mat')
|
|
551
|
+
|
|
552
|
+
exp_ras_img = Image(fn_exp_nii_ras)
|
|
553
|
+
conform_img = Image(fn_conform_nii)
|
|
554
|
+
|
|
555
|
+
m_exp2conf = fromFlirt(m_2conform, exp_ras_img, conform_img, from_='world', to='world')
|
|
556
|
+
|
|
557
|
+
if rem_tmp:
|
|
558
|
+
for f in [fn_exp_nii_ras,
|
|
559
|
+
f'{fn_mat_m_2conform}.mat',
|
|
560
|
+
f'{fn_mat_m_2conform}.nii.gz',
|
|
561
|
+
f"{fn_out_fslmaths}.nii.gz",
|
|
562
|
+
fn_flip]:
|
|
563
|
+
try:
|
|
564
|
+
os.unlink(f)
|
|
565
|
+
except FileNotFoundError:
|
|
566
|
+
print(f"Cannot remove {f}: File not found.")
|
|
567
|
+
|
|
568
|
+
# if nnav_system.lower() == "brainsight":
|
|
569
|
+
# m_brainsight2simnibs = np.array([[-1, 0, 0, 0],
|
|
570
|
+
# [0, -1, 0, 0],
|
|
571
|
+
# [0, 0, 1, 0],
|
|
572
|
+
# [0, 0, 0, 1]])
|
|
573
|
+
# m_brainsight2simnibs = np.dot(m_brainsight2simnibs, exp_nii.affine)
|
|
574
|
+
# # m_brainsight2simnibs = np.dot(np.linalg.inv(exp_nii.affine), m_brainsight2simnibs)
|
|
575
|
+
# m_exp2conf = np.dot(m_exp2conf, m_brainsight2simnibs)
|
|
576
|
+
|
|
577
|
+
# apply the exp2conf matrix to the data...
|
|
578
|
+
if target == 'nnav':
|
|
579
|
+
m_exp2conf = np.linalg.inv(m_exp2conf)
|
|
580
|
+
m_simnibs = np.dot(np.dot(m_exp2conf, m_nnav.transpose([2, 0, 1])).transpose([1, 0, 2]),
|
|
581
|
+
m_flip).transpose([1, 2, 0]) # ...and the coil axis flip
|
|
582
|
+
# TODO: check transformation for conform == exp
|
|
583
|
+
# check headreco and mri2mesh meshes
|
|
584
|
+
|
|
585
|
+
return m_simnibs
|
|
586
|
+
|
|
587
|
+
|
|
588
|
+
def add_sigmoidal_bestfit(mep, p0, constraints=None):
|
|
589
|
+
"""
|
|
590
|
+
Add best fitting sigmoidal function to instance (determined by multistart approach)
|
|
591
|
+
|
|
592
|
+
Parameters
|
|
593
|
+
----------
|
|
594
|
+
mep : pynibs.expio.mep.Mep
|
|
595
|
+
Mep.
|
|
596
|
+
p0 : float
|
|
597
|
+
Initial guess for the parameters of the sigmoidal function.
|
|
598
|
+
constraints : dict, optional
|
|
599
|
+
Dictionary with parameter names as keys and [min, max] values as constraints.
|
|
600
|
+
|
|
601
|
+
Returns
|
|
602
|
+
-------
|
|
603
|
+
mep : object
|
|
604
|
+
Updated Mep object class instance with the following attributes.
|
|
605
|
+
|
|
606
|
+
Notes
|
|
607
|
+
-----
|
|
608
|
+
Adds Attributes:
|
|
609
|
+
|
|
610
|
+
Mep.fun_sig : function
|
|
611
|
+
Sigmoid function
|
|
612
|
+
Mep.popt_sig : np.ndarray of float
|
|
613
|
+
(3) Parameters of sigmoid function
|
|
614
|
+
"""
|
|
615
|
+
# p0 = [70, 0.6, 1]
|
|
616
|
+
|
|
617
|
+
# if mep.fun == sigmoid:
|
|
618
|
+
# mep.fun_sig = sigmoid
|
|
619
|
+
# mep.popt_sig = copy.deepcopy(mep.popt)
|
|
620
|
+
#
|
|
621
|
+
# else:
|
|
622
|
+
mep.fun_sig = pynibs.expio.fit_funs.sigmoid
|
|
623
|
+
|
|
624
|
+
x = np.linspace(mep.x_limits[0], mep.x_limits[1], 100)
|
|
625
|
+
y = mep.eval(x, mep.popt)
|
|
626
|
+
|
|
627
|
+
mep.fit_sig = mep.run_fit_multistart(pynibs.expio.fit_funs.sigmoid, x=x, y=y, p0=p0, constraints=constraints)
|
|
628
|
+
|
|
629
|
+
# get names of arguments of function
|
|
630
|
+
argnames = pynibs.expio.fit_funs.sigmoid.__code__.co_varnames[1:pynibs.expio.fit_funs.sigmoid.__code__.co_argcount]
|
|
631
|
+
|
|
632
|
+
# read out optimal function parameters from best fit
|
|
633
|
+
mep.popt_sig = []
|
|
634
|
+
|
|
635
|
+
for i in range(len(argnames)):
|
|
636
|
+
mep.popt_sig.append(mep.fit_sig.best_values[argnames[i]])
|
|
637
|
+
|
|
638
|
+
mep.popt_sig = np.asarray(mep.popt_sig)
|
|
639
|
+
mep.cvar_sig = mep.fit_sig.covar
|
|
640
|
+
mep.pstd_sig = np.sqrt(np.diag(mep.cvar_sig))
|
|
641
|
+
|
|
642
|
+
return mep
|
|
643
|
+
|
|
644
|
+
|
|
645
|
+
def get_cnt_infos(fn_cnt):
|
|
646
|
+
"""
|
|
647
|
+
Read some meta information from .cnt file.
|
|
648
|
+
|
|
649
|
+
Parameters
|
|
650
|
+
----------
|
|
651
|
+
fn_cnt : str
|
|
652
|
+
Path to the .cnt file.
|
|
653
|
+
|
|
654
|
+
Returns
|
|
655
|
+
-------
|
|
656
|
+
d : dict
|
|
657
|
+
A dictionary containing the meta-information.
|
|
658
|
+
"""
|
|
659
|
+
f = libeep.read_cnt(fn_cnt)
|
|
660
|
+
d = dict()
|
|
661
|
+
d['sampling_rate'] = f.get_sample_frequency()
|
|
662
|
+
d['trigger_count'] = f.get_trigger_count()
|
|
663
|
+
d['sample_count'] = f.get_sample_count()
|
|
664
|
+
d['channel_count'] = f.get_channel_count()
|
|
665
|
+
d['channel_names'] = [f.get_channel(i)[0].lower() for i in range(d['channel_count'])]
|
|
666
|
+
|
|
667
|
+
return d
|
|
668
|
+
|
|
669
|
+
|
|
670
|
+
def get_coil_sn_lst(fn_coil):
|
|
671
|
+
"""
|
|
672
|
+
Extract coil serial numbers from a list of coil paths.
|
|
673
|
+
|
|
674
|
+
Parameters
|
|
675
|
+
----------
|
|
676
|
+
fn_coil : list of list of str
|
|
677
|
+
List containing coil path information.
|
|
678
|
+
|
|
679
|
+
Returns
|
|
680
|
+
-------
|
|
681
|
+
coil_sn_lst : list of str
|
|
682
|
+
A list of coil serial numbers extracted from the provided coil paths.
|
|
683
|
+
"""
|
|
684
|
+
coil_sn_lst = []
|
|
685
|
+
for coil_path_str_lst in fn_coil:
|
|
686
|
+
coil_sn_lst.append(coil_path_str_lst[0][-8:-4])
|
|
687
|
+
return coil_sn_lst
|
|
688
|
+
|
|
689
|
+
|
|
690
|
+
# def calc_p2p(sweep):
|
|
691
|
+
# """
|
|
692
|
+
# Calc peak-to-peak values of an mep sweep.
|
|
693
|
+
#
|
|
694
|
+
# Parameters
|
|
695
|
+
# ----------
|
|
696
|
+
# sweep : np.array of float [Nx1]
|
|
697
|
+
# Input curve
|
|
698
|
+
#
|
|
699
|
+
# Returns
|
|
700
|
+
# -------
|
|
701
|
+
# p2p : float
|
|
702
|
+
# Peak-to-peak value of input curve
|
|
703
|
+
# """
|
|
704
|
+
#
|
|
705
|
+
# # Filter requirements.
|
|
706
|
+
# order = 6
|
|
707
|
+
# fs = 16000 # sample rate, Hz
|
|
708
|
+
# cutoff = 2000 # desired cutoff frequency of the filter, Hz
|
|
709
|
+
#
|
|
710
|
+
# # Get the filter coefficients so we can check its frequency response.
|
|
711
|
+
# # import matplotlib.pyplot as plt
|
|
712
|
+
# # b, a = butter_lowpass(cutoff, fs, order)
|
|
713
|
+
# #
|
|
714
|
+
# # # Plot the frequency response.
|
|
715
|
+
# # w, h = freqz(b, a, worN=8000)
|
|
716
|
+
# # plt.subplot(2, 1, 1)
|
|
717
|
+
# # plt.plot(0.5 * fs * w / np.pi, np.abs(h), 'b')
|
|
718
|
+
# # plt.plot(cutoff, 0.5 * np.sqrt(2), 'ko')
|
|
719
|
+
# # plt.axvline(cutoff, color='k')
|
|
720
|
+
# # plt.xlim(0, 0.5 * fs)
|
|
721
|
+
# # plt.title("Lowpass Filter Frequency Response")
|
|
722
|
+
# # plt.xlabel('Frequency [Hz]')
|
|
723
|
+
# # plt.grid()
|
|
724
|
+
#
|
|
725
|
+
# sweep_filt = butter_lowpass_filter(sweep, cutoff, fs, order)
|
|
726
|
+
#
|
|
727
|
+
# # get indices for max
|
|
728
|
+
# index_max_begin = np.argmin(sweep) + 40 # get TMS impulse # int(0.221 / 0.4 * sweep.size)
|
|
729
|
+
# index_max_end = sweep_filt.size # int(0.234 / 0.4 * sweep.size) + 1
|
|
730
|
+
# if index_max_begin >= index_max_end:
|
|
731
|
+
# index_max_begin = index_max_end-1
|
|
732
|
+
# # index_max_end = index_max_begin + end_mep
|
|
733
|
+
#
|
|
734
|
+
# # get maximum and max index
|
|
735
|
+
# sweep_max = np.amax(sweep_filt[index_max_begin:index_max_end])
|
|
736
|
+
# sweep_max_index = index_max_begin + np.argmax(sweep_filt[index_max_begin:index_max_end])
|
|
737
|
+
#
|
|
738
|
+
# # if list of indices then get last value
|
|
739
|
+
# if sweep_max_index.size > 1:
|
|
740
|
+
# sweep_max_index = sweep_max_index[0]
|
|
741
|
+
#
|
|
742
|
+
# # get minimum and mix index
|
|
743
|
+
# index_min_begin = sweep_max_index # int(sweep_max_index + 0.002 / 0.4 * sweep_filt.size)
|
|
744
|
+
# index_min_end = sweep_max_index + 40 # int(sweep_max_index + 0.009 / 0.4 * sweep_filt.size) + 1
|
|
745
|
+
#
|
|
746
|
+
# # Using the same window as the max should make this more robust
|
|
747
|
+
# # index_min_begin = index_max_begi
|
|
748
|
+
# sweep_min = np.amin(sweep_filt[index_min_begin:index_min_end])
|
|
749
|
+
#
|
|
750
|
+
# return sweep_max - sweep_min
|
|
751
|
+
|
|
752
|
+
|
|
753
|
+
def print_time(relation, tms_time, tms_time_index, mep_time, mep_time_index, time_bnd_l, time_bnd_h):
|
|
754
|
+
"""
|
|
755
|
+
Print timestamps that do not match.
|
|
756
|
+
|
|
757
|
+
Parameters
|
|
758
|
+
----------
|
|
759
|
+
relation : str
|
|
760
|
+
'bigger' or 'smaller'
|
|
761
|
+
tms_time : datetime.timedelta
|
|
762
|
+
TMS timestamps.
|
|
763
|
+
tms_time_index : int
|
|
764
|
+
Index of tms timestamp.
|
|
765
|
+
mep_time : datetime.timedelta
|
|
766
|
+
Mep timestamps.
|
|
767
|
+
mep_time_index : int
|
|
768
|
+
Index of mep timestamps.
|
|
769
|
+
time_bnd_l : datetime.timedelta
|
|
770
|
+
Lowest datetime timestamp for matching.
|
|
771
|
+
time_bnd_h : datetime.timdelta
|
|
772
|
+
Highest datetime timestamp for matching.
|
|
773
|
+
"""
|
|
774
|
+
if relation == 'bigger':
|
|
775
|
+
print('tms_time is bigger. Difference: {}s. TMS Nav idx: {}. MEP idx: {}'.format(
|
|
776
|
+
(tms_time - mep_time).total_seconds(),
|
|
777
|
+
tms_time_index,
|
|
778
|
+
mep_time_index))
|
|
779
|
+
print(" ({} > {} [{} - {})".format(tms_time,
|
|
780
|
+
mep_time,
|
|
781
|
+
time_bnd_l,
|
|
782
|
+
time_bnd_h))
|
|
783
|
+
if relation == 'smaller':
|
|
784
|
+
print('tms_time is smaller. Difference: {}s. TMS Nav idx: {}. MEP idx: {}'.format(
|
|
785
|
+
(tms_time - mep_time).total_seconds(),
|
|
786
|
+
tms_time_index,
|
|
787
|
+
mep_time_index))
|
|
788
|
+
print(" ({} < {} [{} - {})".format(tms_time,
|
|
789
|
+
mep_time,
|
|
790
|
+
time_bnd_l,
|
|
791
|
+
time_bnd_h))
|
|
792
|
+
return 0
|
|
793
|
+
|
|
794
|
+
|
|
795
|
+
def get_trial_data_from_csv(behavior_fn, cond, drop_trial_idx=None, only_corr=True, startatzero=True):
|
|
796
|
+
"""
|
|
797
|
+
Reads trial data from csv file. Reaction time is in ``[0.1ms]``.
|
|
798
|
+
|
|
799
|
+
Parameters
|
|
800
|
+
----------
|
|
801
|
+
behavior_fn : str
|
|
802
|
+
Filename with columns: 'trialtype', 'onset_time', 'rt'.
|
|
803
|
+
cond : str
|
|
804
|
+
Which condition to choose from .csv file.
|
|
805
|
+
drop_trial_idx : list of int, optional
|
|
806
|
+
'trialnum' to remove.
|
|
807
|
+
only_corr : bool, default: True
|
|
808
|
+
Only return trials with correct responses.
|
|
809
|
+
startatzero : bool, default: True
|
|
810
|
+
Shift onset_time axis to zero.
|
|
811
|
+
|
|
812
|
+
Returns
|
|
813
|
+
-------
|
|
814
|
+
rt : list of float
|
|
815
|
+
onsets : list of float
|
|
816
|
+
mean_isi : tuple of int
|
|
817
|
+
In [s]-
|
|
818
|
+
"""
|
|
819
|
+
if drop_trial_idx is None:
|
|
820
|
+
drop_trial_idx = []
|
|
821
|
+
|
|
822
|
+
data = pd.read_csv(behavior_fn)
|
|
823
|
+
|
|
824
|
+
if startatzero:
|
|
825
|
+
data['onset_time'] = data['onset_time'] - data['onset_time'].values[0]
|
|
826
|
+
|
|
827
|
+
# remove some predefined trials
|
|
828
|
+
for drop in drop_trial_idx:
|
|
829
|
+
data = data[data['trialnum'] != drop]
|
|
830
|
+
|
|
831
|
+
# remove incorrect trials
|
|
832
|
+
if only_corr:
|
|
833
|
+
data = data[data['wrong'] == 0]
|
|
834
|
+
data = data[data['rt'] > 100]
|
|
835
|
+
data = data[data['rt'] < 1000]
|
|
836
|
+
|
|
837
|
+
# subset to condition of interest
|
|
838
|
+
data = data[data['trialtype'] == cond]
|
|
839
|
+
|
|
840
|
+
mean_isi = data['isi'].mean()
|
|
841
|
+
rt = data['rt'].to_list()
|
|
842
|
+
onsets = data['onset_time'].to_list()
|
|
843
|
+
|
|
844
|
+
return rt, onsets, mean_isi / 1000
|
|
845
|
+
|
|
846
|
+
|
|
847
|
+
def get_ft_data_from_csv(behavior_fn, cond, drop_trial_idx=None, startatzero=True):
|
|
848
|
+
"""
|
|
849
|
+
Reads trial data from csv file.
|
|
850
|
+
|
|
851
|
+
Parameters
|
|
852
|
+
----------
|
|
853
|
+
behavior_fn : str
|
|
854
|
+
Filename with columns: 'TMS_onset_time', 'p_p_amp', 'ct'. TMS_onset_time in 0.1 ms; p_p_amp in ms.
|
|
855
|
+
cond : str
|
|
856
|
+
behavioral output
|
|
857
|
+
drop_trial_idx : list of int, optional
|
|
858
|
+
'trialnum' to remove.
|
|
859
|
+
startatzero : bool, default: True
|
|
860
|
+
Shift onset_time axis to zero.
|
|
861
|
+
|
|
862
|
+
Returns
|
|
863
|
+
-------
|
|
864
|
+
ft : list of float
|
|
865
|
+
TMS_onsets : list of float
|
|
866
|
+
in [ms]
|
|
867
|
+
mean_isi : tuple of int
|
|
868
|
+
in [s]
|
|
869
|
+
"""
|
|
870
|
+
|
|
871
|
+
if drop_trial_idx is None:
|
|
872
|
+
drop_trial_idx = []
|
|
873
|
+
|
|
874
|
+
data = pd.read_csv(behavior_fn)
|
|
875
|
+
|
|
876
|
+
if startatzero:
|
|
877
|
+
data['TMS_onset_time'] = data['TMS_onset_time'] - data['TMS_onset_time'].values[0]
|
|
878
|
+
|
|
879
|
+
# remove some predefined trials
|
|
880
|
+
for drop in drop_trial_idx:
|
|
881
|
+
data = data[data['trialnum'] != drop]
|
|
882
|
+
|
|
883
|
+
ft = data[cond].to_list()
|
|
884
|
+
onsets = (data['TMS_onset_time'] / 10).to_list() # from 0.1 ms to ms
|
|
885
|
+
mean_isi = np.diff(data['TMS_onset_time'] / 10000).mean()
|
|
886
|
+
|
|
887
|
+
return ft, onsets, mean_isi
|
|
888
|
+
|
|
889
|
+
|
|
890
|
+
def write_csv(csv_output_path, dict_lst):
|
|
891
|
+
"""
|
|
892
|
+
Write dictionary into .csv-file.
|
|
893
|
+
|
|
894
|
+
Parameters
|
|
895
|
+
----------
|
|
896
|
+
csv_output_path : str
|
|
897
|
+
Path to output-file.
|
|
898
|
+
dict_lst : list of dict
|
|
899
|
+
Fields of the .csv-file.
|
|
900
|
+
"""
|
|
901
|
+
|
|
902
|
+
with open(csv_output_path, 'w') as csv_file:
|
|
903
|
+
fieldnames = ['number', 'patient_id', 'condition', 'current', 'mep', 'coil_sn', 'coil0_00', 'coil0_01',
|
|
904
|
+
'coil0_02', 'coil0_03', 'coil0_10',
|
|
905
|
+
'coil0_11', 'coil0_12', 'coil0_13', 'coil0_20', 'coil0_21', 'coil0_22', 'coil0_23', 'coil0_30',
|
|
906
|
+
'coil0_31', 'coil0_32', 'coil0_33', 'coil1_00', 'coil1_01', 'coil1_02', 'coil1_03', 'coil1_10',
|
|
907
|
+
'coil1_11', 'coil1_12', 'coil1_13', 'coil1_20', 'coil1_21', 'coil1_22', 'coil1_23', 'coil1_30',
|
|
908
|
+
'coil1_31', 'coil1_32', 'coil1_33', 'coil_mean_00', 'coil_mean_01', 'coil_mean_02',
|
|
909
|
+
'coil_mean_03',
|
|
910
|
+
'coil_mean_10', 'coil_mean_11', 'coil_mean_12', 'coil_mean_13', 'coil_mean_20', 'coil_mean_21',
|
|
911
|
+
'coil_mean_22', 'coil_mean_23', 'coil_mean_30', 'coil_mean_31', 'coil_mean_32', 'coil_mean_33',
|
|
912
|
+
'ts_tms', 'time_tms', 'time_mep', 'time_diff', 'date']
|
|
913
|
+
|
|
914
|
+
fieldnames_all = list(dict_lst[0].keys())
|
|
915
|
+
|
|
916
|
+
for field in fieldnames_all:
|
|
917
|
+
if field not in fieldnames:
|
|
918
|
+
fieldnames.append(field)
|
|
919
|
+
|
|
920
|
+
writer = csv.DictWriter(csv_file, fieldnames=fieldnames)
|
|
921
|
+
writer.writeheader()
|
|
922
|
+
|
|
923
|
+
for index, dictionary in enumerate(dict_lst):
|
|
924
|
+
dictionary.update({'number': index})
|
|
925
|
+
writer.writerow(dictionary)
|
|
926
|
+
return 0
|
|
927
|
+
|
|
928
|
+
|
|
929
|
+
def read_csv(csv_path):
|
|
930
|
+
"""
|
|
931
|
+
Read dictionary from .csv-file.
|
|
932
|
+
|
|
933
|
+
Parameters
|
|
934
|
+
----------
|
|
935
|
+
csv_path : str
|
|
936
|
+
Path to .csv-file.
|
|
937
|
+
|
|
938
|
+
Returns
|
|
939
|
+
-------
|
|
940
|
+
dict_lst : dict of list
|
|
941
|
+
Field name of the .csv-file as the key.
|
|
942
|
+
"""
|
|
943
|
+
|
|
944
|
+
dictionary = {}
|
|
945
|
+
with open(csv_path) as csv_handle:
|
|
946
|
+
# read csv file
|
|
947
|
+
csv_file = csv.reader(csv_handle)
|
|
948
|
+
# get fieldnames
|
|
949
|
+
csv_fieldnames = next(csv_handle)
|
|
950
|
+
csv_fieldnames = csv_fieldnames.split(',')
|
|
951
|
+
# remove unnecessary characters
|
|
952
|
+
for index in range(len(csv_fieldnames)):
|
|
953
|
+
csv_fieldnames[index] = csv_fieldnames[index].replace('"', '')
|
|
954
|
+
csv_fieldnames[index] = csv_fieldnames[index].replace('\n', '')
|
|
955
|
+
csv_fieldnames[index] = csv_fieldnames[index].replace('\r', '')
|
|
956
|
+
# iterate over rows
|
|
957
|
+
for index, field in enumerate(csv_fieldnames):
|
|
958
|
+
row_array = []
|
|
959
|
+
# rewind file
|
|
960
|
+
csv_handle.seek(0)
|
|
961
|
+
for row_index, row in enumerate(csv_file):
|
|
962
|
+
# do not get fieldname
|
|
963
|
+
if row_index == 0:
|
|
964
|
+
continue
|
|
965
|
+
value = row[index]
|
|
966
|
+
# do not convert patient_id
|
|
967
|
+
if field != 'patient_id':
|
|
968
|
+
# try to convert into integer
|
|
969
|
+
try:
|
|
970
|
+
value = int(value)
|
|
971
|
+
except ValueError:
|
|
972
|
+
# try to convert into float
|
|
973
|
+
try:
|
|
974
|
+
value = float(value)
|
|
975
|
+
except ValueError:
|
|
976
|
+
pass
|
|
977
|
+
row_array.append(value)
|
|
978
|
+
dictionary.update({field: row_array})
|
|
979
|
+
dictionary = get_csv_matrix(dictionary)
|
|
980
|
+
return dictionary
|
|
981
|
+
|
|
982
|
+
|
|
983
|
+
def get_csv_matrix(dictionary):
|
|
984
|
+
"""
|
|
985
|
+
Process the given dictionary to create a 4x4 matrix for each coil specified.
|
|
986
|
+
|
|
987
|
+
Parameters
|
|
988
|
+
----------
|
|
989
|
+
dictionary : dict
|
|
990
|
+
The input dictionary containing the data.
|
|
991
|
+
|
|
992
|
+
Returns
|
|
993
|
+
-------
|
|
994
|
+
dict
|
|
995
|
+
The input dictionary updated with the newly created matrices and redundant keys removed.
|
|
996
|
+
"""
|
|
997
|
+
coil_name_lst = ['coil0_', 'coil1_', 'coil_mean_']
|
|
998
|
+
for coil_name in coil_name_lst:
|
|
999
|
+
array_lst = []
|
|
1000
|
+
for lst_index in range(len(dictionary[coil_name + '00'])):
|
|
1001
|
+
array = np.empty([4, 4])
|
|
1002
|
+
for coil_index1 in range(4):
|
|
1003
|
+
for coil_index2 in range(4):
|
|
1004
|
+
coil_name_index = coil_name + str(coil_index1) + str(coil_index2)
|
|
1005
|
+
array[coil_index1, coil_index2] = float(dictionary[coil_name_index][lst_index])
|
|
1006
|
+
array_lst.append(array)
|
|
1007
|
+
dictionary.update({coil_name + 'matrix': array_lst})
|
|
1008
|
+
# remove redundant entries
|
|
1009
|
+
for coil_name in coil_name_lst:
|
|
1010
|
+
for coil_index1 in range(4):
|
|
1011
|
+
for coil_index2 in range(4):
|
|
1012
|
+
coil_name_index = coil_name + str(coil_index1) + str(coil_index2)
|
|
1013
|
+
del dictionary[coil_name_index]
|
|
1014
|
+
return dictionary
|
|
1015
|
+
|
|
1016
|
+
|
|
1017
|
+
def sort_by_condition(exp, conditions_selected=None):
|
|
1018
|
+
"""
|
|
1019
|
+
Sort experimental dictionary from experimental.csv into list by conditions.
|
|
1020
|
+
|
|
1021
|
+
Parameters
|
|
1022
|
+
----------
|
|
1023
|
+
exp : dict or list of dict
|
|
1024
|
+
Dictionary containing the experimental data information.
|
|
1025
|
+
conditions_selected : str or list of str, optional
|
|
1026
|
+
List of conditions returned by the function (in this order), the others are omitted,
|
|
1027
|
+
If None, all conditions are returned.
|
|
1028
|
+
|
|
1029
|
+
Returns
|
|
1030
|
+
-------
|
|
1031
|
+
exp_cond : list of dict
|
|
1032
|
+
List of dictionaries containing the experimental data information sorted by condition.
|
|
1033
|
+
"""
|
|
1034
|
+
|
|
1035
|
+
_, idx = np.unique(exp['condition'], return_index=True)
|
|
1036
|
+
conds = list(np.array(exp['condition'])[np.sort(idx)])
|
|
1037
|
+
|
|
1038
|
+
cond_idx = []
|
|
1039
|
+
exp_cond = []
|
|
1040
|
+
keys = list(exp.keys())
|
|
1041
|
+
|
|
1042
|
+
for k in range(len(conds)):
|
|
1043
|
+
cond_idx.append([i for i, j in enumerate(exp['condition']) if j == conds[k]])
|
|
1044
|
+
exp_cond.append(dict())
|
|
1045
|
+
|
|
1046
|
+
for l_k in range(len(keys)):
|
|
1047
|
+
exp_cond[-1][keys[l_k]] = []
|
|
1048
|
+
|
|
1049
|
+
for y in cond_idx[-1]:
|
|
1050
|
+
exp_cond[-1][keys[l_k]].append(exp[keys[l_k]][y])
|
|
1051
|
+
|
|
1052
|
+
if conditions_selected is not None:
|
|
1053
|
+
if type(conditions_selected) is not list:
|
|
1054
|
+
conditions_selected = [conditions_selected]
|
|
1055
|
+
exp_cond_selected = []
|
|
1056
|
+
for c in conditions_selected:
|
|
1057
|
+
exp_cond_selected.append([exp_cond[i] for i in range(len(exp_cond)) if exp_cond[i]['condition'][0] == c][0])
|
|
1058
|
+
|
|
1059
|
+
return exp_cond_selected
|
|
1060
|
+
|
|
1061
|
+
else:
|
|
1062
|
+
return exp_cond
|
|
1063
|
+
|
|
1064
|
+
|
|
1065
|
+
def coil_outlier_correction_cond(exp=None, fn_exp=None, fn_exp_out=None, outlier_angle=5., outlier_loc=3.):
|
|
1066
|
+
"""
|
|
1067
|
+
Searches and removes outliers of coil orientation and location w.r.t. the average orientation and location from
|
|
1068
|
+
all zaps. It generates plots of the individual conditions showing the outliers in the folder of fn_exp_out.
|
|
1069
|
+
Depending on if exp (dict containing lists) or fn_exp (csv file) is provided it returns the outlier corrected dict
|
|
1070
|
+
or writes a new <fn_exp_out>.csv file.
|
|
1071
|
+
If _exp_ is provided, all keys are kept.
|
|
1072
|
+
|
|
1073
|
+
Parameters
|
|
1074
|
+
----------
|
|
1075
|
+
exp : list of dict, optional
|
|
1076
|
+
List of dictionaries containing the experimental data.
|
|
1077
|
+
fn_exp : str, optional
|
|
1078
|
+
Filename (incl. path) of experimental .csv file.
|
|
1079
|
+
fn_exp_out : str, optional
|
|
1080
|
+
Filename (incl. path) of corrected experimental .csv file.
|
|
1081
|
+
outlier_angle : float, default: 5.
|
|
1082
|
+
Coil orientation outlier "cone" around axes in +- deg.
|
|
1083
|
+
All zaps with coil orientations outside of this cone are removed.
|
|
1084
|
+
outlier_loc : float, default: 3.
|
|
1085
|
+
Coil position outlier "sphere" in +- mm.
|
|
1086
|
+
All zaps with coil locations outside of this sphere are removed.
|
|
1087
|
+
|
|
1088
|
+
Returns
|
|
1089
|
+
-------
|
|
1090
|
+
<File>: .csv file
|
|
1091
|
+
experimental_oc.csv file with outlier corrected coil positions.
|
|
1092
|
+
<Files>: .png files
|
|
1093
|
+
Plot showing the coil orientations and locations (folder_of_fn_exp_out/COND_X_coil_position.png).
|
|
1094
|
+
or
|
|
1095
|
+
exp : dict
|
|
1096
|
+
Dictionary containing the outlier corrected experimental data.
|
|
1097
|
+
"""
|
|
1098
|
+
if exp is not None:
|
|
1099
|
+
if type(exp) is list:
|
|
1100
|
+
exp = pynibs.list2dict(exp)
|
|
1101
|
+
elif fn_exp is not None:
|
|
1102
|
+
exp = read_csv(fn_exp)
|
|
1103
|
+
else:
|
|
1104
|
+
raise IOError("Please provide either dictionary containing the experimental data or the filename "
|
|
1105
|
+
"of the experimental.csv file")
|
|
1106
|
+
|
|
1107
|
+
# read and sort by condition
|
|
1108
|
+
exp_by_cond = sort_by_condition(exp)
|
|
1109
|
+
exp_cond_corr = []
|
|
1110
|
+
|
|
1111
|
+
bound_radius = np.sin(outlier_angle / 180 * np.pi)
|
|
1112
|
+
|
|
1113
|
+
for cond in exp_by_cond:
|
|
1114
|
+
# concatenate all matrices in one tensor
|
|
1115
|
+
n_coords = len(cond["coil_mean"])
|
|
1116
|
+
|
|
1117
|
+
coil_coords = np.zeros((4, 4, n_coords))
|
|
1118
|
+
|
|
1119
|
+
for i in range(n_coords):
|
|
1120
|
+
coil_coords[:, :, i] = cond["coil_mean"][i]
|
|
1121
|
+
|
|
1122
|
+
# call plot function
|
|
1123
|
+
# idx_keep, _, _ = calc_outlier(coords=coil_coords, dev_location=outlier_loc, dev_radius=bound_radius,
|
|
1124
|
+
# fn_out=os.path.join(os.path.split(fn_exp_out)[0],
|
|
1125
|
+
# str(cond["condition"][0]) + "_coil_position.png"))
|
|
1126
|
+
|
|
1127
|
+
idx_keep, _, _ = calc_outlier(coords=coil_coords, dev_location=outlier_loc, dev_radius=bound_radius)
|
|
1128
|
+
|
|
1129
|
+
# remove outlier and rebuilt dictionary with lists
|
|
1130
|
+
exp_cond_corr.append(OrderedDict())
|
|
1131
|
+
|
|
1132
|
+
for key in list(cond.keys()):
|
|
1133
|
+
exp_cond_corr[-1][key] = []
|
|
1134
|
+
for i in idx_keep:
|
|
1135
|
+
exp_cond_corr[-1][key].append(cond[key][i])
|
|
1136
|
+
|
|
1137
|
+
# corrected exp dictionary
|
|
1138
|
+
exp_corr = OrderedDict()
|
|
1139
|
+
keys = list(exp.keys())
|
|
1140
|
+
for i_cond in range(len(exp_cond_corr)):
|
|
1141
|
+
for k in keys:
|
|
1142
|
+
if i_cond == 0:
|
|
1143
|
+
exp_corr[k] = exp_cond_corr[i_cond][k]
|
|
1144
|
+
else:
|
|
1145
|
+
exp_corr[k] = exp_corr[k] + exp_cond_corr[i_cond][k]
|
|
1146
|
+
|
|
1147
|
+
if fn_exp is not None:
|
|
1148
|
+
# reformat results to save new .csv file
|
|
1149
|
+
coil0_keys = ['coil0_' + str(int(m)) + str(int(n)) for m in range(4) for n in range(4)]
|
|
1150
|
+
coil1_keys = ['coil1_' + str(int(m)) + str(int(n)) for m in range(4) for n in range(4)]
|
|
1151
|
+
coil_mean_keys = ['coil_mean_' + str(int(m)) + str(int(n)) for m in range(4) for n in range(4)]
|
|
1152
|
+
|
|
1153
|
+
exp_corr_formatted = copy.deepcopy(exp_corr)
|
|
1154
|
+
del exp_corr_formatted['coil0_matrix']
|
|
1155
|
+
del exp_corr_formatted['coil1_matrix']
|
|
1156
|
+
del exp_corr_formatted['coil_mean_matrix']
|
|
1157
|
+
|
|
1158
|
+
for i_key in range(len(coil0_keys)):
|
|
1159
|
+
m = int(coil0_keys[i_key][-2])
|
|
1160
|
+
n = int(coil0_keys[i_key][-1])
|
|
1161
|
+
|
|
1162
|
+
exp_corr_formatted[coil0_keys[i_key]] = [exp_corr['coil0_matrix'][i_zap][m, n]
|
|
1163
|
+
for i_zap in range(len(exp_corr['coil0_matrix']))]
|
|
1164
|
+
|
|
1165
|
+
exp_corr_formatted[coil1_keys[i_key]] = [exp_corr['coil1_matrix'][i_zap][m, n]
|
|
1166
|
+
for i_zap in range(len(exp_corr['coil1_matrix']))]
|
|
1167
|
+
|
|
1168
|
+
exp_corr_formatted[coil_mean_keys[i_key]] = [exp_corr['coil_mean_matrix'][i_zap][m, n]
|
|
1169
|
+
for i_zap in range(len(exp_corr['coil_mean_matrix']))]
|
|
1170
|
+
|
|
1171
|
+
# reformat from dict containing lists to list containing dicts to write csv file
|
|
1172
|
+
exp_corr_list = []
|
|
1173
|
+
for i in range(len(exp_corr_formatted['coil_mean_00'])):
|
|
1174
|
+
exp_corr_list.append(OrderedDict())
|
|
1175
|
+
for key in list(exp_corr_formatted.keys()):
|
|
1176
|
+
exp_corr_list[-1][key] = exp_corr_formatted[key][i]
|
|
1177
|
+
|
|
1178
|
+
# save experimental csv
|
|
1179
|
+
write_csv(fn_exp_out, exp_corr_list)
|
|
1180
|
+
else:
|
|
1181
|
+
return exp_corr
|
|
1182
|
+
|
|
1183
|
+
|
|
1184
|
+
def calc_outlier(coords, dev_location, dev_radius, target=None, fn_out=None, verbose=True):
|
|
1185
|
+
"""
|
|
1186
|
+
Computes median coil position and angle, identifies outliers, plots neat figure.
|
|
1187
|
+
Returns a list of idx that are not outliers
|
|
1188
|
+
|
|
1189
|
+
Parameters
|
|
1190
|
+
----------
|
|
1191
|
+
coords : np.ndarray
|
|
1192
|
+
(4, 4, n_zaps)
|
|
1193
|
+
dev_location : float
|
|
1194
|
+
Max allowed location deviation.
|
|
1195
|
+
dev_radius : float
|
|
1196
|
+
Max allowed radius deviation.
|
|
1197
|
+
target : np.ndarray, optional
|
|
1198
|
+
(4, 4) matrix with target coordinates.
|
|
1199
|
+
fn_out : string, optional
|
|
1200
|
+
Output filename.
|
|
1201
|
+
verbose : bool, default: True
|
|
1202
|
+
Flag indicating verbosity.
|
|
1203
|
+
|
|
1204
|
+
Returns
|
|
1205
|
+
-------
|
|
1206
|
+
list of int, list of int, list of int : idx_keep, idx_zero, idx_outlier
|
|
1207
|
+
"""
|
|
1208
|
+
if coords.shape[:2] != (4, 4):
|
|
1209
|
+
print(f"plot_coords is expecting a 4x4xn_zaps array. Found {coords.shape}. Trying to resize")
|
|
1210
|
+
if len(coords.shape) != 3:
|
|
1211
|
+
raise NotImplementedError
|
|
1212
|
+
elif coords.shape[1:] != (4, 4):
|
|
1213
|
+
raise NotImplementedError
|
|
1214
|
+
coords = np.rollaxis(coords, 0, coords.ndim)
|
|
1215
|
+
|
|
1216
|
+
# remove idx with no tracking information
|
|
1217
|
+
idx_zero = []
|
|
1218
|
+
np.where(coords[0, 3, :] == 0)
|
|
1219
|
+
for i in range(coords.shape[2]):
|
|
1220
|
+
if np.all(np.diag(coords[:, :, i]) == np.array([1, 1, 1, 1])):
|
|
1221
|
+
idx_zero.append(i)
|
|
1222
|
+
# coords = np.delete(coords, idx_zero, axis=2)
|
|
1223
|
+
# determine mean coil orientation and location
|
|
1224
|
+
idx_nonzero = np.setdiff1d(range(coords.shape[2]), idx_zero)
|
|
1225
|
+
n_coords = coords.shape[2]
|
|
1226
|
+
coil_coords_median = np.median(coords[:, :, idx_nonzero], axis=2)
|
|
1227
|
+
coil_coords_0 = np.zeros((4, 4, n_coords))
|
|
1228
|
+
coil_coords_0[3, 3, :] = 1.0
|
|
1229
|
+
if target is not None:
|
|
1230
|
+
for i in range(n_coords):
|
|
1231
|
+
coil_coords_0[:3, 3, i] = coords[:3, 3, i] - target[:3, 3]
|
|
1232
|
+
else:
|
|
1233
|
+
# shift all coil_coords (zero-mean)
|
|
1234
|
+
for i in range(n_coords):
|
|
1235
|
+
coil_coords_0[:3, 3, i] = coords[:3, 3, i] - coil_coords_median[:3, 3]
|
|
1236
|
+
|
|
1237
|
+
if verbose:
|
|
1238
|
+
print(f"{n_coords} coil positions found, {len(idx_nonzero)} tracked. Detecting outliers...")
|
|
1239
|
+
print(f"Max allowed location/angle deviation: {dev_location}, {dev_radius}")
|
|
1240
|
+
print(f"Median location original data: {np.round(coil_coords_median[0:3, 3], 2)}")
|
|
1241
|
+
print(
|
|
1242
|
+
f"Median orientation original data: {np.round(coil_coords_median[0:3, 0], 2)}, "
|
|
1243
|
+
f"{np.round(coil_coords_median[0:3, 1], 2)}")
|
|
1244
|
+
|
|
1245
|
+
# rotate all coil_coords to median orientation
|
|
1246
|
+
idx_keep = []
|
|
1247
|
+
idx_outlier = []
|
|
1248
|
+
for i in range(n_coords):
|
|
1249
|
+
if target is not None:
|
|
1250
|
+
coil_coords_0[:3, :3, i] = np.dot(coords[:3, :3, i], np.transpose(target[:3, :3]))
|
|
1251
|
+
else:
|
|
1252
|
+
coil_coords_0[:3, :3, i] = np.dot(coords[:3, :3, i], np.transpose(coil_coords_median[:3, :3]))
|
|
1253
|
+
|
|
1254
|
+
dev_ori_x = np.sqrt(coil_coords_0[1, 0, i] ** 2 + coil_coords_0[2, 0, i] ** 2)
|
|
1255
|
+
dev_ori_y = np.sqrt(coil_coords_0[0, 1, i] ** 2 + coil_coords_0[2, 1, i] ** 2)
|
|
1256
|
+
dev_ori_z = np.sqrt(coil_coords_0[0, 2, i] ** 2 + coil_coords_0[1, 2, i] ** 2)
|
|
1257
|
+
dev_pos = np.linalg.norm(coil_coords_0[:3, 3, i])
|
|
1258
|
+
|
|
1259
|
+
if (i in idx_nonzero) and not (
|
|
1260
|
+
dev_ori_x > dev_radius or dev_ori_y > dev_radius or dev_ori_z > dev_radius or dev_pos > dev_location):
|
|
1261
|
+
idx_keep.append(i)
|
|
1262
|
+
elif i in idx_nonzero:
|
|
1263
|
+
idx_outlier.append(i)
|
|
1264
|
+
if verbose > 1:
|
|
1265
|
+
print(f"Outlier in coil position or orientation detected, removing data point. cond: zap #{i}")
|
|
1266
|
+
if target is not None:
|
|
1267
|
+
coil_coords_0 = coords
|
|
1268
|
+
coil_coords_median = np.median(coil_coords_0[:, :, idx_keep], axis=2)
|
|
1269
|
+
if fn_out is not None:
|
|
1270
|
+
coil_coords_0_keep = coil_coords_0[:, :, idx_keep]
|
|
1271
|
+
coil_coords_0_outlier = coil_coords_0[:, :, idx_outlier]
|
|
1272
|
+
|
|
1273
|
+
fig = plt.figure(figsize=[10, 5.5]) # fig.add_subplot(121, projection='3d')
|
|
1274
|
+
ax = fig.add_subplot(121, projection='3d')
|
|
1275
|
+
try:
|
|
1276
|
+
ax.set_aspect("equal")
|
|
1277
|
+
except NotImplementedError:
|
|
1278
|
+
pass
|
|
1279
|
+
|
|
1280
|
+
# draw sphere
|
|
1281
|
+
if target is not None:
|
|
1282
|
+
ax.scatter(target[0, 3], target[1, 3], target[2, 3], color='y', s=400)
|
|
1283
|
+
if dev_location != np.inf:
|
|
1284
|
+
u, v = np.mgrid[0:2 * np.pi:20j, 0:np.pi:10j]
|
|
1285
|
+
x = dev_location * np.cos(u) * np.sin(v)
|
|
1286
|
+
y = dev_location * np.sin(u) * np.sin(v)
|
|
1287
|
+
z = dev_location * np.cos(v)
|
|
1288
|
+
ax.plot_wireframe(x, y, z, color="k")
|
|
1289
|
+
ax.set_xlim([-dev_location * 1.1, dev_location * 1.1])
|
|
1290
|
+
ax.set_ylim([-dev_location * 1.1, dev_location * 1.1])
|
|
1291
|
+
ax.set_zlim([-dev_location * 1.1, dev_location * 1.1])
|
|
1292
|
+
else:
|
|
1293
|
+
limits_x = [np.min(coil_coords_0_keep[0, 3, :]) - 2, np.max(coil_coords_0_keep[0, 3, :]) + 2]
|
|
1294
|
+
limits_y = [np.min(coil_coords_0_keep[1, 3, :]) - 2, np.max(coil_coords_0_keep[1, 3, :]) + 2]
|
|
1295
|
+
limits_z = [np.min(coil_coords_0_keep[2, 3, :]) - 2, np.max(coil_coords_0_keep[2, 3, :]) + 2]
|
|
1296
|
+
if target is not None:
|
|
1297
|
+
limits_x = [np.min((limits_x[0], target[0, 3] - 2)), np.max((limits_x[1], target[0, 3] + 2))]
|
|
1298
|
+
limits_y = [np.min((limits_y[0], target[1, 3] - 2)), np.max((limits_y[1], target[1, 3] + 2))]
|
|
1299
|
+
limits_z = [np.min((limits_z[0], target[2, 3] - 2)), np.max((limits_z[1], target[2, 3] + 2))]
|
|
1300
|
+
|
|
1301
|
+
ax.set_xlim(limits_x)
|
|
1302
|
+
ax.set_ylim(limits_y)
|
|
1303
|
+
ax.set_zlim(limits_z)
|
|
1304
|
+
|
|
1305
|
+
# color bar + scaling for quiver
|
|
1306
|
+
cm = plt.cm.get_cmap('cool')
|
|
1307
|
+
norm = Normalize()
|
|
1308
|
+
norm.autoscale(range(coil_coords_0_keep.shape[2]))
|
|
1309
|
+
|
|
1310
|
+
# draw coil center locations
|
|
1311
|
+
ax.scatter(coil_coords_0_keep[0, 3, :], coil_coords_0_keep[1, 3, :], coil_coords_0_keep[2, 3, :],
|
|
1312
|
+
c=range(coil_coords_0_keep.shape[2]), cmap=cm)
|
|
1313
|
+
ax.scatter(coil_coords_0_outlier[0, 3, :], coil_coords_0_outlier[1, 3, :], coil_coords_0_outlier[2, 3, :],
|
|
1314
|
+
color='r')
|
|
1315
|
+
|
|
1316
|
+
ax.set_xlabel("x")
|
|
1317
|
+
ax.set_ylabel("y")
|
|
1318
|
+
ax.set_zlabel("z")
|
|
1319
|
+
ax.set_title("Coil location")
|
|
1320
|
+
# ax.annotate(f'median: {np.round(coil_coords_median[0:3,3],2)}\n'
|
|
1321
|
+
# f'std: {np.round(np.std(coords[0:3,3,idx_keep],axis=1),4)}',xy=(10,-150),
|
|
1322
|
+
# annotation_clip=False,xycoords='axes pixels',
|
|
1323
|
+
# bbox=OrderedDict(boxstyle='square', facecolor='wheat', alpha=1),fontfamily='monospace' )
|
|
1324
|
+
med_pos = np.round(coil_coords_median[0:3, 3], 2)
|
|
1325
|
+
std_pos = np.round(np.std(coords[0:3, 3, idx_keep], axis=1), 4)
|
|
1326
|
+
anot = f'median(pos): [{med_pos[0]: 3.3f}, {med_pos[1]: 3.3f}, {med_pos[2]: 3.3f}]\n' \
|
|
1327
|
+
f' std(pos): [{std_pos[0]: 7.3f}, {std_pos[1]: 7.3f}, {std_pos[2]: 7.3f}]'
|
|
1328
|
+
|
|
1329
|
+
if target is not None:
|
|
1330
|
+
pos_dif = np.linalg.norm(((coords[0:3, 3, idx_keep].transpose() - target[0:3, 3]).transpose()), axis=0)
|
|
1331
|
+
anot += f'\nmin/med/max (std) dif: ' \
|
|
1332
|
+
f'{np.min(pos_dif):2.2f}, {np.median(pos_dif):2.2f}, ' \
|
|
1333
|
+
f'{np.max(pos_dif):2.2f} ({np.std(pos_dif):2.2f})'
|
|
1334
|
+
|
|
1335
|
+
ax.annotate(anot,
|
|
1336
|
+
xy=(30, -250),
|
|
1337
|
+
annotation_clip=False, xycoords='axes pixels',
|
|
1338
|
+
bbox=OrderedDict(boxstyle='square', facecolor='wheat', alpha=1), font='monospace')
|
|
1339
|
+
|
|
1340
|
+
# draw coil orientations
|
|
1341
|
+
ax = fig.add_subplot(122, projection='3d')
|
|
1342
|
+
try:
|
|
1343
|
+
ax.set_aspect("equal")
|
|
1344
|
+
except NotImplementedError:
|
|
1345
|
+
pass
|
|
1346
|
+
|
|
1347
|
+
for i in range(coil_coords_0_keep.shape[2]):
|
|
1348
|
+
ax.quiver(0, 0, 0, coil_coords_0_keep[0, 0, i], coil_coords_0_keep[1, 0, i], coil_coords_0_keep[2, 0, i],
|
|
1349
|
+
color=cm(norm(range(coil_coords_0_keep.shape[2])))),
|
|
1350
|
+
ax.quiver(0, 0, 0, coil_coords_0_keep[0, 1, i], coil_coords_0_keep[1, 1, i], coil_coords_0_keep[2, 1, i],
|
|
1351
|
+
color=cm(norm(range(coil_coords_0_keep.shape[2])))),
|
|
1352
|
+
ax.quiver(0, 0, 0, coil_coords_0_keep[0, 2, i], coil_coords_0_keep[1, 2, i], coil_coords_0_keep[2, 2, i],
|
|
1353
|
+
color=cm(norm(range(coil_coords_0_keep.shape[2])))),
|
|
1354
|
+
for i in range(coil_coords_0_outlier.shape[2]):
|
|
1355
|
+
ax.quiver(0, 0, 0, coil_coords_0_outlier[0, 0, i], coil_coords_0_outlier[1, 0, i],
|
|
1356
|
+
coil_coords_0_outlier[2, 0, i], color='r')
|
|
1357
|
+
ax.quiver(0, 0, 0, coil_coords_0_outlier[0, 1, i], coil_coords_0_outlier[1, 1, i],
|
|
1358
|
+
coil_coords_0_outlier[2, 1, i], color='r')
|
|
1359
|
+
ax.quiver(0, 0, 0, coil_coords_0_outlier[0, 2, i], coil_coords_0_outlier[1, 2, i],
|
|
1360
|
+
coil_coords_0_outlier[2, 2, i], color='r')
|
|
1361
|
+
|
|
1362
|
+
if target is not None:
|
|
1363
|
+
ax.quiver(0, 0, 0, target[0, 0], target[1, 0], target[2, 0], color='b', linestyle='dotted')
|
|
1364
|
+
ax.quiver(0, 0, 0, target[0, 1], target[1, 1], target[2, 1], color='b', linestyle='dotted')
|
|
1365
|
+
ax.quiver(0, 0, 0, target[0, 2], target[1, 2], target[2, 2], color='b', linestyle='dotted')
|
|
1366
|
+
|
|
1367
|
+
ax.set_xlim([-1.2, 1.2])
|
|
1368
|
+
ax.set_ylim([-1.2, 1.2])
|
|
1369
|
+
ax.set_zlim([-1.2, 1.2])
|
|
1370
|
+
ax.set_xlabel("x")
|
|
1371
|
+
ax.set_ylabel("y")
|
|
1372
|
+
ax.set_zlabel("z")
|
|
1373
|
+
ax.set_title("Coil orientation")
|
|
1374
|
+
med_pos = np.round(np.median(coil_coords_0_keep[0:3, 0], axis=1), 2)
|
|
1375
|
+
med_rot = np.round(np.median(coil_coords_0_keep[0:3, 1], axis=1), 2)
|
|
1376
|
+
std_pos = np.round(np.std(coords[0:3, 0, idx_keep], axis=1), 4)
|
|
1377
|
+
std_rot = np.round(np.std(coords[0:3, 1, idx_keep], axis=1), 4)
|
|
1378
|
+
ax.annotate(f'median(x): [{med_pos[0]: 2.3f}, {med_pos[1]: 2.3f}, {med_pos[2]: 2.3f}]\n'
|
|
1379
|
+
f' std(x): [{std_pos[0]: 2.3f}, {std_pos[1]: 2.3f}, {std_pos[2]: 2.3f}]\n'
|
|
1380
|
+
f'median(y): [{med_rot[0]: 2.3f}, {med_rot[1]: 2.3f}, {med_rot[2]: 2.3f}]\n'
|
|
1381
|
+
f' std(y): [{std_rot[0]: 2.3f}, {std_rot[1]: 2.3f}, {std_rot[2]: 2.3f}]',
|
|
1382
|
+
xy=(30, -250),
|
|
1383
|
+
annotation_clip=False, xycoords='axes pixels',
|
|
1384
|
+
bbox=OrderedDict(boxstyle='square', facecolor='wheat', alpha=1), font='monospace')
|
|
1385
|
+
|
|
1386
|
+
# these are matplotlib.patch.Patch properties
|
|
1387
|
+
props = OrderedDict(boxstyle='round', facecolor='wheat', alpha=0.5)
|
|
1388
|
+
|
|
1389
|
+
# place a text box in upper left in axes coords
|
|
1390
|
+
plt.figtext(0.5, .7,
|
|
1391
|
+
f"n_pos: {n_coords}\n"
|
|
1392
|
+
f"n_zero: {len(idx_zero)}\n"
|
|
1393
|
+
f"n_outlier: {len(idx_outlier)}\n"
|
|
1394
|
+
f"n_keep: {len(idx_keep)}", bbox=props, family='monospace')
|
|
1395
|
+
# plt.tight_layout(rect=[0.1, 0.03, 1, 0.95])
|
|
1396
|
+
if not os.path.exists(os.path.split(fn_out)[0]):
|
|
1397
|
+
os.makedirs(os.path.split(fn_out)[0])
|
|
1398
|
+
plt.savefig(fn_out, dpi=300)
|
|
1399
|
+
|
|
1400
|
+
if verbose:
|
|
1401
|
+
print(f"{len(idx_outlier)} outliers/zero zaps detected and removed.")
|
|
1402
|
+
print(f"Median location w/o outliers: {np.round(coil_coords_median[0:3, 3], 2)}")
|
|
1403
|
+
print(f"Median orientation w/o outliers: {np.round(coil_coords_median[0:3, 0], 2)}, "
|
|
1404
|
+
f"{np.round(coil_coords_median[0:3, 1], 2)}")
|
|
1405
|
+
|
|
1406
|
+
return idx_keep, idx_zero, idx_outlier
|
|
1407
|
+
|
|
1408
|
+
|
|
1409
|
+
def write_triggermarker_stats(tm_array, idx_keep, idx_outlier, idx_zero, fn, **kwargs):
|
|
1410
|
+
"""
|
|
1411
|
+
Write some stats about the triggermarker analyses to a .csv.
|
|
1412
|
+
Use kwargs to add some more information, like subject id, experiment, conditions, etc.
|
|
1413
|
+
|
|
1414
|
+
Parameters
|
|
1415
|
+
----------
|
|
1416
|
+
tm_array : np.ndarray
|
|
1417
|
+
(N_zaps, 4, 4) The input array containing the triggermarker data.
|
|
1418
|
+
idx_keep : list
|
|
1419
|
+
List of indices to keep in the array for calculation.
|
|
1420
|
+
idx_outlier : list
|
|
1421
|
+
List of outlier indices in the array.
|
|
1422
|
+
idx_zero : list
|
|
1423
|
+
List of zero indices in the array.
|
|
1424
|
+
fn : str
|
|
1425
|
+
File name (including path) for the CSV file to be written.
|
|
1426
|
+
kwargs : dict, optional
|
|
1427
|
+
Additional information to be written to the CSV file. Example: subject=subject_id, experiment=experiment_name.
|
|
1428
|
+
|
|
1429
|
+
Example
|
|
1430
|
+
-------
|
|
1431
|
+
|
|
1432
|
+
.. code-block:: python
|
|
1433
|
+
|
|
1434
|
+
pynibs.write_triggermarker_stats(tm_array, idx_keep, idx_outlier, idx_zero,
|
|
1435
|
+
fn=f"{output_folder}/coil_stats.csv",subject=subject_id,
|
|
1436
|
+
experiment=exp, cond=cond)
|
|
1437
|
+
"""
|
|
1438
|
+
|
|
1439
|
+
idx_nonzero = np.setdiff1d(range(tm_array.shape[0]), idx_zero)
|
|
1440
|
+
# 'subject': [subject_id],
|
|
1441
|
+
# 'experiment': [exp],
|
|
1442
|
+
# 'cond': [cond],
|
|
1443
|
+
res = {
|
|
1444
|
+
'n_zaps': [tm_array.shape[0]],
|
|
1445
|
+
'n_zero': [len(idx_zero)],
|
|
1446
|
+
'n_outlier': [len(idx_outlier)],
|
|
1447
|
+
'median_pos_nonzero_x': [np.median(tm_array[idx_nonzero, 0, 3])],
|
|
1448
|
+
'median_pos_nonzero_y': [np.median(tm_array[idx_nonzero, 1, 3])],
|
|
1449
|
+
'median_pos_nonzero_z': [np.median(tm_array[idx_nonzero, 2, 3])],
|
|
1450
|
+
'median_pos_keep_x': [np.median(tm_array[idx_keep, 0, 3])],
|
|
1451
|
+
'median_pos_keep_y': [np.median(tm_array[idx_keep, 1, 3])],
|
|
1452
|
+
'median_pos_keep_z': [np.median(tm_array[idx_keep, 2, 3])],
|
|
1453
|
+
'std_pos_nonzero_x': [np.std(tm_array[idx_nonzero, 0, 3])],
|
|
1454
|
+
'std_pos_nonzero_y': [np.std(tm_array[idx_nonzero, 1, 3])],
|
|
1455
|
+
'std_pos_nonzero_z': [np.std(tm_array[idx_nonzero, 2, 3])],
|
|
1456
|
+
'std_pos_keep_x': [np.std(tm_array[idx_keep, 0, 3])],
|
|
1457
|
+
'std_pos_keep_y': [np.std(tm_array[idx_keep, 1, 3])],
|
|
1458
|
+
'std_pos_keep_z': [np.std(tm_array[idx_keep, 2, 3])],
|
|
1459
|
+
'median_angle_x_nonzero_x': [np.median(tm_array[idx_nonzero, 0, 3])],
|
|
1460
|
+
'median_angle_x_nonzero_y': [np.median(tm_array[idx_nonzero, 0, 3])],
|
|
1461
|
+
'median_angle_x_nonzero_z': [np.median(tm_array[idx_nonzero, 0, 3])],
|
|
1462
|
+
'median_angle_x_keep_x': [np.median(tm_array[idx_keep, 0, 0])],
|
|
1463
|
+
'median_angle_x_keep_y': [np.median(tm_array[idx_keep, 1, 0])],
|
|
1464
|
+
'median_angle_x_keep_z': [np.median(tm_array[idx_keep, 2, 0])],
|
|
1465
|
+
'std_angle_x_nonzero_x': [np.std(tm_array[idx_nonzero, 0, 0])],
|
|
1466
|
+
'std_angle_x_nonzero_y': [np.std(tm_array[idx_nonzero, 1, 0])],
|
|
1467
|
+
'std_angle_x_nonzero_z': [np.std(tm_array[idx_nonzero, 2, 0])],
|
|
1468
|
+
'std_angle_x_keep_x': [np.std(tm_array[idx_keep, 0, 0])],
|
|
1469
|
+
'std_angle_x_keep_y': [np.std(tm_array[idx_keep, 1, 0])],
|
|
1470
|
+
'std_angle_x_keep_z': [np.std(tm_array[idx_keep, 2, 0])],
|
|
1471
|
+
}
|
|
1472
|
+
|
|
1473
|
+
# add kwargs
|
|
1474
|
+
for key, val in kwargs.items():
|
|
1475
|
+
res[key] = [val]
|
|
1476
|
+
|
|
1477
|
+
# save csv
|
|
1478
|
+
pd.DataFrame().from_dict(res).to_csv(fn, index=False)
|
|
1479
|
+
|
|
1480
|
+
|
|
1481
|
+
def coil_distance_correction(exp=None, fn_exp=None, fn_exp_out=None, fn_geo_hdf5=None,
|
|
1482
|
+
remove_coil_skin_distance_outlier=False, fn_plot=None, min_dist=-5, max_dist=2):
|
|
1483
|
+
"""
|
|
1484
|
+
Corrects the distance between the coil and the head assuming that the coil is touching the head surface during
|
|
1485
|
+
the experiments. This is done since the different coil tracker result in different coil head distances due to
|
|
1486
|
+
tracking inaccuracies. Also averages positions and orientations over the respective condition and writes both
|
|
1487
|
+
mean position and orientation for every condition in ``fn_exp_out``.
|
|
1488
|
+
|
|
1489
|
+
Depending on if exp (dict containing lists) or fn_exp (csv file) is provided it returns the outlier corrected dict
|
|
1490
|
+
or writes a new ``<fn_exp_out>.csv`` file.
|
|
1491
|
+
|
|
1492
|
+
Parameters
|
|
1493
|
+
----------
|
|
1494
|
+
exp : list of dict or dict of list, optional
|
|
1495
|
+
List of dictionaries containing the experimental data.
|
|
1496
|
+
fn_exp : str, optional
|
|
1497
|
+
Filename (incl. path) of experimental .csv file.
|
|
1498
|
+
fn_exp_out : str, optional
|
|
1499
|
+
Filename (incl. path) of distance corrected experimental .csv file.
|
|
1500
|
+
fn_geo_hdf5 : str, optional
|
|
1501
|
+
Filename (incl. path) of geometry mesh file (.hdf5).
|
|
1502
|
+
remove_coil_skin_distance_outlier : bool, default: False
|
|
1503
|
+
Remove coil positions, which are more than +- 2 mm located from the zero mean skin surface.
|
|
1504
|
+
fn_plot : str, optional
|
|
1505
|
+
Folder where plots will be saved in (fn_geo_hdf5 folder).
|
|
1506
|
+
min_dist : int, default: -5
|
|
1507
|
+
Lower boundary for distance correction.
|
|
1508
|
+
max_dist : int, default: 2
|
|
1509
|
+
Lower boundary for distance correction.
|
|
1510
|
+
|
|
1511
|
+
Returns
|
|
1512
|
+
-------
|
|
1513
|
+
<File>: .csv file
|
|
1514
|
+
experimental_dc.csv file with distance corrected coil positions.
|
|
1515
|
+
or
|
|
1516
|
+
exp : dict
|
|
1517
|
+
Dictionary containing the outlier corrected experimental data.
|
|
1518
|
+
"""
|
|
1519
|
+
|
|
1520
|
+
if exp is not None:
|
|
1521
|
+
if type(exp) is list:
|
|
1522
|
+
exp = pynibs.list2dict(exp)
|
|
1523
|
+
elif fn_exp is not None:
|
|
1524
|
+
exp = read_csv(fn_exp)
|
|
1525
|
+
else:
|
|
1526
|
+
raise IOError("Please provide either dictionary containing the experimental data or the filename "
|
|
1527
|
+
"of the experimental.csv file")
|
|
1528
|
+
|
|
1529
|
+
if fn_plot is None:
|
|
1530
|
+
fn_plot = os.path.split(fn_geo_hdf5)[0]
|
|
1531
|
+
|
|
1532
|
+
# read and sort by condition
|
|
1533
|
+
exp_cond = sort_by_condition(exp)
|
|
1534
|
+
n_conditions = len(exp_cond)
|
|
1535
|
+
|
|
1536
|
+
# read head mesh and extract skin surface
|
|
1537
|
+
msh = pynibs.load_mesh_hdf5(fn_geo_hdf5)
|
|
1538
|
+
triangles = msh.triangles[msh.triangles_regions == 1005]
|
|
1539
|
+
point_idx_unique = np.unique(triangles)
|
|
1540
|
+
points = msh.points[point_idx_unique, :]
|
|
1541
|
+
|
|
1542
|
+
# get mean coil orientantion and position
|
|
1543
|
+
ori_mean = [np.mean(np.array(exp_cond[i]['coil_mean'])[:, 0:3, 0:3], axis=0) for i in range(n_conditions)]
|
|
1544
|
+
pos_mean = [np.mean(np.array(exp_cond[i]['coil_mean'])[:, 0:3, 3], axis=0) for i in range(n_conditions)]
|
|
1545
|
+
|
|
1546
|
+
# determine distance between coil plane and skin surface and set coil to it
|
|
1547
|
+
coil_normal = [np.zeros(3) for _ in range(n_conditions)]
|
|
1548
|
+
distance = np.zeros(n_conditions)
|
|
1549
|
+
|
|
1550
|
+
pos_mean_corrected = [np.zeros(3) for _ in range(n_conditions)]
|
|
1551
|
+
|
|
1552
|
+
for i_cond in range(n_conditions):
|
|
1553
|
+
# determine coil normal pointing to subject
|
|
1554
|
+
coil_normal[i_cond] = ori_mean[i_cond][:, 2] / np.linalg.norm(ori_mean[i_cond][:, 2])
|
|
1555
|
+
|
|
1556
|
+
# determine minimal distance between coil and skin surface
|
|
1557
|
+
distance[i_cond] = np.min(np.dot((points - pos_mean[i_cond]), coil_normal[i_cond]))
|
|
1558
|
+
|
|
1559
|
+
# move coil in normal direction by this distance
|
|
1560
|
+
pos_mean_corrected[i_cond] = pos_mean[i_cond] + distance[i_cond] * coil_normal[i_cond]
|
|
1561
|
+
|
|
1562
|
+
# outlier detection
|
|
1563
|
+
if remove_coil_skin_distance_outlier:
|
|
1564
|
+
distance_mean = np.median(distance)
|
|
1565
|
+
distance_zm = distance - distance_mean
|
|
1566
|
+
coil_pos_selected = np.logical_and(min_dist < distance_zm, distance_zm < max_dist) # TODO: remove hardcoded dists
|
|
1567
|
+
|
|
1568
|
+
# distance distribution (original)
|
|
1569
|
+
plt.hist(distance, bins=50, density=True)
|
|
1570
|
+
plt.hist(distance[coil_pos_selected], bins=50, density=True, alpha=0.6)
|
|
1571
|
+
plt.xlabel("distance in (mm)")
|
|
1572
|
+
plt.ylabel("number of stimulations")
|
|
1573
|
+
plt.title(f"Distance histogram (original, mean: {distance_mean:.2f}mm)")
|
|
1574
|
+
plt.legend(["original", "outlier corrected"])
|
|
1575
|
+
plt.savefig(os.path.join(fn_plot, "distance_histogram_orig.png"), dpi=300)
|
|
1576
|
+
plt.close()
|
|
1577
|
+
|
|
1578
|
+
# distance distribution (zero mean)
|
|
1579
|
+
plt.hist(distance_zm, bins=50, density=True)
|
|
1580
|
+
plt.hist(distance_zm[coil_pos_selected], bins=50, density=True, alpha=0.6)
|
|
1581
|
+
plt.xlabel("distance in (mm)")
|
|
1582
|
+
plt.ylabel("number of stimulations")
|
|
1583
|
+
plt.title("Distance histogram (zero mean)")
|
|
1584
|
+
plt.legend(["zero mean", "outlier corrected"])
|
|
1585
|
+
plt.savefig(os.path.join(fn_plot, "distance_histogram_zm.png"), dpi=300)
|
|
1586
|
+
plt.close()
|
|
1587
|
+
|
|
1588
|
+
else:
|
|
1589
|
+
coil_pos_selected = [True] * n_conditions
|
|
1590
|
+
|
|
1591
|
+
# write results in exp_corr
|
|
1592
|
+
exp_cond_corr = copy.deepcopy(exp_cond)
|
|
1593
|
+
|
|
1594
|
+
for i_cond in range(n_conditions):
|
|
1595
|
+
exp_cond_corr[i_cond]['coil_mean'] = [np.vstack((np.hstack((ori_mean[i_cond],
|
|
1596
|
+
pos_mean_corrected[i_cond][:, np.newaxis])),
|
|
1597
|
+
[0, 0, 0, 1]))] * len(
|
|
1598
|
+
exp_cond_corr[i_cond]['coil_mean'])
|
|
1599
|
+
|
|
1600
|
+
exp_cond_corr[i_cond]['coil_0'] = exp_cond_corr[i_cond]['coil_mean']
|
|
1601
|
+
exp_cond_corr[i_cond]['coil_1'] = exp_cond_corr[i_cond]['coil_mean']
|
|
1602
|
+
|
|
1603
|
+
# filter out valid coil positions
|
|
1604
|
+
exp_cond_corr_selected = []
|
|
1605
|
+
i_zap_total = 0
|
|
1606
|
+
for i_cond in range(n_conditions):
|
|
1607
|
+
if coil_pos_selected[i_cond]:
|
|
1608
|
+
exp_cond_corr_selected.append(exp_cond_corr[i_cond])
|
|
1609
|
+
else:
|
|
1610
|
+
print(f"Removing coil position #{i_zap_total} (-5mm < distance < 3mm from zero mean "
|
|
1611
|
+
f"coil <-> skin distance distribution")
|
|
1612
|
+
i_zap_total += 1
|
|
1613
|
+
|
|
1614
|
+
exp_corr = dict()
|
|
1615
|
+
keys = list(exp.keys())
|
|
1616
|
+
for i_cond in range(len(exp_cond_corr_selected)):
|
|
1617
|
+
for k in keys:
|
|
1618
|
+
if i_cond == 0:
|
|
1619
|
+
exp_corr[k] = exp_cond_corr_selected[i_cond][k]
|
|
1620
|
+
else:
|
|
1621
|
+
exp_corr[k] = exp_corr[k] + exp_cond_corr_selected[i_cond][k]
|
|
1622
|
+
|
|
1623
|
+
if fn_exp_out is not None:
|
|
1624
|
+
# reformat results to save new .csv file
|
|
1625
|
+
coil0_keys = ['coil0_' + str(int(m)) + str(int(n)) for m in range(4) for n in range(4)]
|
|
1626
|
+
coil1_keys = ['coil1_' + str(int(m)) + str(int(n)) for m in range(4) for n in range(4)]
|
|
1627
|
+
coil_mean_keys = ['coil_mean_' + str(int(m)) + str(int(n)) for m in range(4) for n in range(4)]
|
|
1628
|
+
|
|
1629
|
+
exp_corr_formatted = copy.deepcopy(exp_corr) # type: dict
|
|
1630
|
+
del exp_corr_formatted['coil_0']
|
|
1631
|
+
del exp_corr_formatted['coil_1']
|
|
1632
|
+
del exp_corr_formatted['coil_mean']
|
|
1633
|
+
|
|
1634
|
+
for i_key in range(len(coil0_keys)):
|
|
1635
|
+
m = int(coil0_keys[i_key][-2])
|
|
1636
|
+
n = int(coil0_keys[i_key][-1])
|
|
1637
|
+
|
|
1638
|
+
exp_corr_formatted[coil0_keys[i_key]] = [exp_corr['coil_0'][i_zap][m, n]
|
|
1639
|
+
for i_zap in range(len(exp_corr['coil_0']))]
|
|
1640
|
+
|
|
1641
|
+
exp_corr_formatted[coil1_keys[i_key]] = [exp_corr['coil_1'][i_zap][m, n]
|
|
1642
|
+
for i_zap in range(len(exp_corr['coil_1']))]
|
|
1643
|
+
|
|
1644
|
+
exp_corr_formatted[coil_mean_keys[i_key]] = [exp_corr['coil_mean'][i_zap][m, n]
|
|
1645
|
+
for i_zap in range(len(exp_corr['coil_mean']))]
|
|
1646
|
+
|
|
1647
|
+
exp_corr_list = []
|
|
1648
|
+
for i in range(len(exp_corr_formatted['coil_mean_00'])):
|
|
1649
|
+
exp_corr_list.append(dict())
|
|
1650
|
+
for key in list(exp_corr_formatted.keys()):
|
|
1651
|
+
exp_corr_list[-1][key] = exp_corr_formatted[key][i]
|
|
1652
|
+
|
|
1653
|
+
# save experimental csv
|
|
1654
|
+
write_csv(fn_exp_out, exp_corr_list)
|
|
1655
|
+
else:
|
|
1656
|
+
return exp_corr
|
|
1657
|
+
|
|
1658
|
+
|
|
1659
|
+
def coil_distance_correction_matsimnibs(matsimnibs, fn_mesh_hdf5, distance=0, remove_coil_skin_distance_outlier=False):
|
|
1660
|
+
"""
|
|
1661
|
+
Corrects the distance between the coil and the head assuming that the coil is located at a distance "d"
|
|
1662
|
+
with respect to the head surface during the experiments. This is done since the different coil tracker result in
|
|
1663
|
+
different coil head distances due to tracking inaccuracies.
|
|
1664
|
+
|
|
1665
|
+
Parameters
|
|
1666
|
+
----------
|
|
1667
|
+
matsimnibs : np.ndarray of float
|
|
1668
|
+
(4, 4) or (4, 4, n_mat) Tensor containing matsimnibs matrices.
|
|
1669
|
+
fn_mesh_hdf5 : str
|
|
1670
|
+
.hdf5 file containing the head mesh.
|
|
1671
|
+
distance : float or list of float, default: 0
|
|
1672
|
+
Target distance in (mm) between coil and head due to hair layer. All coil positions are moved to this distance.
|
|
1673
|
+
If ``distance`` is list: ``len(distance) == n_mat``.
|
|
1674
|
+
remove_coil_skin_distance_outlier : bool, default: False
|
|
1675
|
+
Remove coil positions, which are more than +- 6 mm located from the skin surface.
|
|
1676
|
+
|
|
1677
|
+
Returns
|
|
1678
|
+
-------
|
|
1679
|
+
matsimnibs : np.ndarray of float
|
|
1680
|
+
(4, 4, n_mat) Tensor containing matsimnibs matrices with distance corrected coil positions.
|
|
1681
|
+
"""
|
|
1682
|
+
if matsimnibs.ndim == 2:
|
|
1683
|
+
matsimnibs = matsimnibs[:, :, np.newaxis]
|
|
1684
|
+
|
|
1685
|
+
n_matsimnibs = matsimnibs.shape[2]
|
|
1686
|
+
matsimnibs_corrected = copy.deepcopy(matsimnibs)
|
|
1687
|
+
|
|
1688
|
+
# read head mesh and extract skin surface
|
|
1689
|
+
msh = pynibs.load_mesh_hdf5(fn_mesh_hdf5)
|
|
1690
|
+
triangles = msh.triangles[msh.triangles_regions == 1005] # this is skin
|
|
1691
|
+
point_idx_unique = np.unique(triangles)
|
|
1692
|
+
points = msh.points[point_idx_unique, :]
|
|
1693
|
+
coil_pos_selected = [0 for _ in range(n_matsimnibs)]
|
|
1694
|
+
|
|
1695
|
+
distance = np.atleast_1d(np.array(distance))
|
|
1696
|
+
if distance.shape[0] == 1:
|
|
1697
|
+
distance = np.repeat(distance, n_matsimnibs)
|
|
1698
|
+
assert distance.shape[0] == n_matsimnibs
|
|
1699
|
+
|
|
1700
|
+
# determine distance between coil plane and skin surface and set coil to it
|
|
1701
|
+
for i_mat in range(n_matsimnibs):
|
|
1702
|
+
# determine coil normal pointing to subject
|
|
1703
|
+
coil_normal = matsimnibs[0:3, 2, i_mat] / np.linalg.norm(matsimnibs[0:3, 2, i_mat])
|
|
1704
|
+
|
|
1705
|
+
# determine minimal distance between coil and skin surface
|
|
1706
|
+
distance_coil_skin = np.min(np.dot((points - matsimnibs[0:3, 3, i_mat]),
|
|
1707
|
+
coil_normal)
|
|
1708
|
+
) - distance[i_mat]
|
|
1709
|
+
|
|
1710
|
+
# move coil in normal direction by this distance
|
|
1711
|
+
matsimnibs_corrected[0:3, 3, i_mat] = matsimnibs[0:3, 3, i_mat] + distance_coil_skin * coil_normal
|
|
1712
|
+
|
|
1713
|
+
# check if distance is too big -> outlier
|
|
1714
|
+
if remove_coil_skin_distance_outlier:
|
|
1715
|
+
coil_pos_selected[i_mat] = np.logical_and(-5 < distance_coil_skin, distance_coil_skin < 2)
|
|
1716
|
+
if not coil_pos_selected[i_mat]:
|
|
1717
|
+
print(f"Removing coil position #{i_mat} "
|
|
1718
|
+
f"(distance is larger than -5mm < distance < 2mm from skin surface)")
|
|
1719
|
+
else:
|
|
1720
|
+
coil_pos_selected[i_mat] = True
|
|
1721
|
+
|
|
1722
|
+
# select valid coil positions
|
|
1723
|
+
matsimnibs_corrected = matsimnibs_corrected[:, :, coil_pos_selected]
|
|
1724
|
+
|
|
1725
|
+
return matsimnibs_corrected
|
|
1726
|
+
|
|
1727
|
+
|
|
1728
|
+
def save_matsimnibs_txt(fn_matsimnibs, matsimnibs):
|
|
1729
|
+
"""
|
|
1730
|
+
Saving matsimnibs matrices in .txt file.
|
|
1731
|
+
|
|
1732
|
+
Parameters
|
|
1733
|
+
----------
|
|
1734
|
+
fn_matsimnibs : str
|
|
1735
|
+
Filename of .txt file the matsimnibs matrices are stored in.
|
|
1736
|
+
matsimnibs : np.ndarray of float
|
|
1737
|
+
(4, 4) or (4, 4, n_mat) Tensor containing matsimnibs matrices.
|
|
1738
|
+
|
|
1739
|
+
Returns
|
|
1740
|
+
-------
|
|
1741
|
+
<File>: .txt file
|
|
1742
|
+
Textfile containing the matsimnibs matrices.
|
|
1743
|
+
"""
|
|
1744
|
+
if matsimnibs.ndim == 2:
|
|
1745
|
+
matsimnibs = matsimnibs[:, :, np.newaxis]
|
|
1746
|
+
|
|
1747
|
+
for i_mat in range(matsimnibs.shape[2]):
|
|
1748
|
+
if i_mat == 0:
|
|
1749
|
+
mode = "w"
|
|
1750
|
+
else:
|
|
1751
|
+
mode = "a"
|
|
1752
|
+
|
|
1753
|
+
with open(fn_matsimnibs, mode) as f:
|
|
1754
|
+
for line in np.matrix(matsimnibs[:, :, i_mat]):
|
|
1755
|
+
np.savetxt(f, line, fmt='%.8f')
|
|
1756
|
+
f.write("\n")
|
|
1757
|
+
|
|
1758
|
+
|
|
1759
|
+
def load_matsimnibs_txt(fn_matsimnibs):
|
|
1760
|
+
"""
|
|
1761
|
+
Loading matsimnibs matrices from .txt file.
|
|
1762
|
+
|
|
1763
|
+
Parameters
|
|
1764
|
+
----------
|
|
1765
|
+
fn_matsimnibs : str
|
|
1766
|
+
Filename of .txt file the matsimnibs matrices are stored in.
|
|
1767
|
+
|
|
1768
|
+
Returns
|
|
1769
|
+
-------
|
|
1770
|
+
matsimnibs : np.ndarray of float
|
|
1771
|
+
(4, 4) or (4, 4, n_mat) Tensor containing matsimnibs matrices.
|
|
1772
|
+
"""
|
|
1773
|
+
|
|
1774
|
+
matsimnibs_list = []
|
|
1775
|
+
|
|
1776
|
+
with open(fn_matsimnibs, "r") as f:
|
|
1777
|
+
# read first line
|
|
1778
|
+
line = np.array([float(i) for i in f.readline().strip().split()])
|
|
1779
|
+
|
|
1780
|
+
while line.any():
|
|
1781
|
+
mat = []
|
|
1782
|
+
i = 0
|
|
1783
|
+
|
|
1784
|
+
# read remaining lines
|
|
1785
|
+
while line != "\n":
|
|
1786
|
+
mat.append(line)
|
|
1787
|
+
i += 1
|
|
1788
|
+
line = np.array([float(j) for j in f.readline().strip().split()])
|
|
1789
|
+
|
|
1790
|
+
if line.size == 0:
|
|
1791
|
+
break
|
|
1792
|
+
|
|
1793
|
+
matsimnibs_list.append(np.vstack(mat))
|
|
1794
|
+
line = np.array([float(j) for j in f.readline().strip().split()])
|
|
1795
|
+
|
|
1796
|
+
matsimnibs = np.zeros((matsimnibs_list[0].shape[0], matsimnibs_list[0].shape[1], len(matsimnibs_list)))
|
|
1797
|
+
|
|
1798
|
+
for i, m in enumerate(matsimnibs_list):
|
|
1799
|
+
matsimnibs[:, :, i] = m
|
|
1800
|
+
|
|
1801
|
+
return matsimnibs
|
|
1802
|
+
|
|
1803
|
+
|
|
1804
|
+
# TODO: Hier fehlen noch die MEP Amplituden in den phys_data/postproc/zap_idx/EMG_p2p folder im hdf5
|
|
1805
|
+
def convert_csv_to_hdf5(fn_csv, fn_hdf5, overwrite_arr=True, verbose=False):
|
|
1806
|
+
"""
|
|
1807
|
+
Wrapper from experiment.csv to experiment.hdf5.
|
|
1808
|
+
Saves all relevant columns from the (old) experiment.csv file to an .hdf5 file.
|
|
1809
|
+
|
|
1810
|
+
.. code-block:: sh
|
|
1811
|
+
|
|
1812
|
+
fn_hdf5:/stim_data/
|
|
1813
|
+
|--coil_sn
|
|
1814
|
+
|--current
|
|
1815
|
+
|--date
|
|
1816
|
+
|--time_diff
|
|
1817
|
+
|--time_mep
|
|
1818
|
+
|--time_tms
|
|
1819
|
+
|--ts_tms
|
|
1820
|
+
|--coil0 # <- all coil0_** columns
|
|
1821
|
+
|--coil1 # <- all coil1_** columns
|
|
1822
|
+
|--coil_mean # <- all coil_mean_** columns
|
|
1823
|
+
|
|
1824
|
+
All columns not found in experiment.csv are ignored (and a warning is thrown).
|
|
1825
|
+
|
|
1826
|
+
Parameters
|
|
1827
|
+
----------
|
|
1828
|
+
fn_csv: str
|
|
1829
|
+
experiment.csv filename.
|
|
1830
|
+
fn_hdf5: str
|
|
1831
|
+
experiment.hdf5 filename. File is created if not existing.
|
|
1832
|
+
overwrite_arr: bool, default: True.
|
|
1833
|
+
Overwrite existing arrays. Otherwise: fail.
|
|
1834
|
+
verbose: bool, default: False
|
|
1835
|
+
Flag indicating verbosity.
|
|
1836
|
+
|
|
1837
|
+
"""
|
|
1838
|
+
# fn_csv = "/data/pt_01756/tmp/write_exp_hdf/experiment_oc_dc.csv"
|
|
1839
|
+
# fn_hdf5 = "/data/pt_01756/tmp/write_exp_hdf/experiment.hdf5"
|
|
1840
|
+
# verbose = True
|
|
1841
|
+
csv_data = pd.read_csv(fn_csv)
|
|
1842
|
+
|
|
1843
|
+
# save the following columns to hdf5
|
|
1844
|
+
cols2save = ["coil_sn", "current", "date", "time_diff", "time_mep", "time_tms", "ts_tms"]
|
|
1845
|
+
for missing_col in set(cols2save) - set(csv_data.columns):
|
|
1846
|
+
warnings.warn(f"{missing_col} not found in {fn_csv}")
|
|
1847
|
+
cols2save = list(set(cols2save) & set(csv_data.columns))
|
|
1848
|
+
|
|
1849
|
+
for col_name, data in csv_data[cols2save].iteritems():
|
|
1850
|
+
if verbose:
|
|
1851
|
+
print(f"Adding {col_name} to {fn_hdf5}:/stim_data/{col_name}")
|
|
1852
|
+
|
|
1853
|
+
data = data.values
|
|
1854
|
+
pynibs.write_arr_to_hdf5(fn_hdf5, f"/stim_data/{col_name}", data, overwrite_arr=overwrite_arr, verbose=verbose)
|
|
1855
|
+
|
|
1856
|
+
# save coil coordinate information hdf5
|
|
1857
|
+
cols2save = ["coil0", "coil1", "coil_mean"]
|
|
1858
|
+
|
|
1859
|
+
# the coil coordinates are stored as one column per cell, so get all columns that belong to coilX
|
|
1860
|
+
for col_name in cols2save:
|
|
1861
|
+
cols = [col for col in csv_data if col.startswith(col_name)]
|
|
1862
|
+
if not cols:
|
|
1863
|
+
warnings.warn(f"{col_name} not found in {fn_csv}")
|
|
1864
|
+
continue
|
|
1865
|
+
if verbose:
|
|
1866
|
+
print(f"Adding {col_name} to {fn_hdf5}:/stim_data/{col_name}")
|
|
1867
|
+
data = csv_data[cols].values
|
|
1868
|
+
|
|
1869
|
+
if col_name == "coil0":
|
|
1870
|
+
col_name = "coil_0"
|
|
1871
|
+
if col_name == "coil1":
|
|
1872
|
+
col_name = "coil_1"
|
|
1873
|
+
|
|
1874
|
+
pynibs.write_arr_to_hdf5(fn_hdf5, f"/stim_data/{col_name}", data, overwrite_arr=True, verbose=True)
|
|
1875
|
+
pynibs.write_arr_to_hdf5(fn_hdf5, f"/stim_data/{col_name}_columns", np.array(cols), overwrite_arr=True,
|
|
1876
|
+
verbose=True)
|